summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/.b4-config4
-rw-r--r--bitbake/README50
-rw-r--r--bitbake/SECURITY.md24
-rwxr-xr-xbitbake/bin/bitbake7
l---------bitbake/bin/bitbake-config-build1
-rwxr-xr-xbitbake/bin/bitbake-diffsigs68
-rwxr-xr-xbitbake/bin/bitbake-getvar71
-rwxr-xr-xbitbake/bin/bitbake-hashclient351
-rwxr-xr-xbitbake/bin/bitbake-hashserv155
-rwxr-xr-xbitbake/bin/bitbake-layers32
-rwxr-xr-xbitbake/bin/bitbake-prserv114
-rwxr-xr-xbitbake/bin/bitbake-selftest6
-rwxr-xr-xbitbake/bin/bitbake-server24
-rwxr-xr-xbitbake/bin/bitbake-worker171
-rwxr-xr-xbitbake/bin/git-make-shallow40
-rwxr-xr-xbitbake/bin/toaster18
-rwxr-xr-xbitbake/bin/toaster-eventreplay82
-rwxr-xr-xbitbake/contrib/b4-wrapper-bitbake.py40
-rw-r--r--bitbake/contrib/hashserv/Dockerfile8
-rw-r--r--bitbake/contrib/prserv/Dockerfile62
-rw-r--r--bitbake/contrib/vim/ftdetect/bitbake.vim14
-rw-r--r--bitbake/contrib/vim/indent/bitbake.vim6
-rw-r--r--bitbake/contrib/vim/plugin/newbbappend.vim2
-rw-r--r--bitbake/contrib/vim/syntax/bitbake.vim19
-rw-r--r--bitbake/doc/Makefile2
-rw-r--r--bitbake/doc/README8
-rw-r--r--bitbake/doc/_templates/footer.html9
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst182
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst314
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst195
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst228
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-library-functions.rst59
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst731
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst91
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst894
-rw-r--r--bitbake/doc/conf.py7
-rw-r--r--bitbake/doc/index.rst2
-rw-r--r--bitbake/doc/releases.rst111
-rw-r--r--bitbake/lib/bb/COW.py12
-rw-r--r--bitbake/lib/bb/__init__.py141
-rwxr-xr-xbitbake/lib/bb/acl.py213
-rw-r--r--bitbake/lib/bb/asyncrpc/__init__.py16
-rw-r--r--bitbake/lib/bb/asyncrpc/client.py271
-rw-r--r--bitbake/lib/bb/asyncrpc/connection.py146
-rw-r--r--bitbake/lib/bb/asyncrpc/exceptions.py21
-rw-r--r--bitbake/lib/bb/asyncrpc/serv.py410
-rw-r--r--bitbake/lib/bb/build.py291
-rw-r--r--bitbake/lib/bb/cache.py373
-rw-r--r--bitbake/lib/bb/checksum.py47
-rw-r--r--bitbake/lib/bb/codeparser.py195
-rw-r--r--bitbake/lib/bb/command.py147
-rw-r--r--bitbake/lib/bb/compress/_pipecompress.py196
-rw-r--r--bitbake/lib/bb/compress/lz4.py19
-rw-r--r--bitbake/lib/bb/compress/zstd.py30
-rw-r--r--bitbake/lib/bb/cooker.py904
-rw-r--r--bitbake/lib/bb/cookerdata.py211
-rw-r--r--bitbake/lib/bb/daemonize.py44
-rw-r--r--bitbake/lib/bb/data.py147
-rw-r--r--bitbake/lib/bb/data_smart.py371
-rw-r--r--bitbake/lib/bb/event.py204
-rw-r--r--bitbake/lib/bb/exceptions.py94
-rw-r--r--bitbake/lib/bb/fetch2/README57
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py561
-rw-r--r--bitbake/lib/bb/fetch2/az.py98
-rw-r--r--bitbake/lib/bb/fetch2/clearcase.py6
-rw-r--r--bitbake/lib/bb/fetch2/crate.py150
-rw-r--r--bitbake/lib/bb/fetch2/gcp.py102
-rw-r--r--bitbake/lib/bb/fetch2/git.py669
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py159
-rw-r--r--bitbake/lib/bb/fetch2/gomod.py273
-rw-r--r--bitbake/lib/bb/fetch2/hg.py1
-rw-r--r--bitbake/lib/bb/fetch2/local.py25
-rw-r--r--bitbake/lib/bb/fetch2/npm.py83
-rw-r--r--bitbake/lib/bb/fetch2/npmsw.py112
-rw-r--r--bitbake/lib/bb/fetch2/osc.py52
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py2
-rw-r--r--bitbake/lib/bb/fetch2/s3.py43
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py4
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py50
-rw-r--r--bitbake/lib/bb/fetch2/svn.py15
-rw-r--r--bitbake/lib/bb/fetch2/wget.py263
-rwxr-xr-xbitbake/lib/bb/main.py407
-rw-r--r--bitbake/lib/bb/monitordisk.py24
-rw-r--r--bitbake/lib/bb/msg.py38
-rw-r--r--bitbake/lib/bb/parse/__init__.py61
-rw-r--r--bitbake/lib/bb/parse/ast.py217
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py125
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py58
-rw-r--r--bitbake/lib/bb/persist_data.py301
-rw-r--r--bitbake/lib/bb/process.py9
-rw-r--r--bitbake/lib/bb/progress.py9
-rw-r--r--bitbake/lib/bb/providers.py92
-rw-r--r--bitbake/lib/bb/runqueue.py1146
-rw-r--r--bitbake/lib/bb/server/process.py402
-rw-r--r--bitbake/lib/bb/server/xmlrpcserver.py14
-rw-r--r--bitbake/lib/bb/siggen.py731
-rw-r--r--bitbake/lib/bb/taskdata.py14
-rw-r--r--bitbake/lib/bb/tests/codeparser.py106
-rw-r--r--bitbake/lib/bb/tests/color.py4
-rw-r--r--bitbake/lib/bb/tests/compression.py100
-rw-r--r--bitbake/lib/bb/tests/cooker.py2
-rw-r--r--bitbake/lib/bb/tests/data.py181
-rw-r--r--bitbake/lib/bb/tests/event.py62
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html59
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html20
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html40
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html19
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php3528
-rw-r--r--bitbake/lib/bb/tests/fetch.py1959
-rw-r--r--bitbake/lib/bb/tests/parse.py303
-rw-r--r--bitbake/lib/bb/tests/persist_data.py129
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass2
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf2
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb2
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb0
-rw-r--r--bitbake/lib/bb/tests/runqueue.py65
-rw-r--r--bitbake/lib/bb/tests/siggen.py77
-rw-r--r--bitbake/lib/bb/tests/utils.py39
-rw-r--r--bitbake/lib/bb/tinfoil.py215
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py101
-rw-r--r--bitbake/lib/bb/ui/eventreplay.py86
-rw-r--r--bitbake/lib/bb/ui/knotty.py246
-rw-r--r--bitbake/lib/bb/ui/ncurses.py3
-rw-r--r--bitbake/lib/bb/ui/taskexp.py7
-rwxr-xr-xbitbake/lib/bb/ui/taskexp_ncurses.py1511
-rw-r--r--bitbake/lib/bb/ui/teamcity.py5
-rw-r--r--bitbake/lib/bb/ui/toasterui.py2
-rw-r--r--bitbake/lib/bb/ui/uievent.py32
-rw-r--r--bitbake/lib/bb/ui/uihelper.py8
-rw-r--r--bitbake/lib/bb/utils.py962
-rwxr-xr-xbitbake/lib/bb/xattr.py126
-rw-r--r--bitbake/lib/bblayers/__init__.py2
-rw-r--r--bitbake/lib/bblayers/action.py17
-rw-r--r--bitbake/lib/bblayers/common.py2
-rw-r--r--bitbake/lib/bblayers/layerindex.py43
-rw-r--r--bitbake/lib/bblayers/query.py72
-rw-r--r--bitbake/lib/bs4/AUTHORS49
-rw-r--r--bitbake/lib/bs4/AUTHORS.txt43
-rw-r--r--bitbake/lib/bs4/CHANGELOG (renamed from bitbake/lib/bs4/NEWS.txt)779
-rw-r--r--bitbake/lib/bs4/LICENSE (renamed from bitbake/lib/bs4/COPYING.txt)11
-rw-r--r--bitbake/lib/bs4/__init__.py681
-rw-r--r--bitbake/lib/bs4/builder/__init__.py382
-rw-r--r--bitbake/lib/bs4/builder/_html5lib.py251
-rw-r--r--bitbake/lib/bs4/builder/_htmlparser.py433
-rw-r--r--bitbake/lib/bs4/builder/_lxml.py212
-rw-r--r--bitbake/lib/bs4/css.py274
-rw-r--r--bitbake/lib/bs4/dammit.py411
-rw-r--r--bitbake/lib/bs4/diagnose.py84
-rw-r--r--bitbake/lib/bs4/element.py2219
-rw-r--r--bitbake/lib/bs4/formatter.py185
-rw-r--r--bitbake/lib/bs4/testing.py686
-rw-r--r--bitbake/lib/bs4/tests/__init__.py1
-rw-r--r--bitbake/lib/bs4/tests/test_builder_registry.py147
-rw-r--r--bitbake/lib/bs4/tests/test_docs.py32
-rw-r--r--bitbake/lib/bs4/tests/test_html5lib.py98
-rw-r--r--bitbake/lib/bs4/tests/test_htmlparser.py31
-rw-r--r--bitbake/lib/bs4/tests/test_lxml.py70
-rw-r--r--bitbake/lib/bs4/tests/test_soup.py479
-rw-r--r--bitbake/lib/bs4/tests/test_tree.py2004
-rw-r--r--bitbake/lib/codegen.py10
-rw-r--r--bitbake/lib/hashserv/__init__.py175
-rw-r--r--bitbake/lib/hashserv/client.py453
-rw-r--r--bitbake/lib/hashserv/server.py1117
-rw-r--r--bitbake/lib/hashserv/sqlalchemy.py598
-rw-r--r--bitbake/lib/hashserv/sqlite.py579
-rw-r--r--bitbake/lib/hashserv/tests.py1309
-rw-r--r--bitbake/lib/layerindexlib/__init__.py24
-rw-r--r--bitbake/lib/layerindexlib/cooker.py2
-rw-r--r--bitbake/lib/layerindexlib/restapi.py4
-rw-r--r--bitbake/lib/layerindexlib/tests/restapi.py2
-rw-r--r--bitbake/lib/ply/yacc.py11
-rw-r--r--bitbake/lib/progressbar/progressbar.py2
-rw-r--r--bitbake/lib/prserv/__init__.py99
-rw-r--r--bitbake/lib/prserv/client.py72
-rw-r--r--bitbake/lib/prserv/db.py427
-rw-r--r--bitbake/lib/prserv/serv.py684
-rw-r--r--bitbake/lib/prserv/tests.py388
-rw-r--r--bitbake/lib/pyinotify.py44
-rw-r--r--bitbake/lib/toaster/bldcollector/urls.py2
-rw-r--r--bitbake/lib/toaster/bldcollector/views.py3
-rw-r--r--bitbake/lib/toaster/bldcontrol/localhostbecontroller.py4
-rw-r--r--bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py83
-rw-r--r--bitbake/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py48
-rw-r--r--bitbake/lib/toaster/bldcontrol/models.py4
-rw-r--r--bitbake/lib/toaster/logs/.gitignore1
-rwxr-xr-xbitbake/lib/toaster/manage.py2
-rw-r--r--bitbake/lib/toaster/orm/fixtures/README2
-rwxr-xr-xbitbake/lib/toaster/orm/fixtures/check_fixtures.py38
-rwxr-xr-xbitbake/lib/toaster/orm/fixtures/gen_fixtures.py451
-rw-r--r--bitbake/lib/toaster/orm/fixtures/oe-core.xml80
-rw-r--r--bitbake/lib/toaster/orm/fixtures/poky.xml210
-rw-r--r--bitbake/lib/toaster/orm/fixtures/settings.xml4
-rw-r--r--bitbake/lib/toaster/orm/management/commands/lsupdates.py16
-rw-r--r--bitbake/lib/toaster/orm/migrations/0020_models_bigautofield.py173
-rw-r--r--bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py22
-rw-r--r--bitbake/lib/toaster/orm/models.py52
-rw-r--r--bitbake/lib/toaster/pytest.ini16
-rw-r--r--bitbake/lib/toaster/tests/browser/selenium_helpers_base.py88
-rw-r--r--bitbake/lib/toaster/tests/browser/test_all_builds_page.py316
-rw-r--r--bitbake/lib/toaster/tests/browser/test_all_projects_page.py162
-rw-r--r--bitbake/lib/toaster/tests/browser/test_builddashboard_page.py15
-rw-r--r--bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py8
-rw-r--r--bitbake/lib/toaster/tests/browser/test_delete_project.py103
-rw-r--r--bitbake/lib/toaster/tests/browser/test_landing_page.py143
-rw-r--r--bitbake/lib/toaster/tests/browser/test_layerdetails_page.py31
-rw-r--r--bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py24
-rw-r--r--bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py14
-rw-r--r--bitbake/lib/toaster/tests/browser/test_new_project_page.py16
-rw-r--r--bitbake/lib/toaster/tests/browser/test_project_builds_page.py4
-rw-r--r--bitbake/lib/toaster/tests/browser/test_project_config_page.py33
-rw-r--r--bitbake/lib/toaster/tests/browser/test_sample.py10
-rw-r--r--bitbake/lib/toaster/tests/browser/test_toastertable_ui.py11
-rw-r--r--bitbake/lib/toaster/tests/builds/buildtest.py13
-rw-r--r--bitbake/lib/toaster/tests/builds/test_core_image_min.py20
-rw-r--r--bitbake/lib/toaster/tests/commands/test_loaddata.py4
-rw-r--r--bitbake/lib/toaster/tests/commands/test_lsupdates.py3
-rw-r--r--bitbake/lib/toaster/tests/commands/test_runbuilds.py13
-rw-r--r--bitbake/lib/toaster/tests/db/test_db.py3
-rw-r--r--bitbake/lib/toaster/tests/functional/functional_helpers.py168
-rw-r--r--bitbake/lib/toaster/tests/functional/test_create_new_project.py124
-rw-r--r--bitbake/lib/toaster/tests/functional/test_functional_basic.py249
-rw-r--r--bitbake/lib/toaster/tests/functional/test_project_config.py294
-rw-r--r--bitbake/lib/toaster/tests/functional/test_project_page.py775
-rw-r--r--bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py507
-rw-r--r--bitbake/lib/toaster/tests/functional/utils.py86
-rw-r--r--bitbake/lib/toaster/tests/toaster-tests-requirements.txt10
-rw-r--r--bitbake/lib/toaster/tests/views/test_views.py20
-rw-r--r--bitbake/lib/toaster/toastergui/api.py26
-rw-r--r--bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml24
-rw-r--r--bitbake/lib/toaster/toastergui/forms.py14
-rw-r--r--bitbake/lib/toaster/toastergui/static/css/default.css28
-rw-r--r--bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css1
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js (renamed from bitbake/lib/toaster/toastergui/static/js/bootstrap.js)431
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js6
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js7
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js2
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map1
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js4
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/libtoaster.js2
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/projectpage.js2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/base.html11
-rw-r--r--bitbake/lib/toaster/toastergui/templates/base_specific.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templates/command_line_builds.html209
-rw-r--r--bitbake/lib/toaster/toastergui/templates/configvars.html2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/js-unit-tests.html2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/landing.html16
-rw-r--r--bitbake/lib/toaster/toastergui/templates/landing_not_managed.html34
-rw-r--r--bitbake/lib/toaster/toastergui/templates/layerdetails.html2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/mrb_section.html2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html8
-rw-r--r--bitbake/lib/toaster/toastergui/templates/package_detail_base.html2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html8
-rw-r--r--bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templates/project.html2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/project_specific.html2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/projectconf.html34
-rw-r--r--bitbake/lib/toaster/toastergui/templates/recipe.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templates/target.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templatetags/projecttags.py5
-rw-r--r--bitbake/lib/toaster/toastergui/urls.py6
-rw-r--r--bitbake/lib/toaster/toastergui/views.py220
-rw-r--r--bitbake/lib/toaster/toastergui/widgets.py10
-rw-r--r--bitbake/lib/toaster/toastermain/logs.py158
-rw-r--r--bitbake/lib/toaster/toastermain/management/commands/buildimport.py4
-rw-r--r--bitbake/lib/toaster/toastermain/management/commands/checksocket.py4
-rw-r--r--bitbake/lib/toaster/toastermain/settings.py86
-rw-r--r--bitbake/lib/toaster/toastermain/settings_test.py4
-rw-r--r--bitbake/lib/toaster/toastermain/urls.py4
-rw-r--r--bitbake/lib/toaster/tox.ini24
-rw-r--r--bitbake/toaster-requirements.txt3
270 files changed, 34005 insertions, 12678 deletions
diff --git a/bitbake/.b4-config b/bitbake/.b4-config
new file mode 100644
index 0000000000..047f0b94a4
--- /dev/null
+++ b/bitbake/.b4-config
@@ -0,0 +1,4 @@
1[b4]
2 send-series-to = bitbake-devel@lists.openembedded.org
3 send-auto-cc-cmd = ./contrib/b4-wrapper-bitbake.py send-auto-cc-cmd
4 prep-pre-flight-checks = disable-needs-checking
diff --git a/bitbake/README b/bitbake/README
index 479c376588..e9f4c858ee 100644
--- a/bitbake/README
+++ b/bitbake/README
@@ -7,29 +7,57 @@ One of BitBake's main users, OpenEmbedded, takes this core and builds embedded L
7stacks using a task-oriented approach. 7stacks using a task-oriented approach.
8 8
9For information about Bitbake, see the OpenEmbedded website: 9For information about Bitbake, see the OpenEmbedded website:
10 http://www.openembedded.org/ 10 https://www.openembedded.org/
11 11
12Bitbake plain documentation can be found under the doc directory or its integrated 12Bitbake plain documentation can be found under the doc directory or its integrated
13html version at the Yocto Project website: 13html version at the Yocto Project website:
14 http://yoctoproject.org/documentation 14 https://docs.yoctoproject.org
15
16Bitbake requires Python version 3.8 or newer.
15 17
16Contributing 18Contributing
17------------ 19------------
18 20
19Please refer to 21Please refer to our contributor guide here: https://docs.yoctoproject.org/contributor-guide/
20http://www.openembedded.org/wiki/How_to_submit_a_patch_to_OpenEmbedded 22for full details on how to submit changes.
21for guidelines on how to submit patches, just note that the latter documentation is intended 23
22for OpenEmbedded (and its core) not bitbake patches (bitbake-devel@lists.openembedded.org) 24As a quick guide, patches should be sent to bitbake-devel@lists.openembedded.org
23but in general main guidelines apply. Once the commit(s) have been created, the way to send 25The git command to do that would be:
24the patch is through git-send-email. For example, to send the last commit (HEAD) on current
25branch, type:
26 26
27 git send-email -M -1 --to bitbake-devel@lists.openembedded.org 27 git send-email -M -1 --to bitbake-devel@lists.openembedded.org
28 28
29If you're sending a patch related to the BitBake manual, make sure you copy
30the Yocto Project documentation mailing list:
31
32 git send-email -M -1 --to bitbake-devel@lists.openembedded.org --cc docs@lists.yoctoproject.org
33
29Mailing list: 34Mailing list:
30 35
31 http://lists.openembedded.org/mailman/listinfo/bitbake-devel 36 https://lists.openembedded.org/g/bitbake-devel
32 37
33Source code: 38Source code:
34 39
35 http://git.openembedded.org/bitbake/ 40 https://git.openembedded.org/bitbake/
41
42Testing
43-------
44
45Bitbake has a testsuite located in lib/bb/tests/ whichs aim to try and prevent regressions.
46You can run this with "bitbake-selftest". In particular the fetcher is well covered since
47it has so many corner cases. The datastore has many tests too. Testing with the testsuite is
48recommended before submitting patches, particularly to the fetcher and datastore. We also
49appreciate new test cases and may require them for more obscure issues.
50
51To run the tests "zstd" and "git" must be installed.
52
53The assumption is made that this testsuite is run from an initialized OpenEmbedded build
54environment (i.e. `source oe-init-build-env` is used). If this is not the case, run the
55testsuite as follows:
56
57 export PATH=$(pwd)/bin:$PATH
58 bin/bitbake-selftest
59
60The testsuite can alternatively be executed using pytest, e.g. obtained from PyPI (in this
61case, the PATH is configured automatically):
62
63 pytest
diff --git a/bitbake/SECURITY.md b/bitbake/SECURITY.md
new file mode 100644
index 0000000000..7d2ce1f631
--- /dev/null
+++ b/bitbake/SECURITY.md
@@ -0,0 +1,24 @@
1How to Report a Potential Vulnerability?
2========================================
3
4If you would like to report a public issue (for example, one with a released
5CVE number), please report it using the
6[https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Security Security Bugzilla].
7If you have a patch ready, submit it following the same procedure as any other
8patch as described in README.md.
9
10If you are dealing with a not-yet released or urgent issue, please send a
11message to security AT yoctoproject DOT org, including as many details as
12possible: the layer or software module affected, the recipe and its version,
13and any example code, if available.
14
15Branches maintained with security fixes
16---------------------------------------
17
18See [https://wiki.yoctoproject.org/wiki/Stable_Release_and_LTS Stable release and LTS]
19for detailed info regarding the policies and maintenance of Stable branches.
20
21The [https://wiki.yoctoproject.org/wiki/Releases Release page] contains a list of all
22releases of the Yocto Project. Versions in grey are no longer actively maintained with
23security patches, but well-tested patches may still be accepted for them for
24significant issues.
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index bc762bfc15..40b5d895c1 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -12,6 +12,8 @@
12 12
13import os 13import os
14import sys 14import sys
15import warnings
16warnings.simplefilter("default")
15 17
16sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 18sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
17 'lib')) 19 'lib'))
@@ -23,10 +25,9 @@ except RuntimeError as exc:
23from bb import cookerdata 25from bb import cookerdata
24from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException 26from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
25 27
26if sys.getfilesystemencoding() != "utf-8": 28bb.utils.check_system_locale()
27 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
28 29
29__version__ = "1.49.2" 30__version__ = "2.15.1"
30 31
31if __name__ == "__main__": 32if __name__ == "__main__":
32 if __version__ != bb.__version__: 33 if __version__ != bb.__version__:
diff --git a/bitbake/bin/bitbake-config-build b/bitbake/bin/bitbake-config-build
new file mode 120000
index 0000000000..11e6df80c4
--- /dev/null
+++ b/bitbake/bin/bitbake-config-build
@@ -0,0 +1 @@
bitbake-layers \ No newline at end of file
diff --git a/bitbake/bin/bitbake-diffsigs b/bitbake/bin/bitbake-diffsigs
index 19420a2df6..9d6cb8c944 100755
--- a/bitbake/bin/bitbake-diffsigs
+++ b/bitbake/bin/bitbake-diffsigs
@@ -11,6 +11,8 @@
11import os 11import os
12import sys 12import sys
13import warnings 13import warnings
14
15warnings.simplefilter("default")
14import argparse 16import argparse
15import logging 17import logging
16import pickle 18import pickle
@@ -26,6 +28,7 @@ logger = bb.msg.logger_create(myname)
26 28
27is_dump = myname == 'bitbake-dumpsig' 29is_dump = myname == 'bitbake-dumpsig'
28 30
31
29def find_siginfo(tinfoil, pn, taskname, sigs=None): 32def find_siginfo(tinfoil, pn, taskname, sigs=None):
30 result = None 33 result = None
31 tinfoil.set_event_mask(['bb.event.FindSigInfoResult', 34 tinfoil.set_event_mask(['bb.event.FindSigInfoResult',
@@ -51,6 +54,7 @@ def find_siginfo(tinfoil, pn, taskname, sigs=None):
51 sys.exit(2) 54 sys.exit(2)
52 return result 55 return result
53 56
57
54def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None): 58def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
55 """ Find the most recent signature files for the specified PN/task """ 59 """ Find the most recent signature files for the specified PN/task """
56 60
@@ -59,22 +63,26 @@ def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
59 63
60 if sig1 and sig2: 64 if sig1 and sig2:
61 sigfiles = find_siginfo(bbhandler, pn, taskname, [sig1, sig2]) 65 sigfiles = find_siginfo(bbhandler, pn, taskname, [sig1, sig2])
62 if len(sigfiles) == 0: 66 if not sigfiles:
63 logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2)) 67 logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2))
64 sys.exit(1) 68 sys.exit(1)
65 elif not sig1 in sigfiles: 69 elif sig1 not in sigfiles:
66 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig1)) 70 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig1))
67 sys.exit(1) 71 sys.exit(1)
68 elif not sig2 in sigfiles: 72 elif sig2 not in sigfiles:
69 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2)) 73 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2))
70 sys.exit(1) 74 sys.exit(1)
71 latestfiles = [sigfiles[sig1], sigfiles[sig2]] 75
76 latestfiles = [sigfiles[sig1]['path'], sigfiles[sig2]['path']]
72 else: 77 else:
73 filedates = find_siginfo(bbhandler, pn, taskname) 78 sigfiles = find_siginfo(bbhandler, pn, taskname)
74 latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:] 79 latestsigs = sorted(sigfiles.keys(), key=lambda h: sigfiles[h]['time'])[-2:]
75 if not latestfiles: 80 if not latestsigs:
76 logger.error('No sigdata files found matching %s %s' % (pn, taskname)) 81 logger.error('No sigdata files found matching %s %s' % (pn, taskname))
77 sys.exit(1) 82 sys.exit(1)
83 latestfiles = [sigfiles[latestsigs[0]]['path']]
84 if len(latestsigs) > 1:
85 latestfiles.append(sigfiles[latestsigs[1]]['path'])
78 86
79 return latestfiles 87 return latestfiles
80 88
@@ -85,14 +93,14 @@ def recursecb(key, hash1, hash2):
85 hashfiles = find_siginfo(tinfoil, key, None, hashes) 93 hashfiles = find_siginfo(tinfoil, key, None, hashes)
86 94
87 recout = [] 95 recout = []
88 if len(hashfiles) == 0: 96 if not hashfiles:
89 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) 97 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
90 elif not hash1 in hashfiles: 98 elif hash1 not in hashfiles:
91 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1)) 99 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1))
92 elif not hash2 in hashfiles: 100 elif hash2 not in hashfiles:
93 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2)) 101 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2))
94 else: 102 else:
95 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, color=color) 103 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, color=color)
96 for change in out2: 104 for change in out2:
97 for line in change.splitlines(): 105 for line in change.splitlines():
98 recout.append(' ' + line) 106 recout.append(' ' + line)
@@ -109,36 +117,36 @@ parser.add_argument('-D', '--debug',
109 117
110if is_dump: 118if is_dump:
111 parser.add_argument("-t", "--task", 119 parser.add_argument("-t", "--task",
112 help="find the signature data file for the last run of the specified task", 120 help="find the signature data file for the last run of the specified task",
113 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname')) 121 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
114 122
115 parser.add_argument("sigdatafile1", 123 parser.add_argument("sigdatafile1",
116 help="Signature file to dump. Not used when using -t/--task.", 124 help="Signature file to dump. Not used when using -t/--task.",
117 action="store", nargs='?', metavar="sigdatafile") 125 action="store", nargs='?', metavar="sigdatafile")
118else: 126else:
119 parser.add_argument('-c', '--color', 127 parser.add_argument('-c', '--color',
120 help='Colorize the output (where %(metavar)s is %(choices)s)', 128 help='Colorize the output (where %(metavar)s is %(choices)s)',
121 choices=['auto', 'always', 'never'], default='auto', metavar='color') 129 choices=['auto', 'always', 'never'], default='auto', metavar='color')
122 130
123 parser.add_argument('-d', '--dump', 131 parser.add_argument('-d', '--dump',
124 help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)', 132 help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)',
125 action='store_true') 133 action='store_true')
126 134
127 parser.add_argument("-t", "--task", 135 parser.add_argument("-t", "--task",
128 help="find the signature data files for the last two runs of the specified task and compare them", 136 help="find the signature data files for the last two runs of the specified task and compare them",
129 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname')) 137 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
130 138
131 parser.add_argument("-s", "--signature", 139 parser.add_argument("-s", "--signature",
132 help="With -t/--task, specify the signatures to look for instead of taking the last two", 140 help="With -t/--task, specify the signatures to look for instead of taking the last two",
133 action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig')) 141 action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
134 142
135 parser.add_argument("sigdatafile1", 143 parser.add_argument("sigdatafile1",
136 help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.", 144 help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
137 action="store", nargs='?') 145 action="store", nargs='?')
138 146
139 parser.add_argument("sigdatafile2", 147 parser.add_argument("sigdatafile2",
140 help="Second signature file to compare", 148 help="Second signature file to compare",
141 action="store", nargs='?') 149 action="store", nargs='?')
142 150
143options = parser.parse_args() 151options = parser.parse_args()
144if is_dump: 152if is_dump:
@@ -156,7 +164,8 @@ if options.taskargs:
156 with bb.tinfoil.Tinfoil() as tinfoil: 164 with bb.tinfoil.Tinfoil() as tinfoil:
157 tinfoil.prepare(config_only=True) 165 tinfoil.prepare(config_only=True)
158 if not options.dump and options.sigargs: 166 if not options.dump and options.sigargs:
159 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0], options.sigargs[1]) 167 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0],
168 options.sigargs[1])
160 else: 169 else:
161 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1]) 170 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1])
162 171
@@ -165,7 +174,8 @@ if options.taskargs:
165 output = bb.siggen.dump_sigfile(files[-1]) 174 output = bb.siggen.dump_sigfile(files[-1])
166 else: 175 else:
167 if len(files) < 2: 176 if len(files) < 2:
168 logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (options.taskargs[0], options.taskargs[1])) 177 logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (
178 options.taskargs[0], options.taskargs[1]))
169 sys.exit(1) 179 sys.exit(1)
170 180
171 # Recurse into signature comparison 181 # Recurse into signature comparison
diff --git a/bitbake/bin/bitbake-getvar b/bitbake/bin/bitbake-getvar
new file mode 100755
index 0000000000..378fb13572
--- /dev/null
+++ b/bitbake/bin/bitbake-getvar
@@ -0,0 +1,71 @@
1#! /usr/bin/env python3
2#
3# Copyright (C) 2021 Richard Purdie
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import argparse
9import io
10import os
11import sys
12import warnings
13import logging
14warnings.simplefilter("default")
15
16bindir = os.path.dirname(__file__)
17topdir = os.path.dirname(bindir)
18sys.path[0:0] = [os.path.join(topdir, 'lib')]
19
20import bb.providers
21import bb.tinfoil
22
23if __name__ == "__main__":
24 parser = argparse.ArgumentParser(description="Bitbake Query Variable")
25 parser.add_argument("variable", help="variable name to query")
26 parser.add_argument("-r", "--recipe", help="Recipe name to query", default=None, required=False)
27 parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
28 parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
29 parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
30 parser.add_argument('-q', '--quiet', help='Silence bitbake server logging', action="store_true")
31 parser.add_argument('--ignore-undefined', help='Suppress any errors related to undefined variables', action="store_true")
32 args = parser.parse_args()
33
34 if not args.value:
35 if args.unexpand:
36 sys.exit("--unexpand only makes sense with --value")
37
38 if args.flag:
39 sys.exit("--flag only makes sense with --value")
40
41 quiet = args.quiet or args.value
42 if quiet:
43 logger = logging.getLogger("BitBake")
44 logger.setLevel(logging.WARNING)
45
46 with bb.tinfoil.Tinfoil(tracking=True, setup_logging=not quiet) as tinfoil:
47 if args.recipe:
48 tinfoil.prepare(quiet=3 if quiet else 2)
49 try:
50 d = tinfoil.parse_recipe(args.recipe)
51 except bb.providers.NoProvider as e:
52 sys.exit(str(e))
53 else:
54 tinfoil.prepare(quiet=2, config_only=True)
55 # Expand keys and run anonymous functions to get identical result to
56 # "bitbake -e"
57 d = tinfoil.finalizeData()
58
59 value = None
60 if args.flag:
61 value = d.getVarFlag(args.variable, args.flag, expand=not args.unexpand)
62 if value is None and not args.ignore_undefined:
63 sys.exit(f"The flag '{args.flag}' is not defined for variable '{args.variable}'")
64 else:
65 value = d.getVar(args.variable, expand=not args.unexpand)
66 if value is None and not args.ignore_undefined:
67 sys.exit(f"The variable '{args.variable}' is not defined")
68 if args.value:
69 print(str(value if value is not None else ""))
70 else:
71 bb.data.emit_var(args.variable, d=d, all=True)
diff --git a/bitbake/bin/bitbake-hashclient b/bitbake/bin/bitbake-hashclient
index a89290217b..b8755c5797 100755
--- a/bitbake/bin/bitbake-hashclient
+++ b/bitbake/bin/bitbake-hashclient
@@ -13,6 +13,12 @@ import pprint
13import sys 13import sys
14import threading 14import threading
15import time 15import time
16import warnings
17import netrc
18import json
19import statistics
20import textwrap
21warnings.simplefilter("default")
16 22
17try: 23try:
18 import tqdm 24 import tqdm
@@ -34,18 +40,42 @@ except ImportError:
34sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib')) 40sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
35 41
36import hashserv 42import hashserv
43import bb.asyncrpc
37 44
38DEFAULT_ADDRESS = 'unix://./hashserve.sock' 45DEFAULT_ADDRESS = 'unix://./hashserve.sock'
39METHOD = 'stress.test.method' 46METHOD = 'stress.test.method'
40 47
48def print_user(u):
49 print(f"Username: {u['username']}")
50 if "permissions" in u:
51 print("Permissions: " + " ".join(u["permissions"]))
52 if "token" in u:
53 print(f"Token: {u['token']}")
54
41 55
42def main(): 56def main():
57 def handle_get(args, client):
58 result = client.get_taskhash(args.method, args.taskhash, all_properties=True)
59 if not result:
60 return 0
61
62 print(json.dumps(result, sort_keys=True, indent=4))
63 return 0
64
65 def handle_get_outhash(args, client):
66 result = client.get_outhash(args.method, args.outhash, args.taskhash)
67 if not result:
68 return 0
69
70 print(json.dumps(result, sort_keys=True, indent=4))
71 return 0
72
43 def handle_stats(args, client): 73 def handle_stats(args, client):
44 if args.reset: 74 if args.reset:
45 s = client.reset_stats() 75 s = client.reset_stats()
46 else: 76 else:
47 s = client.get_stats() 77 s = client.get_stats()
48 pprint.pprint(s) 78 print(json.dumps(s, sort_keys=True, indent=4))
49 return 0 79 return 0
50 80
51 def handle_stress(args, client): 81 def handle_stress(args, client):
@@ -53,47 +83,59 @@ def main():
53 nonlocal found_hashes 83 nonlocal found_hashes
54 nonlocal missed_hashes 84 nonlocal missed_hashes
55 nonlocal max_time 85 nonlocal max_time
86 nonlocal times
56 87
57 client = hashserv.create_client(args.address) 88 with hashserv.create_client(args.address) as client:
58 89 for i in range(args.requests):
59 for i in range(args.requests): 90 taskhash = hashlib.sha256()
60 taskhash = hashlib.sha256() 91 taskhash.update(args.taskhash_seed.encode('utf-8'))
61 taskhash.update(args.taskhash_seed.encode('utf-8')) 92 taskhash.update(str(i).encode('utf-8'))
62 taskhash.update(str(i).encode('utf-8'))
63 93
64 start_time = time.perf_counter() 94 start_time = time.perf_counter()
65 l = client.get_unihash(METHOD, taskhash.hexdigest()) 95 l = client.get_unihash(METHOD, taskhash.hexdigest())
66 elapsed = time.perf_counter() - start_time 96 elapsed = time.perf_counter() - start_time
67 97
68 with lock: 98 with lock:
69 if l: 99 if l:
70 found_hashes += 1 100 found_hashes += 1
71 else: 101 else:
72 missed_hashes += 1 102 missed_hashes += 1
73 103
74 max_time = max(elapsed, max_time) 104 times.append(elapsed)
75 pbar.update() 105 pbar.update()
76 106
77 max_time = 0 107 max_time = 0
78 found_hashes = 0 108 found_hashes = 0
79 missed_hashes = 0 109 missed_hashes = 0
80 lock = threading.Lock() 110 lock = threading.Lock()
81 total_requests = args.clients * args.requests 111 times = []
82 start_time = time.perf_counter() 112 start_time = time.perf_counter()
83 with ProgressBar(total=total_requests) as pbar: 113 with ProgressBar(total=args.clients * args.requests) as pbar:
84 threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)] 114 threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)]
85 for t in threads: 115 for t in threads:
86 t.start() 116 t.start()
87 117
88 for t in threads: 118 for t in threads:
89 t.join() 119 t.join()
120 total_elapsed = time.perf_counter() - start_time
90 121
91 elapsed = time.perf_counter() - start_time
92 with lock: 122 with lock:
93 print("%d requests in %.1fs. %.1f requests per second" % (total_requests, elapsed, total_requests / elapsed)) 123 mean = statistics.mean(times)
94 print("Average request time %.8fs" % (elapsed / total_requests)) 124 median = statistics.median(times)
95 print("Max request time was %.8fs" % max_time) 125 stddev = statistics.pstdev(times)
96 print("Found %d hashes, missed %d" % (found_hashes, missed_hashes)) 126
127 print(f"Number of clients: {args.clients}")
128 print(f"Requests per client: {args.requests}")
129 print(f"Number of requests: {len(times)}")
130 print(f"Total elapsed time: {total_elapsed:.3f}s")
131 print(f"Total request rate: {len(times)/total_elapsed:.3f} req/s")
132 print(f"Average request time: {mean:.3f}s")
133 print(f"Median request time: {median:.3f}s")
134 print(f"Request time std dev: {stddev:.3f}s")
135 print(f"Maximum request time: {max(times):.3f}s")
136 print(f"Minimum request time: {min(times):.3f}s")
137 print(f"Hashes found: {found_hashes}")
138 print(f"Hashes missed: {missed_hashes}")
97 139
98 if args.report: 140 if args.report:
99 with ProgressBar(total=args.requests) as pbar: 141 with ProgressBar(total=args.requests) as pbar:
@@ -111,12 +153,173 @@ def main():
111 with lock: 153 with lock:
112 pbar.update() 154 pbar.update()
113 155
114 parser = argparse.ArgumentParser(description='Hash Equivalence Client') 156 def handle_remove(args, client):
157 where = {k: v for k, v in args.where}
158 if where:
159 result = client.remove(where)
160 print("Removed %d row(s)" % (result["count"]))
161 else:
162 print("No query specified")
163
164 def handle_clean_unused(args, client):
165 result = client.clean_unused(args.max_age)
166 print("Removed %d rows" % (result["count"]))
167 return 0
168
169 def handle_refresh_token(args, client):
170 r = client.refresh_token(args.username)
171 print_user(r)
172
173 def handle_set_user_permissions(args, client):
174 r = client.set_user_perms(args.username, args.permissions)
175 print_user(r)
176
177 def handle_get_user(args, client):
178 r = client.get_user(args.username)
179 print_user(r)
180
181 def handle_get_all_users(args, client):
182 users = client.get_all_users()
183 print("{username:20}| {permissions}".format(username="Username", permissions="Permissions"))
184 print(("-" * 20) + "+" + ("-" * 20))
185 for u in users:
186 print("{username:20}| {permissions}".format(username=u["username"], permissions=" ".join(u["permissions"])))
187
188 def handle_new_user(args, client):
189 r = client.new_user(args.username, args.permissions)
190 print_user(r)
191
192 def handle_delete_user(args, client):
193 r = client.delete_user(args.username)
194 print_user(r)
195
196 def handle_get_db_usage(args, client):
197 usage = client.get_db_usage()
198 print(usage)
199 tables = sorted(usage.keys())
200 print("{name:20}| {rows:20}".format(name="Table name", rows="Rows"))
201 print(("-" * 20) + "+" + ("-" * 20))
202 for t in tables:
203 print("{name:20}| {rows:<20}".format(name=t, rows=usage[t]["rows"]))
204 print()
205
206 total_rows = sum(t["rows"] for t in usage.values())
207 print(f"Total rows: {total_rows}")
208
209 def handle_get_db_query_columns(args, client):
210 columns = client.get_db_query_columns()
211 print("\n".join(sorted(columns)))
212
213 def handle_gc_status(args, client):
214 result = client.gc_status()
215 if not result["mark"]:
216 print("No Garbage collection in progress")
217 return 0
218
219 print("Current Mark: %s" % result["mark"])
220 print("Total hashes to keep: %d" % result["keep"])
221 print("Total hashes to remove: %s" % result["remove"])
222 return 0
223
224 def handle_gc_mark(args, client):
225 where = {k: v for k, v in args.where}
226 result = client.gc_mark(args.mark, where)
227 print("New hashes marked: %d" % result["count"])
228 return 0
229
230 def handle_gc_mark_stream(args, client):
231 stdin = (l.strip() for l in sys.stdin)
232 marked_hashes = 0
233
234 try:
235 result = client.gc_mark_stream(args.mark, stdin)
236 marked_hashes = result["count"]
237 except ConnectionError:
238 logger.warning(
239 "Server doesn't seem to support `gc-mark-stream`. Sending "
240 "hashes sequentially using `gc-mark` API."
241 )
242 for line in stdin:
243 pairs = line.split()
244 condition = dict(zip(pairs[::2], pairs[1::2]))
245 result = client.gc_mark(args.mark, condition)
246 marked_hashes += result["count"]
247
248 print("New hashes marked: %d" % marked_hashes)
249 return 0
250
251 def handle_gc_sweep(args, client):
252 result = client.gc_sweep(args.mark)
253 print("Removed %d rows" % result["count"])
254 return 0
255
256 def handle_unihash_exists(args, client):
257 result = client.unihash_exists(args.unihash)
258 if args.quiet:
259 return 0 if result else 1
260
261 print("true" if result else "false")
262 return 0
263
264 def handle_ping(args, client):
265 times = []
266 for i in range(1, args.count + 1):
267 if not args.quiet:
268 print(f"Ping {i} of {args.count}... ", end="")
269 start_time = time.perf_counter()
270 client.ping()
271 elapsed = time.perf_counter() - start_time
272 times.append(elapsed)
273 if not args.quiet:
274 print(f"{elapsed:.3f}s")
275
276 mean = statistics.mean(times)
277 median = statistics.median(times)
278 std_dev = statistics.pstdev(times)
279
280 if not args.quiet:
281 print("------------------------")
282 print(f"Number of pings: {len(times)}")
283 print(f"Average round trip time: {mean:.3f}s")
284 print(f"Median round trip time: {median:.3f}s")
285 print(f"Round trip time std dev: {std_dev:.3f}s")
286 print(f"Min time is: {min(times):.3f}s")
287 print(f"Max time is: {max(times):.3f}s")
288 return 0
289
290 parser = argparse.ArgumentParser(
291 formatter_class=argparse.RawDescriptionHelpFormatter,
292 description='Hash Equivalence Client',
293 epilog=textwrap.dedent(
294 """
295 Possible ADDRESS options are:
296 unix://PATH Connect to UNIX domain socket at PATH
297 ws://HOST[:PORT] Connect to websocket at HOST:PORT (default port is 80)
298 wss://HOST[:PORT] Connect to secure websocket at HOST:PORT (default port is 443)
299 HOST:PORT Connect to TCP server at HOST:PORT
300 """
301 ),
302 )
115 parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")') 303 parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")')
116 parser.add_argument('--log', default='WARNING', help='Set logging level') 304 parser.add_argument('--log', default='WARNING', help='Set logging level')
305 parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME")
306 parser.add_argument('--password', '-p', metavar="TOKEN", help="Authenticate using token TOKEN")
307 parser.add_argument('--become', '-b', metavar="USERNAME", help="Impersonate user USERNAME (if allowed) when performing actions")
308 parser.add_argument('--no-netrc', '-n', action="store_false", dest="netrc", help="Do not use .netrc")
117 309
118 subparsers = parser.add_subparsers() 310 subparsers = parser.add_subparsers()
119 311
312 get_parser = subparsers.add_parser('get', help="Get the unihash for a taskhash")
313 get_parser.add_argument("method", help="Method to query")
314 get_parser.add_argument("taskhash", help="Task hash to query")
315 get_parser.set_defaults(func=handle_get)
316
317 get_outhash_parser = subparsers.add_parser('get-outhash', help="Get output hash information")
318 get_outhash_parser.add_argument("method", help="Method to query")
319 get_outhash_parser.add_argument("outhash", help="Output hash to query")
320 get_outhash_parser.add_argument("taskhash", help="Task hash to query")
321 get_outhash_parser.set_defaults(func=handle_get_outhash)
322
120 stats_parser = subparsers.add_parser('stats', help='Show server stats') 323 stats_parser = subparsers.add_parser('stats', help='Show server stats')
121 stats_parser.add_argument('--reset', action='store_true', 324 stats_parser.add_argument('--reset', action='store_true',
122 help='Reset server stats') 325 help='Reset server stats')
@@ -135,6 +338,79 @@ def main():
135 help='Include string in outhash') 338 help='Include string in outhash')
136 stress_parser.set_defaults(func=handle_stress) 339 stress_parser.set_defaults(func=handle_stress)
137 340
341 remove_parser = subparsers.add_parser('remove', help="Remove hash entries")
342 remove_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
343 help="Remove entries from table where KEY == VALUE")
344 remove_parser.set_defaults(func=handle_remove)
345
346 clean_unused_parser = subparsers.add_parser('clean-unused', help="Remove unused database entries")
347 clean_unused_parser.add_argument("max_age", metavar="SECONDS", type=int, help="Remove unused entries older than SECONDS old")
348 clean_unused_parser.set_defaults(func=handle_clean_unused)
349
350 refresh_token_parser = subparsers.add_parser('refresh-token', help="Refresh auth token")
351 refresh_token_parser.add_argument("--username", "-u", help="Refresh the token for another user (if authorized)")
352 refresh_token_parser.set_defaults(func=handle_refresh_token)
353
354 set_user_perms_parser = subparsers.add_parser('set-user-perms', help="Set new permissions for user")
355 set_user_perms_parser.add_argument("--username", "-u", help="Username", required=True)
356 set_user_perms_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
357 set_user_perms_parser.set_defaults(func=handle_set_user_permissions)
358
359 get_user_parser = subparsers.add_parser('get-user', help="Get user")
360 get_user_parser.add_argument("--username", "-u", help="Username")
361 get_user_parser.set_defaults(func=handle_get_user)
362
363 get_all_users_parser = subparsers.add_parser('get-all-users', help="List all users")
364 get_all_users_parser.set_defaults(func=handle_get_all_users)
365
366 new_user_parser = subparsers.add_parser('new-user', help="Create new user")
367 new_user_parser.add_argument("--username", "-u", help="Username", required=True)
368 new_user_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
369 new_user_parser.set_defaults(func=handle_new_user)
370
371 delete_user_parser = subparsers.add_parser('delete-user', help="Delete user")
372 delete_user_parser.add_argument("--username", "-u", help="Username", required=True)
373 delete_user_parser.set_defaults(func=handle_delete_user)
374
375 db_usage_parser = subparsers.add_parser('get-db-usage', help="Database Usage")
376 db_usage_parser.set_defaults(func=handle_get_db_usage)
377
378 db_query_columns_parser = subparsers.add_parser('get-db-query-columns', help="Show columns that can be used in database queries")
379 db_query_columns_parser.set_defaults(func=handle_get_db_query_columns)
380
381 gc_status_parser = subparsers.add_parser("gc-status", help="Show garbage collection status")
382 gc_status_parser.set_defaults(func=handle_gc_status)
383
384 gc_mark_parser = subparsers.add_parser('gc-mark', help="Mark hashes to be kept for garbage collection")
385 gc_mark_parser.add_argument("mark", help="Mark for this garbage collection operation")
386 gc_mark_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
387 help="Keep entries in table where KEY == VALUE")
388 gc_mark_parser.set_defaults(func=handle_gc_mark)
389
390 gc_mark_parser_stream = subparsers.add_parser(
391 'gc-mark-stream',
392 help=(
393 "Mark multiple hashes to be retained for garbage collection. Input should be provided via stdin, "
394 "with each line formatted as key-value pairs separated by spaces, for example 'column1 foo column2 bar'."
395 )
396 )
397 gc_mark_parser_stream.add_argument("mark", help="Mark for this garbage collection operation")
398 gc_mark_parser_stream.set_defaults(func=handle_gc_mark_stream)
399
400 gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked")
401 gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation")
402 gc_sweep_parser.set_defaults(func=handle_gc_sweep)
403
404 unihash_exists_parser = subparsers.add_parser('unihash-exists', help="Check if a unihash is known to the server")
405 unihash_exists_parser.add_argument("--quiet", action="store_true", help="Don't print status. Instead, exit with 0 if unihash exists and 1 if it does not")
406 unihash_exists_parser.add_argument("unihash", help="Unihash to check")
407 unihash_exists_parser.set_defaults(func=handle_unihash_exists)
408
409 ping_parser = subparsers.add_parser('ping', help="Ping server")
410 ping_parser.add_argument("-n", "--count", type=int, help="Number of pings. Default is %(default)s", default=10)
411 ping_parser.add_argument("-q", "--quiet", action="store_true", help="Don't print each ping; only print results")
412 ping_parser.set_defaults(func=handle_ping)
413
138 args = parser.parse_args() 414 args = parser.parse_args()
139 415
140 logger = logging.getLogger('hashserv') 416 logger = logging.getLogger('hashserv')
@@ -148,11 +424,30 @@ def main():
148 console.setLevel(level) 424 console.setLevel(level)
149 logger.addHandler(console) 425 logger.addHandler(console)
150 426
427 login = args.login
428 password = args.password
429
430 if login is None and args.netrc:
431 try:
432 n = netrc.netrc()
433 auth = n.authenticators(args.address)
434 if auth is not None:
435 login, _, password = auth
436 except FileNotFoundError:
437 pass
438 except netrc.NetrcParseError as e:
439 sys.stderr.write(f"Error parsing {e.filename}:{e.lineno}: {e.msg}\n")
440
151 func = getattr(args, 'func', None) 441 func = getattr(args, 'func', None)
152 if func: 442 if func:
153 client = hashserv.create_client(args.address) 443 try:
154 444 with hashserv.create_client(args.address, login, password) as client:
155 return func(args, client) 445 if args.become:
446 client.become_user(args.become)
447 return func(args, client)
448 except bb.asyncrpc.InvokeError as e:
449 print(f"ERROR: {e}")
450 return 1
156 451
157 return 0 452 return 0
158 453
diff --git a/bitbake/bin/bitbake-hashserv b/bitbake/bin/bitbake-hashserv
index 153f65a378..01503736b9 100755
--- a/bitbake/bin/bitbake-hashserv
+++ b/bitbake/bin/bitbake-hashserv
@@ -10,55 +10,170 @@ import sys
10import logging 10import logging
11import argparse 11import argparse
12import sqlite3 12import sqlite3
13import warnings
13 14
14sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib')) 15warnings.simplefilter("default")
16
17sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
15 18
16import hashserv 19import hashserv
20from hashserv.server import DEFAULT_ANON_PERMS
17 21
18VERSION = "1.0.0" 22VERSION = "1.0.0"
19 23
20DEFAULT_BIND = 'unix://./hashserve.sock' 24DEFAULT_BIND = "unix://./hashserve.sock"
21 25
22 26
23def main(): 27def main():
24 parser = argparse.ArgumentParser(description='Hash Equivalence Reference Server. Version=%s' % VERSION, 28 parser = argparse.ArgumentParser(
25 epilog='''The bind address is the path to a unix domain socket if it is 29 description="Hash Equivalence Reference Server. Version=%s" % VERSION,
26 prefixed with "unix://". Otherwise, it is an IP address 30 formatter_class=argparse.RawTextHelpFormatter,
27 and port in form ADDRESS:PORT. To bind to all addresses, leave 31 epilog="""
28 the ADDRESS empty, e.g. "--bind :8686". To bind to a specific 32The bind address may take one of the following formats:
29 IPv6 address, enclose the address in "[]", e.g. 33 unix://PATH - Bind to unix domain socket at PATH
30 "--bind [::1]:8686"''' 34 ws://ADDRESS:PORT - Bind to websocket on ADDRESS:PORT
31 ) 35 ADDRESS:PORT - Bind to raw TCP socket on ADDRESS:PORT
32 36
33 parser.add_argument('-b', '--bind', default=DEFAULT_BIND, help='Bind address (default "%(default)s")') 37To bind to all addresses, leave the ADDRESS empty, e.g. "--bind :8686" or
34 parser.add_argument('-d', '--database', default='./hashserv.db', help='Database file (default "%(default)s")') 38"--bind ws://:8686". To bind to a specific IPv6 address, enclose the address in
35 parser.add_argument('-l', '--log', default='WARNING', help='Set logging level') 39"[]", e.g. "--bind [::1]:8686" or "--bind ws://[::1]:8686"
36 parser.add_argument('-u', '--upstream', help='Upstream hashserv to pull hashes from') 40
37 parser.add_argument('-r', '--read-only', action='store_true', help='Disallow write operations from clients') 41Note that the default Anonymous permissions are designed to not break existing
42server instances when upgrading, but are not particularly secure defaults. If
43you want to use authentication, it is recommended that you use "--anon-perms
44@read" to only give anonymous users read access, or "--anon-perms @none" to
45give un-authenticated users no access at all.
46
47Setting "--anon-perms @all" or "--anon-perms @user-admin" is not allowed, since
48this would allow anonymous users to manage all users accounts, which is a bad
49idea.
50
51If you are using user authentication, you should run your server in websockets
52mode with an SSL terminating load balancer in front of it (as this server does
53not implement SSL). Otherwise all usernames and passwords will be transmitted
54in the clear. When configured this way, clients can connect using a secure
55websocket, as in "wss://SERVER:PORT"
56
57The following permissions are supported by the server:
58
59 @none - No permissions
60 @read - The ability to read equivalent hashes from the server
61 @report - The ability to report equivalent hashes to the server
62 @db-admin - Manage the hash database(s). This includes cleaning the
63 database, removing hashes, etc.
64 @user-admin - The ability to manage user accounts. This includes, creating
65 users, deleting users, resetting login tokens, and assigning
66 permissions.
67 @all - All possible permissions, including any that may be added
68 in the future
69 """,
70 )
71
72 parser.add_argument(
73 "-b",
74 "--bind",
75 default=os.environ.get("HASHSERVER_BIND", DEFAULT_BIND),
76 help='Bind address (default $HASHSERVER_BIND, "%(default)s")',
77 )
78 parser.add_argument(
79 "-d",
80 "--database",
81 default=os.environ.get("HASHSERVER_DB", "./hashserv.db"),
82 help='Database file (default $HASHSERVER_DB, "%(default)s")',
83 )
84 parser.add_argument(
85 "-l",
86 "--log",
87 default=os.environ.get("HASHSERVER_LOG_LEVEL", "WARNING"),
88 help='Set logging level (default $HASHSERVER_LOG_LEVEL, "%(default)s")',
89 )
90 parser.add_argument(
91 "-u",
92 "--upstream",
93 default=os.environ.get("HASHSERVER_UPSTREAM", None),
94 help="Upstream hashserv to pull hashes from ($HASHSERVER_UPSTREAM)",
95 )
96 parser.add_argument(
97 "-r",
98 "--read-only",
99 action="store_true",
100 help="Disallow write operations from clients ($HASHSERVER_READ_ONLY)",
101 )
102 parser.add_argument(
103 "--db-username",
104 default=os.environ.get("HASHSERVER_DB_USERNAME", None),
105 help="Database username ($HASHSERVER_DB_USERNAME)",
106 )
107 parser.add_argument(
108 "--db-password",
109 default=os.environ.get("HASHSERVER_DB_PASSWORD", None),
110 help="Database password ($HASHSERVER_DB_PASSWORD)",
111 )
112 parser.add_argument(
113 "--anon-perms",
114 metavar="PERM[,PERM[,...]]",
115 default=os.environ.get("HASHSERVER_ANON_PERMS", ",".join(DEFAULT_ANON_PERMS)),
116 help='Permissions to give anonymous users (default $HASHSERVER_ANON_PERMS, "%(default)s")',
117 )
118 parser.add_argument(
119 "--admin-user",
120 default=os.environ.get("HASHSERVER_ADMIN_USER", None),
121 help="Create default admin user with name ADMIN_USER ($HASHSERVER_ADMIN_USER)",
122 )
123 parser.add_argument(
124 "--admin-password",
125 default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None),
126 help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)",
127 )
128 parser.add_argument(
129 "--reuseport",
130 action="store_true",
131 help="Enable SO_REUSEPORT, allowing multiple servers to bind to the same port for load balancing",
132 )
38 133
39 args = parser.parse_args() 134 args = parser.parse_args()
40 135
41 logger = logging.getLogger('hashserv') 136 logger = logging.getLogger("hashserv")
42 137
43 level = getattr(logging, args.log.upper(), None) 138 level = getattr(logging, args.log.upper(), None)
44 if not isinstance(level, int): 139 if not isinstance(level, int):
45 raise ValueError('Invalid log level: %s' % args.log) 140 raise ValueError(
141 "Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log
142 )
46 143
47 logger.setLevel(level) 144 logger.setLevel(level)
48 console = logging.StreamHandler() 145 console = logging.StreamHandler()
49 console.setLevel(level) 146 console.setLevel(level)
50 logger.addHandler(console) 147 logger.addHandler(console)
51 148
52 server = hashserv.create_server(args.bind, args.database, upstream=args.upstream, read_only=args.read_only) 149 read_only = (os.environ.get("HASHSERVER_READ_ONLY", "0") == "1") or args.read_only
150 if "," in args.anon_perms:
151 anon_perms = args.anon_perms.split(",")
152 else:
153 anon_perms = args.anon_perms.split()
154
155 server = hashserv.create_server(
156 args.bind,
157 args.database,
158 upstream=args.upstream,
159 read_only=read_only,
160 db_username=args.db_username,
161 db_password=args.db_password,
162 anon_perms=anon_perms,
163 admin_username=args.admin_user,
164 admin_password=args.admin_password,
165 reuseport=args.reuseport,
166 )
53 server.serve_forever() 167 server.serve_forever()
54 return 0 168 return 0
55 169
56 170
57if __name__ == '__main__': 171if __name__ == "__main__":
58 try: 172 try:
59 ret = main() 173 ret = main()
60 except Exception: 174 except Exception:
61 ret = 1 175 ret = 1
62 import traceback 176 import traceback
177
63 traceback.print_exc() 178 traceback.print_exc()
64 sys.exit(ret) 179 sys.exit(ret)
diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers
index ff085d6744..341ecbcd97 100755
--- a/bitbake/bin/bitbake-layers
+++ b/bitbake/bin/bitbake-layers
@@ -14,15 +14,18 @@ import logging
14import os 14import os
15import sys 15import sys
16import argparse 16import argparse
17import warnings
18warnings.simplefilter("default")
17 19
18bindir = os.path.dirname(__file__) 20bindir = os.path.dirname(__file__)
21toolname = os.path.basename(__file__).split(".")[0]
19topdir = os.path.dirname(bindir) 22topdir = os.path.dirname(bindir)
20sys.path[0:0] = [os.path.join(topdir, 'lib')] 23sys.path[0:0] = [os.path.join(topdir, 'lib')]
21 24
22import bb.tinfoil 25import bb.tinfoil
23import bb.msg 26import bb.msg
24 27
25logger = bb.msg.logger_create('bitbake-layers', sys.stdout) 28logger = bb.msg.logger_create(toolname, sys.stdout)
26 29
27def main(): 30def main():
28 parser = argparse.ArgumentParser( 31 parser = argparse.ArgumentParser(
@@ -31,7 +34,7 @@ def main():
31 add_help=False) 34 add_help=False)
32 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') 35 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
33 parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true') 36 parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
34 parser.add_argument('-F', '--force', help='Force add without recipe parse verification', action='store_true') 37 parser.add_argument('-F', '--force', help='Forced execution: can be specified multiple times. -F will force add without recipe parse verification and -FF will additionally force the run withput layer parsing.', action='count', default=0)
35 parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR') 38 parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
36 39
37 global_args, unparsed_args = parser.parse_known_args() 40 global_args, unparsed_args = parser.parse_known_args()
@@ -55,22 +58,27 @@ def main():
55 level=logger.getEffectiveLevel()) 58 level=logger.getEffectiveLevel())
56 59
57 plugins = [] 60 plugins = []
58 tinfoil = bb.tinfoil.Tinfoil(tracking=True) 61 with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
59 tinfoil.logger.setLevel(logger.getEffectiveLevel()) 62 tinfoil.logger.setLevel(logger.getEffectiveLevel())
60 try: 63
61 tinfoil.prepare(True) 64 if global_args.force > 1:
62 for path in ([topdir] + 65 bbpaths = []
63 tinfoil.config_data.getVar('BBPATH').split(':')): 66 else:
64 pluginpath = os.path.join(path, 'lib', 'bblayers') 67 tinfoil.prepare(True)
68 bbpaths = tinfoil.config_data.getVar('BBPATH').split(':')
69
70 for path in ([topdir] + bbpaths):
71 pluginbasepath = {"bitbake-layers":'bblayers', 'bitbake-config-build':'bbconfigbuild'}[toolname]
72 pluginpath = os.path.join(path, 'lib', pluginbasepath)
65 bb.utils.load_plugins(logger, plugins, pluginpath) 73 bb.utils.load_plugins(logger, plugins, pluginpath)
66 74
67 registered = False 75 registered = False
68 for plugin in plugins: 76 for plugin in plugins:
77 if hasattr(plugin, 'tinfoil_init') and global_args.force <= 1:
78 plugin.tinfoil_init(tinfoil)
69 if hasattr(plugin, 'register_commands'): 79 if hasattr(plugin, 'register_commands'):
70 registered = True 80 registered = True
71 plugin.register_commands(subparsers) 81 plugin.register_commands(subparsers)
72 if hasattr(plugin, 'tinfoil_init'):
73 plugin.tinfoil_init(tinfoil)
74 82
75 if not registered: 83 if not registered:
76 logger.error("No commands registered - missing plugins?") 84 logger.error("No commands registered - missing plugins?")
@@ -84,8 +92,6 @@ def main():
84 tinfoil.config_data.enableTracking() 92 tinfoil.config_data.enableTracking()
85 93
86 return args.func(args) 94 return args.func(args)
87 finally:
88 tinfoil.shutdown()
89 95
90 96
91if __name__ == "__main__": 97if __name__ == "__main__":
diff --git a/bitbake/bin/bitbake-prserv b/bitbake/bin/bitbake-prserv
index 1e9b6cbc1b..3992e84eab 100755
--- a/bitbake/bin/bitbake-prserv
+++ b/bitbake/bin/bitbake-prserv
@@ -1,49 +1,103 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright BitBake Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
6import os 8import os
7import sys,logging 9import sys,logging
8import optparse 10import argparse
11import warnings
12warnings.simplefilter("default")
9 13
10sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),'lib')) 14sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
11 15
12import prserv 16import prserv
13import prserv.serv 17import prserv.serv
14 18
15__version__="1.0.0" 19VERSION = "2.0.0"
16 20
17PRHOST_DEFAULT='0.0.0.0' 21PRHOST_DEFAULT="0.0.0.0"
18PRPORT_DEFAULT=8585 22PRPORT_DEFAULT=8585
19 23
24def init_logger(logfile, loglevel):
25 numeric_level = getattr(logging, loglevel.upper(), None)
26 if not isinstance(numeric_level, int):
27 raise ValueError("Invalid log level: %s" % loglevel)
28 FORMAT = "%(asctime)-15s %(message)s"
29 logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
30
20def main(): 31def main():
21 parser = optparse.OptionParser( 32 parser = argparse.ArgumentParser(
22 version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__), 33 description="BitBake PR Server. Version=%s" % VERSION,
23 usage = "%prog < --start | --stop > [options]") 34 formatter_class=argparse.RawTextHelpFormatter)
24 35
25 parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store", 36 parser.add_argument(
26 dest="dbfile", type="string", default="prserv.sqlite3") 37 "-f",
27 parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store", 38 "--file",
28 dest="logfile", type="string", default="prserv.log") 39 default="prserv.sqlite3",
29 parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG", 40 help="database filename (default: prserv.sqlite3)",
30 action = "store", type="string", dest="loglevel", default = "INFO") 41 )
31 parser.add_option("--start", help="start daemon", 42 parser.add_argument(
32 action="store_true", dest="start") 43 "-l",
33 parser.add_option("--stop", help="stop daemon", 44 "--log",
34 action="store_true", dest="stop") 45 default="prserv.log",
35 parser.add_option("--host", help="ip address to bind", action="store", 46 help="log filename(default: prserv.log)",
36 dest="host", type="string", default=PRHOST_DEFAULT) 47 )
37 parser.add_option("--port", help="port number(default: 8585)", action="store", 48 parser.add_argument(
38 dest="port", type="int", default=PRPORT_DEFAULT) 49 "--loglevel",
39 50 default="INFO",
40 options, args = parser.parse_args(sys.argv) 51 help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
41 prserv.init_logger(os.path.abspath(options.logfile),options.loglevel) 52 )
42 53 parser.add_argument(
43 if options.start: 54 "--start",
44 ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile)) 55 action="store_true",
45 elif options.stop: 56 help="start daemon",
46 ret=prserv.serv.stop_daemon(options.host, options.port) 57 )
58 parser.add_argument(
59 "--stop",
60 action="store_true",
61 help="stop daemon",
62 )
63 parser.add_argument(
64 "--host",
65 help="ip address to bind",
66 default=PRHOST_DEFAULT,
67 )
68 parser.add_argument(
69 "--port",
70 type=int,
71 default=PRPORT_DEFAULT,
72 help="port number (default: 8585)",
73 )
74 parser.add_argument(
75 "-r",
76 "--read-only",
77 action="store_true",
78 help="open database in read-only mode",
79 )
80 parser.add_argument(
81 "-u",
82 "--upstream",
83 default=os.environ.get("PRSERV_UPSTREAM", None),
84 help="Upstream PR service (host:port)",
85 )
86
87 args = parser.parse_args()
88 init_logger(os.path.abspath(args.log), args.loglevel)
89
90 if args.start:
91 ret=prserv.serv.start_daemon(
92 args.file,
93 args.host,
94 args.port,
95 os.path.abspath(args.log),
96 args.read_only,
97 args.upstream
98 )
99 elif args.stop:
100 ret=prserv.serv.stop_daemon(args.host, args.port)
47 else: 101 else:
48 ret=parser.print_help() 102 ret=parser.print_help()
49 return ret 103 return ret
diff --git a/bitbake/bin/bitbake-selftest b/bitbake/bin/bitbake-selftest
index 6c0737416b..1b7a783fdc 100755
--- a/bitbake/bin/bitbake-selftest
+++ b/bitbake/bin/bitbake-selftest
@@ -7,12 +7,15 @@
7 7
8import os 8import os
9import sys, logging 9import sys, logging
10import warnings
11warnings.simplefilter("default")
10sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib')) 12sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
11 13
12import unittest 14import unittest
13try: 15try:
14 import bb 16 import bb
15 import hashserv 17 import hashserv
18 import prserv
16 import layerindexlib 19 import layerindexlib
17except RuntimeError as exc: 20except RuntimeError as exc:
18 sys.exit(str(exc)) 21 sys.exit(str(exc))
@@ -25,11 +28,12 @@ tests = ["bb.tests.codeparser",
25 "bb.tests.event", 28 "bb.tests.event",
26 "bb.tests.fetch", 29 "bb.tests.fetch",
27 "bb.tests.parse", 30 "bb.tests.parse",
28 "bb.tests.persist_data",
29 "bb.tests.runqueue", 31 "bb.tests.runqueue",
30 "bb.tests.siggen", 32 "bb.tests.siggen",
31 "bb.tests.utils", 33 "bb.tests.utils",
34 "bb.tests.compression",
32 "hashserv.tests", 35 "hashserv.tests",
36 "prserv.tests",
33 "layerindexlib.tests.layerindexobj", 37 "layerindexlib.tests.layerindexobj",
34 "layerindexlib.tests.restapi", 38 "layerindexlib.tests.restapi",
35 "layerindexlib.tests.cooker"] 39 "layerindexlib.tests.cooker"]
diff --git a/bitbake/bin/bitbake-server b/bitbake/bin/bitbake-server
index ffbc7894ef..a559109e3f 100755
--- a/bitbake/bin/bitbake-server
+++ b/bitbake/bin/bitbake-server
@@ -8,14 +8,17 @@
8import os 8import os
9import sys 9import sys
10import warnings 10import warnings
11warnings.simplefilter("default")
12warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*use.of.fork.*may.lead.to.deadlocks.in.the.child.*")
11import logging 13import logging
12sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) 14sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
13 15
14if sys.getfilesystemencoding() != "utf-8": 16import bb
15 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.") 17
18bb.utils.check_system_locale()
16 19
17# Users shouldn't be running this code directly 20# Users shouldn't be running this code directly
18if len(sys.argv) != 10 or not sys.argv[1].startswith("decafbad"): 21if len(sys.argv) != 11 or not sys.argv[1].startswith("decafbad"):
19 print("bitbake-server is meant for internal execution by bitbake itself, please don't use it standalone.") 22 print("bitbake-server is meant for internal execution by bitbake itself, please don't use it standalone.")
20 sys.exit(1) 23 sys.exit(1)
21 24
@@ -26,20 +29,19 @@ readypipeinfd = int(sys.argv[3])
26logfile = sys.argv[4] 29logfile = sys.argv[4]
27lockname = sys.argv[5] 30lockname = sys.argv[5]
28sockname = sys.argv[6] 31sockname = sys.argv[6]
29timeout = sys.argv[7] 32timeout = float(sys.argv[7])
30xmlrpcinterface = (sys.argv[8], int(sys.argv[9])) 33profile = bool(int(sys.argv[8]))
34xmlrpcinterface = (sys.argv[9], int(sys.argv[10]))
31if xmlrpcinterface[0] == "None": 35if xmlrpcinterface[0] == "None":
32 xmlrpcinterface = (None, xmlrpcinterface[1]) 36 xmlrpcinterface = (None, xmlrpcinterface[1])
33if timeout == "None":
34 timeout = None
35 37
36# Replace standard fds with our own 38# Replace standard fds with our own
37with open('/dev/null', 'r') as si: 39with open('/dev/null', 'r') as si:
38 os.dup2(si.fileno(), sys.stdin.fileno()) 40 os.dup2(si.fileno(), sys.stdin.fileno())
39 41
40so = open(logfile, 'a+') 42with open(logfile, 'a+') as so:
41os.dup2(so.fileno(), sys.stdout.fileno()) 43 os.dup2(so.fileno(), sys.stdout.fileno())
42os.dup2(so.fileno(), sys.stderr.fileno()) 44 os.dup2(so.fileno(), sys.stderr.fileno())
43 45
44# Have stdout and stderr be the same so log output matches chronologically 46# Have stdout and stderr be the same so log output matches chronologically
45# and there aren't two seperate buffers 47# and there aren't two seperate buffers
@@ -50,5 +52,5 @@ logger = logging.getLogger("BitBake")
50handler = bb.event.LogHandler() 52handler = bb.event.LogHandler()
51logger.addHandler(handler) 53logger.addHandler(handler)
52 54
53bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface) 55bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface, profile)
54 56
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker
index 7765b9368b..d2b146a6a9 100755
--- a/bitbake/bin/bitbake-worker
+++ b/bitbake/bin/bitbake-worker
@@ -1,11 +1,15 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright BitBake Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
6import os 8import os
7import sys 9import sys
8import warnings 10import warnings
11warnings.simplefilter("default")
12warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*use.of.fork.*may.lead.to.deadlocks.in.the.child.*")
9sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) 13sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
10from bb import fetch2 14from bb import fetch2
11import logging 15import logging
@@ -16,11 +20,17 @@ import signal
16import pickle 20import pickle
17import traceback 21import traceback
18import queue 22import queue
23import shlex
24import subprocess
25import fcntl
19from multiprocessing import Lock 26from multiprocessing import Lock
20from threading import Thread 27from threading import Thread
21 28
22if sys.getfilesystemencoding() != "utf-8": 29# Remove when we have a minimum of python 3.10
23 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.") 30if not hasattr(fcntl, 'F_SETPIPE_SZ'):
31 fcntl.F_SETPIPE_SZ = 1031
32
33bb.utils.check_system_locale()
24 34
25# Users shouldn't be running this code directly 35# Users shouldn't be running this code directly
26if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"): 36if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
@@ -40,7 +50,6 @@ if sys.argv[1].startswith("decafbadbad"):
40# updates to log files for use with tail 50# updates to log files for use with tail
41try: 51try:
42 if sys.stdout.name == '<stdout>': 52 if sys.stdout.name == '<stdout>':
43 import fcntl
44 fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL) 53 fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
45 fl |= os.O_SYNC 54 fl |= os.O_SYNC
46 fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl) 55 fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
@@ -52,6 +61,12 @@ logger = logging.getLogger("BitBake")
52 61
53worker_pipe = sys.stdout.fileno() 62worker_pipe = sys.stdout.fileno()
54bb.utils.nonblockingfd(worker_pipe) 63bb.utils.nonblockingfd(worker_pipe)
64# Try to make the pipe buffers larger as it is much more efficient. If we can't
65# e.g. out of buffer space (/proc/sys/fs/pipe-user-pages-soft) then just pass over.
66try:
67 fcntl.fcntl(worker_pipe, fcntl.F_SETPIPE_SZ, 512 * 1024)
68except:
69 pass
55# Need to guard against multiprocessing being used in child processes 70# Need to guard against multiprocessing being used in child processes
56# and multiple processes trying to write to the parent at the same time 71# and multiple processes trying to write to the parent at the same time
57worker_pipe_lock = None 72worker_pipe_lock = None
@@ -87,21 +102,21 @@ def worker_fire_prepickled(event):
87worker_thread_exit = False 102worker_thread_exit = False
88 103
89def worker_flush(worker_queue): 104def worker_flush(worker_queue):
90 worker_queue_int = b"" 105 worker_queue_int = bytearray()
91 global worker_pipe, worker_thread_exit 106 global worker_pipe, worker_thread_exit
92 107
93 while True: 108 while True:
94 try: 109 try:
95 worker_queue_int = worker_queue_int + worker_queue.get(True, 1) 110 worker_queue_int.extend(worker_queue.get(True, 1))
96 except queue.Empty: 111 except queue.Empty:
97 pass 112 pass
98 while (worker_queue_int or not worker_queue.empty()): 113 while (worker_queue_int or not worker_queue.empty()):
99 try: 114 try:
100 (_, ready, _) = select.select([], [worker_pipe], [], 1) 115 (_, ready, _) = select.select([], [worker_pipe], [], 1)
101 if not worker_queue.empty(): 116 if not worker_queue.empty():
102 worker_queue_int = worker_queue_int + worker_queue.get() 117 worker_queue_int.extend(worker_queue.get())
103 written = os.write(worker_pipe, worker_queue_int) 118 written = os.write(worker_pipe, worker_queue_int)
104 worker_queue_int = worker_queue_int[written:] 119 del worker_queue_int[0:written]
105 except (IOError, OSError) as e: 120 except (IOError, OSError) as e:
106 if e.errno != errno.EAGAIN and e.errno != errno.EPIPE: 121 if e.errno != errno.EAGAIN and e.errno != errno.EPIPE:
107 raise 122 raise
@@ -117,11 +132,10 @@ def worker_child_fire(event, d):
117 132
118 data = b"<event>" + pickle.dumps(event) + b"</event>" 133 data = b"<event>" + pickle.dumps(event) + b"</event>"
119 try: 134 try:
120 worker_pipe_lock.acquire() 135 with bb.utils.lock_timeout(worker_pipe_lock):
121 while(len(data)): 136 while(len(data)):
122 written = worker_pipe.write(data) 137 written = worker_pipe.write(data)
123 data = data[written:] 138 data = data[written:]
124 worker_pipe_lock.release()
125 except IOError: 139 except IOError:
126 sigterm_handler(None, None) 140 sigterm_handler(None, None)
127 raise 141 raise
@@ -140,44 +154,56 @@ def sigterm_handler(signum, frame):
140 os.killpg(0, signal.SIGTERM) 154 os.killpg(0, signal.SIGTERM)
141 sys.exit() 155 sys.exit()
142 156
143def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False): 157def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask):
158
159 fn = runtask['fn']
160 task = runtask['task']
161 taskname = runtask['taskname']
162 taskhash = runtask['taskhash']
163 unihash = runtask['unihash']
164 appends = runtask['appends']
165 layername = runtask['layername']
166 taskdepdata = runtask['taskdepdata']
167 quieterrors = runtask['quieterrors']
144 # We need to setup the environment BEFORE the fork, since 168 # We need to setup the environment BEFORE the fork, since
145 # a fork() or exec*() activates PSEUDO... 169 # a fork() or exec*() activates PSEUDO...
146 170
147 envbackup = {} 171 envbackup = {}
172 fakeroot = False
148 fakeenv = {} 173 fakeenv = {}
149 umask = None 174 umask = None
150 175
151 taskdep = workerdata["taskdeps"][fn] 176 uid = os.getuid()
177 gid = os.getgid()
178
179 taskdep = runtask['taskdep']
152 if 'umask' in taskdep and taskname in taskdep['umask']: 180 if 'umask' in taskdep and taskname in taskdep['umask']:
153 umask = taskdep['umask'][taskname] 181 umask = taskdep['umask'][taskname]
154 elif workerdata["umask"]: 182 elif workerdata["umask"]:
155 umask = workerdata["umask"] 183 umask = workerdata["umask"]
156 if umask: 184 if umask:
157 # umask might come in as a number or text string.. 185 # Convert to a python numeric value as it could be a string
158 try: 186 umask = bb.utils.to_filemode(umask)
159 umask = int(umask, 8)
160 except TypeError:
161 pass
162 187
163 dry_run = cfg.dry_run or dry_run_exec 188 dry_run = cfg.dry_run or runtask['dry_run']
164 189
165 # We can't use the fakeroot environment in a dry run as it possibly hasn't been built 190 # We can't use the fakeroot environment in a dry run as it possibly hasn't been built
166 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run: 191 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run:
167 envvars = (workerdata["fakerootenv"][fn] or "").split() 192 fakeroot = True
168 for key, value in (var.split('=') for var in envvars): 193 envvars = (runtask['fakerootenv'] or "").split()
194 for key, value in (var.split('=',1) for var in envvars):
169 envbackup[key] = os.environ.get(key) 195 envbackup[key] = os.environ.get(key)
170 os.environ[key] = value 196 os.environ[key] = value
171 fakeenv[key] = value 197 fakeenv[key] = value
172 198
173 fakedirs = (workerdata["fakerootdirs"][fn] or "").split() 199 fakedirs = (runtask['fakerootdirs'] or "").split()
174 for p in fakedirs: 200 for p in fakedirs:
175 bb.utils.mkdirhier(p) 201 bb.utils.mkdirhier(p)
176 logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' % 202 logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' %
177 (fn, taskname, ', '.join(fakedirs))) 203 (fn, taskname, ', '.join(fakedirs)))
178 else: 204 else:
179 envvars = (workerdata["fakerootnoenv"][fn] or "").split() 205 envvars = (runtask['fakerootnoenv'] or "").split()
180 for key, value in (var.split('=') for var in envvars): 206 for key, value in (var.split('=',1) for var in envvars):
181 envbackup[key] = os.environ.get(key) 207 envbackup[key] = os.environ.get(key)
182 os.environ[key] = value 208 os.environ[key] = value
183 fakeenv[key] = value 209 fakeenv[key] = value
@@ -219,19 +245,21 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
219 # Let SIGHUP exit as SIGTERM 245 # Let SIGHUP exit as SIGTERM
220 signal.signal(signal.SIGHUP, sigterm_handler) 246 signal.signal(signal.SIGHUP, sigterm_handler)
221 247
222 # No stdin 248 # No stdin & stdout
223 newsi = os.open(os.devnull, os.O_RDWR) 249 # stdout is used as a status report channel and must not be used by child processes.
224 os.dup2(newsi, sys.stdin.fileno()) 250 dumbio = os.open(os.devnull, os.O_RDWR)
251 os.dup2(dumbio, sys.stdin.fileno())
252 os.dup2(dumbio, sys.stdout.fileno())
225 253
226 if umask: 254 if umask is not None:
227 os.umask(umask) 255 os.umask(umask)
228 256
229 try: 257 try:
230 bb_cache = bb.cache.NoCache(databuilder)
231 (realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn) 258 (realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
232 the_data = databuilder.mcdata[mc] 259 the_data = databuilder.mcdata[mc]
233 the_data.setVar("BB_WORKERCONTEXT", "1") 260 the_data.setVar("BB_WORKERCONTEXT", "1")
234 the_data.setVar("BB_TASKDEPDATA", taskdepdata) 261 the_data.setVar("BB_TASKDEPDATA", taskdepdata)
262 the_data.setVar('BB_CURRENTTASK', taskname.replace("do_", ""))
235 if cfg.limited_deps: 263 if cfg.limited_deps:
236 the_data.setVar("BB_LIMITEDDEPS", "1") 264 the_data.setVar("BB_LIMITEDDEPS", "1")
237 the_data.setVar("BUILDNAME", workerdata["buildname"]) 265 the_data.setVar("BUILDNAME", workerdata["buildname"])
@@ -245,12 +273,20 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
245 bb.parse.siggen.set_taskhashes(workerdata["newhashes"]) 273 bb.parse.siggen.set_taskhashes(workerdata["newhashes"])
246 ret = 0 274 ret = 0
247 275
248 the_data = bb_cache.loadDataFull(fn, appends) 276 the_data = databuilder.parseRecipe(fn, appends, layername)
249 the_data.setVar('BB_TASKHASH', taskhash) 277 the_data.setVar('BB_TASKHASH', taskhash)
250 the_data.setVar('BB_UNIHASH', unihash) 278 the_data.setVar('BB_UNIHASH', unihash)
279 bb.parse.siggen.setup_datacache_from_datastore(fn, the_data)
251 280
252 bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", ""))) 281 bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
253 282
283 if not bb.utils.to_boolean(the_data.getVarFlag(taskname, 'network')):
284 if bb.utils.is_local_uid(uid):
285 logger.debug("Attempting to disable network for %s" % taskname)
286 bb.utils.disable_network(uid, gid)
287 else:
288 logger.debug("Skipping disable network for %s since %s is not a local uid." % (taskname, uid))
289
254 # exported_vars() returns a generator which *cannot* be passed to os.environ.update() 290 # exported_vars() returns a generator which *cannot* be passed to os.environ.update()
255 # successfully. We also need to unset anything from the environment which shouldn't be there 291 # successfully. We also need to unset anything from the environment which shouldn't be there
256 exports = bb.data.exported_vars(the_data) 292 exports = bb.data.exported_vars(the_data)
@@ -279,10 +315,20 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
279 if not quieterrors: 315 if not quieterrors:
280 logger.critical(traceback.format_exc()) 316 logger.critical(traceback.format_exc())
281 os._exit(1) 317 os._exit(1)
318
319 sys.stdout.flush()
320 sys.stderr.flush()
321
282 try: 322 try:
283 if dry_run: 323 if dry_run:
284 return 0 324 return 0
285 return bb.build.exec_task(fn, taskname, the_data, cfg.profile) 325 try:
326 ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
327 finally:
328 if fakeroot:
329 fakerootcmd = shlex.split(the_data.getVar("FAKEROOTCMD"))
330 subprocess.run(fakerootcmd + ['-S'], check=True, stdout=subprocess.PIPE)
331 return ret
286 except: 332 except:
287 os._exit(1) 333 os._exit(1)
288 if not profiling: 334 if not profiling:
@@ -314,12 +360,12 @@ class runQueueWorkerPipe():
314 if pipeout: 360 if pipeout:
315 pipeout.close() 361 pipeout.close()
316 bb.utils.nonblockingfd(self.input) 362 bb.utils.nonblockingfd(self.input)
317 self.queue = b"" 363 self.queue = bytearray()
318 364
319 def read(self): 365 def read(self):
320 start = len(self.queue) 366 start = len(self.queue)
321 try: 367 try:
322 self.queue = self.queue + (self.input.read(102400) or b"") 368 self.queue.extend(self.input.read(512*1024) or b"")
323 except (OSError, IOError) as e: 369 except (OSError, IOError) as e:
324 if e.errno != errno.EAGAIN: 370 if e.errno != errno.EAGAIN:
325 raise 371 raise
@@ -347,7 +393,7 @@ class BitbakeWorker(object):
347 def __init__(self, din): 393 def __init__(self, din):
348 self.input = din 394 self.input = din
349 bb.utils.nonblockingfd(self.input) 395 bb.utils.nonblockingfd(self.input)
350 self.queue = b"" 396 self.queue = bytearray()
351 self.cookercfg = None 397 self.cookercfg = None
352 self.databuilder = None 398 self.databuilder = None
353 self.data = None 399 self.data = None
@@ -381,7 +427,7 @@ class BitbakeWorker(object):
381 if len(r) == 0: 427 if len(r) == 0:
382 # EOF on pipe, server must have terminated 428 # EOF on pipe, server must have terminated
383 self.sigterm_exception(signal.SIGTERM, None) 429 self.sigterm_exception(signal.SIGTERM, None)
384 self.queue = self.queue + r 430 self.queue.extend(r)
385 except (OSError, IOError): 431 except (OSError, IOError):
386 pass 432 pass
387 if len(self.queue): 433 if len(self.queue):
@@ -401,19 +447,35 @@ class BitbakeWorker(object):
401 while self.process_waitpid(): 447 while self.process_waitpid():
402 continue 448 continue
403 449
404
405 def handle_item(self, item, func): 450 def handle_item(self, item, func):
406 if self.queue.startswith(b"<" + item + b">"): 451 opening_tag = b"<" + item + b">"
407 index = self.queue.find(b"</" + item + b">") 452 if not self.queue.startswith(opening_tag):
408 while index != -1: 453 return
409 func(self.queue[(len(item) + 2):index]) 454
410 self.queue = self.queue[(index + len(item) + 3):] 455 tag_len = len(opening_tag)
411 index = self.queue.find(b"</" + item + b">") 456 if len(self.queue) < tag_len + 4:
457 # we need to receive more data
458 return
459 header = self.queue[tag_len:tag_len + 4]
460 payload_len = int.from_bytes(header, 'big')
461 # closing tag has length (tag_len + 1)
462 if len(self.queue) < tag_len * 2 + 1 + payload_len:
463 # we need to receive more data
464 return
465
466 index = self.queue.find(b"</" + item + b">")
467 if index != -1:
468 try:
469 func(self.queue[(tag_len + 4):index])
470 except pickle.UnpicklingError:
471 workerlog_write("Unable to unpickle data: %s\n" % ":".join("{:02x}".format(c) for c in self.queue))
472 raise
473 self.queue = self.queue[(index + len(b"</") + len(item) + len(b">")):]
412 474
413 def handle_cookercfg(self, data): 475 def handle_cookercfg(self, data):
414 self.cookercfg = pickle.loads(data) 476 self.cookercfg = pickle.loads(data)
415 self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True) 477 self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True)
416 self.databuilder.parseBaseConfiguration() 478 self.databuilder.parseBaseConfiguration(worker=True)
417 self.data = self.databuilder.data 479 self.data = self.databuilder.data
418 480
419 def handle_extraconfigdata(self, data): 481 def handle_extraconfigdata(self, data):
@@ -428,6 +490,7 @@ class BitbakeWorker(object):
428 for mc in self.databuilder.mcdata: 490 for mc in self.databuilder.mcdata:
429 self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"]) 491 self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
430 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.workerdata["hashservaddr"]) 492 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.workerdata["hashservaddr"])
493 self.databuilder.mcdata[mc].setVar("__bbclasstype", "recipe")
431 494
432 def handle_newtaskhashes(self, data): 495 def handle_newtaskhashes(self, data):
433 self.workerdata["newhashes"] = pickle.loads(data) 496 self.workerdata["newhashes"] = pickle.loads(data)
@@ -445,11 +508,15 @@ class BitbakeWorker(object):
445 sys.exit(0) 508 sys.exit(0)
446 509
447 def handle_runtask(self, data): 510 def handle_runtask(self, data):
448 fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data) 511 runtask = pickle.loads(data)
449 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname)) 512
513 fn = runtask['fn']
514 task = runtask['task']
515 taskname = runtask['taskname']
450 516
451 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec) 517 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
452 518
519 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, self.extraconfigdata, runtask)
453 self.build_pids[pid] = task 520 self.build_pids[pid] = task
454 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout) 521 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
455 522
@@ -513,9 +580,11 @@ except BaseException as e:
513 import traceback 580 import traceback
514 sys.stderr.write(traceback.format_exc()) 581 sys.stderr.write(traceback.format_exc())
515 sys.stderr.write(str(e)) 582 sys.stderr.write(str(e))
583finally:
584 worker_thread_exit = True
585 worker_thread.join()
516 586
517worker_thread_exit = True 587workerlog_write("exiting")
518worker_thread.join() 588if not normalexit:
519 589 sys.exit(1)
520workerlog_write("exitting")
521sys.exit(0) 590sys.exit(0)
diff --git a/bitbake/bin/git-make-shallow b/bitbake/bin/git-make-shallow
index 57069f7edf..e6c180b4d6 100755
--- a/bitbake/bin/git-make-shallow
+++ b/bitbake/bin/git-make-shallow
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright BitBake Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
@@ -16,19 +18,23 @@ import itertools
16import os 18import os
17import subprocess 19import subprocess
18import sys 20import sys
21import warnings
22warnings.simplefilter("default")
19 23
20version = 1.0 24version = 1.0
21 25
22 26
27git_cmd = ['git', '-c', 'safe.bareRepository=all']
28
23def main(): 29def main():
24 if sys.version_info < (3, 4, 0): 30 if sys.version_info < (3, 4, 0):
25 sys.exit('Python 3.4 or greater is required') 31 sys.exit('Python 3.4 or greater is required')
26 32
27 git_dir = check_output(['git', 'rev-parse', '--git-dir']).rstrip() 33 git_dir = check_output(git_cmd + ['rev-parse', '--git-dir']).rstrip()
28 shallow_file = os.path.join(git_dir, 'shallow') 34 shallow_file = os.path.join(git_dir, 'shallow')
29 if os.path.exists(shallow_file): 35 if os.path.exists(shallow_file):
30 try: 36 try:
31 check_output(['git', 'fetch', '--unshallow']) 37 check_output(git_cmd + ['fetch', '--unshallow'])
32 except subprocess.CalledProcessError: 38 except subprocess.CalledProcessError:
33 try: 39 try:
34 os.unlink(shallow_file) 40 os.unlink(shallow_file)
@@ -37,21 +43,21 @@ def main():
37 raise 43 raise
38 44
39 args = process_args() 45 args = process_args()
40 revs = check_output(['git', 'rev-list'] + args.revisions).splitlines() 46 revs = check_output(git_cmd + ['rev-list'] + args.revisions).splitlines()
41 47
42 make_shallow(shallow_file, args.revisions, args.refs) 48 make_shallow(shallow_file, args.revisions, args.refs)
43 49
44 ref_revs = check_output(['git', 'rev-list'] + args.refs).splitlines() 50 ref_revs = check_output(git_cmd + ['rev-list'] + args.refs).splitlines()
45 remaining_history = set(revs) & set(ref_revs) 51 remaining_history = set(revs) & set(ref_revs)
46 for rev in remaining_history: 52 for rev in remaining_history:
47 if check_output(['git', 'rev-parse', '{}^@'.format(rev)]): 53 if check_output(git_cmd + ['rev-parse', '{}^@'.format(rev)]):
48 sys.exit('Error: %s was not made shallow' % rev) 54 sys.exit('Error: %s was not made shallow' % rev)
49 55
50 filter_refs(args.refs) 56 filter_refs(args.refs)
51 57
52 if args.shrink: 58 if args.shrink:
53 shrink_repo(git_dir) 59 shrink_repo(git_dir)
54 subprocess.check_call(['git', 'fsck', '--unreachable']) 60 subprocess.check_call(git_cmd + ['fsck', '--unreachable'])
55 61
56 62
57def process_args(): 63def process_args():
@@ -68,12 +74,12 @@ def process_args():
68 args = parser.parse_args() 74 args = parser.parse_args()
69 75
70 if args.refs: 76 if args.refs:
71 args.refs = check_output(['git', 'rev-parse', '--symbolic-full-name'] + args.refs).splitlines() 77 args.refs = check_output(git_cmd + ['rev-parse', '--symbolic-full-name'] + args.refs).splitlines()
72 else: 78 else:
73 args.refs = get_all_refs(lambda r, t, tt: t == 'commit' or tt == 'commit') 79 args.refs = get_all_refs(lambda r, t, tt: t == 'commit' or tt == 'commit')
74 80
75 args.refs = list(filter(lambda r: not r.endswith('/HEAD'), args.refs)) 81 args.refs = list(filter(lambda r: not r.endswith('/HEAD'), args.refs))
76 args.revisions = check_output(['git', 'rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines() 82 args.revisions = check_output(git_cmd + ['rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines()
77 return args 83 return args
78 84
79 85
@@ -91,7 +97,7 @@ def make_shallow(shallow_file, revisions, refs):
91 97
92def get_all_refs(ref_filter=None): 98def get_all_refs(ref_filter=None):
93 """Return all the existing refs in this repository, optionally filtering the refs.""" 99 """Return all the existing refs in this repository, optionally filtering the refs."""
94 ref_output = check_output(['git', 'for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)']) 100 ref_output = check_output(git_cmd + ['for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)'])
95 ref_split = [tuple(iter_extend(l.rsplit('\t'), 3)) for l in ref_output.splitlines()] 101 ref_split = [tuple(iter_extend(l.rsplit('\t'), 3)) for l in ref_output.splitlines()]
96 if ref_filter: 102 if ref_filter:
97 ref_split = (e for e in ref_split if ref_filter(*e)) 103 ref_split = (e for e in ref_split if ref_filter(*e))
@@ -109,8 +115,8 @@ def filter_refs(refs):
109 all_refs = get_all_refs() 115 all_refs = get_all_refs()
110 to_remove = set(all_refs) - set(refs) 116 to_remove = set(all_refs) - set(refs)
111 if to_remove: 117 if to_remove:
112 check_output(['xargs', '-0', '-n', '1', 'git', 'update-ref', '-d', '--no-deref'], 118 check_output(git_cmd + ['update-ref', '--no-deref', '--stdin', '-z'],
113 input=''.join(l + '\0' for l in to_remove)) 119 input=''.join('delete ' + l + '\0\0' for l in to_remove))
114 120
115 121
116def follow_history_intersections(revisions, refs): 122def follow_history_intersections(revisions, refs):
@@ -122,7 +128,7 @@ def follow_history_intersections(revisions, refs):
122 if rev in seen: 128 if rev in seen:
123 continue 129 continue
124 130
125 parents = check_output(['git', 'rev-parse', '%s^@' % rev]).splitlines() 131 parents = check_output(git_cmd + ['rev-parse', '%s^@' % rev]).splitlines()
126 132
127 yield rev 133 yield rev
128 seen.add(rev) 134 seen.add(rev)
@@ -130,12 +136,12 @@ def follow_history_intersections(revisions, refs):
130 if not parents: 136 if not parents:
131 continue 137 continue
132 138
133 check_refs = check_output(['git', 'merge-base', '--independent'] + sorted(refs)).splitlines() 139 check_refs = check_output(git_cmd + ['merge-base', '--independent'] + sorted(refs)).splitlines()
134 for parent in parents: 140 for parent in parents:
135 for ref in check_refs: 141 for ref in check_refs:
136 print("Checking %s vs %s" % (parent, ref)) 142 print("Checking %s vs %s" % (parent, ref))
137 try: 143 try:
138 merge_base = check_output(['git', 'merge-base', parent, ref]).rstrip() 144 merge_base = check_output(git_cmd + ['merge-base', parent, ref]).rstrip()
139 except subprocess.CalledProcessError: 145 except subprocess.CalledProcessError:
140 continue 146 continue
141 else: 147 else:
@@ -155,14 +161,14 @@ def iter_except(func, exception, start=None):
155 161
156def shrink_repo(git_dir): 162def shrink_repo(git_dir):
157 """Shrink the newly shallow repository, removing the unreachable objects.""" 163 """Shrink the newly shallow repository, removing the unreachable objects."""
158 subprocess.check_call(['git', 'reflog', 'expire', '--expire-unreachable=now', '--all']) 164 subprocess.check_call(git_cmd + ['reflog', 'expire', '--expire-unreachable=now', '--all'])
159 subprocess.check_call(['git', 'repack', '-ad']) 165 subprocess.check_call(git_cmd + ['repack', '-ad'])
160 try: 166 try:
161 os.unlink(os.path.join(git_dir, 'objects', 'info', 'alternates')) 167 os.unlink(os.path.join(git_dir, 'objects', 'info', 'alternates'))
162 except OSError as exc: 168 except OSError as exc:
163 if exc.errno != errno.ENOENT: 169 if exc.errno != errno.ENOENT:
164 raise 170 raise
165 subprocess.check_call(['git', 'prune', '--expire', 'now']) 171 subprocess.check_call(git_cmd + ['prune', '--expire', 'now'])
166 172
167 173
168if __name__ == '__main__': 174if __name__ == '__main__':
diff --git a/bitbake/bin/toaster b/bitbake/bin/toaster
index 6b90ee187e..f002c8c159 100755
--- a/bitbake/bin/toaster
+++ b/bitbake/bin/toaster
@@ -33,7 +33,7 @@ databaseCheck()
33 $MANAGE migrate --noinput || retval=1 33 $MANAGE migrate --noinput || retval=1
34 34
35 if [ $retval -eq 1 ]; then 35 if [ $retval -eq 1 ]; then
36 echo "Failed migrations, aborting system start" 1>&2 36 echo "Failed migrations, halting system start" 1>&2
37 return $retval 37 return $retval
38 fi 38 fi
39 # Make sure that checksettings can pick up any value for TEMPLATECONF 39 # Make sure that checksettings can pick up any value for TEMPLATECONF
@@ -41,7 +41,7 @@ databaseCheck()
41 $MANAGE checksettings --traceback || retval=1 41 $MANAGE checksettings --traceback || retval=1
42 42
43 if [ $retval -eq 1 ]; then 43 if [ $retval -eq 1 ]; then
44 printf "\nError while checking settings; aborting\n" 44 printf "\nError while checking settings; exiting\n"
45 return $retval 45 return $retval
46 fi 46 fi
47 47
@@ -84,7 +84,7 @@ webserverStartAll()
84 echo "Starting webserver..." 84 echo "Starting webserver..."
85 85
86 $MANAGE runserver --noreload "$ADDR_PORT" \ 86 $MANAGE runserver --noreload "$ADDR_PORT" \
87 </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \ 87 </dev/null >>${TOASTER_LOGS_DIR}/web.log 2>&1 \
88 & echo $! >${BUILDDIR}/.toastermain.pid 88 & echo $! >${BUILDDIR}/.toastermain.pid
89 89
90 sleep 1 90 sleep 1
@@ -181,6 +181,14 @@ WEBSERVER=1
181export TOASTER_BUILDSERVER=1 181export TOASTER_BUILDSERVER=1
182ADDR_PORT="localhost:8000" 182ADDR_PORT="localhost:8000"
183TOASTERDIR=`dirname $BUILDDIR` 183TOASTERDIR=`dirname $BUILDDIR`
184# ${BUILDDIR}/toaster_logs/ became the default location for toaster logs
185# This is needed for implemented django-log-viewer: https://pypi.org/project/django-log-viewer/
186# If the directory does not exist, create it.
187TOASTER_LOGS_DIR="${BUILDDIR}/toaster_logs/"
188if [ ! -d $TOASTER_LOGS_DIR ]
189then
190 mkdir $TOASTER_LOGS_DIR
191fi
184unset CMD 192unset CMD
185for param in $*; do 193for param in $*; do
186 case $param in 194 case $param in
@@ -248,7 +256,7 @@ fi
248# 3) the sqlite db if that is being used. 256# 3) the sqlite db if that is being used.
249# 4) pid's we need to clean up on exit/shutdown 257# 4) pid's we need to clean up on exit/shutdown
250export TOASTER_DIR=$TOASTERDIR 258export TOASTER_DIR=$TOASTERDIR
251export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE TOASTER_DIR" 259export BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS TOASTER_DIR"
252 260
253# Determine the action. If specified by arguments, fine, if not, toggle it 261# Determine the action. If specified by arguments, fine, if not, toggle it
254if [ "$CMD" = "start" ] ; then 262if [ "$CMD" = "start" ] ; then
@@ -299,7 +307,7 @@ case $CMD in
299 export BITBAKE_UI='toasterui' 307 export BITBAKE_UI='toasterui'
300 if [ $TOASTER_BUILDSERVER -eq 1 ] ; then 308 if [ $TOASTER_BUILDSERVER -eq 1 ] ; then
301 $MANAGE runbuilds \ 309 $MANAGE runbuilds \
302 </dev/null >>${BUILDDIR}/toaster_runbuilds.log 2>&1 \ 310 </dev/null >>${TOASTER_LOGS_DIR}/toaster_runbuilds.log 2>&1 \
303 & echo $! >${BUILDDIR}/.runbuilds.pid 311 & echo $! >${BUILDDIR}/.runbuilds.pid
304 else 312 else
305 echo "Toaster build server not started." 313 echo "Toaster build server not started."
diff --git a/bitbake/bin/toaster-eventreplay b/bitbake/bin/toaster-eventreplay
index 8fa4ab7116..74a319320e 100755
--- a/bitbake/bin/toaster-eventreplay
+++ b/bitbake/bin/toaster-eventreplay
@@ -19,6 +19,8 @@ import sys
19import json 19import json
20import pickle 20import pickle
21import codecs 21import codecs
22import warnings
23warnings.simplefilter("default")
22 24
23from collections import namedtuple 25from collections import namedtuple
24 26
@@ -28,79 +30,23 @@ sys.path.insert(0, join(dirname(dirname(abspath(__file__))), 'lib'))
28 30
29import bb.cooker 31import bb.cooker
30from bb.ui import toasterui 32from bb.ui import toasterui
31 33from bb.ui import eventreplay
32class EventPlayer:
33 """Emulate a connection to a bitbake server."""
34
35 def __init__(self, eventfile, variables):
36 self.eventfile = eventfile
37 self.variables = variables
38 self.eventmask = []
39
40 def waitEvent(self, _timeout):
41 """Read event from the file."""
42 line = self.eventfile.readline().strip()
43 if not line:
44 return
45 try:
46 event_str = json.loads(line)['vars'].encode('utf-8')
47 event = pickle.loads(codecs.decode(event_str, 'base64'))
48 event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
49 if event_name not in self.eventmask:
50 return
51 return event
52 except ValueError as err:
53 print("Failed loading ", line)
54 raise err
55
56 def runCommand(self, command_line):
57 """Emulate running a command on the server."""
58 name = command_line[0]
59
60 if name == "getVariable":
61 var_name = command_line[1]
62 variable = self.variables.get(var_name)
63 if variable:
64 return variable['v'], None
65 return None, "Missing variable %s" % var_name
66
67 elif name == "getAllKeysWithFlags":
68 dump = {}
69 flaglist = command_line[1]
70 for key, val in self.variables.items():
71 try:
72 if not key.startswith("__"):
73 dump[key] = {
74 'v': val['v'],
75 'history' : val['history'],
76 }
77 for flag in flaglist:
78 dump[key][flag] = val[flag]
79 except Exception as err:
80 print(err)
81 return (dump, None)
82
83 elif name == 'setEventMask':
84 self.eventmask = command_line[-1]
85 return True, None
86
87 else:
88 raise Exception("Command %s not implemented" % command_line[0])
89
90 def getEventHandle(self):
91 """
92 This method is called by toasterui.
93 The return value is passed to self.runCommand but not used there.
94 """
95 pass
96 34
97def main(argv): 35def main(argv):
98 with open(argv[-1]) as eventfile: 36 with open(argv[-1]) as eventfile:
99 # load variables from the first line 37 # load variables from the first line
100 variables = json.loads(eventfile.readline().strip())['allvariables'] 38 variables = None
101 39 while line := eventfile.readline().strip():
40 try:
41 variables = json.loads(line)['allvariables']
42 break
43 except (KeyError, json.JSONDecodeError):
44 continue
45 if not variables:
46 sys.exit("Cannot find allvariables entry in event log file %s" % argv[-1])
47 eventfile.seek(0)
102 params = namedtuple('ConfigParams', ['observe_only'])(True) 48 params = namedtuple('ConfigParams', ['observe_only'])(True)
103 player = EventPlayer(eventfile, variables) 49 player = eventreplay.EventPlayer(eventfile, variables)
104 50
105 return toasterui.main(player, player, params) 51 return toasterui.main(player, player, params)
106 52
diff --git a/bitbake/contrib/b4-wrapper-bitbake.py b/bitbake/contrib/b4-wrapper-bitbake.py
new file mode 100755
index 0000000000..87dff2c3a7
--- /dev/null
+++ b/bitbake/contrib/b4-wrapper-bitbake.py
@@ -0,0 +1,40 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7# This script is to be called by b4:
8# - through b4.send-auto-cc-cmd with "send-auto-cc-cmd" as first argument,
9#
10# When send-auto-cc-cmd is passed:
11#
12# This returns the list of Cc recipients for a patch.
13#
14# This script takes as stdin a patch.
15
16import subprocess
17import sys
18
19cmd = sys.argv[1]
20if cmd != "send-auto-cc-cmd":
21 sys.exit(-1)
22
23patch = sys.stdin.read()
24
25if subprocess.call(["which", "lsdiff"], stdout=subprocess.DEVNULL) != 0:
26 print("lsdiff missing from host, please install patchutils")
27 sys.exit(-1)
28
29files = subprocess.check_output(["lsdiff", "--strip-match=1", "--strip=1", "--include=doc/*"],
30 input=patch, text=True)
31if len(files):
32 print("docs@lists.yoctoproject.org")
33else:
34# Handle patches made with --no-prefix
35 files = subprocess.check_output(["lsdiff", "--include=doc/*"],
36 input=patch, text=True)
37 if len(files):
38 print("docs@lists.yoctoproject.org")
39
40sys.exit(0)
diff --git a/bitbake/contrib/hashserv/Dockerfile b/bitbake/contrib/hashserv/Dockerfile
index d6fc728f37..aec1f86fc9 100644
--- a/bitbake/contrib/hashserv/Dockerfile
+++ b/bitbake/contrib/hashserv/Dockerfile
@@ -1,7 +1,7 @@
1# SPDX-License-Identifier: MIT 1# SPDX-License-Identifier: MIT
2# 2#
3# Copyright (c) 2021 Joshua Watt <JPEWhacker@gmail.com> 3# Copyright (c) 2021 Joshua Watt <JPEWhacker@gmail.com>
4# 4#
5# Dockerfile to build a bitbake hash equivalence server container 5# Dockerfile to build a bitbake hash equivalence server container
6# 6#
7# From the root of the bitbake repository, run: 7# From the root of the bitbake repository, run:
@@ -11,9 +11,13 @@
11 11
12FROM alpine:3.13.1 12FROM alpine:3.13.1
13 13
14RUN apk add --no-cache python3 14RUN apk add --no-cache python3 libgcc
15 15
16COPY bin/bitbake-hashserv /opt/bbhashserv/bin/ 16COPY bin/bitbake-hashserv /opt/bbhashserv/bin/
17COPY lib/hashserv /opt/bbhashserv/lib/hashserv/ 17COPY lib/hashserv /opt/bbhashserv/lib/hashserv/
18COPY lib/bb /opt/bbhashserv/lib/bb/
19COPY lib/codegen.py /opt/bbhashserv/lib/codegen.py
20COPY lib/ply /opt/bbhashserv/lib/ply/
21COPY lib/bs4 /opt/bbhashserv/lib/bs4/
18 22
19ENTRYPOINT ["/opt/bbhashserv/bin/bitbake-hashserv"] 23ENTRYPOINT ["/opt/bbhashserv/bin/bitbake-hashserv"]
diff --git a/bitbake/contrib/prserv/Dockerfile b/bitbake/contrib/prserv/Dockerfile
new file mode 100644
index 0000000000..9585fe3f07
--- /dev/null
+++ b/bitbake/contrib/prserv/Dockerfile
@@ -0,0 +1,62 @@
1# SPDX-License-Identifier: MIT
2#
3# Copyright (c) 2022 Daniel Gomez <daniel@qtec.com>
4#
5# Dockerfile to build a bitbake PR service container
6#
7# From the root of the bitbake repository, run:
8#
9# docker build -f contrib/prserv/Dockerfile . -t prserv
10#
11# Running examples:
12#
13# 1. PR Service in RW mode, port 18585:
14#
15# docker run --detach --tty \
16# --env PORT=18585 \
17# --publish 18585:18585 \
18# --volume $PWD:/var/lib/bbprserv \
19# prserv
20#
21# 2. PR Service in RO mode, default port (8585) and custom LOGFILE:
22#
23# docker run --detach --tty \
24# --env DBMODE="--read-only" \
25# --env LOGFILE=/var/lib/bbprserv/prservro.log \
26# --publish 8585:8585 \
27# --volume $PWD:/var/lib/bbprserv \
28# prserv
29#
30
31FROM alpine:3.14.4
32
33RUN apk add --no-cache python3
34
35COPY bin/bitbake-prserv /opt/bbprserv/bin/
36COPY lib/prserv /opt/bbprserv/lib/prserv/
37COPY lib/bb /opt/bbprserv/lib/bb/
38COPY lib/codegen.py /opt/bbprserv/lib/codegen.py
39COPY lib/ply /opt/bbprserv/lib/ply/
40COPY lib/bs4 /opt/bbprserv/lib/bs4/
41
42ENV PATH=$PATH:/opt/bbprserv/bin
43
44RUN mkdir -p /var/lib/bbprserv
45
46ENV DBFILE=/var/lib/bbprserv/prserv.sqlite3 \
47 LOGFILE=/var/lib/bbprserv/prserv.log \
48 LOGLEVEL=debug \
49 HOST=0.0.0.0 \
50 PORT=8585 \
51 DBMODE=""
52
53ENTRYPOINT [ "/bin/sh", "-c", \
54"bitbake-prserv \
55--file=$DBFILE \
56--log=$LOGFILE \
57--loglevel=$LOGLEVEL \
58--start \
59--host=$HOST \
60--port=$PORT \
61$DBMODE \
62&& tail -f $LOGFILE"]
diff --git a/bitbake/contrib/vim/ftdetect/bitbake.vim b/bitbake/contrib/vim/ftdetect/bitbake.vim
index 09fc4dc74c..427ab5b987 100644
--- a/bitbake/contrib/vim/ftdetect/bitbake.vim
+++ b/bitbake/contrib/vim/ftdetect/bitbake.vim
@@ -11,10 +11,18 @@ if &compatible || version < 600 || exists("b:loaded_bitbake_plugin")
11endif 11endif
12 12
13" .bb, .bbappend and .bbclass 13" .bb, .bbappend and .bbclass
14au BufNewFile,BufRead *.{bb,bbappend,bbclass} set filetype=bitbake 14au BufNewFile,BufRead *.{bb,bbappend,bbclass} setfiletype bitbake
15 15
16" .inc 16" .inc -- meanwhile included upstream
17au BufNewFile,BufRead *.inc set filetype=bitbake 17if !has("patch-9.0.0055")
18 au BufNewFile,BufRead *.inc call s:BBIncDetect()
19 def s:BBIncDetect()
20 l:lines = getline(1) .. getline(2) .. getline(3)
21 if l:lines =~# '\<\%(require\|inherit\)\>' || lines =~# '[A-Z][A-Za-z0-9_:${}]*\s\+\%(??\|[?:+]\)\?= '
22 set filetype bitbake
23 endif
24 enddef
25endif
18 26
19" .conf 27" .conf
20au BufNewFile,BufRead *.conf 28au BufNewFile,BufRead *.conf
diff --git a/bitbake/contrib/vim/indent/bitbake.vim b/bitbake/contrib/vim/indent/bitbake.vim
index 1381034098..7ee9d69938 100644
--- a/bitbake/contrib/vim/indent/bitbake.vim
+++ b/bitbake/contrib/vim/indent/bitbake.vim
@@ -40,7 +40,7 @@ set cpo&vim
40 40
41let s:maxoff = 50 " maximum number of lines to look backwards for () 41let s:maxoff = 50 " maximum number of lines to look backwards for ()
42 42
43function GetPythonIndent(lnum) 43function! GetBBPythonIndent(lnum)
44 44
45 " If this line is explicitly joined: If the previous line was also joined, 45 " If this line is explicitly joined: If the previous line was also joined,
46 " line it up with that one, otherwise add two 'shiftwidth' 46 " line it up with that one, otherwise add two 'shiftwidth'
@@ -257,7 +257,7 @@ let b:did_indent = 1
257setlocal indentkeys+=0\" 257setlocal indentkeys+=0\"
258 258
259 259
260function BitbakeIndent(lnum) 260function! BitbakeIndent(lnum)
261 if !has('syntax_items') 261 if !has('syntax_items')
262 return -1 262 return -1
263 endif 263 endif
@@ -315,7 +315,7 @@ function BitbakeIndent(lnum)
315 endif 315 endif
316 316
317 if index(["bbPyDefRegion", "bbPyFuncRegion"], name) != -1 317 if index(["bbPyDefRegion", "bbPyFuncRegion"], name) != -1
318 let ret = GetPythonIndent(a:lnum) 318 let ret = GetBBPythonIndent(a:lnum)
319 " Should normally always be indented by at least one shiftwidth; but allow 319 " Should normally always be indented by at least one shiftwidth; but allow
320 " return of -1 (defer to autoindent) or -2 (force indent to 0) 320 " return of -1 (defer to autoindent) or -2 (force indent to 0)
321 if ret == 0 321 if ret == 0
diff --git a/bitbake/contrib/vim/plugin/newbbappend.vim b/bitbake/contrib/vim/plugin/newbbappend.vim
index e04174cf62..3f65f79cdc 100644
--- a/bitbake/contrib/vim/plugin/newbbappend.vim
+++ b/bitbake/contrib/vim/plugin/newbbappend.vim
@@ -20,7 +20,7 @@ fun! NewBBAppendTemplate()
20 set nopaste 20 set nopaste
21 21
22 " New bbappend template 22 " New bbappend template
23 0 put ='FILESEXTRAPATHS_prepend := \"${THISDIR}/${PN}:\"' 23 0 put ='FILESEXTRAPATHS:prepend := \"${THISDIR}/${PN}:\"'
24 2 24 2
25 25
26 if paste == 1 26 if paste == 1
diff --git a/bitbake/contrib/vim/syntax/bitbake.vim b/bitbake/contrib/vim/syntax/bitbake.vim
index f964621ae5..8f39b8f951 100644
--- a/bitbake/contrib/vim/syntax/bitbake.vim
+++ b/bitbake/contrib/vim/syntax/bitbake.vim
@@ -51,9 +51,9 @@ syn region bbString matchgroup=bbQuote start=+'+ skip=+\\$+ end=+'+
51syn match bbExport "^export" nextgroup=bbIdentifier skipwhite 51syn match bbExport "^export" nextgroup=bbIdentifier skipwhite
52syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite 52syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
53syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained 53syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained
54syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained 54syn match bbVarDeref "${[a-zA-Z0-9\-_:\.\/\+]\+}" contained
55syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)" contained nextgroup=bbVarValue 55syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)" contained nextgroup=bbVarValue
56syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq 56syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+][${}a-zA-Z0-9\-_:\.\/\+]*\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbOverrideOperator,bbVarDeref nextgroup=bbVarEq
57syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue 57syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
58syn region bbVarPyValue start=+${@+ skip=+\\$+ end=+}+ contained contains=@python 58syn region bbVarPyValue start=+${@+ skip=+\\$+ end=+}+ contained contains=@python
59 59
@@ -63,13 +63,14 @@ syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*
63 63
64" Includes and requires 64" Includes and requires
65syn keyword bbInclude inherit include require contained 65syn keyword bbInclude inherit include require contained
66syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref 66syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
67syn match bbIncludeLine "^\(inherit\|include\|require\)\s\+" contains=bbInclude nextgroup=bbIncludeRest 67syn match bbIncludeLine "^\(inherit\|include\|require\)\s\+" contains=bbInclude nextgroup=bbIncludeRest
68 68
69" Add taks and similar 69" Add taks and similar
70syn keyword bbStatement addtask deltask addhandler after before EXPORT_FUNCTIONS contained 70syn keyword bbStatement addtask deltask addhandler after before EXPORT_FUNCTIONS contained
71syn match bbStatementRest ".*$" skipwhite contained contains=bbStatement 71syn match bbStatementRest /[^\\]*$/ skipwhite contained contains=bbStatement,bbVarDeref,bbVarPyValue
72syn match bbStatementLine "^\(addtask\|deltask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest 72syn region bbStatementRestCont start=/.*\\$/ end=/^[^\\]*$/ contained contains=bbStatement,bbVarDeref,bbVarPyValue,bbContinue keepend
73syn match bbStatementLine "^\(addtask\|deltask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest,bbStatementRestCont
73 74
74" OE Important Functions 75" OE Important Functions
75syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_compile do_stage do_install do_package contained 76syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_compile do_stage do_install do_package contained
@@ -77,13 +78,15 @@ syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_comp
77" Generic Functions 78" Generic Functions
78syn match bbFunction "\h[0-9A-Za-z_\-\.]*" display contained contains=bbOEFunctions 79syn match bbFunction "\h[0-9A-Za-z_\-\.]*" display contained contains=bbOEFunctions
79 80
81syn keyword bbOverrideOperator append prepend remove contained
82
80" BitBake shell metadata 83" BitBake shell metadata
81syn include @shell syntax/sh.vim 84syn include @shell syntax/sh.vim
82if exists("b:current_syntax") 85if exists("b:current_syntax")
83 unlet b:current_syntax 86 unlet b:current_syntax
84endif 87endif
85syn keyword bbShFakeRootFlag fakeroot contained 88syn keyword bbShFakeRootFlag fakeroot contained
86syn match bbShFuncDef "^\(fakeroot\s*\)\?\([\.0-9A-Za-z_${}\-\.]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbShFuncRegion skipwhite 89syn match bbShFuncDef "^\(fakeroot\s*\)\?\([\.0-9A-Za-z_:${}\-\.]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbOverrideOperator,bbVarDeref,bbDelimiter nextgroup=bbShFuncRegion skipwhite
87syn region bbShFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@shell 90syn region bbShFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@shell
88 91
89" Python value inside shell functions 92" Python value inside shell functions
@@ -91,7 +94,7 @@ syn region shDeref start=+${@+ skip=+\\$+ excludenl end=+}+ contained co
91 94
92" BitBake python metadata 95" BitBake python metadata
93syn keyword bbPyFlag python contained 96syn keyword bbPyFlag python contained
94syn match bbPyFuncDef "^\(fakeroot\s*\)\?\(python\)\(\s\+[0-9A-Za-z_${}\-\.]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbPyFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbPyFuncRegion skipwhite 97syn match bbPyFuncDef "^\(fakeroot\s*\)\?\(python\)\(\s\+[0-9A-Za-z_:${}\-\.]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbPyFlag,bbFunction,bbOverrideOperator,bbVarDeref,bbDelimiter nextgroup=bbPyFuncRegion skipwhite
95syn region bbPyFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@python 98syn region bbPyFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@python
96 99
97" BitBake 'def'd python functions 100" BitBake 'def'd python functions
@@ -120,7 +123,9 @@ hi def link bbPyFlag Type
120hi def link bbPyDef Statement 123hi def link bbPyDef Statement
121hi def link bbStatement Statement 124hi def link bbStatement Statement
122hi def link bbStatementRest Identifier 125hi def link bbStatementRest Identifier
126hi def link bbStatementRestCont Identifier
123hi def link bbOEFunctions Special 127hi def link bbOEFunctions Special
124hi def link bbVarPyValue PreProc 128hi def link bbVarPyValue PreProc
129hi def link bbOverrideOperator Operator
125 130
126let b:current_syntax = "bb" 131let b:current_syntax = "bb"
diff --git a/bitbake/doc/Makefile b/bitbake/doc/Makefile
index d40f390e2b..996f01b7d5 100644
--- a/bitbake/doc/Makefile
+++ b/bitbake/doc/Makefile
@@ -3,7 +3,7 @@
3 3
4# You can set these variables from the command line, and also 4# You can set these variables from the command line, and also
5# from the environment for the first two. 5# from the environment for the first two.
6SPHINXOPTS ?= -j auto 6SPHINXOPTS ?= -W --keep-going -j auto
7SPHINXBUILD ?= sphinx-build 7SPHINXBUILD ?= sphinx-build
8SOURCEDIR = . 8SOURCEDIR = .
9BUILDDIR = _build 9BUILDDIR = _build
diff --git a/bitbake/doc/README b/bitbake/doc/README
index 62595820bd..d4f56afa37 100644
--- a/bitbake/doc/README
+++ b/bitbake/doc/README
@@ -8,12 +8,12 @@ Manual Organization
8 8
9Folders exist for individual manuals as follows: 9Folders exist for individual manuals as follows:
10 10
11* bitbake-user-manual - The BitBake User Manual 11* bitbake-user-manual --- The BitBake User Manual
12 12
13Each folder is self-contained regarding content and figures. 13Each folder is self-contained regarding content and figures.
14 14
15If you want to find HTML versions of the BitBake manuals on the web, 15If you want to find HTML versions of the BitBake manuals on the web,
16go to http://www.openembedded.org/wiki/Documentation. 16go to https://www.openembedded.org/wiki/Documentation.
17 17
18Sphinx 18Sphinx
19====== 19======
@@ -47,8 +47,8 @@ To install all required packages run:
47 47
48To build the documentation locally, run: 48To build the documentation locally, run:
49 49
50 $ cd documentation 50 $ cd doc
51 $ make -f Makefile.sphinx html 51 $ make html
52 52
53The resulting HTML index page will be _build/html/index.html, and you 53The resulting HTML index page will be _build/html/index.html, and you
54can browse your own copy of the locally generated documentation with 54can browse your own copy of the locally generated documentation with
diff --git a/bitbake/doc/_templates/footer.html b/bitbake/doc/_templates/footer.html
new file mode 100644
index 0000000000..1398f20d7e
--- /dev/null
+++ b/bitbake/doc/_templates/footer.html
@@ -0,0 +1,9 @@
1<footer>
2 <hr/>
3 <div role="contentinfo">
4 <p>&copy; Copyright {{ copyright }}
5 <br>Last updated on {{ last_updated }} from the <a href="https://git.openembedded.org/bitbake/">bitbake</a> git repository.
6 </p>
7 </div>
8</footer>
9
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
index d74e768f69..d58fbb32ea 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
@@ -16,7 +16,7 @@ data, or simply return information about the execution environment.
16 16
17This chapter describes BitBake's execution process from start to finish 17This chapter describes BitBake's execution process from start to finish
18when you use it to create an image. The execution process is launched 18when you use it to create an image. The execution process is launched
19using the following command form: :: 19using the following command form::
20 20
21 $ bitbake target 21 $ bitbake target
22 22
@@ -32,7 +32,7 @@ the BitBake command and its options, see ":ref:`The BitBake Command
32 your project's ``local.conf`` configuration file. 32 your project's ``local.conf`` configuration file.
33 33
34 A common method to determine this value for your build host is to run 34 A common method to determine this value for your build host is to run
35 the following: :: 35 the following::
36 36
37 $ grep processor /proc/cpuinfo 37 $ grep processor /proc/cpuinfo
38 38
@@ -40,7 +40,7 @@ the BitBake command and its options, see ":ref:`The BitBake Command
40 the number of processors, which takes into account hyper-threading. 40 the number of processors, which takes into account hyper-threading.
41 Thus, a quad-core build host with hyper-threading most likely shows 41 Thus, a quad-core build host with hyper-threading most likely shows
42 eight processors, which is the value you would then assign to 42 eight processors, which is the value you would then assign to
43 ``BB_NUMBER_THREADS``. 43 :term:`BB_NUMBER_THREADS`.
44 44
45 A possibly simpler solution is that some Linux distributions (e.g. 45 A possibly simpler solution is that some Linux distributions (e.g.
46 Debian and Ubuntu) provide the ``ncpus`` command. 46 Debian and Ubuntu) provide the ``ncpus`` command.
@@ -65,13 +65,13 @@ data itself is of various types:
65 65
66The ``layer.conf`` files are used to construct key variables such as 66The ``layer.conf`` files are used to construct key variables such as
67:term:`BBPATH` and :term:`BBFILES`. 67:term:`BBPATH` and :term:`BBFILES`.
68``BBPATH`` is used to search for configuration and class files under the 68:term:`BBPATH` is used to search for configuration and class files under the
69``conf`` and ``classes`` directories, respectively. ``BBFILES`` is used 69``conf`` and ``classes`` directories, respectively. :term:`BBFILES` is used
70to locate both recipe and recipe append files (``.bb`` and 70to locate both recipe and recipe append files (``.bb`` and
71``.bbappend``). If there is no ``bblayers.conf`` file, it is assumed the 71``.bbappend``). If there is no ``bblayers.conf`` file, it is assumed the
72user has set the ``BBPATH`` and ``BBFILES`` directly in the environment. 72user has set the :term:`BBPATH` and :term:`BBFILES` directly in the environment.
73 73
74Next, the ``bitbake.conf`` file is located using the ``BBPATH`` variable 74Next, the ``bitbake.conf`` file is located using the :term:`BBPATH` variable
75that was just constructed. The ``bitbake.conf`` file may also include 75that was just constructed. The ``bitbake.conf`` file may also include
76other configuration files using the ``include`` or ``require`` 76other configuration files using the ``include`` or ``require``
77directives. 77directives.
@@ -79,8 +79,8 @@ directives.
79Prior to parsing configuration files, BitBake looks at certain 79Prior to parsing configuration files, BitBake looks at certain
80variables, including: 80variables, including:
81 81
82- :term:`BB_ENV_WHITELIST` 82- :term:`BB_ENV_PASSTHROUGH`
83- :term:`BB_ENV_EXTRAWHITE` 83- :term:`BB_ENV_PASSTHROUGH_ADDITIONS`
84- :term:`BB_PRESERVE_ENV` 84- :term:`BB_PRESERVE_ENV`
85- :term:`BB_ORIGENV` 85- :term:`BB_ORIGENV`
86- :term:`BITBAKE_UI` 86- :term:`BITBAKE_UI`
@@ -104,7 +104,7 @@ BitBake first searches the current working directory for an optional
104contain a :term:`BBLAYERS` variable that is a 104contain a :term:`BBLAYERS` variable that is a
105space-delimited list of 'layer' directories. Recall that if BitBake 105space-delimited list of 'layer' directories. Recall that if BitBake
106cannot find a ``bblayers.conf`` file, then it is assumed the user has 106cannot find a ``bblayers.conf`` file, then it is assumed the user has
107set the ``BBPATH`` and ``BBFILES`` variables directly in the 107set the :term:`BBPATH` and :term:`BBFILES` variables directly in the
108environment. 108environment.
109 109
110For each directory (layer) in this list, a ``conf/layer.conf`` file is 110For each directory (layer) in this list, a ``conf/layer.conf`` file is
@@ -114,7 +114,7 @@ files automatically set up :term:`BBPATH` and other
114variables correctly for a given build directory. 114variables correctly for a given build directory.
115 115
116BitBake then expects to find the ``conf/bitbake.conf`` file somewhere in 116BitBake then expects to find the ``conf/bitbake.conf`` file somewhere in
117the user-specified ``BBPATH``. That configuration file generally has 117the user-specified :term:`BBPATH`. That configuration file generally has
118include directives to pull in any other metadata such as files specific 118include directives to pull in any other metadata such as files specific
119to the architecture, the machine, the local environment, and so forth. 119to the architecture, the machine, the local environment, and so forth.
120 120
@@ -135,11 +135,11 @@ The ``base.bbclass`` file is always included. Other classes that are
135specified in the configuration using the 135specified in the configuration using the
136:term:`INHERIT` variable are also included. BitBake 136:term:`INHERIT` variable are also included. BitBake
137searches for class files in a ``classes`` subdirectory under the paths 137searches for class files in a ``classes`` subdirectory under the paths
138in ``BBPATH`` in the same way as configuration files. 138in :term:`BBPATH` in the same way as configuration files.
139 139
140A good way to get an idea of the configuration files and the class files 140A good way to get an idea of the configuration files and the class files
141used in your execution environment is to run the following BitBake 141used in your execution environment is to run the following BitBake
142command: :: 142command::
143 143
144 $ bitbake -e > mybb.log 144 $ bitbake -e > mybb.log
145 145
@@ -155,7 +155,7 @@ execution environment.
155 pair of curly braces in a shell function, the closing curly brace 155 pair of curly braces in a shell function, the closing curly brace
156 must not be located at the start of the line without leading spaces. 156 must not be located at the start of the line without leading spaces.
157 157
158 Here is an example that causes BitBake to produce a parsing error: :: 158 Here is an example that causes BitBake to produce a parsing error::
159 159
160 fakeroot create_shar() { 160 fakeroot create_shar() {
161 cat << "EOF" > ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh 161 cat << "EOF" > ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
@@ -184,13 +184,13 @@ Locating and Parsing Recipes
184During the configuration phase, BitBake will have set 184During the configuration phase, BitBake will have set
185:term:`BBFILES`. BitBake now uses it to construct a 185:term:`BBFILES`. BitBake now uses it to construct a
186list of recipes to parse, along with any append files (``.bbappend``) to 186list of recipes to parse, along with any append files (``.bbappend``) to
187apply. ``BBFILES`` is a space-separated list of available files and 187apply. :term:`BBFILES` is a space-separated list of available files and
188supports wildcards. An example would be: :: 188supports wildcards. An example would be::
189 189
190 BBFILES = "/path/to/bbfiles/*.bb /path/to/appends/*.bbappend" 190 BBFILES = "/path/to/bbfiles/*.bb /path/to/appends/*.bbappend"
191 191
192BitBake parses each 192BitBake parses each
193recipe and append file located with ``BBFILES`` and stores the values of 193recipe and append file located with :term:`BBFILES` and stores the values of
194various variables into the datastore. 194various variables into the datastore.
195 195
196.. note:: 196.. note::
@@ -201,18 +201,18 @@ For each file, a fresh copy of the base configuration is made, then the
201recipe is parsed line by line. Any inherit statements cause BitBake to 201recipe is parsed line by line. Any inherit statements cause BitBake to
202find and then parse class files (``.bbclass``) using 202find and then parse class files (``.bbclass``) using
203:term:`BBPATH` as the search path. Finally, BitBake 203:term:`BBPATH` as the search path. Finally, BitBake
204parses in order any append files found in ``BBFILES``. 204parses in order any append files found in :term:`BBFILES`.
205 205
206One common convention is to use the recipe filename to define pieces of 206One common convention is to use the recipe filename to define pieces of
207metadata. For example, in ``bitbake.conf`` the recipe name and version 207metadata. For example, in ``bitbake.conf`` the recipe name and version
208are used to set the variables :term:`PN` and 208are used to set the variables :term:`PN` and
209:term:`PV`: :: 209:term:`PV`::
210 210
211 PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}" 211 PN = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
212 PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}" 212 PV = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
213 213
214In this example, a recipe called "something_1.2.3.bb" would set 214In this example, a recipe called "something_1.2.3.bb" would set
215``PN`` to "something" and ``PV`` to "1.2.3". 215:term:`PN` to "something" and :term:`PV` to "1.2.3".
216 216
217By the time parsing is complete for a recipe, BitBake has a list of 217By the time parsing is complete for a recipe, BitBake has a list of
218tasks that the recipe defines and a set of data consisting of keys and 218tasks that the recipe defines and a set of data consisting of keys and
@@ -228,7 +228,7 @@ and then reload it.
228Where possible, subsequent BitBake commands reuse this cache of recipe 228Where possible, subsequent BitBake commands reuse this cache of recipe
229information. The validity of this cache is determined by first computing 229information. The validity of this cache is determined by first computing
230a checksum of the base configuration data (see 230a checksum of the base configuration data (see
231:term:`BB_HASHCONFIG_WHITELIST`) and 231:term:`BB_HASHCONFIG_IGNORE_VARS`) and
232then checking if the checksum matches. If that checksum matches what is 232then checking if the checksum matches. If that checksum matches what is
233in the cache and the recipe and class files have not changed, BitBake is 233in the cache and the recipe and class files have not changed, BitBake is
234able to use the cache. BitBake then reloads the cached information about 234able to use the cache. BitBake then reloads the cached information about
@@ -238,13 +238,14 @@ Recipe file collections exist to allow the user to have multiple
238repositories of ``.bb`` files that contain the same exact package. For 238repositories of ``.bb`` files that contain the same exact package. For
239example, one could easily use them to make one's own local copy of an 239example, one could easily use them to make one's own local copy of an
240upstream repository, but with custom modifications that one does not 240upstream repository, but with custom modifications that one does not
241want upstream. Here is an example: :: 241want upstream. Here is an example::
242 242
243 BBFILES = "/stuff/openembedded/*/*.bb /stuff/openembedded.modified/*/*.bb" 243 BBFILES = "/stuff/openembedded/*/*.bb /stuff/openembedded.modified/*/*.bb"
244 BBFILE_COLLECTIONS = "upstream local" 244 BBFILE_COLLECTIONS = "upstream local"
245 BBFILE_PATTERN_upstream = "^/stuff/openembedded/" 245 BBFILE_PATTERN_upstream = "^/stuff/openembedded/"
246 BBFILE_PATTERN_local = "^/stuff/openembedded.modified/" 246 BBFILE_PATTERN_local = "^/stuff/openembedded.modified/"
247 BBFILE_PRIORITY_upstream = "5" BBFILE_PRIORITY_local = "10" 247 BBFILE_PRIORITY_upstream = "5"
248 BBFILE_PRIORITY_local = "10"
248 249
249.. note:: 250.. note::
250 251
@@ -259,21 +260,21 @@ Providers
259 260
260Assuming BitBake has been instructed to execute a target and that all 261Assuming BitBake has been instructed to execute a target and that all
261the recipe files have been parsed, BitBake starts to figure out how to 262the recipe files have been parsed, BitBake starts to figure out how to
262build the target. BitBake looks through the ``PROVIDES`` list for each 263build the target. BitBake looks through the :term:`PROVIDES` list for each
263of the recipes. A ``PROVIDES`` list is the list of names by which the 264of the recipes. A :term:`PROVIDES` list is the list of names by which the
264recipe can be known. Each recipe's ``PROVIDES`` list is created 265recipe can be known. Each recipe's :term:`PROVIDES` list is created
265implicitly through the recipe's :term:`PN` variable and 266implicitly through the recipe's :term:`PN` variable and
266explicitly through the recipe's :term:`PROVIDES` 267explicitly through the recipe's :term:`PROVIDES`
267variable, which is optional. 268variable, which is optional.
268 269
269When a recipe uses ``PROVIDES``, that recipe's functionality can be 270When a recipe uses :term:`PROVIDES`, that recipe's functionality can be
270found under an alternative name or names other than the implicit ``PN`` 271found under an alternative name or names other than the implicit :term:`PN`
271name. As an example, suppose a recipe named ``keyboard_1.0.bb`` 272name. As an example, suppose a recipe named ``keyboard_1.0.bb``
272contained the following: :: 273contained the following::
273 274
274 PROVIDES += "fullkeyboard" 275 PROVIDES += "fullkeyboard"
275 276
276The ``PROVIDES`` 277The :term:`PROVIDES`
277list for this recipe becomes "keyboard", which is implicit, and 278list for this recipe becomes "keyboard", which is implicit, and
278"fullkeyboard", which is explicit. Consequently, the functionality found 279"fullkeyboard", which is explicit. Consequently, the functionality found
279in ``keyboard_1.0.bb`` can be found under two different names. 280in ``keyboard_1.0.bb`` can be found under two different names.
@@ -283,14 +284,14 @@ in ``keyboard_1.0.bb`` can be found under two different names.
283Preferences 284Preferences
284=========== 285===========
285 286
286The ``PROVIDES`` list is only part of the solution for figuring out a 287The :term:`PROVIDES` list is only part of the solution for figuring out a
287target's recipes. Because targets might have multiple providers, BitBake 288target's recipes. Because targets might have multiple providers, BitBake
288needs to prioritize providers by determining provider preferences. 289needs to prioritize providers by determining provider preferences.
289 290
290A common example in which a target has multiple providers is 291A common example in which a target has multiple providers is
291"virtual/kernel", which is on the ``PROVIDES`` list for each kernel 292"virtual/kernel", which is on the :term:`PROVIDES` list for each kernel
292recipe. Each machine often selects the best kernel provider by using a 293recipe. Each machine often selects the best kernel provider by using a
293line similar to the following in the machine configuration file: :: 294line similar to the following in the machine configuration file::
294 295
295 PREFERRED_PROVIDER_virtual/kernel = "linux-yocto" 296 PREFERRED_PROVIDER_virtual/kernel = "linux-yocto"
296 297
@@ -308,10 +309,10 @@ specify a particular version. You can influence the order by using the
308:term:`DEFAULT_PREFERENCE` variable. 309:term:`DEFAULT_PREFERENCE` variable.
309 310
310By default, files have a preference of "0". Setting 311By default, files have a preference of "0". Setting
311``DEFAULT_PREFERENCE`` to "-1" makes the recipe unlikely to be used 312:term:`DEFAULT_PREFERENCE` to "-1" makes the recipe unlikely to be used
312unless it is explicitly referenced. Setting ``DEFAULT_PREFERENCE`` to 313unless it is explicitly referenced. Setting :term:`DEFAULT_PREFERENCE` to
313"1" makes it likely the recipe is used. ``PREFERRED_VERSION`` overrides 314"1" makes it likely the recipe is used. :term:`PREFERRED_VERSION` overrides
314any ``DEFAULT_PREFERENCE`` setting. ``DEFAULT_PREFERENCE`` is often used 315any :term:`DEFAULT_PREFERENCE` setting. :term:`DEFAULT_PREFERENCE` is often used
315to mark newer and more experimental recipe versions until they have 316to mark newer and more experimental recipe versions until they have
316undergone sufficient testing to be considered stable. 317undergone sufficient testing to be considered stable.
317 318
@@ -330,7 +331,7 @@ If the first recipe is named ``a_1.1.bb``, then the
330 331
331Thus, if a recipe named ``a_1.2.bb`` exists, BitBake will choose 1.2 by 332Thus, if a recipe named ``a_1.2.bb`` exists, BitBake will choose 1.2 by
332default. However, if you define the following variable in a ``.conf`` 333default. However, if you define the following variable in a ``.conf``
333file that BitBake parses, you can change that preference: :: 334file that BitBake parses, you can change that preference::
334 335
335 PREFERRED_VERSION_a = "1.1" 336 PREFERRED_VERSION_a = "1.1"
336 337
@@ -393,7 +394,7 @@ ready to run, those tasks have all their dependencies met, and the
393thread threshold has not been exceeded. 394thread threshold has not been exceeded.
394 395
395It is worth noting that you can greatly speed up the build time by 396It is worth noting that you can greatly speed up the build time by
396properly setting the ``BB_NUMBER_THREADS`` variable. 397properly setting the :term:`BB_NUMBER_THREADS` variable.
397 398
398As each task completes, a timestamp is written to the directory 399As each task completes, a timestamp is written to the directory
399specified by the :term:`STAMP` variable. On subsequent 400specified by the :term:`STAMP` variable. On subsequent
@@ -434,7 +435,7 @@ BitBake writes a shell script to
434executes the script. The generated shell script contains all the 435executes the script. The generated shell script contains all the
435exported variables, and the shell functions with all variables expanded. 436exported variables, and the shell functions with all variables expanded.
436Output from the shell script goes to the file 437Output from the shell script goes to the file
437``${T}/log.do_taskname.pid``. Looking at the expanded shell functions in 438``${``\ :term:`T`\ ``}/log.do_taskname.pid``. Looking at the expanded shell functions in
438the run file and the output in the log files is a useful debugging 439the run file and the output in the log files is a useful debugging
439technique. 440technique.
440 441
@@ -476,7 +477,7 @@ changes because it should not affect the output for target packages. The
476simplistic approach for excluding the working directory is to set it to 477simplistic approach for excluding the working directory is to set it to
477some fixed value and create the checksum for the "run" script. BitBake 478some fixed value and create the checksum for the "run" script. BitBake
478goes one step better and uses the 479goes one step better and uses the
479:term:`BB_HASHBASE_WHITELIST` variable 480:term:`BB_BASEHASH_IGNORE_VARS` variable
480to define a list of variables that should never be included when 481to define a list of variables that should never be included when
481generating the signatures. 482generating the signatures.
482 483
@@ -497,7 +498,7 @@ to the task.
497 498
498Like the working directory case, situations exist where dependencies 499Like the working directory case, situations exist where dependencies
499should be ignored. For these cases, you can instruct the build process 500should be ignored. For these cases, you can instruct the build process
500to ignore a dependency by using a line like the following: :: 501to ignore a dependency by using a line like the following::
501 502
502 PACKAGE_ARCHS[vardepsexclude] = "MACHINE" 503 PACKAGE_ARCHS[vardepsexclude] = "MACHINE"
503 504
@@ -507,7 +508,7 @@ even if it does reference it.
507 508
508Equally, there are cases where we need to add dependencies BitBake is 509Equally, there are cases where we need to add dependencies BitBake is
509not able to find. You can accomplish this by using a line like the 510not able to find. You can accomplish this by using a line like the
510following: :: 511following::
511 512
512 PACKAGE_ARCHS[vardeps] = "MACHINE" 513 PACKAGE_ARCHS[vardeps] = "MACHINE"
513 514
@@ -522,7 +523,7 @@ it cannot figure out dependencies.
522Thus far, this section has limited discussion to the direct inputs into 523Thus far, this section has limited discussion to the direct inputs into
523a task. Information based on direct inputs is referred to as the 524a task. Information based on direct inputs is referred to as the
524"basehash" in the code. However, there is still the question of a task's 525"basehash" in the code. However, there is still the question of a task's
525indirect inputs - the things that were already built and present in the 526indirect inputs --- the things that were already built and present in the
526build directory. The checksum (or signature) for a particular task needs 527build directory. The checksum (or signature) for a particular task needs
527to add the hashes of all the tasks on which the particular task depends. 528to add the hashes of all the tasks on which the particular task depends.
528Choosing which dependencies to add is a policy decision. However, the 529Choosing which dependencies to add is a policy decision. However, the
@@ -533,11 +534,11 @@ At the code level, there are a variety of ways both the basehash and the
533dependent task hashes can be influenced. Within the BitBake 534dependent task hashes can be influenced. Within the BitBake
534configuration file, we can give BitBake some extra information to help 535configuration file, we can give BitBake some extra information to help
535it construct the basehash. The following statement effectively results 536it construct the basehash. The following statement effectively results
536in a list of global variable dependency excludes - variables never 537in a list of global variable dependency excludes --- variables never
537included in any checksum. This example uses variables from OpenEmbedded 538included in any checksum. This example uses variables from OpenEmbedded
538to help illustrate the concept: :: 539to help illustrate the concept::
539 540
540 BB_HASHBASE_WHITELIST ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH DL_DIR \ 541 BB_BASEHASH_IGNORE_VARS ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH DL_DIR \
541 SSTATE_DIR THISDIR FILESEXTRAPATHS FILE_DIRNAME HOME LOGNAME SHELL \ 542 SSTATE_DIR THISDIR FILESEXTRAPATHS FILE_DIRNAME HOME LOGNAME SHELL \
542 USER FILESPATH STAGING_DIR_HOST STAGING_DIR_TARGET COREBASE PRSERV_HOST \ 543 USER FILESPATH STAGING_DIR_HOST STAGING_DIR_TARGET COREBASE PRSERV_HOST \
543 PRSERV_DUMPDIR PRSERV_DUMPFILE PRSERV_LOCKDOWN PARALLEL_MAKE \ 544 PRSERV_DUMPDIR PRSERV_DUMPFILE PRSERV_LOCKDOWN PARALLEL_MAKE \
@@ -551,23 +552,22 @@ through dependency chains are more complex and are generally
551accomplished with a Python function. The code in 552accomplished with a Python function. The code in
552``meta/lib/oe/sstatesig.py`` shows two examples of this and also 553``meta/lib/oe/sstatesig.py`` shows two examples of this and also
553illustrates how you can insert your own policy into the system if so 554illustrates how you can insert your own policy into the system if so
554desired. This file defines the two basic signature generators 555desired. This file defines the basic signature generator
555OpenEmbedded-Core uses: "OEBasic" and "OEBasicHash". By default, there 556OpenEmbedded-Core uses: "OEBasicHash". By default, there
556is a dummy "noop" signature handler enabled in BitBake. This means that 557is a dummy "noop" signature handler enabled in BitBake. This means that
557behavior is unchanged from previous versions. ``OE-Core`` uses the 558behavior is unchanged from previous versions. ``OE-Core`` uses the
558"OEBasicHash" signature handler by default through this setting in the 559"OEBasicHash" signature handler by default through this setting in the
559``bitbake.conf`` file: :: 560``bitbake.conf`` file::
560 561
561 BB_SIGNATURE_HANDLER ?= "OEBasicHash" 562 BB_SIGNATURE_HANDLER ?= "OEBasicHash"
562 563
563The "OEBasicHash" ``BB_SIGNATURE_HANDLER`` is the same as the "OEBasic" 564The main feature of the "OEBasicHash" :term:`BB_SIGNATURE_HANDLER` is that
564version but adds the task hash to the stamp files. This results in any 565it adds the task hash to the stamp files. Thanks to this, any metadata
565metadata change that changes the task hash, automatically causing the 566change will change the task hash, automatically causing the task to be run
566task to be run again. This removes the need to bump 567again. This removes the need to bump :term:`PR` values, and changes to
567:term:`PR` values, and changes to metadata automatically 568metadata automatically ripple across the build.
568ripple across the build.
569 569
570It is also worth noting that the end result of these signature 570It is also worth noting that the end result of signature
571generators is to make some dependency and hash information available to 571generators is to make some dependency and hash information available to
572the build. This information includes: 572the build. This information includes:
573 573
@@ -577,10 +577,7 @@ the build. This information includes:
577- ``BB_BASEHASH_``\ *filename:taskname*: The base hashes for each 577- ``BB_BASEHASH_``\ *filename:taskname*: The base hashes for each
578 dependent task. 578 dependent task.
579 579
580- ``BBHASHDEPS_``\ *filename:taskname*: The task dependencies for 580- :term:`BB_TASKHASH`: The hash of the currently running task.
581 each task.
582
583- ``BB_TASKHASH``: The hash of the currently running task.
584 581
585It is worth noting that BitBake's "-S" option lets you debug BitBake's 582It is worth noting that BitBake's "-S" option lets you debug BitBake's
586processing of signatures. The options passed to -S allow different 583processing of signatures. The options passed to -S allow different
@@ -589,10 +586,11 @@ or possibly those defined in the metadata/signature handler itself. The
589simplest parameter to pass is "none", which causes a set of signature 586simplest parameter to pass is "none", which causes a set of signature
590information to be written out into ``STAMPS_DIR`` corresponding to the 587information to be written out into ``STAMPS_DIR`` corresponding to the
591targets specified. The other currently available parameter is 588targets specified. The other currently available parameter is
592"printdiff", which causes BitBake to try to establish the closest 589"printdiff", which causes BitBake to try to establish the most recent
593signature match it can (e.g. in the sstate cache) and then run 590signature match it can (e.g. in the sstate cache) and then run
594``bitbake-diffsigs`` over the matches to determine the stamps and delta 591compare the matched signatures to determine the stamps and delta
595where these two stamp trees diverge. 592where these two stamp trees diverge. This can be used to determine why
593tasks need to be re-run in situations where that is not expected.
596 594
597.. note:: 595.. note::
598 596
@@ -647,13 +645,6 @@ compiled binary. To handle this, BitBake calls the
647each successful setscene task to know whether or not it needs to obtain 645each successful setscene task to know whether or not it needs to obtain
648the dependencies of that task. 646the dependencies of that task.
649 647
650Finally, after all the setscene tasks have executed, BitBake calls the
651function listed in
652:term:`BB_SETSCENE_VERIFY_FUNCTION2`
653with the list of tasks BitBake thinks has been "covered". The metadata
654can then ensure that this list is correct and can inform BitBake that it
655wants specific tasks to be run regardless of the setscene result.
656
657You can find more information on setscene metadata in the 648You can find more information on setscene metadata in the
658:ref:`bitbake-user-manual/bitbake-user-manual-metadata:task checksums and setscene` 649:ref:`bitbake-user-manual/bitbake-user-manual-metadata:task checksums and setscene`
659section. 650section.
@@ -666,7 +657,7 @@ builds are when execute, bitbake also supports user defined
666configuration of the `Python 657configuration of the `Python
667logging <https://docs.python.org/3/library/logging.html>`__ facilities 658logging <https://docs.python.org/3/library/logging.html>`__ facilities
668through the :term:`BB_LOGCONFIG` variable. This 659through the :term:`BB_LOGCONFIG` variable. This
669variable defines a json or yaml `logging 660variable defines a JSON or YAML `logging
670configuration <https://docs.python.org/3/library/logging.config.html>`__ 661configuration <https://docs.python.org/3/library/logging.config.html>`__
671that will be intelligently merged into the default configuration. The 662that will be intelligently merged into the default configuration. The
672logging configuration is merged using the following rules: 663logging configuration is merged using the following rules:
@@ -700,9 +691,9 @@ logging configuration is merged using the following rules:
700 adds a filter called ``BitBake.defaultFilter``, both filters will be 691 adds a filter called ``BitBake.defaultFilter``, both filters will be
701 applied to the logger 692 applied to the logger
702 693
703As an example, consider the following user logging configuration file 694As a first example, you can create a ``hashequiv.json`` user logging
704which logs all Hash Equivalence related messages of VERBOSE or higher to 695configuration file to log all Hash Equivalence related messages of ``VERBOSE``
705a file called ``hashequiv.log`` :: 696or higher priority to a file called ``hashequiv.log``::
706 697
707 { 698 {
708 "version": 1, 699 "version": 1,
@@ -731,3 +722,40 @@ a file called ``hashequiv.log`` ::
731 } 722 }
732 } 723 }
733 } 724 }
725
726Then set the :term:`BB_LOGCONFIG` variable in ``conf/local.conf``::
727
728 BB_LOGCONFIG = "hashequiv.json"
729
730Another example is this ``warn.json`` file to log all ``WARNING`` and
731higher priority messages to a ``warn.log`` file::
732
733 {
734 "version": 1,
735 "formatters": {
736 "warnlogFormatter": {
737 "()": "bb.msg.BBLogFormatter",
738 "format": "%(levelname)s: %(message)s"
739 }
740 },
741
742 "handlers": {
743 "warnlog": {
744 "class": "logging.FileHandler",
745 "formatter": "warnlogFormatter",
746 "level": "WARNING",
747 "filename": "warn.log"
748 }
749 },
750
751 "loggers": {
752 "BitBake": {
753 "handlers": ["warnlog"]
754 }
755 },
756
757 "@disable_existing_loggers": false
758 }
759
760Note that BitBake's helper classes for structured logging are implemented in
761``lib/bb/msg.py``.
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
index 6760b10828..a2c2432db1 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
@@ -27,7 +27,7 @@ and unpacking the files is often optionally followed by patching.
27Patching, however, is not covered by this module. 27Patching, however, is not covered by this module.
28 28
29The code to execute the first part of this process, a fetch, looks 29The code to execute the first part of this process, a fetch, looks
30something like the following: :: 30something like the following::
31 31
32 src_uri = (d.getVar('SRC_URI') or "").split() 32 src_uri = (d.getVar('SRC_URI') or "").split()
33 fetcher = bb.fetch2.Fetch(src_uri, d) 33 fetcher = bb.fetch2.Fetch(src_uri, d)
@@ -37,12 +37,12 @@ This code sets up an instance of the fetch class. The instance uses a
37space-separated list of URLs from the :term:`SRC_URI` 37space-separated list of URLs from the :term:`SRC_URI`
38variable and then calls the ``download`` method to download the files. 38variable and then calls the ``download`` method to download the files.
39 39
40The instantiation of the fetch class is usually followed by: :: 40The instantiation of the fetch class is usually followed by::
41 41
42 rootdir = l.getVar('WORKDIR') 42 rootdir = l.getVar('UNPACKDIR')
43 fetcher.unpack(rootdir) 43 fetcher.unpack(rootdir)
44 44
45This code unpacks the downloaded files to the specified by ``WORKDIR``. 45This code unpacks the downloaded files to the specified by ``UNPACKDIR``.
46 46
47.. note:: 47.. note::
48 48
@@ -51,7 +51,7 @@ This code unpacks the downloaded files to the specified by ``WORKDIR``.
51 examine the OpenEmbedded class file ``base.bbclass`` 51 examine the OpenEmbedded class file ``base.bbclass``
52 . 52 .
53 53
54The ``SRC_URI`` and ``WORKDIR`` variables are not hardcoded into the 54The :term:`SRC_URI` and ``UNPACKDIR`` variables are not hardcoded into the
55fetcher, since those fetcher methods can be (and are) called with 55fetcher, since those fetcher methods can be (and are) called with
56different variable names. In OpenEmbedded for example, the shared state 56different variable names. In OpenEmbedded for example, the shared state
57(sstate) code uses the fetch module to fetch the sstate files. 57(sstate) code uses the fetch module to fetch the sstate files.
@@ -64,38 +64,38 @@ URLs by looking for source files in a specific search order:
64 :term:`PREMIRRORS` variable. 64 :term:`PREMIRRORS` variable.
65 65
66- *Source URI:* If pre-mirrors fail, BitBake uses the original URL (e.g 66- *Source URI:* If pre-mirrors fail, BitBake uses the original URL (e.g
67 from ``SRC_URI``). 67 from :term:`SRC_URI`).
68 68
69- *Mirror Sites:* If fetch failures occur, BitBake next uses mirror 69- *Mirror Sites:* If fetch failures occur, BitBake next uses mirror
70 locations as defined by the :term:`MIRRORS` variable. 70 locations as defined by the :term:`MIRRORS` variable.
71 71
72For each URL passed to the fetcher, the fetcher calls the submodule that 72For each URL passed to the fetcher, the fetcher calls the submodule that
73handles that particular URL type. This behavior can be the source of 73handles that particular URL type. This behavior can be the source of
74some confusion when you are providing URLs for the ``SRC_URI`` variable. 74some confusion when you are providing URLs for the :term:`SRC_URI` variable.
75Consider the following two URLs: :: 75Consider the following two URLs::
76 76
77 http://git.yoctoproject.org/git/poky;protocol=git 77 https://git.yoctoproject.org/git/poky;protocol=git
78 git://git.yoctoproject.org/git/poky;protocol=http 78 git://git.yoctoproject.org/git/poky;protocol=http
79 79
80In the former case, the URL is passed to the ``wget`` fetcher, which does not 80In the former case, the URL is passed to the ``wget`` fetcher, which does not
81understand "git". Therefore, the latter case is the correct form since the Git 81understand "git". Therefore, the latter case is the correct form since the Git
82fetcher does know how to use HTTP as a transport. 82fetcher does know how to use HTTP as a transport.
83 83
84Here are some examples that show commonly used mirror definitions: :: 84Here are some examples that show commonly used mirror definitions::
85 85
86 PREMIRRORS ?= "\ 86 PREMIRRORS ?= "\
87 bzr://.*/.\* http://somemirror.org/sources/ \\n \ 87 bzr://.*/.\* http://somemirror.org/sources/ \
88 cvs://.*/.\* http://somemirror.org/sources/ \\n \ 88 cvs://.*/.\* http://somemirror.org/sources/ \
89 git://.*/.\* http://somemirror.org/sources/ \\n \ 89 git://.*/.\* http://somemirror.org/sources/ \
90 hg://.*/.\* http://somemirror.org/sources/ \\n \ 90 hg://.*/.\* http://somemirror.org/sources/ \
91 osc://.*/.\* http://somemirror.org/sources/ \\n \ 91 osc://.*/.\* http://somemirror.org/sources/ \
92 p4://.*/.\* http://somemirror.org/sources/ \\n \ 92 p4://.*/.\* http://somemirror.org/sources/ \
93 svn://.*/.\* http://somemirror.org/sources/ \\n" 93 svn://.*/.\* http://somemirror.org/sources/"
94 94
95 MIRRORS =+ "\ 95 MIRRORS =+ "\
96 ftp://.*/.\* http://somemirror.org/sources/ \\n \ 96 ftp://.*/.\* http://somemirror.org/sources/ \
97 http://.*/.\* http://somemirror.org/sources/ \\n \ 97 http://.*/.\* http://somemirror.org/sources/ \
98 https://.*/.\* http://somemirror.org/sources/ \\n" 98 https://.*/.\* http://somemirror.org/sources/"
99 99
100It is useful to note that BitBake 100It is useful to note that BitBake
101supports cross-URLs. It is possible to mirror a Git repository on an 101supports cross-URLs. It is possible to mirror a Git repository on an
@@ -110,26 +110,26 @@ which is specified by the :term:`DL_DIR` variable.
110File integrity is of key importance for reproducing builds. For 110File integrity is of key importance for reproducing builds. For
111non-local archive downloads, the fetcher code can verify SHA-256 and MD5 111non-local archive downloads, the fetcher code can verify SHA-256 and MD5
112checksums to ensure the archives have been downloaded correctly. You can 112checksums to ensure the archives have been downloaded correctly. You can
113specify these checksums by using the ``SRC_URI`` variable with the 113specify these checksums by using the :term:`SRC_URI` variable with the
114appropriate varflags as follows: :: 114appropriate varflags as follows::
115 115
116 SRC_URI[md5sum] = "value" 116 SRC_URI[md5sum] = "value"
117 SRC_URI[sha256sum] = "value" 117 SRC_URI[sha256sum] = "value"
118 118
119You can also specify the checksums as 119You can also specify the checksums as
120parameters on the ``SRC_URI`` as shown below: :: 120parameters on the :term:`SRC_URI` as shown below::
121 121
122 SRC_URI = "http://example.com/foobar.tar.bz2;md5sum=4a8e0f237e961fd7785d19d07fdb994d" 122 SRC_URI = "http://example.com/foobar.tar.bz2;md5sum=4a8e0f237e961fd7785d19d07fdb994d"
123 123
124If multiple URIs exist, you can specify the checksums either directly as 124If multiple URIs exist, you can specify the checksums either directly as
125in the previous example, or you can name the URLs. The following syntax 125in the previous example, or you can name the URLs. The following syntax
126shows how you name the URIs: :: 126shows how you name the URIs::
127 127
128 SRC_URI = "http://example.com/foobar.tar.bz2;name=foo" 128 SRC_URI = "http://example.com/foobar.tar.bz2;name=foo"
129 SRC_URI[foo.md5sum] = 4a8e0f237e961fd7785d19d07fdb994d 129 SRC_URI[foo.md5sum] = 4a8e0f237e961fd7785d19d07fdb994d
130 130
131After a file has been downloaded and 131After a file has been downloaded and
132has had its checksum checked, a ".done" stamp is placed in ``DL_DIR``. 132has had its checksum checked, a ".done" stamp is placed in :term:`DL_DIR`.
133BitBake uses this stamp during subsequent builds to avoid downloading or 133BitBake uses this stamp during subsequent builds to avoid downloading or
134comparing a checksum for the file again. 134comparing a checksum for the file again.
135 135
@@ -144,6 +144,10 @@ download without a checksum triggers an error message. The
144make any attempted network access a fatal error, which is useful for 144make any attempted network access a fatal error, which is useful for
145checking that mirrors are complete as well as other things. 145checking that mirrors are complete as well as other things.
146 146
147If :term:`BB_CHECK_SSL_CERTS` is set to ``0`` then SSL certificate checking will
148be disabled. This variable defaults to ``1`` so SSL certificates are normally
149checked.
150
147.. _bb-the-unpack: 151.. _bb-the-unpack:
148 152
149The Unpack 153The Unpack
@@ -163,8 +167,8 @@ govern the behavior of the unpack stage:
163- *dos:* Applies to ``.zip`` and ``.jar`` files and specifies whether 167- *dos:* Applies to ``.zip`` and ``.jar`` files and specifies whether
164 to use DOS line ending conversion on text files. 168 to use DOS line ending conversion on text files.
165 169
166- *basepath:* Instructs the unpack stage to strip the specified 170- *striplevel:* Strip specified number of leading components (levels)
167 directories from the source path when unpacking. 171 from file names on extraction
168 172
169- *subdir:* Unpacks the specific URL to the specified subdirectory 173- *subdir:* Unpacks the specific URL to the specified subdirectory
170 within the root directory. 174 within the root directory.
@@ -204,7 +208,7 @@ time the ``download()`` method is called.
204If you specify a directory, the entire directory is unpacked. 208If you specify a directory, the entire directory is unpacked.
205 209
206Here are a couple of example URLs, the first relative and the second 210Here are a couple of example URLs, the first relative and the second
207absolute: :: 211absolute::
208 212
209 SRC_URI = "file://relativefile.patch" 213 SRC_URI = "file://relativefile.patch"
210 SRC_URI = "file:///Users/ich/very_important_software" 214 SRC_URI = "file:///Users/ich/very_important_software"
@@ -225,7 +229,12 @@ downloaded file is useful for avoiding collisions in
225:term:`DL_DIR` when dealing with multiple files that 229:term:`DL_DIR` when dealing with multiple files that
226have the same name. 230have the same name.
227 231
228Some example URLs are as follows: :: 232If a username and password are specified in the ``SRC_URI``, a Basic
233Authorization header will be added to each request, including across redirects.
234To instead limit the Authorization header to the first request, add
235"redirectauth=0" to the list of parameters.
236
237Some example URLs are as follows::
229 238
230 SRC_URI = "http://oe.handhelds.org/not_there.aac" 239 SRC_URI = "http://oe.handhelds.org/not_there.aac"
231 SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac" 240 SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
@@ -235,15 +244,13 @@ Some example URLs are as follows: ::
235 244
236 Because URL parameters are delimited by semi-colons, this can 245 Because URL parameters are delimited by semi-colons, this can
237 introduce ambiguity when parsing URLs that also contain semi-colons, 246 introduce ambiguity when parsing URLs that also contain semi-colons,
238 for example: 247 for example::
239 ::
240 248
241 SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git;a=snapshot;h=a5dd47" 249 SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git;a=snapshot;h=a5dd47"
242 250
243 251
244 Such URLs should should be modified by replacing semi-colons with '&' 252 Such URLs should should be modified by replacing semi-colons with '&'
245 characters: 253 characters::
246 ::
247 254
248 SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47" 255 SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47"
249 256
@@ -251,8 +258,7 @@ Some example URLs are as follows: ::
251 In most cases this should work. Treating semi-colons and '&' in 258 In most cases this should work. Treating semi-colons and '&' in
252 queries identically is recommended by the World Wide Web Consortium 259 queries identically is recommended by the World Wide Web Consortium
253 (W3C). Note that due to the nature of the URL, you may have to 260 (W3C). Note that due to the nature of the URL, you may have to
254 specify the name of the downloaded file as well: 261 specify the name of the downloaded file as well::
255 ::
256 262
257 SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47;downloadfilename=myfile.bz2" 263 SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47;downloadfilename=myfile.bz2"
258 264
@@ -321,7 +327,7 @@ The supported parameters are as follows:
321 327
322- *"port":* The port to which the CVS server connects. 328- *"port":* The port to which the CVS server connects.
323 329
324Some example URLs are as follows: :: 330Some example URLs are as follows::
325 331
326 SRC_URI = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext" 332 SRC_URI = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext"
327 SRC_URI = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat" 333 SRC_URI = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat"
@@ -363,7 +369,7 @@ The supported parameters are as follows:
363 username is different than the username used in the main URL, which 369 username is different than the username used in the main URL, which
364 is passed to the subversion command. 370 is passed to the subversion command.
365 371
366Following are three examples using svn: :: 372Following are three examples using svn::
367 373
368 SRC_URI = "svn://myrepos/proj1;module=vip;protocol=http;rev=667" 374 SRC_URI = "svn://myrepos/proj1;module=vip;protocol=http;rev=667"
369 SRC_URI = "svn://myrepos/proj1;module=opie;protocol=svn+ssh" 375 SRC_URI = "svn://myrepos/proj1;module=opie;protocol=svn+ssh"
@@ -390,6 +396,19 @@ This fetcher supports the following parameters:
390 protocol is "file". You can also use "http", "https", "ssh" and 396 protocol is "file". You can also use "http", "https", "ssh" and
391 "rsync". 397 "rsync".
392 398
399 .. note::
400
401 When ``protocol`` is "ssh", the URL expected in :term:`SRC_URI` differs
402 from the one that is typically passed to ``git clone`` command and provided
403 by the Git server to fetch from. For example, the URL returned by GitLab
404 server for ``mesa`` when cloning over SSH is
405 ``git@gitlab.freedesktop.org:mesa/mesa.git``, however the expected URL in
406 :term:`SRC_URI` is the following::
407
408 SRC_URI = "git://git@gitlab.freedesktop.org/mesa/mesa.git;branch=main;protocol=ssh;..."
409
410 Note the ``:`` character changed for a ``/`` before the path to the project.
411
393- *"nocheckout":* Tells the fetcher to not checkout source code when 412- *"nocheckout":* Tells the fetcher to not checkout source code when
394 unpacking when set to "1". Set this option for the URL where there is 413 unpacking when set to "1". Set this option for the URL where there is
395 a custom routine to checkout code. The default is "0". 414 a custom routine to checkout code. The default is "0".
@@ -405,17 +424,17 @@ This fetcher supports the following parameters:
405 424
406- *"nobranch":* Tells the fetcher to not check the SHA validation for 425- *"nobranch":* Tells the fetcher to not check the SHA validation for
407 the branch when set to "1". The default is "0". Set this option for 426 the branch when set to "1". The default is "0". Set this option for
408 the recipe that refers to the commit that is valid for a tag instead 427 the recipe that refers to the commit that is valid for any namespace
409 of the branch. 428 (branch, tag, ...) instead of the branch.
410 429
411- *"bareclone":* Tells the fetcher to clone a bare clone into the 430- *"bareclone":* Tells the fetcher to clone a bare clone into the
412 destination directory without checking out a working tree. Only the 431 destination directory without checking out a working tree. Only the
413 raw Git metadata is provided. This parameter implies the "nocheckout" 432 raw Git metadata is provided. This parameter implies the "nocheckout"
414 parameter as well. 433 parameter as well.
415 434
416- *"branch":* The branch(es) of the Git tree to clone. If unset, this 435- *"branch":* The branch(es) of the Git tree to clone. Unless
417 is assumed to be "master". The number of branch parameters much match 436 "nobranch" is set to "1", this is a mandatory parameter. The number of
418 the number of name parameters. 437 branch parameters must match the number of name parameters.
419 438
420- *"rev":* The revision to use for the checkout. The default is 439- *"rev":* The revision to use for the checkout. The default is
421 "master". 440 "master".
@@ -436,10 +455,28 @@ This fetcher supports the following parameters:
436 parameter implies no branch and only works when the transfer protocol 455 parameter implies no branch and only works when the transfer protocol
437 is ``file://``. 456 is ``file://``.
438 457
439Here are some example URLs: :: 458Here are some example URLs::
459
460 SRC_URI = "git://github.com/fronteed/icheck.git;protocol=https;branch=${PV};tag=${PV}"
461 SRC_URI = "git://github.com/asciidoc/asciidoc-py;protocol=https;branch=main"
462 SRC_URI = "git://git@gitlab.freedesktop.org/mesa/mesa.git;branch=main;protocol=ssh;..."
463
464.. note::
440 465
441 SRC_URI = "git://git.oe.handhelds.org/git/vip.git;tag=version-1" 466 Specifying passwords directly in ``git://`` urls is not supported.
442 SRC_URI = "git://git.oe.handhelds.org/git/vip.git;protocol=http" 467 There are several reasons: :term:`SRC_URI` is often written out to logs and
468 other places, and that could easily leak passwords; it is also all too
469 easy to share metadata without removing passwords. SSH keys, ``~/.netrc``
470 and ``~/.ssh/config`` files can be used as alternatives.
471
472Using tags with the git fetcher may cause surprising behaviour. Bitbake needs to
473resolve the tag to a specific revision and to do that, it has to connect to and use
474the upstream repository. This is because the revision the tags point at can change and
475we've seen cases of this happening in well known public repositories. This can mean
476many more network connections than expected and recipes may be reparsed at every build.
477Source mirrors will also be bypassed as the upstream repository is the only source
478of truth to resolve the revision accurately. For these reasons, whilst the fetcher
479can support tags, we recommend being specific about revisions in recipes.
443 480
444.. _gitsm-fetcher: 481.. _gitsm-fetcher:
445 482
@@ -475,7 +512,7 @@ repository.
475 512
476To use this fetcher, make sure your recipe has proper 513To use this fetcher, make sure your recipe has proper
477:term:`SRC_URI`, :term:`SRCREV`, and 514:term:`SRC_URI`, :term:`SRCREV`, and
478:term:`PV` settings. Here is an example: :: 515:term:`PV` settings. Here is an example::
479 516
480 SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module" 517 SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
481 SRCREV = "EXAMPLE_CLEARCASE_TAG" 518 SRCREV = "EXAMPLE_CLEARCASE_TAG"
@@ -484,7 +521,7 @@ To use this fetcher, make sure your recipe has proper
484The fetcher uses the ``rcleartool`` or 521The fetcher uses the ``rcleartool`` or
485``cleartool`` remote client, depending on which one is available. 522``cleartool`` remote client, depending on which one is available.
486 523
487Following are options for the ``SRC_URI`` statement: 524Following are options for the :term:`SRC_URI` statement:
488 525
489- *vob*: The name, which must include the prepending "/" character, 526- *vob*: The name, which must include the prepending "/" character,
490 of the ClearCase VOB. This option is required. 527 of the ClearCase VOB. This option is required.
@@ -497,7 +534,7 @@ Following are options for the ``SRC_URI`` statement:
497 The module and vob options are combined to create the load rule in the 534 The module and vob options are combined to create the load rule in the
498 view config spec. As an example, consider the vob and module values from 535 view config spec. As an example, consider the vob and module values from
499 the SRC_URI statement at the start of this section. Combining those values 536 the SRC_URI statement at the start of this section. Combining those values
500 results in the following: :: 537 results in the following::
501 538
502 load /example_vob/example_module 539 load /example_vob/example_module
503 540
@@ -546,32 +583,32 @@ password if you do not wish to keep those values in a recipe itself. If
546you choose not to use ``P4CONFIG``, or to explicitly set variables that 583you choose not to use ``P4CONFIG``, or to explicitly set variables that
547``P4CONFIG`` can contain, you can specify the ``P4PORT`` value, which is 584``P4CONFIG`` can contain, you can specify the ``P4PORT`` value, which is
548the server's URL and port number, and you can specify a username and 585the server's URL and port number, and you can specify a username and
549password directly in your recipe within ``SRC_URI``. 586password directly in your recipe within :term:`SRC_URI`.
550 587
551Here is an example that relies on ``P4CONFIG`` to specify the server URL 588Here is an example that relies on ``P4CONFIG`` to specify the server URL
552and port, username, and password, and fetches the Head Revision: :: 589and port, username, and password, and fetches the Head Revision::
553 590
554 SRC_URI = "p4://example-depot/main/source/..." 591 SRC_URI = "p4://example-depot/main/source/..."
555 SRCREV = "${AUTOREV}" 592 SRCREV = "${AUTOREV}"
556 PV = "p4-${SRCPV}" 593 PV = "p4-${SRCPV}"
557 S = "${WORKDIR}/p4" 594 S = "${UNPACKDIR}/p4"
558 595
559Here is an example that specifies the server URL and port, username, and 596Here is an example that specifies the server URL and port, username, and
560password, and fetches a Revision based on a Label: :: 597password, and fetches a Revision based on a Label::
561 598
562 P4PORT = "tcp:p4server.example.net:1666" 599 P4PORT = "tcp:p4server.example.net:1666"
563 SRC_URI = "p4://user:passwd@example-depot/main/source/..." 600 SRC_URI = "p4://user:passwd@example-depot/main/source/..."
564 SRCREV = "release-1.0" 601 SRCREV = "release-1.0"
565 PV = "p4-${SRCPV}" 602 PV = "p4-${SRCPV}"
566 S = "${WORKDIR}/p4" 603 S = "${UNPACKDIR}/p4"
567 604
568.. note:: 605.. note::
569 606
570 You should always set S to "${WORKDIR}/p4" in your recipe. 607 You should always set S to "${UNPACKDIR}/p4" in your recipe.
571 608
572By default, the fetcher strips the depot location from the local file paths. In 609By default, the fetcher strips the depot location from the local file paths. In
573the above example, the content of ``example-depot/main/source/`` will be placed 610the above example, the content of ``example-depot/main/source/`` will be placed
574in ``${WORKDIR}/p4``. For situations where preserving parts of the remote depot 611in ``${UNPACKDIR}/p4``. For situations where preserving parts of the remote depot
575paths locally is desirable, the fetcher supports two parameters: 612paths locally is desirable, the fetcher supports two parameters:
576 613
577- *"module":* 614- *"module":*
@@ -583,7 +620,7 @@ paths locally is desirable, the fetcher supports two parameters:
583 paths locally for the specified location, even in combination with the 620 paths locally for the specified location, even in combination with the
584 ``module`` parameter. 621 ``module`` parameter.
585 622
586Here is an example use of the the ``module`` parameter: :: 623Here is an example use of the the ``module`` parameter::
587 624
588 SRC_URI = "p4://user:passwd@example-depot/main;module=source/..." 625 SRC_URI = "p4://user:passwd@example-depot/main;module=source/..."
589 626
@@ -591,7 +628,7 @@ In this case, the content of the top-level directory ``source/`` will be fetched
591to ``${P4DIR}``, including the directory itself. The top-level directory will 628to ``${P4DIR}``, including the directory itself. The top-level directory will
592be accesible at ``${P4DIR}/source/``. 629be accesible at ``${P4DIR}/source/``.
593 630
594Here is an example use of the the ``remotepath`` parameter: :: 631Here is an example use of the the ``remotepath`` parameter::
595 632
596 SRC_URI = "p4://user:passwd@example-depot/main;module=source/...;remotepath=keep" 633 SRC_URI = "p4://user:passwd@example-depot/main;module=source/...;remotepath=keep"
597 634
@@ -619,11 +656,166 @@ This fetcher supports the following parameters:
619 656
620- *"manifest":* Name of the manifest file (default: ``default.xml``). 657- *"manifest":* Name of the manifest file (default: ``default.xml``).
621 658
622Here are some example URLs: :: 659Here are some example URLs::
623 660
624 SRC_URI = "repo://REPOROOT;protocol=git;branch=some_branch;manifest=my_manifest.xml" 661 SRC_URI = "repo://REPOROOT;protocol=git;branch=some_branch;manifest=my_manifest.xml"
625 SRC_URI = "repo://REPOROOT;protocol=file;branch=some_branch;manifest=my_manifest.xml" 662 SRC_URI = "repo://REPOROOT;protocol=file;branch=some_branch;manifest=my_manifest.xml"
626 663
664.. _az-fetcher:
665
666Az Fetcher (``az://``)
667--------------------------
668
669This submodule fetches data from an
670`Azure Storage account <https://docs.microsoft.com/en-us/azure/storage/>`__ ,
671it inherits its functionality from the HTTP wget fetcher, but modifies its
672behavior to accomodate the usage of a
673`Shared Access Signature (SAS) <https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview>`__
674for non-public data.
675
676Such functionality is set by the variable:
677
678- :term:`AZ_SAS`: The Azure Storage Shared Access Signature provides secure
679 delegate access to resources, if this variable is set, the Az Fetcher will
680 use it when fetching artifacts from the cloud.
681
682You can specify the AZ_SAS variable prefixed with a ? as shown below::
683
684 AZ_SAS = "?se=2021-01-01&sp=r&sv=2018-11-09&sr=c&skoid=<skoid>&sig=<signature>"
685
686Here is an example URL::
687
688 SRC_URI = "az://<azure-storage-account>.blob.core.windows.net/<foo_container>/<bar_file>"
689
690It can also be used when setting mirrors definitions using the :term:`PREMIRRORS` variable.
691
692.. _gcp-fetcher:
693
694GCP Fetcher (``gs://``)
695--------------------------
696
697This submodule fetches data from a
698`Google Cloud Storage Bucket <https://cloud.google.com/storage/docs/buckets>`__.
699It uses the `Google Cloud Storage Python Client <https://cloud.google.com/python/docs/reference/storage/latest>`__
700to check the status of objects in the bucket and download them.
701The use of the Python client makes it substantially faster than using command
702line tools such as gsutil.
703
704The fetcher requires the Google Cloud Storage Python Client to be installed, along
705with the gsutil tool.
706
707The fetcher requires that the machine has valid credentials for accessing the
708chosen bucket. Instructions for authentication can be found in the
709`Google Cloud documentation <https://cloud.google.com/docs/authentication/provide-credentials-adc#local-dev>`__.
710
711If it used from the OpenEmbedded build system, the fetcher can be used for
712fetching sstate artifacts from a GCS bucket by specifying the
713``SSTATE_MIRRORS`` variable as shown below::
714
715 SSTATE_MIRRORS ?= "\
716 file://.* gs://<bucket name>/PATH \
717 "
718
719The fetcher can also be used in recipes::
720
721 SRC_URI = "gs://<bucket name>/<foo_container>/<bar_file>"
722
723However, the checksum of the file should be also be provided::
724
725 SRC_URI[sha256sum] = "<sha256 string>"
726
727.. _crate-fetcher:
728
729Crate Fetcher (``crate://``)
730----------------------------
731
732This submodule fetches code for
733`Rust language "crates" <https://doc.rust-lang.org/reference/glossary.html?highlight=crate#crate>`__
734corresponding to Rust libraries and programs to compile. Such crates are typically shared
735on https://crates.io/ but this fetcher supports other crate registries too.
736
737The format for the :term:`SRC_URI` setting must be::
738
739 SRC_URI = "crate://REGISTRY/NAME/VERSION"
740
741Here is an example URL::
742
743 SRC_URI = "crate://crates.io/glob/0.2.11"
744
745.. _npm-fetcher:
746
747NPM Fetcher (``npm://``)
748------------------------
749
750This submodule fetches source code from an
751`NPM <https://en.wikipedia.org/wiki/Npm_(software)>`__
752Javascript package registry.
753
754The format for the :term:`SRC_URI` setting must be::
755
756 SRC_URI = "npm://some.registry.url;ParameterA=xxx;ParameterB=xxx;..."
757
758This fetcher supports the following parameters:
759
760- *"package":* The NPM package name. This is a mandatory parameter.
761
762- *"version":* The NPM package version. This is a mandatory parameter.
763
764- *"downloadfilename":* Specifies the filename used when storing the downloaded file.
765
766- *"destsuffix":* Specifies the directory to use to unpack the package (default: ``npm``).
767
768Note that NPM fetcher only fetches the package source itself. The dependencies
769can be fetched through the `npmsw-fetcher`_.
770
771Here is an example URL with both fetchers::
772
773 SRC_URI = " \
774 npm://registry.npmjs.org/;package=cute-files;version=${PV} \
775 npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
776 "
777
778See :yocto_docs:`Creating Node Package Manager (NPM) Packages
779</dev-manual/packages.html#creating-node-package-manager-npm-packages>`
780in the Yocto Project manual for details about using
781:yocto_docs:`devtool <https://docs.yoctoproject.org/ref-manual/devtool-reference.html>`
782to automatically create a recipe from an NPM URL.
783
784.. _npmsw-fetcher:
785
786NPM shrinkwrap Fetcher (``npmsw://``)
787-------------------------------------
788
789This submodule fetches source code from an
790`NPM shrinkwrap <https://docs.npmjs.com/cli/v8/commands/npm-shrinkwrap>`__
791description file, which lists the dependencies
792of an NPM package while locking their versions.
793
794The format for the :term:`SRC_URI` setting must be::
795
796 SRC_URI = "npmsw://some.registry.url;ParameterA=xxx;ParameterB=xxx;..."
797
798This fetcher supports the following parameters:
799
800- *"dev":* Set this parameter to ``1`` to install "devDependencies".
801
802- *"destsuffix":* Specifies the directory to use to unpack the dependencies
803 (``${S}`` by default).
804
805Note that the shrinkwrap file can also be provided by the recipe for
806the package which has such dependencies, for example::
807
808 SRC_URI = " \
809 npm://registry.npmjs.org/;package=cute-files;version=${PV} \
810 npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
811 "
812
813Such a file can automatically be generated using
814:yocto_docs:`devtool <https://docs.yoctoproject.org/ref-manual/devtool-reference.html>`
815as described in the :yocto_docs:`Creating Node Package Manager (NPM) Packages
816</dev-manual/packages.html#creating-node-package-manager-npm-packages>`
817section of the Yocto Project.
818
627Other Fetchers 819Other Fetchers
628-------------- 820--------------
629 821
@@ -633,10 +825,10 @@ Fetch submodules also exist for the following:
633 825
634- Mercurial (``hg://``) 826- Mercurial (``hg://``)
635 827
636- npm (``npm://``)
637
638- OSC (``osc://``) 828- OSC (``osc://``)
639 829
830- S3 (``s3://``)
831
640- Secure FTP (``sftp://``) 832- Secure FTP (``sftp://``)
641 833
642- Secure Shell (``ssh://``) 834- Secure Shell (``ssh://``)
@@ -649,4 +841,4 @@ submodules. However, you might find the code helpful and readable.
649Auto Revisions 841Auto Revisions
650============== 842==============
651 843
652We need to document ``AUTOREV`` and ``SRCREV_FORMAT`` here. 844We need to document ``AUTOREV`` and :term:`SRCREV_FORMAT` here.
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
index e3fd321588..654196ca24 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
@@ -18,28 +18,32 @@ it.
18Obtaining BitBake 18Obtaining BitBake
19================= 19=================
20 20
21See the :ref:`bitbake-user-manual/bitbake-user-manual-hello:obtaining bitbake` section for 21See the :ref:`bitbake-user-manual/bitbake-user-manual-intro:obtaining bitbake` section for
22information on how to obtain BitBake. Once you have the source code on 22information on how to obtain BitBake. Once you have the source code on
23your machine, the BitBake directory appears as follows: :: 23your machine, the BitBake directory appears as follows::
24 24
25 $ ls -al 25 $ ls -al
26 total 100 26 total 108
27 drwxrwxr-x. 9 wmat wmat 4096 Jan 31 13:44 . 27 drwxr-xr-x 9 fawkh 10000 4096 feb 24 12:10 .
28 drwxrwxr-x. 3 wmat wmat 4096 Feb 4 10:45 .. 28 drwx------ 36 fawkh 10000 4096 mar 2 17:00 ..
29 -rw-rw-r--. 1 wmat wmat 365 Nov 26 04:55 AUTHORS 29 -rw-r--r-- 1 fawkh 10000 365 feb 24 12:10 AUTHORS
30 drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 bin 30 drwxr-xr-x 2 fawkh 10000 4096 feb 24 12:10 bin
31 drwxrwxr-x. 4 wmat wmat 4096 Jan 31 13:44 build 31 -rw-r--r-- 1 fawkh 10000 16501 feb 24 12:10 ChangeLog
32 -rw-rw-r--. 1 wmat wmat 16501 Nov 26 04:55 ChangeLog 32 drwxr-xr-x 2 fawkh 10000 4096 feb 24 12:10 classes
33 drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 classes 33 drwxr-xr-x 2 fawkh 10000 4096 feb 24 12:10 conf
34 drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 conf 34 drwxr-xr-x 5 fawkh 10000 4096 feb 24 12:10 contrib
35 drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 contrib 35 drwxr-xr-x 6 fawkh 10000 4096 feb 24 12:10 doc
36 -rw-rw-r--. 1 wmat wmat 17987 Nov 26 04:55 COPYING 36 drwxr-xr-x 8 fawkh 10000 4096 mar 2 16:26 .git
37 drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 doc 37 -rw-r--r-- 1 fawkh 10000 31 feb 24 12:10 .gitattributes
38 -rw-rw-r--. 1 wmat wmat 69 Nov 26 04:55 .gitignore 38 -rw-r--r-- 1 fawkh 10000 392 feb 24 12:10 .gitignore
39 -rw-rw-r--. 1 wmat wmat 849 Nov 26 04:55 HEADER 39 drwxr-xr-x 13 fawkh 10000 4096 feb 24 12:11 lib
40 drwxrwxr-x. 5 wmat wmat 4096 Jan 31 13:44 lib 40 -rw-r--r-- 1 fawkh 10000 1224 feb 24 12:10 LICENSE
41 -rw-rw-r--. 1 wmat wmat 195 Nov 26 04:55 MANIFEST.in 41 -rw-r--r-- 1 fawkh 10000 15394 feb 24 12:10 LICENSE.GPL-2.0-only
42 -rw-rw-r--. 1 wmat wmat 2887 Nov 26 04:55 TODO 42 -rw-r--r-- 1 fawkh 10000 1286 feb 24 12:10 LICENSE.MIT
43 -rw-r--r-- 1 fawkh 10000 229 feb 24 12:10 MANIFEST.in
44 -rw-r--r-- 1 fawkh 10000 2413 feb 24 12:10 README
45 -rw-r--r-- 1 fawkh 10000 43 feb 24 12:10 toaster-requirements.txt
46 -rw-r--r-- 1 fawkh 10000 2887 feb 24 12:10 TODO
43 47
44At this point, you should have BitBake cloned to a directory that 48At this point, you should have BitBake cloned to a directory that
45matches the previous listing except for dates and user names. 49matches the previous listing except for dates and user names.
@@ -49,10 +53,10 @@ Setting Up the BitBake Environment
49 53
50First, you need to be sure that you can run BitBake. Set your working 54First, you need to be sure that you can run BitBake. Set your working
51directory to where your local BitBake files are and run the following 55directory to where your local BitBake files are and run the following
52command: :: 56command::
53 57
54 $ ./bin/bitbake --version 58 $ ./bin/bitbake --version
55 BitBake Build Tool Core version 1.23.0, bitbake version 1.23.0 59 BitBake Build Tool Core version 2.3.1
56 60
57The console output tells you what version 61The console output tells you what version
58you are running. 62you are running.
@@ -61,14 +65,14 @@ The recommended method to run BitBake is from a directory of your
61choice. To be able to run BitBake from any directory, you need to add 65choice. To be able to run BitBake from any directory, you need to add
62the executable binary to your binary to your shell's environment 66the executable binary to your binary to your shell's environment
63``PATH`` variable. First, look at your current ``PATH`` variable by 67``PATH`` variable. First, look at your current ``PATH`` variable by
64entering the following: :: 68entering the following::
65 69
66 $ echo $PATH 70 $ echo $PATH
67 71
68Next, add the directory location 72Next, add the directory location
69for the BitBake binary to the ``PATH``. Here is an example that adds the 73for the BitBake binary to the ``PATH``. Here is an example that adds the
70``/home/scott-lenovo/bitbake/bin`` directory to the front of the 74``/home/scott-lenovo/bitbake/bin`` directory to the front of the
71``PATH`` variable: :: 75``PATH`` variable::
72 76
73 $ export PATH=/home/scott-lenovo/bitbake/bin:$PATH 77 $ export PATH=/home/scott-lenovo/bitbake/bin:$PATH
74 78
@@ -99,7 +103,7 @@ discussion mailing list about the BitBake build tool.
99 103
100 This example was inspired by and drew heavily from 104 This example was inspired by and drew heavily from
101 `Mailing List post - The BitBake equivalent of "Hello, World!" 105 `Mailing List post - The BitBake equivalent of "Hello, World!"
102 <http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html>`_. 106 <https://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html>`_.
103 107
104As stated earlier, the goal of this example is to eventually compile 108As stated earlier, the goal of this example is to eventually compile
105"Hello World". However, it is unknown what BitBake needs and what you 109"Hello World". However, it is unknown what BitBake needs and what you
@@ -116,7 +120,7 @@ Following is the complete "Hello World" example.
116 120
117#. **Create a Project Directory:** First, set up a directory for the 121#. **Create a Project Directory:** First, set up a directory for the
118 "Hello World" project. Here is how you can do so in your home 122 "Hello World" project. Here is how you can do so in your home
119 directory: :: 123 directory::
120 124
121 $ mkdir ~/hello 125 $ mkdir ~/hello
122 $ cd ~/hello 126 $ cd ~/hello
@@ -127,41 +131,26 @@ Following is the complete "Hello World" example.
127 directory is a good way to isolate your project. 131 directory is a good way to isolate your project.
128 132
129#. **Run BitBake:** At this point, you have nothing but a project 133#. **Run BitBake:** At this point, you have nothing but a project
130 directory. Run the ``bitbake`` command and see what it does: :: 134 directory. Run the ``bitbake`` command and see what it does::
131 135
132 $ bitbake 136 $ bitbake
133 The BBPATH variable is not set and bitbake did not 137 ERROR: The BBPATH variable is not set and bitbake did not find a conf/bblayers.conf file in the expected location.
134 find a conf/bblayers.conf file in the expected location.
135 Maybe you accidentally invoked bitbake from the wrong directory? 138 Maybe you accidentally invoked bitbake from the wrong directory?
136 DEBUG: Removed the following variables from the environment:
137 GNOME_DESKTOP_SESSION_ID, XDG_CURRENT_DESKTOP,
138 GNOME_KEYRING_CONTROL, DISPLAY, SSH_AGENT_PID, LANG, no_proxy,
139 XDG_SESSION_PATH, XAUTHORITY, SESSION_MANAGER, SHLVL,
140 MANDATORY_PATH, COMPIZ_CONFIG_PROFILE, WINDOWID, EDITOR,
141 GPG_AGENT_INFO, SSH_AUTH_SOCK, GDMSESSION, GNOME_KEYRING_PID,
142 XDG_SEAT_PATH, XDG_CONFIG_DIRS, LESSOPEN, DBUS_SESSION_BUS_ADDRESS,
143 _, XDG_SESSION_COOKIE, DESKTOP_SESSION, LESSCLOSE, DEFAULTS_PATH,
144 UBUNTU_MENUPROXY, OLDPWD, XDG_DATA_DIRS, COLORTERM, LS_COLORS
145
146 The majority of this output is specific to environment variables that
147 are not directly relevant to BitBake. However, the very first
148 message regarding the ``BBPATH`` variable and the
149 ``conf/bblayers.conf`` file is relevant.
150 139
151 When you run BitBake, it begins looking for metadata files. The 140 When you run BitBake, it begins looking for metadata files. The
152 :term:`BBPATH` variable is what tells BitBake where 141 :term:`BBPATH` variable is what tells BitBake where
153 to look for those files. ``BBPATH`` is not set and you need to set 142 to look for those files. :term:`BBPATH` is not set and you need to set
154 it. Without ``BBPATH``, BitBake cannot find any configuration files 143 it. Without :term:`BBPATH`, BitBake cannot find any configuration files
155 (``.conf``) or recipe files (``.bb``) at all. BitBake also cannot 144 (``.conf``) or recipe files (``.bb``) at all. BitBake also cannot
156 find the ``bitbake.conf`` file. 145 find the ``bitbake.conf`` file.
157 146
158#. **Setting BBPATH:** For this example, you can set ``BBPATH`` in 147#. **Setting BBPATH:** For this example, you can set :term:`BBPATH` in
159 the same manner that you set ``PATH`` earlier in the appendix. You 148 the same manner that you set ``PATH`` earlier in the appendix. You
160 should realize, though, that it is much more flexible to set the 149 should realize, though, that it is much more flexible to set the
161 ``BBPATH`` variable up in a configuration file for each project. 150 :term:`BBPATH` variable up in a configuration file for each project.
162 151
163 From your shell, enter the following commands to set and export the 152 From your shell, enter the following commands to set and export the
164 ``BBPATH`` variable: :: 153 :term:`BBPATH` variable::
165 154
166 $ BBPATH="projectdirectory" 155 $ BBPATH="projectdirectory"
167 $ export BBPATH 156 $ export BBPATH
@@ -175,24 +164,18 @@ Following is the complete "Hello World" example.
175 ("~") character as BitBake does not expand that character as the 164 ("~") character as BitBake does not expand that character as the
176 shell would. 165 shell would.
177 166
178#. **Run BitBake:** Now that you have ``BBPATH`` defined, run the 167#. **Run BitBake:** Now that you have :term:`BBPATH` defined, run the
179 ``bitbake`` command again: :: 168 ``bitbake`` command again::
180 169
181 $ bitbake 170 $ bitbake
182 ERROR: Traceback (most recent call last): 171 ERROR: Unable to parse /home/scott-lenovo/bitbake/lib/bb/parse/__init__.py
183 File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped 172 Traceback (most recent call last):
184 return func(fn, *args) 173 File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 127, in resolve_file(fn='conf/bitbake.conf', d=<bb.data_smart.DataSmart object at 0x7f22919a3df0>):
185 File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 173, in parse_config_file 174 if not newfn:
186 return bb.parse.handle(fn, data, include) 175 > raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath))
187 File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 99, in handle 176 fn = newfn
188 return h['handle'](fn, data, include) 177 FileNotFoundError: [Errno 2] file conf/bitbake.conf not found in <projectdirectory>
189 File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 120, in handle 178
190 abs_fn = resolve_file(fn, data)
191 File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 117, in resolve_file
192 raise IOError("file %s not found in %s" % (fn, bbpath))
193 IOError: file conf/bitbake.conf not found in /home/scott-lenovo/hello
194
195 ERROR: Unable to parse conf/bitbake.conf: file conf/bitbake.conf not found in /home/scott-lenovo/hello
196 179
197 This sample output shows that BitBake could not find the 180 This sample output shows that BitBake could not find the
198 ``conf/bitbake.conf`` file in the project directory. This file is 181 ``conf/bitbake.conf`` file in the project directory. This file is
@@ -205,18 +188,18 @@ Following is the complete "Hello World" example.
205 recipe files. For this example, you need to create the file in your 188 recipe files. For this example, you need to create the file in your
206 project directory and define some key BitBake variables. For more 189 project directory and define some key BitBake variables. For more
207 information on the ``bitbake.conf`` file, see 190 information on the ``bitbake.conf`` file, see
208 http://git.openembedded.org/bitbake/tree/conf/bitbake.conf. 191 https://git.openembedded.org/bitbake/tree/conf/bitbake.conf.
209 192
210 Use the following commands to create the ``conf`` directory in the 193 Use the following commands to create the ``conf`` directory in the
211 project directory: :: 194 project directory::
212 195
213 $ mkdir conf 196 $ mkdir conf
214 197
215 From within the ``conf`` directory, 198 From within the ``conf`` directory,
216 use some editor to create the ``bitbake.conf`` so that it contains 199 use some editor to create the ``bitbake.conf`` so that it contains
217 the following: :: 200 the following::
218 201
219 PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}" 202 PN = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
220 203
221 TMPDIR = "${TOPDIR}/tmp" 204 TMPDIR = "${TOPDIR}/tmp"
222 CACHE = "${TMPDIR}/cache" 205 CACHE = "${TMPDIR}/cache"
@@ -226,12 +209,12 @@ Following is the complete "Hello World" example.
226 209
227 .. note:: 210 .. note::
228 211
229 Without a value for PN , the variables STAMP , T , and B , prevent more 212 Without a value for :term:`PN`, the variables :term:`STAMP`, :term:`T`, and :term:`B`, prevent more
230 than one recipe from working. You can fix this by either setting PN to 213 than one recipe from working. You can fix this by either setting :term:`PN` to
231 have a value similar to what OpenEmbedded and BitBake use in the default 214 have a value similar to what OpenEmbedded and BitBake use in the default
232 bitbake.conf file (see previous example). Or, by manually updating each 215 ``bitbake.conf`` file (see previous example). Or, by manually updating each
233 recipe to set PN . You will also need to include PN as part of the STAMP 216 recipe to set :term:`PN`. You will also need to include :term:`PN` as part of the :term:`STAMP`,
234 , T , and B variable definitions in the local.conf file. 217 :term:`T`, and :term:`B` variable definitions in the ``local.conf`` file.
235 218
236 The ``TMPDIR`` variable establishes a directory that BitBake uses 219 The ``TMPDIR`` variable establishes a directory that BitBake uses
237 for build output and intermediate files other than the cached 220 for build output and intermediate files other than the cached
@@ -251,21 +234,17 @@ Following is the complete "Hello World" example.
251 glossary. 234 glossary.
252 235
253#. **Run BitBake:** After making sure that the ``conf/bitbake.conf`` file 236#. **Run BitBake:** After making sure that the ``conf/bitbake.conf`` file
254 exists, you can run the ``bitbake`` command again: :: 237 exists, you can run the ``bitbake`` command again::
255 238
256 $ bitbake 239 $ bitbake
257 ERROR: Traceback (most recent call last): 240 ERROR: Unable to parse /home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py
258 File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped 241 Traceback (most recent call last):
259 return func(fn, *args) 242 File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 67, in inherit(files=['base'], fn='configuration INHERITs', lineno=0, d=<bb.data_smart.DataSmart object at 0x7fab6815edf0>):
260 File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 177, in _inherit 243 if not os.path.exists(file):
261 bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data) 244 > raise ParseError("Could not inherit file %s" % (file), fn, lineno)
262 File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 92, in inherit 245
263 include(fn, file, lineno, d, "inherit") 246 bb.parse.ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
264 File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 100, in include 247
265 raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
266 ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
267
268 ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
269 248
270 In the sample output, 249 In the sample output,
271 BitBake could not find the ``classes/base.bbclass`` file. You need 250 BitBake could not find the ``classes/base.bbclass`` file. You need
@@ -278,20 +257,23 @@ Following is the complete "Hello World" example.
278 in the ``classes`` directory of the project (i.e ``hello/classes`` 257 in the ``classes`` directory of the project (i.e ``hello/classes``
279 in this example). 258 in this example).
280 259
281 Create the ``classes`` directory as follows: :: 260 Create the ``classes`` directory as follows::
282 261
283 $ cd $HOME/hello 262 $ cd $HOME/hello
284 $ mkdir classes 263 $ mkdir classes
285 264
286 Move to the ``classes`` directory and then create the 265 Move to the ``classes`` directory and then create the
287 ``base.bbclass`` file by inserting this single line: addtask build 266 ``base.bbclass`` file by inserting this single line::
267
268 addtask build
269
288 The minimal task that BitBake runs is the ``do_build`` task. This is 270 The minimal task that BitBake runs is the ``do_build`` task. This is
289 all the example needs in order to build the project. Of course, the 271 all the example needs in order to build the project. Of course, the
290 ``base.bbclass`` can have much more depending on which build 272 ``base.bbclass`` can have much more depending on which build
291 environments BitBake is supporting. 273 environments BitBake is supporting.
292 274
293#. **Run BitBake:** After making sure that the ``classes/base.bbclass`` 275#. **Run BitBake:** After making sure that the ``classes/base.bbclass``
294 file exists, you can run the ``bitbake`` command again: :: 276 file exists, you can run the ``bitbake`` command again::
295 277
296 $ bitbake 278 $ bitbake
297 Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information. 279 Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.
@@ -314,7 +296,7 @@ Following is the complete "Hello World" example.
314 Minimally, you need a recipe file and a layer configuration file in 296 Minimally, you need a recipe file and a layer configuration file in
315 your layer. The configuration file needs to be in the ``conf`` 297 your layer. The configuration file needs to be in the ``conf``
316 directory inside the layer. Use these commands to set up the layer 298 directory inside the layer. Use these commands to set up the layer
317 and the ``conf`` directory: :: 299 and the ``conf`` directory::
318 300
319 $ cd $HOME 301 $ cd $HOME
320 $ mkdir mylayer 302 $ mkdir mylayer
@@ -322,20 +304,29 @@ Following is the complete "Hello World" example.
322 $ mkdir conf 304 $ mkdir conf
323 305
324 Move to the ``conf`` directory and create a ``layer.conf`` file that has the 306 Move to the ``conf`` directory and create a ``layer.conf`` file that has the
325 following: :: 307 following::
326 308
327 BBPATH .= ":${LAYERDIR}" 309 BBPATH .= ":${LAYERDIR}"
328 BBFILES += "${LAYERDIR}/\*.bb" 310 BBFILES += "${LAYERDIR}/*.bb"
329 BBFILE_COLLECTIONS += "mylayer" 311 BBFILE_COLLECTIONS += "mylayer"
330 `BBFILE_PATTERN_mylayer := "^${LAYERDIR_RE}/" 312 BBFILE_PATTERN_mylayer := "^${LAYERDIR_RE}/"
313 LAYERSERIES_CORENAMES = "hello_world_example"
314 LAYERSERIES_COMPAT_mylayer = "hello_world_example"
331 315
332 For information on these variables, click on :term:`BBFILES`, 316 For information on these variables, click on :term:`BBFILES`,
333 :term:`LAYERDIR`, :term:`BBFILE_COLLECTIONS` or :term:`BBFILE_PATTERN_mylayer <BBFILE_PATTERN>` 317 :term:`LAYERDIR`, :term:`BBFILE_COLLECTIONS`, :term:`BBFILE_PATTERN_mylayer <BBFILE_PATTERN>`
334 to go to the definitions in the glossary. 318 or :term:`LAYERSERIES_COMPAT` to go to the definitions in the glossary.
319
320 .. note::
321
322 We are setting both ``LAYERSERIES_CORENAMES`` and :term:`LAYERSERIES_COMPAT` in this particular case, because we
323 are using bitbake without OpenEmbedded.
324 You should usually just use :term:`LAYERSERIES_COMPAT` to specify the OE-Core versions for which your layer
325 is compatible, and add the meta-openembedded layer to your project.
335 326
336 You need to create the recipe file next. Inside your layer at the 327 You need to create the recipe file next. Inside your layer at the
337 top-level, use an editor and create a recipe file named 328 top-level, use an editor and create a recipe file named
338 ``printhello.bb`` that has the following: :: 329 ``printhello.bb`` that has the following::
339 330
340 DESCRIPTION = "Prints Hello World" 331 DESCRIPTION = "Prints Hello World"
341 PN = 'printhello' 332 PN = 'printhello'
@@ -356,7 +347,7 @@ Following is the complete "Hello World" example.
356 follow the links to the glossary. 347 follow the links to the glossary.
357 348
358#. **Run BitBake With a Target:** Now that a BitBake target exists, run 349#. **Run BitBake With a Target:** Now that a BitBake target exists, run
359 the command and provide that target: :: 350 the command and provide that target::
360 351
361 $ cd $HOME/hello 352 $ cd $HOME/hello
362 $ bitbake printhello 353 $ bitbake printhello
@@ -376,7 +367,7 @@ Following is the complete "Hello World" example.
376 ``hello/conf`` for this example). 367 ``hello/conf`` for this example).
377 368
378 Set your working directory to the ``hello/conf`` directory and then 369 Set your working directory to the ``hello/conf`` directory and then
379 create the ``bblayers.conf`` file so that it contains the following: :: 370 create the ``bblayers.conf`` file so that it contains the following::
380 371
381 BBLAYERS ?= " \ 372 BBLAYERS ?= " \
382 /home/<you>/mylayer \ 373 /home/<you>/mylayer \
@@ -386,15 +377,17 @@ Following is the complete "Hello World" example.
386 377
387#. **Run BitBake With a Target:** Now that you have supplied the 378#. **Run BitBake With a Target:** Now that you have supplied the
388 ``bblayers.conf`` file, run the ``bitbake`` command and provide the 379 ``bblayers.conf`` file, run the ``bitbake`` command and provide the
389 target: :: 380 target::
390 381
391 $ bitbake printhello 382 $ bitbake printhello
383 Loading cache: 100% |
384 Loaded 0 entries from dependency cache.
392 Parsing recipes: 100% |##################################################################################| 385 Parsing recipes: 100% |##################################################################################|
393 Time: 00:00:00
394 Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors. 386 Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
395 NOTE: Resolving any missing task queue dependencies 387 NOTE: Resolving any missing task queue dependencies
396 NOTE: Preparing RunQueue 388 Initialising tasks: 100% |###############################################################################|
397 NOTE: Executing RunQueue Tasks 389 NOTE: No setscene tasks
390 NOTE: Executing Tasks
398 ******************** 391 ********************
399 * * 392 * *
400 * Hello, World! * 393 * Hello, World! *
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
index 6f9d392935..539bb62d81 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
@@ -27,7 +27,7 @@ Linux software stacks using a task-oriented approach.
27Conceptually, BitBake is similar to GNU Make in some regards but has 27Conceptually, BitBake is similar to GNU Make in some regards but has
28significant differences: 28significant differences:
29 29
30- BitBake executes tasks according to provided metadata that builds up 30- BitBake executes tasks according to the provided metadata that builds up
31 the tasks. Metadata is stored in recipe (``.bb``) and related recipe 31 the tasks. Metadata is stored in recipe (``.bb``) and related recipe
32 "append" (``.bbappend``) files, configuration (``.conf``) and 32 "append" (``.bbappend``) files, configuration (``.conf``) and
33 underlying include (``.inc``) files, and in class (``.bbclass``) 33 underlying include (``.inc``) files, and in class (``.bbclass``)
@@ -60,11 +60,10 @@ member Chris Larson split the project into two distinct pieces:
60- OpenEmbedded, a metadata set utilized by BitBake 60- OpenEmbedded, a metadata set utilized by BitBake
61 61
62Today, BitBake is the primary basis of the 62Today, BitBake is the primary basis of the
63`OpenEmbedded <http://www.openembedded.org/>`__ project, which is being 63`OpenEmbedded <https://www.openembedded.org/>`__ project, which is being
64used to build and maintain Linux distributions such as the `Angstrom 64used to build and maintain Linux distributions such as the `Poky
65Distribution <http://www.angstrom-distribution.org/>`__, and which is 65Reference Distribution <https://www.yoctoproject.org/software-item/poky/>`__,
66also being used as the build tool for Linux projects such as the `Yocto 66developed under the umbrella of the `Yocto Project <https://www.yoctoproject.org>`__.
67Project <http://www.yoctoproject.org>`__.
68 67
69Prior to BitBake, no other build tool adequately met the needs of an 68Prior to BitBake, no other build tool adequately met the needs of an
70aspiring embedded Linux distribution. All of the build systems used by 69aspiring embedded Linux distribution. All of the build systems used by
@@ -248,13 +247,13 @@ underlying, similarly-named recipe files.
248 247
249When you name an append file, you can use the "``%``" wildcard character 248When you name an append file, you can use the "``%``" wildcard character
250to allow for matching recipe names. For example, suppose you have an 249to allow for matching recipe names. For example, suppose you have an
251append file named as follows: :: 250append file named as follows::
252 251
253 busybox_1.21.%.bbappend 252 busybox_1.21.%.bbappend
254 253
255That append file 254That append file
256would match any ``busybox_1.21.``\ x\ ``.bb`` version of the recipe. So, 255would match any ``busybox_1.21.``\ x\ ``.bb`` version of the recipe. So,
257the append file would match the following recipe names: :: 256the append file would match the following recipe names::
258 257
259 busybox_1.21.1.bb 258 busybox_1.21.1.bb
260 busybox_1.21.2.bb 259 busybox_1.21.2.bb
@@ -290,7 +289,7 @@ You can obtain BitBake several different ways:
290 are using. The metadata is generally backwards compatible but not 289 are using. The metadata is generally backwards compatible but not
291 forward compatible. 290 forward compatible.
292 291
293 Here is an example that clones the BitBake repository: :: 292 Here is an example that clones the BitBake repository::
294 293
295 $ git clone git://git.openembedded.org/bitbake 294 $ git clone git://git.openembedded.org/bitbake
296 295
@@ -298,7 +297,7 @@ You can obtain BitBake several different ways:
298 Git repository into a directory called ``bitbake``. Alternatively, 297 Git repository into a directory called ``bitbake``. Alternatively,
299 you can designate a directory after the ``git clone`` command if you 298 you can designate a directory after the ``git clone`` command if you
300 want to call the new directory something other than ``bitbake``. Here 299 want to call the new directory something other than ``bitbake``. Here
301 is an example that names the directory ``bbdev``: :: 300 is an example that names the directory ``bbdev``::
302 301
303 $ git clone git://git.openembedded.org/bitbake bbdev 302 $ git clone git://git.openembedded.org/bitbake bbdev
304 303
@@ -317,9 +316,9 @@ You can obtain BitBake several different ways:
317 method for getting BitBake. Cloning the repository makes it easier 316 method for getting BitBake. Cloning the repository makes it easier
318 to update as patches are added to the stable branches. 317 to update as patches are added to the stable branches.
319 318
320 The following example downloads a snapshot of BitBake version 1.17.0: :: 319 The following example downloads a snapshot of BitBake version 1.17.0::
321 320
322 $ wget http://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz 321 $ wget https://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz
323 $ tar zxpvf bitbake-1.17.0.tar.gz 322 $ tar zxpvf bitbake-1.17.0.tar.gz
324 323
325 After extraction of the tarball using 324 After extraction of the tarball using
@@ -347,43 +346,87 @@ execution examples.
347Usage and syntax 346Usage and syntax
348---------------- 347----------------
349 348
350Following is the usage and syntax for BitBake: :: 349Following is the usage and syntax for BitBake::
351 350
352 $ bitbake -h 351 $ bitbake -h
353 Usage: bitbake [options] [recipename/target recipe:do_task ...] 352 usage: bitbake [-s] [-e] [-g] [-u UI] [--version] [-h] [-f] [-c CMD]
354 353 [-C INVALIDATE_STAMP] [--runall RUNALL] [--runonly RUNONLY]
355 Executes the specified task (default is 'build') for a given set of target recipes (.bb files). 354 [--no-setscene] [--skip-setscene] [--setscene-only] [-n] [-p]
356 It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which 355 [-k] [-P] [-S SIGNATURE_HANDLER] [--revisions-changed]
357 will provide the layer, BBFILES and other configuration information. 356 [-b BUILDFILE] [-D] [-l DEBUG_DOMAINS] [-v] [-q]
357 [-w WRITEEVENTLOG] [-B BIND] [-T SERVER_TIMEOUT]
358 [--remote-server REMOTE_SERVER] [-m] [--token XMLRPCTOKEN]
359 [--observe-only] [--status-only] [--server-only] [-r PREFILE]
360 [-R POSTFILE] [-I EXTRA_ASSUME_PROVIDED]
361 [recipename/target ...]
362
363 It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH
364 which will provide the layer, BBFILES and other configuration information.
365
366 General options:
367 recipename/target Execute the specified task (default is 'build') for
368 these target recipes (.bb files).
369 -s, --show-versions Show current and preferred versions of all recipes.
370 -e, --environment Show the global or per-recipe environment complete
371 with information about where variables were
372 set/changed.
373 -g, --graphviz Save dependency tree information for the specified
374 targets in the dot syntax.
375 -u UI, --ui UI The user interface to use (knotty, ncurses, taskexp,
376 taskexp_ncurses or teamcity - default knotty).
377 --version Show programs version and exit.
378 -h, --help Show this help message and exit.
358 379
359 Options: 380 Task control options:
360 --version show program's version number and exit
361 -h, --help show this help message and exit
362 -b BUILDFILE, --buildfile=BUILDFILE
363 Execute tasks from a specific .bb recipe directly.
364 WARNING: Does not handle any dependencies from other
365 recipes.
366 -k, --continue Continue as much as possible after an error. While the
367 target that failed and anything depending on it cannot
368 be built, as much as possible will be built before
369 stopping.
370 -f, --force Force the specified targets/task to run (invalidating 381 -f, --force Force the specified targets/task to run (invalidating
371 any existing stamp file). 382 any existing stamp file).
372 -c CMD, --cmd=CMD Specify the task to execute. The exact options 383 -c CMD, --cmd CMD Specify the task to execute. The exact options
373 available depend on the metadata. Some examples might 384 available depend on the metadata. Some examples might
374 be 'compile' or 'populate_sysroot' or 'listtasks' may 385 be 'compile' or 'populate_sysroot' or 'listtasks' may
375 give a list of the tasks available. 386 give a list of the tasks available.
376 -C INVALIDATE_STAMP, --clear-stamp=INVALIDATE_STAMP 387 -C INVALIDATE_STAMP, --clear-stamp INVALIDATE_STAMP
377 Invalidate the stamp for the specified task such as 388 Invalidate the stamp for the specified task such as
378 'compile' and then run the default task for the 389 'compile' and then run the default task for the
379 specified target(s). 390 specified target(s).
380 -r PREFILE, --read=PREFILE 391 --runall RUNALL Run the specified task for any recipe in the taskgraph
381 Read the specified file before bitbake.conf. 392 of the specified target (even if it wouldn't otherwise
382 -R POSTFILE, --postread=POSTFILE 393 have run).
383 Read the specified file after bitbake.conf. 394 --runonly RUNONLY Run only the specified task within the taskgraph of
384 -v, --verbose Enable tracing of shell tasks (with 'set -x'). Also 395 the specified targets (and any task dependencies those
385 print bb.note(...) messages to stdout (in addition to 396 tasks may have).
386 writing them to ${T}/log.do_&lt;task&gt;). 397 --no-setscene Do not run any setscene tasks. sstate will be ignored
398 and everything needed, built.
399 --skip-setscene Skip setscene tasks if they would be executed. Tasks
400 previously restored from sstate will be kept, unlike
401 --no-setscene.
402 --setscene-only Only run setscene tasks, don't run any real tasks.
403
404 Execution control options:
405 -n, --dry-run Don't execute, just go through the motions.
406 -p, --parse-only Quit after parsing the BB recipes.
407 -k, --continue Continue as much as possible after an error. While the
408 target that failed and anything depending on it cannot
409 be built, as much as possible will be built before
410 stopping.
411 -P, --profile Profile the command and save reports.
412 -S SIGNATURE_HANDLER, --dump-signatures SIGNATURE_HANDLER
413 Dump out the signature construction information, with
414 no task execution. The SIGNATURE_HANDLER parameter is
415 passed to the handler. Two common values are none and
416 printdiff but the handler may define more/less. none
417 means only dump the signature, printdiff means
418 recursively compare the dumped signature with the most
419 recent one in a local build or sstate cache (can be
420 used to find out why tasks re-run when that is not
421 expected)
422 --revisions-changed Set the exit code depending on whether upstream
423 floating revisions have changed or not.
424 -b BUILDFILE, --buildfile BUILDFILE
425 Execute tasks from a specific .bb recipe directly.
426 WARNING: Does not handle any dependencies from other
427 recipes.
428
429 Logging/output control options:
387 -D, --debug Increase the debug level. You can specify this more 430 -D, --debug Increase the debug level. You can specify this more
388 than once. -D sets the debug level to 1, where only 431 than once. -D sets the debug level to 1, where only
389 bb.debug(1, ...) messages are printed to stdout; -DD 432 bb.debug(1, ...) messages are printed to stdout; -DD
@@ -393,62 +436,47 @@ Following is the usage and syntax for BitBake: ::
393 -D only affects output to stdout. All debug messages 436 -D only affects output to stdout. All debug messages
394 are written to ${T}/log.do_taskname, regardless of the 437 are written to ${T}/log.do_taskname, regardless of the
395 debug level. 438 debug level.
439 -l DEBUG_DOMAINS, --log-domains DEBUG_DOMAINS
440 Show debug logging for the specified logging domains.
441 -v, --verbose Enable tracing of shell tasks (with 'set -x'). Also
442 print bb.note(...) messages to stdout (in addition to
443 writing them to ${T}/log.do_<task>).
396 -q, --quiet Output less log message data to the terminal. You can 444 -q, --quiet Output less log message data to the terminal. You can
397 specify this more than once. 445 specify this more than once.
398 -n, --dry-run Don't execute, just go through the motions. 446 -w WRITEEVENTLOG, --write-log WRITEEVENTLOG
399 -S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER 447 Writes the event log of the build to a bitbake event
400 Dump out the signature construction information, with 448 json file. Use '' (empty string) to assign the name
401 no task execution. The SIGNATURE_HANDLER parameter is 449 automatically.
402 passed to the handler. Two common values are none and 450
403 printdiff but the handler may define more/less. none 451 Server options:
404 means only dump the signature, printdiff means compare 452 -B BIND, --bind BIND The name/address for the bitbake xmlrpc server to bind
405 the dumped signature with the cached one.
406 -p, --parse-only Quit after parsing the BB recipes.
407 -s, --show-versions Show current and preferred versions of all recipes.
408 -e, --environment Show the global or per-recipe environment complete
409 with information about where variables were
410 set/changed.
411 -g, --graphviz Save dependency tree information for the specified
412 targets in the dot syntax.
413 -I EXTRA_ASSUME_PROVIDED, --ignore-deps=EXTRA_ASSUME_PROVIDED
414 Assume these dependencies don't exist and are already
415 provided (equivalent to ASSUME_PROVIDED). Useful to
416 make dependency graphs more appealing
417 -l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
418 Show debug logging for the specified logging domains
419 -P, --profile Profile the command and save reports.
420 -u UI, --ui=UI The user interface to use (knotty, ncurses or taskexp
421 - default knotty).
422 --token=XMLRPCTOKEN Specify the connection token to be used when
423 connecting to a remote server.
424 --revisions-changed Set the exit code depending on whether upstream
425 floating revisions have changed or not.
426 --server-only Run bitbake without a UI, only starting a server
427 (cooker) process.
428 -B BIND, --bind=BIND The name/address for the bitbake xmlrpc server to bind
429 to. 453 to.
430 -T SERVER_TIMEOUT, --idle-timeout=SERVER_TIMEOUT 454 -T SERVER_TIMEOUT, --idle-timeout SERVER_TIMEOUT
431 Set timeout to unload bitbake server due to 455 Set timeout to unload bitbake server due to
432 inactivity, set to -1 means no unload, default: 456 inactivity, set to -1 means no unload, default:
433 Environment variable BB_SERVER_TIMEOUT. 457 Environment variable BB_SERVER_TIMEOUT.
434 --no-setscene Do not run any setscene tasks. sstate will be ignored 458 --remote-server REMOTE_SERVER
435 and everything needed, built.
436 --setscene-only Only run setscene tasks, don't run any real tasks.
437 --remote-server=REMOTE_SERVER
438 Connect to the specified server. 459 Connect to the specified server.
439 -m, --kill-server Terminate any running bitbake server. 460 -m, --kill-server Terminate any running bitbake server.
461 --token XMLRPCTOKEN Specify the connection token to be used when
462 connecting to a remote server.
440 --observe-only Connect to a server as an observing-only client. 463 --observe-only Connect to a server as an observing-only client.
441 --status-only Check the status of the remote bitbake server. 464 --status-only Check the status of the remote bitbake server.
442 -w WRITEEVENTLOG, --write-log=WRITEEVENTLOG 465 --server-only Run bitbake without a UI, only starting a server
443 Writes the event log of the build to a bitbake event 466 (cooker) process.
444 json file. Use '' (empty string) to assign the name 467
445 automatically. 468 Configuration options:
446 --runall=RUNALL Run the specified task for any recipe in the taskgraph 469 -r PREFILE, --read PREFILE
447 of the specified target (even if it wouldn't otherwise 470 Read the specified file before bitbake.conf.
448 have run). 471 -R POSTFILE, --postread POSTFILE
449 --runonly=RUNONLY Run only the specified task within the taskgraph of 472 Read the specified file after bitbake.conf.
450 the specified targets (and any task dependencies those 473 -I EXTRA_ASSUME_PROVIDED, --ignore-deps EXTRA_ASSUME_PROVIDED
451 tasks may have). 474 Assume these dependencies don't exist and are already
475 provided (equivalent to ASSUME_PROVIDED). Useful to
476 make dependency graphs more appealing.
477
478..
479 Bitbake help output generated with "stty columns 80; bin/bitbake -h"
452 480
453.. _bitbake-examples: 481.. _bitbake-examples:
454 482
@@ -469,11 +497,11 @@ default task, which is "build". BitBake obeys inter-task dependencies
469when doing so. 497when doing so.
470 498
471The following command runs the build task, which is the default task, on 499The following command runs the build task, which is the default task, on
472the ``foo_1.0.bb`` recipe file: :: 500the ``foo_1.0.bb`` recipe file::
473 501
474 $ bitbake -b foo_1.0.bb 502 $ bitbake -b foo_1.0.bb
475 503
476The following command runs the clean task on the ``foo.bb`` recipe file: :: 504The following command runs the clean task on the ``foo.bb`` recipe file::
477 505
478 $ bitbake -b foo.bb -c clean 506 $ bitbake -b foo.bb -c clean
479 507
@@ -497,13 +525,13 @@ functionality, or when there are multiple versions of a recipe.
497The ``bitbake`` command, when not using "--buildfile" or "-b" only 525The ``bitbake`` command, when not using "--buildfile" or "-b" only
498accepts a "PROVIDES". You cannot provide anything else. By default, a 526accepts a "PROVIDES". You cannot provide anything else. By default, a
499recipe file generally "PROVIDES" its "packagename" as shown in the 527recipe file generally "PROVIDES" its "packagename" as shown in the
500following example: :: 528following example::
501 529
502 $ bitbake foo 530 $ bitbake foo
503 531
504This next example "PROVIDES" the 532This next example "PROVIDES" the
505package name and also uses the "-c" option to tell BitBake to just 533package name and also uses the "-c" option to tell BitBake to just
506execute the ``do_clean`` task: :: 534execute the ``do_clean`` task::
507 535
508 $ bitbake -c clean foo 536 $ bitbake -c clean foo
509 537
@@ -514,7 +542,7 @@ The BitBake command line supports specifying different tasks for
514individual targets when you specify multiple targets. For example, 542individual targets when you specify multiple targets. For example,
515suppose you had two targets (or recipes) ``myfirstrecipe`` and 543suppose you had two targets (or recipes) ``myfirstrecipe`` and
516``mysecondrecipe`` and you needed BitBake to run ``taskA`` for the first 544``mysecondrecipe`` and you needed BitBake to run ``taskA`` for the first
517recipe and ``taskB`` for the second recipe: :: 545recipe and ``taskB`` for the second recipe::
518 546
519 $ bitbake myfirstrecipe:do_taskA mysecondrecipe:do_taskB 547 $ bitbake myfirstrecipe:do_taskA mysecondrecipe:do_taskB
520 548
@@ -534,13 +562,13 @@ current working directory:
534- ``pn-buildlist``: Shows a simple list of targets that are to be 562- ``pn-buildlist``: Shows a simple list of targets that are to be
535 built. 563 built.
536 564
537To stop depending on common depends, use the "-I" depend option and 565To stop depending on common depends, use the ``-I`` depend option and
538BitBake omits them from the graph. Leaving this information out can 566BitBake omits them from the graph. Leaving this information out can
539produce more readable graphs. This way, you can remove from the graph 567produce more readable graphs. This way, you can remove from the graph
540``DEPENDS`` from inherited classes such as ``base.bbclass``. 568:term:`DEPENDS` from inherited classes such as ``base.bbclass``.
541 569
542Here are two examples that create dependency graphs. The second example 570Here are two examples that create dependency graphs. The second example
543omits depends common in OpenEmbedded from the graph: :: 571omits depends common in OpenEmbedded from the graph::
544 572
545 $ bitbake -g foo 573 $ bitbake -g foo
546 574
@@ -564,7 +592,7 @@ for two separate targets:
564.. image:: figures/bb_multiconfig_files.png 592.. image:: figures/bb_multiconfig_files.png
565 :align: center 593 :align: center
566 594
567The reason for this required file hierarchy is because the ``BBPATH`` 595The reason for this required file hierarchy is because the :term:`BBPATH`
568variable is not constructed until the layers are parsed. Consequently, 596variable is not constructed until the layers are parsed. Consequently,
569using the configuration file as a pre-configuration file is not possible 597using the configuration file as a pre-configuration file is not possible
570unless it is located in the current working directory. 598unless it is located in the current working directory.
@@ -582,17 +610,17 @@ accomplished by setting the
582configuration files for ``target1`` and ``target2`` defined in the build 610configuration files for ``target1`` and ``target2`` defined in the build
583directory. The following statement in the ``local.conf`` file both 611directory. The following statement in the ``local.conf`` file both
584enables BitBake to perform multiple configuration builds and specifies 612enables BitBake to perform multiple configuration builds and specifies
585the two extra multiconfigs: :: 613the two extra multiconfigs::
586 614
587 BBMULTICONFIG = "target1 target2" 615 BBMULTICONFIG = "target1 target2"
588 616
589Once the target configuration files are in place and BitBake has been 617Once the target configuration files are in place and BitBake has been
590enabled to perform multiple configuration builds, use the following 618enabled to perform multiple configuration builds, use the following
591command form to start the builds: :: 619command form to start the builds::
592 620
593 $ bitbake [mc:multiconfigname:]target [[[mc:multiconfigname:]target] ... ] 621 $ bitbake [mc:multiconfigname:]target [[[mc:multiconfigname:]target] ... ]
594 622
595Here is an example for two extra multiconfigs: ``target1`` and ``target2``: :: 623Here is an example for two extra multiconfigs: ``target1`` and ``target2``::
596 624
597 $ bitbake mc::target mc:target1:target mc:target2:target 625 $ bitbake mc::target mc:target1:target mc:target2:target
598 626
@@ -613,12 +641,12 @@ multiconfig.
613 641
614To enable dependencies in a multiple configuration build, you must 642To enable dependencies in a multiple configuration build, you must
615declare the dependencies in the recipe using the following statement 643declare the dependencies in the recipe using the following statement
616form: :: 644form::
617 645
618 task_or_package[mcdepends] = "mc:from_multiconfig:to_multiconfig:recipe_name:task_on_which_to_depend" 646 task_or_package[mcdepends] = "mc:from_multiconfig:to_multiconfig:recipe_name:task_on_which_to_depend"
619 647
620To better show how to use this statement, consider an example with two 648To better show how to use this statement, consider an example with two
621multiconfigs: ``target1`` and ``target2``: :: 649multiconfigs: ``target1`` and ``target2``::
622 650
623 image_task[mcdepends] = "mc:target1:target2:image2:rootfs_task" 651 image_task[mcdepends] = "mc:target1:target2:image2:rootfs_task"
624 652
@@ -629,7 +657,7 @@ completion of the rootfs_task used to build out image2, which is
629associated with the "target2" multiconfig. 657associated with the "target2" multiconfig.
630 658
631Once you set up this dependency, you can build the "target1" multiconfig 659Once you set up this dependency, you can build the "target1" multiconfig
632using a BitBake command as follows: :: 660using a BitBake command as follows::
633 661
634 $ bitbake mc:target1:image1 662 $ bitbake mc:target1:image1
635 663
@@ -639,7 +667,7 @@ the ``rootfs_task`` for the "target2" multiconfig build.
639 667
640Having a recipe depend on the root filesystem of another build might not 668Having a recipe depend on the root filesystem of another build might not
641seem that useful. Consider this change to the statement in the image1 669seem that useful. Consider this change to the statement in the image1
642recipe: :: 670recipe::
643 671
644 image_task[mcdepends] = "mc:target1:target2:image2:image_task" 672 image_task[mcdepends] = "mc:target1:target2:image2:image_task"
645 673
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-library-functions.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-library-functions.rst
new file mode 100644
index 0000000000..09e353945b
--- /dev/null
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-library-functions.rst
@@ -0,0 +1,59 @@
1.. SPDX-License-Identifier: CC-BY-2.5
2
3=================
4Library Functions
5=================
6
7|
8
9This chapter lists common library functions available under the ``lib/``
10directory in BitBake.
11
12These functions can be used in recipes or configuration files with
13:ref:`inline-Python <bitbake-user-manual/bitbake-user-manual-metadata:Inline
14Python Variable Expansion>` or :ref:`Python
15<bitbake-user-manual/bitbake-user-manual-metadata:BitBake-Style Python
16Functions>` functions.
17
18Logging utilities
19=================
20
21Different logging utilities can be used from Python code in recipes or
22configuration files.
23
24The strings passed below can be formatted with ``str.format()``, for example::
25
26 bb.warn("Houston, we have a %s", "bit of a problem")
27
28Formatted string can also be used directly::
29
30 bb.error("%s, we have a %s" % ("Houston", "big problem"))
31
32Python f-strings may also be used::
33
34 h = "Houston"
35 bb.fatal(f"{h}, we have a critical problem")
36
37.. automodule:: bb
38 :members:
39 debug,
40 error,
41 erroronce,
42 fatal,
43 note,
44 plain,
45 verbnote,
46 warn,
47 warnonce,
48
49``bb.utils``
50============
51
52.. automodule:: bb.utils
53 :members:
54 :exclude-members:
55 LogCatcher,
56 PrCtlError,
57 VersionStringException,
58 better_compile,
59 better_exec,
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
index 7ea68ade72..f60a9d8312 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
@@ -26,7 +26,7 @@ assignment. ::
26 VARIABLE = "value" 26 VARIABLE = "value"
27 27
28As expected, if you include leading or 28As expected, if you include leading or
29trailing spaces as part of an assignment, the spaces are retained: :: 29trailing spaces as part of an assignment, the spaces are retained::
30 30
31 VARIABLE = " value" 31 VARIABLE = " value"
32 VARIABLE = "value " 32 VARIABLE = "value "
@@ -40,7 +40,7 @@ blank space (i.e. these are not the same values). ::
40 40
41You can use single quotes instead of double quotes when setting a 41You can use single quotes instead of double quotes when setting a
42variable's value. Doing so allows you to use values that contain the 42variable's value. Doing so allows you to use values that contain the
43double quote character: :: 43double quote character::
44 44
45 VARIABLE = 'I have a " in my value' 45 VARIABLE = 'I have a " in my value'
46 46
@@ -77,7 +77,7 @@ occurs, you can use BitBake to check the actual value of the suspect
77variable. You can make these checks for both configuration and recipe 77variable. You can make these checks for both configuration and recipe
78level changes: 78level changes:
79 79
80- For configuration changes, use the following: :: 80- For configuration changes, use the following::
81 81
82 $ bitbake -e 82 $ bitbake -e
83 83
@@ -91,9 +91,10 @@ level changes:
91 Variables that are exported to the environment are preceded by the 91 Variables that are exported to the environment are preceded by the
92 string "export" in the command's output. 92 string "export" in the command's output.
93 93
94- For recipe changes, use the following: :: 94- To find changes to a given variable in a specific recipe, use the
95 following::
95 96
96 $ bitbake recipe -e \| grep VARIABLE=" 97 $ bitbake recipename -e | grep VARIABLENAME=\"
97 98
98 This command checks to see if the variable actually makes 99 This command checks to see if the variable actually makes
99 it into a specific recipe. 100 it into a specific recipe.
@@ -103,20 +104,20 @@ Line Joining
103 104
104Outside of :ref:`functions <bitbake-user-manual/bitbake-user-manual-metadata:functions>`, 105Outside of :ref:`functions <bitbake-user-manual/bitbake-user-manual-metadata:functions>`,
105BitBake joins any line ending in 106BitBake joins any line ending in
106a backslash character ("\") with the following line before parsing 107a backslash character ("\\") with the following line before parsing
107statements. The most common use for the "\" character is to split 108statements. The most common use for the "\\" character is to split
108variable assignments over multiple lines, as in the following example: :: 109variable assignments over multiple lines, as in the following example::
109 110
110 FOO = "bar \ 111 FOO = "bar \
111 baz \ 112 baz \
112 qaz" 113 qaz"
113 114
114Both the "\" character and the newline 115Both the "\\" character and the newline
115character that follow it are removed when joining lines. Thus, no 116character that follow it are removed when joining lines. Thus, no
116newline characters end up in the value of ``FOO``. 117newline characters end up in the value of ``FOO``.
117 118
118Consider this additional example where the two assignments both assign 119Consider this additional example where the two assignments both assign
119"barbaz" to ``FOO``: :: 120"barbaz" to ``FOO``::
120 121
121 FOO = "barbaz" 122 FOO = "barbaz"
122 FOO = "bar\ 123 FOO = "bar\
@@ -124,7 +125,7 @@ Consider this additional example where the two assignments both assign
124 125
125.. note:: 126.. note::
126 127
127 BitBake does not interpret escape sequences like "\n" in variable 128 BitBake does not interpret escape sequences like "\\n" in variable
128 values. For these to have an effect, the value must be passed to some 129 values. For these to have an effect, the value must be passed to some
129 utility that interprets escape sequences, such as 130 utility that interprets escape sequences, such as
130 ``printf`` or ``echo -n``. 131 ``printf`` or ``echo -n``.
@@ -149,7 +150,7 @@ The "=" operator does not immediately expand variable references in the
149right-hand side. Instead, expansion is deferred until the variable 150right-hand side. Instead, expansion is deferred until the variable
150assigned to is actually used. The result depends on the current values 151assigned to is actually used. The result depends on the current values
151of the referenced variables. The following example should clarify this 152of the referenced variables. The following example should clarify this
152behavior: :: 153behavior::
153 154
154 A = "${B} baz" 155 A = "${B} baz"
155 B = "${C} bar" 156 B = "${C} bar"
@@ -158,7 +159,7 @@ behavior: ::
158 C = "qux" 159 C = "qux"
159 *At this point, ${A} equals "qux bar baz"* 160 *At this point, ${A} equals "qux bar baz"*
160 B = "norf" 161 B = "norf"
161 *At this point, ${A} equals "norf baz"\* 162 *At this point, ${A} equals "norf baz"*
162 163
163Contrast this behavior with the 164Contrast this behavior with the
164:ref:`bitbake-user-manual/bitbake-user-manual-metadata:immediate variable 165:ref:`bitbake-user-manual/bitbake-user-manual-metadata:immediate variable
@@ -177,7 +178,7 @@ Setting a default value (?=)
177You can use the "?=" operator to achieve a "softer" assignment for a 178You can use the "?=" operator to achieve a "softer" assignment for a
178variable. This type of assignment allows you to define a variable if it 179variable. This type of assignment allows you to define a variable if it
179is undefined when the statement is parsed, but to leave the value alone 180is undefined when the statement is parsed, but to leave the value alone
180if the variable has a value. Here is an example: :: 181if the variable has a value. Here is an example::
181 182
182 A ?= "aval" 183 A ?= "aval"
183 184
@@ -194,28 +195,51 @@ value. However, if ``A`` is not set, the variable is set to "aval".
194Setting a weak default value (??=) 195Setting a weak default value (??=)
195---------------------------------- 196----------------------------------
196 197
197It is possible to use a "weaker" assignment than in the previous section 198The weak default value of a variable is the value which that variable
198by using the "??=" operator. This assignment behaves identical to "?=" 199will expand to if no value has been assigned to it via any of the other
199except that the assignment is made at the end of the parsing process 200assignment operators. The "??=" operator takes effect immediately, replacing
200rather than immediately. Consequently, when multiple "??=" assignments 201any previously defined weak default value. Here is an example::
201exist, the last one is used. Also, any "=" or "?=" assignment will
202override the value set with "??=". Here is an example: ::
203 202
204 A ??= "somevalue" 203 W ??= "x"
205 A ??= "someothervalue" 204 A := "${W}" # Immediate variable expansion
205 W ??= "y"
206 B := "${W}" # Immediate variable expansion
207 W ??= "z"
208 C = "${W}"
209 W ?= "i"
206 210
207If ``A`` is set before the above statements are 211After parsing we will have::
208parsed, the variable retains its value. If ``A`` is not set, the
209variable is set to "someothervalue".
210 212
211Again, this assignment is a "lazy" or "weak" assignment because it does 213 A = "x"
212not occur until the end of the parsing process. 214 B = "y"
215 C = "i"
216 W = "i"
217
218Appending and prepending non-override style will not substitute the weak
219default value, which means that after parsing::
220
221 W ??= "x"
222 W += "y"
223
224we will have::
225
226 W = " y"
227
228On the other hand, override-style appends/prepends/removes are applied after
229any active weak default value has been substituted::
230
231 W ??= "x"
232 W:append = "y"
233
234After parsing we will have::
235
236 W = "xy"
213 237
214Immediate variable expansion (:=) 238Immediate variable expansion (:=)
215--------------------------------- 239---------------------------------
216 240
217The ":=" operator results in a variable's contents being expanded 241The ":=" operator results in a variable's contents being expanded
218immediately, rather than when the variable is actually used: :: 242immediately, rather than when the variable is actually used::
219 243
220 T = "123" 244 T = "123"
221 A := "test ${T}" 245 A := "test ${T}"
@@ -225,7 +249,7 @@ immediately, rather than when the variable is actually used: ::
225 C := "${C}append" 249 C := "${C}append"
226 250
227In this example, ``A`` contains "test 123", even though the final value 251In this example, ``A`` contains "test 123", even though the final value
228of ``T`` is "456". The variable ``B`` will end up containing "456 252of :term:`T` is "456". The variable :term:`B` will end up containing "456
229cvalappend". This is because references to undefined variables are 253cvalappend". This is because references to undefined variables are
230preserved as is during (immediate)expansion. This is in contrast to GNU 254preserved as is during (immediate)expansion. This is in contrast to GNU
231Make, where undefined variables expand to nothing. The variable ``C`` 255Make, where undefined variables expand to nothing. The variable ``C``
@@ -241,14 +265,14 @@ the "+=" and "=+" operators. These operators insert a space between the
241current value and prepended or appended value. 265current value and prepended or appended value.
242 266
243These operators take immediate effect during parsing. Here are some 267These operators take immediate effect during parsing. Here are some
244examples: :: 268examples::
245 269
246 B = "bval" 270 B = "bval"
247 B += "additionaldata" 271 B += "additionaldata"
248 C = "cval" 272 C = "cval"
249 C =+ "test" 273 C =+ "test"
250 274
251The variable ``B`` contains "bval additionaldata" and ``C`` contains "test 275The variable :term:`B` contains "bval additionaldata" and ``C`` contains "test
252cval". 276cval".
253 277
254.. _appending-and-prepending-without-spaces: 278.. _appending-and-prepending-without-spaces:
@@ -260,14 +284,14 @@ If you want to append or prepend values without an inserted space, use
260the ".=" and "=." operators. 284the ".=" and "=." operators.
261 285
262These operators take immediate effect during parsing. Here are some 286These operators take immediate effect during parsing. Here are some
263examples: :: 287examples::
264 288
265 B = "bval" 289 B = "bval"
266 B .= "additionaldata" 290 B .= "additionaldata"
267 C = "cval" 291 C = "cval"
268 C =. "test" 292 C =. "test"
269 293
270The variable ``B`` contains "bvaladditionaldata" and ``C`` contains 294The variable :term:`B` contains "bvaladditionaldata" and ``C`` contains
271"testcval". 295"testcval".
272 296
273Appending and Prepending (Override Style Syntax) 297Appending and Prepending (Override Style Syntax)
@@ -278,16 +302,16 @@ style syntax. When you use this syntax, no spaces are inserted.
278 302
279These operators differ from the ":=", ".=", "=.", "+=", and "=+" 303These operators differ from the ":=", ".=", "=.", "+=", and "=+"
280operators in that their effects are applied at variable expansion time 304operators in that their effects are applied at variable expansion time
281rather than being immediately applied. Here are some examples: :: 305rather than being immediately applied. Here are some examples::
282 306
283 B = "bval" 307 B = "bval"
284 B_append = " additional data" 308 B:append = " additional data"
285 C = "cval" 309 C = "cval"
286 C_prepend = "additional data " 310 C:prepend = "additional data "
287 D = "dval" 311 D = "dval"
288 D_append = "additional data" 312 D:append = "additional data"
289 313
290The variable ``B`` 314The variable :term:`B`
291becomes "bval additional data" and ``C`` becomes "additional data cval". 315becomes "bval additional data" and ``C`` becomes "additional data cval".
292The variable ``D`` becomes "dvaladditional data". 316The variable ``D`` becomes "dvaladditional data".
293 317
@@ -295,6 +319,10 @@ The variable ``D`` becomes "dvaladditional data".
295 319
296 You must control all spacing when you use the override syntax. 320 You must control all spacing when you use the override syntax.
297 321
322.. note::
323
324 The overrides are applied in this order, ":append", ":prepend", ":remove".
325
298It is also possible to append and prepend to shell functions and 326It is also possible to append and prepend to shell functions and
299BitBake-style Python functions. See the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:shell functions`" and ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:bitbake-style python functions`" 327BitBake-style Python functions. See the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:shell functions`" and ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:bitbake-style python functions`"
300sections for examples. 328sections for examples.
@@ -306,16 +334,17 @@ Removal (Override Style Syntax)
306 334
307You can remove values from lists using the removal override style 335You can remove values from lists using the removal override style
308syntax. Specifying a value for removal causes all occurrences of that 336syntax. Specifying a value for removal causes all occurrences of that
309value to be removed from the variable. 337value to be removed from the variable. Unlike ":append" and ":prepend",
338there is no need to add a leading or trailing space to the value.
310 339
311When you use this syntax, BitBake expects one or more strings. 340When you use this syntax, BitBake expects one or more strings.
312Surrounding spaces and spacing are preserved. Here is an example: :: 341Surrounding spaces and spacing are preserved. Here is an example::
313 342
314 FOO = "123 456 789 123456 123 456 123 456" 343 FOO = "123 456 789 123456 123 456 123 456"
315 FOO_remove = "123" 344 FOO:remove = "123"
316 FOO_remove = "456" 345 FOO:remove = "456"
317 FOO2 = " abc def ghi abcdef abc def abc def def" 346 FOO2 = " abc def ghi abcdef abc def abc def def"
318 FOO2_remove = "\ 347 FOO2:remove = "\
319 def \ 348 def \
320 abc \ 349 abc \
321 ghi \ 350 ghi \
@@ -324,40 +353,62 @@ Surrounding spaces and spacing are preserved. Here is an example: ::
324The variable ``FOO`` becomes 353The variable ``FOO`` becomes
325" 789 123456 " and ``FOO2`` becomes " abcdef ". 354" 789 123456 " and ``FOO2`` becomes " abcdef ".
326 355
327Like "_append" and "_prepend", "_remove" is applied at variable 356Like ":append" and ":prepend", ":remove" is applied at variable
328expansion time. 357expansion time.
329 358
359.. note::
360
361 The overrides are applied in this order, ":append", ":prepend", ":remove".
362 This implies it is not possible to re-append previously removed strings.
363 However, one can undo a ":remove" by using an intermediate variable whose
364 content is passed to the ":remove" so that modifying the intermediate
365 variable equals to keeping the string in::
366
367 FOOREMOVE = "123 456 789"
368 FOO:remove = "${FOOREMOVE}"
369 ...
370 FOOREMOVE = "123 789"
371
372 This expands to ``FOO:remove = "123 789"``.
373
374.. note::
375
376 Override application order may not match variable parse history, i.e.
377 the output of ``bitbake -e`` may contain ":remove" before ":append",
378 but the result will be removed string, because ":remove" is handled
379 last.
380
330Override Style Operation Advantages 381Override Style Operation Advantages
331----------------------------------- 382-----------------------------------
332 383
333An advantage of the override style operations "_append", "_prepend", and 384An advantage of the override style operations ":append", ":prepend", and
334"_remove" as compared to the "+=" and "=+" operators is that the 385":remove" as compared to the "+=" and "=+" operators is that the
335override style operators provide guaranteed operations. For example, 386override style operators provide guaranteed operations. For example,
336consider a class ``foo.bbclass`` that needs to add the value "val" to 387consider a class ``foo.bbclass`` that needs to add the value "val" to
337the variable ``FOO``, and a recipe that uses ``foo.bbclass`` as follows: :: 388the variable ``FOO``, and a recipe that uses ``foo.bbclass`` as follows::
338 389
339 inherit foo 390 inherit foo
340 FOO = "initial" 391 FOO = "initial"
341 392
342If ``foo.bbclass`` uses the "+=" operator, 393If ``foo.bbclass`` uses the "+=" operator,
343as follows, then the final value of ``FOO`` will be "initial", which is 394as follows, then the final value of ``FOO`` will be "initial", which is
344not what is desired: :: 395not what is desired::
345 396
346 FOO += "val" 397 FOO += "val"
347 398
348If, on the other hand, ``foo.bbclass`` 399If, on the other hand, ``foo.bbclass``
349uses the "_append" operator, then the final value of ``FOO`` will be 400uses the ":append" operator, then the final value of ``FOO`` will be
350"initial val", as intended: :: 401"initial val", as intended::
351 402
352 FOO_append = " val" 403 FOO:append = " val"
353 404
354.. note:: 405.. note::
355 406
356 It is never necessary to use "+=" together with "_append". The following 407 It is never necessary to use "+=" together with ":append". The following
357 sequence of assignments appends "barbaz" to FOO: :: 408 sequence of assignments appends "barbaz" to FOO::
358 409
359 FOO_append = "bar" 410 FOO:append = "bar"
360 FOO_append = "baz" 411 FOO:append = "baz"
361 412
362 413
363 The only effect of changing the second assignment in the previous 414 The only effect of changing the second assignment in the previous
@@ -378,10 +429,10 @@ You can find more out about variable flags in general in the
378 429
379You can define, append, and prepend values to variable flags. All the 430You can define, append, and prepend values to variable flags. All the
380standard syntax operations previously mentioned work for variable flags 431standard syntax operations previously mentioned work for variable flags
381except for override style syntax (i.e. "_prepend", "_append", and 432except for override style syntax (i.e. ":prepend", ":append", and
382"_remove"). 433":remove").
383 434
384Here are some examples showing how to set variable flags: :: 435Here are some examples showing how to set variable flags::
385 436
386 FOO[a] = "abc" 437 FOO[a] = "abc"
387 FOO[b] = "123" 438 FOO[b] = "123"
@@ -393,15 +444,21 @@ respectively. The ``[a]`` flag becomes "abc 456".
393 444
394No need exists to pre-define variable flags. You can simply start using 445No need exists to pre-define variable flags. You can simply start using
395them. One extremely common application is to attach some brief 446them. One extremely common application is to attach some brief
396documentation to a BitBake variable as follows: :: 447documentation to a BitBake variable as follows::
397 448
398 CACHE[doc] = "The directory holding the cache of the metadata." 449 CACHE[doc] = "The directory holding the cache of the metadata."
399 450
451.. note::
452
453 Variable flag names starting with an underscore (``_``) character
454 are allowed but are ignored by ``d.getVarFlags("VAR")``
455 in Python code. Such flag names are used internally by BitBake.
456
400Inline Python Variable Expansion 457Inline Python Variable Expansion
401-------------------------------- 458--------------------------------
402 459
403You can use inline Python variable expansion to set variables. Here is 460You can use inline Python variable expansion to set variables. Here is
404an example: :: 461an example::
405 462
406 DATE = "${@time.strftime('%Y%m%d',time.gmtime())}" 463 DATE = "${@time.strftime('%Y%m%d',time.gmtime())}"
407 464
@@ -410,21 +467,21 @@ This example results in the ``DATE`` variable being set to the current date.
410Probably the most common use of this feature is to extract the value of 467Probably the most common use of this feature is to extract the value of
411variables from BitBake's internal data dictionary, ``d``. The following 468variables from BitBake's internal data dictionary, ``d``. The following
412lines select the values of a package name and its version number, 469lines select the values of a package name and its version number,
413respectively: :: 470respectively::
414 471
415 PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}" 472 PN = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
416 PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}" 473 PV = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
417 474
418.. note:: 475.. note::
419 476
420 Inline Python expressions work just like variable expansions insofar as the 477 Inline Python expressions work just like variable expansions insofar as the
421 "=" and ":=" operators are concerned. Given the following assignment, foo() 478 "=" and ":=" operators are concerned. Given the following assignment, foo()
422 is called each time FOO is expanded: :: 479 is called each time FOO is expanded::
423 480
424 FOO = "${@foo()}" 481 FOO = "${@foo()}"
425 482
426 Contrast this with the following immediate assignment, where foo() is only 483 Contrast this with the following immediate assignment, where foo() is only
427 called once, while the assignment is parsed: :: 484 called once, while the assignment is parsed::
428 485
429 FOO := "${@foo()}" 486 FOO := "${@foo()}"
430 487
@@ -437,7 +494,7 @@ Unsetting variables
437 494
438It is possible to completely remove a variable or a variable flag from 495It is possible to completely remove a variable or a variable flag from
439BitBake's internal data dictionary by using the "unset" keyword. Here is 496BitBake's internal data dictionary by using the "unset" keyword. Here is
440an example: :: 497an example::
441 498
442 unset DATE 499 unset DATE
443 unset do_fetch[noexec] 500 unset do_fetch[noexec]
@@ -452,7 +509,7 @@ When specifying pathnames for use with BitBake, do not use the tilde
452cause BitBake to not recognize the path since BitBake does not expand 509cause BitBake to not recognize the path since BitBake does not expand
453this character in the same way a shell would. 510this character in the same way a shell would.
454 511
455Instead, provide a fuller path as the following example illustrates: :: 512Instead, provide a fuller path as the following example illustrates::
456 513
457 BBLAYERS ?= " \ 514 BBLAYERS ?= " \
458 /home/scott-lenovo/LayerA \ 515 /home/scott-lenovo/LayerA \
@@ -463,7 +520,7 @@ Exporting Variables to the Environment
463 520
464You can export variables to the environment of running tasks by using 521You can export variables to the environment of running tasks by using
465the ``export`` keyword. For example, in the following example, the 522the ``export`` keyword. For example, in the following example, the
466``do_foo`` task prints "value from the environment" when run: :: 523``do_foo`` task prints "value from the environment" when run::
467 524
468 export ENV_VARIABLE 525 export ENV_VARIABLE
469 ENV_VARIABLE = "value from the environment" 526 ENV_VARIABLE = "value from the environment"
@@ -481,7 +538,7 @@ It does not matter whether ``export ENV_VARIABLE`` appears before or
481after assignments to ``ENV_VARIABLE``. 538after assignments to ``ENV_VARIABLE``.
482 539
483It is also possible to combine ``export`` with setting a value for the 540It is also possible to combine ``export`` with setting a value for the
484variable. Here is an example: :: 541variable. Here is an example::
485 542
486 export ENV_VARIABLE = "variable-value" 543 export ENV_VARIABLE = "variable-value"
487 544
@@ -496,78 +553,78 @@ Conditional Syntax (Overrides)
496 553
497BitBake uses :term:`OVERRIDES` to control what 554BitBake uses :term:`OVERRIDES` to control what
498variables are overridden after BitBake parses recipes and configuration 555variables are overridden after BitBake parses recipes and configuration
499files. This section describes how you can use ``OVERRIDES`` as 556files. This section describes how you can use :term:`OVERRIDES` as
500conditional metadata, talks about key expansion in relationship to 557conditional metadata, talks about key expansion in relationship to
501``OVERRIDES``, and provides some examples to help with understanding. 558:term:`OVERRIDES`, and provides some examples to help with understanding.
502 559
503Conditional Metadata 560Conditional Metadata
504-------------------- 561--------------------
505 562
506You can use ``OVERRIDES`` to conditionally select a specific version of 563You can use :term:`OVERRIDES` to conditionally select a specific version of
507a variable and to conditionally append or prepend the value of a 564a variable and to conditionally append or prepend the value of a
508variable. 565variable.
509 566
510.. note:: 567.. note::
511 568
512 Overrides can only use lower-case characters. Additionally, 569 Overrides can only use lower-case characters, digits and dashes.
513 underscores are not permitted in override names as they are used to 570 In particular, colons are not permitted in override names as they are used to
514 separate overrides from each other and from the variable name. 571 separate overrides from each other and from the variable name.
515 572
516- *Selecting a Variable:* The ``OVERRIDES`` variable is a 573- *Selecting a Variable:* The :term:`OVERRIDES` variable is a
517 colon-character-separated list that contains items for which you want 574 colon-character-separated list that contains items for which you want
518 to satisfy conditions. Thus, if you have a variable that is 575 to satisfy conditions. Thus, if you have a variable that is
519 conditional on "arm", and "arm" is in ``OVERRIDES``, then the 576 conditional on "arm", and "arm" is in :term:`OVERRIDES`, then the
520 "arm"-specific version of the variable is used rather than the 577 "arm"-specific version of the variable is used rather than the
521 non-conditional version. Here is an example: :: 578 non-conditional version. Here is an example::
522 579
523 OVERRIDES = "architecture:os:machine" 580 OVERRIDES = "architecture:os:machine"
524 TEST = "default" 581 TEST = "default"
525 TEST_os = "osspecific" 582 TEST:os = "osspecific"
526 TEST_nooverride = "othercondvalue" 583 TEST:nooverride = "othercondvalue"
527 584
528 In this example, the ``OVERRIDES`` 585 In this example, the :term:`OVERRIDES`
529 variable lists three overrides: "architecture", "os", and "machine". 586 variable lists three overrides: "architecture", "os", and "machine".
530 The variable ``TEST`` by itself has a default value of "default". You 587 The variable ``TEST`` by itself has a default value of "default". You
531 select the os-specific version of the ``TEST`` variable by appending 588 select the os-specific version of the ``TEST`` variable by appending
532 the "os" override to the variable (i.e. ``TEST_os``). 589 the "os" override to the variable (i.e. ``TEST:os``).
533 590
534 To better understand this, consider a practical example that assumes 591 To better understand this, consider a practical example that assumes
535 an OpenEmbedded metadata-based Linux kernel recipe file. The 592 an OpenEmbedded metadata-based Linux kernel recipe file. The
536 following lines from the recipe file first set the kernel branch 593 following lines from the recipe file first set the kernel branch
537 variable ``KBRANCH`` to a default value, then conditionally override 594 variable ``KBRANCH`` to a default value, then conditionally override
538 that value based on the architecture of the build: :: 595 that value based on the architecture of the build::
539 596
540 KBRANCH = "standard/base" 597 KBRANCH = "standard/base"
541 KBRANCH_qemuarm = "standard/arm-versatile-926ejs" 598 KBRANCH:qemuarm = "standard/arm-versatile-926ejs"
542 KBRANCH_qemumips = "standard/mti-malta32" 599 KBRANCH:qemumips = "standard/mti-malta32"
543 KBRANCH_qemuppc = "standard/qemuppc" 600 KBRANCH:qemuppc = "standard/qemuppc"
544 KBRANCH_qemux86 = "standard/common-pc/base" 601 KBRANCH:qemux86 = "standard/common-pc/base"
545 KBRANCH_qemux86-64 = "standard/common-pc-64/base" 602 KBRANCH:qemux86-64 = "standard/common-pc-64/base"
546 KBRANCH_qemumips64 = "standard/mti-malta64" 603 KBRANCH:qemumips64 = "standard/mti-malta64"
547 604
548- *Appending and Prepending:* BitBake also supports append and prepend 605- *Appending and Prepending:* BitBake also supports append and prepend
549 operations to variable values based on whether a specific item is 606 operations to variable values based on whether a specific item is
550 listed in ``OVERRIDES``. Here is an example: :: 607 listed in :term:`OVERRIDES`. Here is an example::
551 608
552 DEPENDS = "glibc ncurses" 609 DEPENDS = "glibc ncurses"
553 OVERRIDES = "machine:local" 610 OVERRIDES = "machine:local"
554 DEPENDS_append_machine = "libmad" 611 DEPENDS:append:machine = "libmad"
555 612
556 In this example, ``DEPENDS`` becomes "glibc ncurses libmad". 613 In this example, :term:`DEPENDS` becomes "glibc ncurses libmad".
557 614
558 Again, using an OpenEmbedded metadata-based kernel recipe file as an 615 Again, using an OpenEmbedded metadata-based kernel recipe file as an
559 example, the following lines will conditionally append to the 616 example, the following lines will conditionally append to the
560 ``KERNEL_FEATURES`` variable based on the architecture: :: 617 ``KERNEL_FEATURES`` variable based on the architecture::
561 618
562 KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}" 619 KERNEL_FEATURES:append = " ${KERNEL_EXTRA_FEATURES}"
563 KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc" 620 KERNEL_FEATURES:append:qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
564 KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc" 621 KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
565 622
566- *Setting a Variable for a Single Task:* BitBake supports setting a 623- *Setting a Variable for a Single Task:* BitBake supports setting a
567 variable just for the duration of a single task. Here is an example: :: 624 variable just for the duration of a single task. Here is an example::
568 625
569 FOO_task-configure = "val 1" 626 FOO:task-configure = "val 1"
570 FOO_task-compile = "val 2" 627 FOO:task-compile = "val 2"
571 628
572 In the 629 In the
573 previous example, ``FOO`` has the value "val 1" while the 630 previous example, ``FOO`` has the value "val 1" while the
@@ -580,15 +637,25 @@ variable.
580 ``do_compile`` task. 637 ``do_compile`` task.
581 638
582 You can also use this syntax with other combinations (e.g. 639 You can also use this syntax with other combinations (e.g.
583 "``_prepend``") as shown in the following example: :: 640 "``:prepend``") as shown in the following example::
641
642 EXTRA_OEMAKE:prepend:task-compile = "${PARALLEL_MAKE} "
643
644.. note::
584 645
585 EXTRA_OEMAKE_prepend_task-compile = "${PARALLEL_MAKE} " 646 Before BitBake 1.52 (Honister 3.4), the syntax for :term:`OVERRIDES`
647 used ``_`` instead of ``:``, so you will still find a lot of documentation
648 using ``_append``, ``_prepend``, and ``_remove``, for example.
649
650 For details, see the
651 :yocto_docs:`Overrides Syntax Changes </migration-guides/migration-3.4.html#override-syntax-changes>`
652 section in the Yocto Project manual migration notes.
586 653
587Key Expansion 654Key Expansion
588------------- 655-------------
589 656
590Key expansion happens when the BitBake datastore is finalized. To better 657Key expansion happens when the BitBake datastore is finalized. To better
591understand this, consider the following example: :: 658understand this, consider the following example::
592 659
593 A${B} = "X" 660 A${B} = "X"
594 B = "2" 661 B = "2"
@@ -612,57 +679,57 @@ users.
612 679
613There is often confusion concerning the order in which overrides and 680There is often confusion concerning the order in which overrides and
614various "append" operators take effect. Recall that an append or prepend 681various "append" operators take effect. Recall that an append or prepend
615operation using "_append" and "_prepend" does not result in an immediate 682operation using ":append" and ":prepend" does not result in an immediate
616assignment as would "+=", ".=", "=+", or "=.". Consider the following 683assignment as would "+=", ".=", "=+", or "=.". Consider the following
617example: :: 684example::
618 685
619 OVERRIDES = "foo" 686 OVERRIDES = "foo"
620 A = "Z" 687 A = "Z"
621 A_foo_append = "X" 688 A:foo:append = "X"
622 689
623For this case, 690For this case,
624``A`` is unconditionally set to "Z" and "X" is unconditionally and 691``A`` is unconditionally set to "Z" and "X" is unconditionally and
625immediately appended to the variable ``A_foo``. Because overrides have 692immediately appended to the variable ``A:foo``. Because overrides have
626not been applied yet, ``A_foo`` is set to "X" due to the append and 693not been applied yet, ``A:foo`` is set to "X" due to the append and
627``A`` simply equals "Z". 694``A`` simply equals "Z".
628 695
629Applying overrides, however, changes things. Since "foo" is listed in 696Applying overrides, however, changes things. Since "foo" is listed in
630``OVERRIDES``, the conditional variable ``A`` is replaced with the "foo" 697:term:`OVERRIDES`, the conditional variable ``A`` is replaced with the "foo"
631version, which is equal to "X". So effectively, ``A_foo`` replaces 698version, which is equal to "X". So effectively, ``A:foo`` replaces
632``A``. 699``A``.
633 700
634This next example changes the order of the override and the append: :: 701This next example changes the order of the override and the append::
635 702
636 OVERRIDES = "foo" 703 OVERRIDES = "foo"
637 A = "Z" 704 A = "Z"
638 A_append_foo = "X" 705 A:append:foo = "X"
639 706
640For this case, before 707For this case, before
641overrides are handled, ``A`` is set to "Z" and ``A_append_foo`` is set 708overrides are handled, ``A`` is set to "Z" and ``A:append:foo`` is set
642to "X". Once the override for "foo" is applied, however, ``A`` gets 709to "X". Once the override for "foo" is applied, however, ``A`` gets
643appended with "X". Consequently, ``A`` becomes "ZX". Notice that spaces 710appended with "X". Consequently, ``A`` becomes "ZX". Notice that spaces
644are not appended. 711are not appended.
645 712
646This next example has the order of the appends and overrides reversed 713This next example has the order of the appends and overrides reversed
647back as in the first example: :: 714back as in the first example::
648 715
649 OVERRIDES = "foo" 716 OVERRIDES = "foo"
650 A = "Y" 717 A = "Y"
651 A_foo_append = "Z" 718 A:foo:append = "Z"
652 A_foo_append = "X" 719 A:foo:append = "X"
653 720
654For this case, before any overrides are resolved, 721For this case, before any overrides are resolved,
655``A`` is set to "Y" using an immediate assignment. After this immediate 722``A`` is set to "Y" using an immediate assignment. After this immediate
656assignment, ``A_foo`` is set to "Z", and then further appended with "X" 723assignment, ``A:foo`` is set to "Z", and then further appended with "X"
657leaving the variable set to "ZX". Finally, applying the override for 724leaving the variable set to "ZX". Finally, applying the override for
658"foo" results in the conditional variable ``A`` becoming "ZX" (i.e. 725"foo" results in the conditional variable ``A`` becoming "ZX" (i.e.
659``A`` is replaced with ``A_foo``). 726``A`` is replaced with ``A:foo``).
660 727
661This final example mixes in some varying operators: :: 728This final example mixes in some varying operators::
662 729
663 A = "1" 730 A = "1"
664 A_append = "2" 731 A:append = "2"
665 A_append = "3" 732 A:append = "3"
666 A += "4" 733 A += "4"
667 A .= "5" 734 A .= "5"
668 735
@@ -670,7 +737,7 @@ For this case, the type of append
670operators are affecting the order of assignments as BitBake passes 737operators are affecting the order of assignments as BitBake passes
671through the code multiple times. Initially, ``A`` is set to "1 45" 738through the code multiple times. Initially, ``A`` is set to "1 45"
672because of the three statements that use immediate operators. After 739because of the three statements that use immediate operators. After
673these assignments are made, BitBake applies the "_append" operations. 740these assignments are made, BitBake applies the ":append" operations.
674Those operations result in ``A`` becoming "1 4523". 741Those operations result in ``A`` becoming "1 4523".
675 742
676Sharing Functionality 743Sharing Functionality
@@ -686,8 +753,10 @@ share the task.
686 753
687This section presents the mechanisms BitBake provides to allow you to 754This section presents the mechanisms BitBake provides to allow you to
688share functionality between recipes. Specifically, the mechanisms 755share functionality between recipes. Specifically, the mechanisms
689include ``include``, ``inherit``, ``INHERIT``, and ``require`` 756include ``include``, ``inherit``, :term:`INHERIT`, and ``require``
690directives. 757directives. There is also a higher-level abstraction called
758``configuration fragments`` that is enabled with ``addfragments``
759directive.
691 760
692Locating Include and Class Files 761Locating Include and Class Files
693-------------------------------- 762--------------------------------
@@ -702,7 +771,9 @@ current directory for ``include`` and ``require`` directives.
702 771
703In order for include and class files to be found by BitBake, they need 772In order for include and class files to be found by BitBake, they need
704to be located in a "classes" subdirectory that can be found in 773to be located in a "classes" subdirectory that can be found in
705``BBPATH``. 774:term:`BBPATH`.
775
776.. _ref-bitbake-user-manual-metadata-inherit:
706 777
707``inherit`` Directive 778``inherit`` Directive
708--------------------- 779---------------------
@@ -720,12 +791,12 @@ file and then have your recipe inherit that class file.
720 791
721As an example, your recipes could use the following directive to inherit 792As an example, your recipes could use the following directive to inherit
722an ``autotools.bbclass`` file. The class file would contain common 793an ``autotools.bbclass`` file. The class file would contain common
723functionality for using Autotools that could be shared across recipes: :: 794functionality for using Autotools that could be shared across recipes::
724 795
725 inherit autotools 796 inherit autotools
726 797
727In this case, BitBake would search for the directory 798In this case, BitBake would search for the directory
728``classes/autotools.bbclass`` in ``BBPATH``. 799``classes/autotools.bbclass`` in :term:`BBPATH`.
729 800
730.. note:: 801.. note::
731 802
@@ -734,7 +805,7 @@ In this case, BitBake would search for the directory
734 805
735If you want to use the directive to inherit multiple classes, separate 806If you want to use the directive to inherit multiple classes, separate
736them with spaces. The following example shows how to inherit both the 807them with spaces. The following example shows how to inherit both the
737``buildhistory`` and ``rm_work`` classes: :: 808``buildhistory`` and ``rm_work`` classes::
738 809
739 inherit buildhistory rm_work 810 inherit buildhistory rm_work
740 811
@@ -742,19 +813,43 @@ An advantage with the inherit directive as compared to both the
742:ref:`include <bitbake-user-manual/bitbake-user-manual-metadata:\`\`include\`\` directive>` and :ref:`require <bitbake-user-manual/bitbake-user-manual-metadata:\`\`require\`\` directive>` 813:ref:`include <bitbake-user-manual/bitbake-user-manual-metadata:\`\`include\`\` directive>` and :ref:`require <bitbake-user-manual/bitbake-user-manual-metadata:\`\`require\`\` directive>`
743directives is that you can inherit class files conditionally. You can 814directives is that you can inherit class files conditionally. You can
744accomplish this by using a variable expression after the ``inherit`` 815accomplish this by using a variable expression after the ``inherit``
745statement. Here is an example: :: 816statement.
817
818For inheriting classes conditionally, using the :ref:`inherit_defer
819<ref-bitbake-user-manual-metadata-inherit-defer>` directive is advised as
820:ref:`inherit_defer <ref-bitbake-user-manual-metadata-inherit-defer>` is
821evaluated at the end of parsing.
822
823.. _ref-bitbake-user-manual-metadata-inherit-defer:
824
825``inherit_defer`` Directive
826~~~~~~~~~~~~~~~~~~~~~~~~~~~
827
828The :ref:`inherit_defer <ref-bitbake-user-manual-metadata-inherit-defer>`
829directive works like the :ref:`inherit
830<ref-bitbake-user-manual-metadata-inherit>` directive, except that it is only
831evaluated at the end of parsing. Its usage is recommended when a conditional
832expression is used.
746 833
747 inherit ${VARNAME} 834This allows conditional expressions to be evaluated "late", meaning changes to
835the variable after the line is parsed will take effect. With the :ref:`inherit
836<ref-bitbake-user-manual-metadata-inherit>` directive this is not the case.
837
838Here is an example::
839
840 inherit_defer ${VARNAME}
748 841
749If ``VARNAME`` is 842If ``VARNAME`` is
750going to be set, it needs to be set before the ``inherit`` statement is 843going to be set, it needs to be set before the ``inherit_defer`` statement is
751parsed. One way to achieve a conditional inherit in this case is to use 844parsed. One way to achieve a conditional inherit in this case is to use
752overrides: :: 845overrides::
753 846
754 VARIABLE = "" 847 VARIABLE = ""
755 VARIABLE_someoverride = "myclass" 848 VARIABLE:someoverride = "myclass"
756 849
757Another method is by using anonymous Python. Here is an example: :: 850Another method is by using :ref:`anonymous Python
851<bitbake-user-manual/bitbake-user-manual-metadata:Anonymous Python Functions>`.
852Here is an example::
758 853
759 python () { 854 python () {
760 if condition == value: 855 if condition == value:
@@ -763,11 +858,14 @@ Another method is by using anonymous Python. Here is an example: ::
763 d.setVar('VARIABLE', '') 858 d.setVar('VARIABLE', '')
764 } 859 }
765 860
766Alternatively, you could use an in-line Python expression in the 861Alternatively, you could use an inline Python expression in the
767following form: :: 862following form::
863
864 inherit_defer ${@'classname' if condition else ''}
768 865
769 inherit ${@'classname' if condition else ''} 866Or::
770 inherit ${@functionname(params)} 867
868 inherit_defer ${@bb.utils.contains('VARIABLE', 'something', 'classname', '', d)}
771 869
772In all cases, if the expression evaluates to an 870In all cases, if the expression evaluates to an
773empty string, the statement does not trigger a syntax error because it 871empty string, the statement does not trigger a syntax error because it
@@ -780,7 +878,7 @@ BitBake understands the ``include`` directive. This directive causes
780BitBake to parse whatever file you specify, and to insert that file at 878BitBake to parse whatever file you specify, and to insert that file at
781that location. The directive is much like its equivalent in Make except 879that location. The directive is much like its equivalent in Make except
782that if the path specified on the include line is a relative path, 880that if the path specified on the include line is a relative path,
783BitBake locates the first file it can find within ``BBPATH``. 881BitBake locates the first file it can find within :term:`BBPATH`.
784 882
785The include directive is a more generic method of including 883The include directive is a more generic method of including
786functionality as compared to the :ref:`inherit <bitbake-user-manual/bitbake-user-manual-metadata:\`\`inherit\`\` directive>` 884functionality as compared to the :ref:`inherit <bitbake-user-manual/bitbake-user-manual-metadata:\`\`inherit\`\` directive>`
@@ -790,7 +888,7 @@ encapsulated functionality or configuration that does not suit a
790``.bbclass`` file. 888``.bbclass`` file.
791 889
792As an example, suppose you needed a recipe to include some self-test 890As an example, suppose you needed a recipe to include some self-test
793definitions: :: 891definitions::
794 892
795 include test_defs.inc 893 include test_defs.inc
796 894
@@ -802,6 +900,33 @@ definitions: ::
802 of include . Doing so makes sure that an error is produced if the file cannot 900 of include . Doing so makes sure that an error is produced if the file cannot
803 be found. 901 be found.
804 902
903``include_all`` Directive
904-------------------------
905
906The ``include_all`` directive works like the :ref:`include
907<bitbake-user-manual/bitbake-user-manual-metadata:\`\`include\`\` directive>`
908directive but will include all of the files that match the specified path in
909the enabled layers (layers part of :term:`BBLAYERS`).
910
911For example, let's say a ``maintainers.inc`` file is present in different layers
912and is conventionally placed in the ``conf/distro/include`` directory of each
913layer. In that case the ``include_all`` directive can be used to include
914the ``maintainers.inc`` file for all of these layers::
915
916 include_all conf/distro/include/maintainers.inc
917
918In other words, the ``maintainers.inc`` file for each layer is included through
919the :ref:`include <bitbake-user-manual/bitbake-user-manual-metadata:\`\`include\`\` directive>`
920directive.
921
922BitBake will iterate through the colon-separated :term:`BBPATH` list to look for
923matching files to include, from left to right. As a consequence, matching files
924are included in that order.
925
926As the ``include_all`` directive uses the :ref:`include
927<bitbake-user-manual/bitbake-user-manual-metadata:\`\`include\`\` directive>`
928directive in the background, no error is produced if no files are matched.
929
805.. _require-inclusion: 930.. _require-inclusion:
806 931
807``require`` Directive 932``require`` Directive
@@ -822,7 +947,7 @@ does not suit a ``.bbclass`` file.
822 947
823Similar to how BitBake handles :ref:`include <bitbake-user-manual/bitbake-user-manual-metadata:\`\`include\`\` directive>`, if 948Similar to how BitBake handles :ref:`include <bitbake-user-manual/bitbake-user-manual-metadata:\`\`include\`\` directive>`, if
824the path specified on the require line is a relative path, BitBake 949the path specified on the require line is a relative path, BitBake
825locates the first file it can find within ``BBPATH``. 950locates the first file it can find within :term:`BBPATH`.
826 951
827As an example, suppose you have two versions of a recipe (e.g. 952As an example, suppose you have two versions of a recipe (e.g.
828``foo_1.2.2.bb`` and ``foo_2.0.0.bb``) where each version contains some 953``foo_1.2.2.bb`` and ``foo_2.0.0.bb``) where each version contains some
@@ -831,7 +956,7 @@ include file named ``foo.inc`` that contains the common definitions
831needed to build "foo". You need to be sure ``foo.inc`` is located in the 956needed to build "foo". You need to be sure ``foo.inc`` is located in the
832same directory as your two recipe files as well. Once these conditions 957same directory as your two recipe files as well. Once these conditions
833are set up, you can share the functionality using a ``require`` 958are set up, you can share the functionality using a ``require``
834directive from within each recipe: :: 959directive from within each recipe::
835 960
836 require foo.inc 961 require foo.inc
837 962
@@ -844,14 +969,14 @@ class. BitBake only supports this directive when used within a
844configuration file. 969configuration file.
845 970
846As an example, suppose you needed to inherit a class file called 971As an example, suppose you needed to inherit a class file called
847``abc.bbclass`` from a configuration file as follows: :: 972``abc.bbclass`` from a configuration file as follows::
848 973
849 INHERIT += "abc" 974 INHERIT += "abc"
850 975
851This configuration directive causes the named class to be inherited at 976This configuration directive causes the named class to be inherited at
852the point of the directive during parsing. As with the ``inherit`` 977the point of the directive during parsing. As with the ``inherit``
853directive, the ``.bbclass`` file must be located in a "classes" 978directive, the ``.bbclass`` file must be located in a "classes"
854subdirectory in one of the directories specified in ``BBPATH``. 979subdirectory in one of the directories specified in :term:`BBPATH`.
855 980
856.. note:: 981.. note::
857 982
@@ -862,10 +987,69 @@ subdirectory in one of the directories specified in ``BBPATH``.
862If you want to use the directive to inherit multiple classes, you can 987If you want to use the directive to inherit multiple classes, you can
863provide them on the same line in the ``local.conf`` file. Use spaces to 988provide them on the same line in the ``local.conf`` file. Use spaces to
864separate the classes. The following example shows how to inherit both 989separate the classes. The following example shows how to inherit both
865the ``autotools`` and ``pkgconfig`` classes: :: 990the ``autotools`` and ``pkgconfig`` classes::
866 991
867 INHERIT += "autotools pkgconfig" 992 INHERIT += "autotools pkgconfig"
868 993
994``addfragments`` Directive
995--------------------------
996
997This directive allows fine-tuning local configurations with configuration
998snippets contained in layers in a structured, controlled way. Typically it would
999go into ``bitbake.conf``, for example::
1000
1001 addfragments conf/fragments OE_FRAGMENTS OE_FRAGMENTS_METADATA_VARS OE_BUILTIN_FRAGMENTS
1002
1003``addfragments`` takes four parameters:
1004
1005- path prefix for fragment files inside the layer file tree that bitbake
1006 uses to construct full paths to the fragment files
1007
1008- name of variable that holds the list of enabled fragments in an
1009 active build
1010
1011- name of variable that contains a list of variable names containing
1012 fragment-specific metadata (such as descriptions)
1013
1014- name of variable that contains definitions for built-in fragments
1015
1016This allows listing enabled configuration fragments in ``OE_FRAGMENTS``
1017variable like this::
1018
1019 OE_FRAGMENTS = "core/domain/somefragment core/someotherfragment anotherlayer/anotherdomain/anotherfragment"
1020
1021Fragment names listed in this variable must be prefixed by the layer name
1022where a fragment file is located, defined by :term:`BBFILE_COLLECTIONS` in ``layer.conf``.
1023
1024The implementation then expands this list into
1025:ref:`require <bitbake-user-manual/bitbake-user-manual-metadata:\`\`require\`\` directive>`
1026directives with full paths to respective layers::
1027
1028 require /path/to/core-layer/conf/fragments/domain/somefragment.conf
1029 require /path/to/core-layer/conf/fragments/someotherfragment.conf
1030 require /path/to/another-layer/conf/fragments/anotherdomain/anotherfragment.conf
1031
1032The variable containing a list of fragment metadata variables could look like this::
1033
1034 OE_FRAGMENTS_METADATA_VARS = "BB_CONF_FRAGMENT_SUMMARY BB_CONF_FRAGMENT_DESCRIPTION"
1035
1036The implementation will add a flag containing the fragment name to each of those variables
1037when parsing fragments, so that the variables are namespaced by fragment name, and do not override
1038each other when several fragments are enabled.
1039
1040The variable containing a built-in fragment definitions could look like this::
1041
1042 OE_BUILTIN_FRAGMENTS = "someprefix:SOMEVARIABLE anotherprefix:ANOTHERVARIABLE"
1043
1044and then if 'someprefix/somevalue' is added to the variable that holds the list
1045of enabled fragments:
1046
1047 OE_FRAGMENTS = "... someprefix/somevalue"
1048
1049bitbake will treat that as direct value assignment in its configuration::
1050
1051 SOMEVARIABLE = "somevalue"
1052
869Functions 1053Functions
870========= 1054=========
871 1055
@@ -893,9 +1077,9 @@ Regardless of the type of function, you can only define them in class
893Shell Functions 1077Shell Functions
894--------------- 1078---------------
895 1079
896Functions written in shell script and executed either directly as 1080Functions written in shell script are executed either directly as
897functions, tasks, or both. They can also be called by other shell 1081functions, tasks, or both. They can also be called by other shell
898functions. Here is an example shell function definition: :: 1082functions. Here is an example shell function definition::
899 1083
900 some_function () { 1084 some_function () {
901 echo "Hello World" 1085 echo "Hello World"
@@ -907,19 +1091,19 @@ rules. The scripts are executed by ``/bin/sh``, which may not be a bash
907shell but might be something such as ``dash``. You should not use 1091shell but might be something such as ``dash``. You should not use
908Bash-specific script (bashisms). 1092Bash-specific script (bashisms).
909 1093
910Overrides and override-style operators like ``_append`` and ``_prepend`` 1094Overrides and override-style operators like ``:append`` and ``:prepend``
911can also be applied to shell functions. Most commonly, this application 1095can also be applied to shell functions. Most commonly, this application
912would be used in a ``.bbappend`` file to modify functions in the main 1096would be used in a ``.bbappend`` file to modify functions in the main
913recipe. It can also be used to modify functions inherited from classes. 1097recipe. It can also be used to modify functions inherited from classes.
914 1098
915As an example, consider the following: :: 1099As an example, consider the following::
916 1100
917 do_foo() { 1101 do_foo() {
918 bbplain first 1102 bbplain first
919 fn 1103 fn
920 } 1104 }
921 1105
922 fn_prepend() { 1106 fn:prepend() {
923 bbplain second 1107 bbplain second
924 } 1108 }
925 1109
@@ -927,11 +1111,11 @@ As an example, consider the following: ::
927 bbplain third 1111 bbplain third
928 } 1112 }
929 1113
930 do_foo_append() { 1114 do_foo:append() {
931 bbplain fourth 1115 bbplain fourth
932 } 1116 }
933 1117
934Running ``do_foo`` prints the following: :: 1118Running ``do_foo`` prints the following::
935 1119
936 recipename do_foo: first 1120 recipename do_foo: first
937 recipename do_foo: second 1121 recipename do_foo: second
@@ -943,7 +1127,7 @@ Running ``do_foo`` prints the following: ::
943 Overrides and override-style operators can be applied to any shell 1127 Overrides and override-style operators can be applied to any shell
944 function, not just :ref:`tasks <bitbake-user-manual/bitbake-user-manual-metadata:tasks>`. 1128 function, not just :ref:`tasks <bitbake-user-manual/bitbake-user-manual-metadata:tasks>`.
945 1129
946You can use the ``bitbake -e`` recipename command to view the final 1130You can use the ``bitbake -e recipename`` command to view the final
947assembled function after all overrides have been applied. 1131assembled function after all overrides have been applied.
948 1132
949BitBake-Style Python Functions 1133BitBake-Style Python Functions
@@ -952,7 +1136,7 @@ BitBake-Style Python Functions
952These functions are written in Python and executed by BitBake or other 1136These functions are written in Python and executed by BitBake or other
953Python functions using ``bb.build.exec_func()``. 1137Python functions using ``bb.build.exec_func()``.
954 1138
955An example BitBake function is: :: 1139An example BitBake function is::
956 1140
957 python some_python_function () { 1141 python some_python_function () {
958 d.setVar("TEXT", "Hello World") 1142 d.setVar("TEXT", "Hello World")
@@ -975,9 +1159,9 @@ import these modules. Also in these types of functions, the datastore
975Similar to shell functions, you can also apply overrides and 1159Similar to shell functions, you can also apply overrides and
976override-style operators to BitBake-style Python functions. 1160override-style operators to BitBake-style Python functions.
977 1161
978As an example, consider the following: :: 1162As an example, consider the following::
979 1163
980 python do_foo_prepend() { 1164 python do_foo:prepend() {
981 bb.plain("first") 1165 bb.plain("first")
982 } 1166 }
983 1167
@@ -985,17 +1169,17 @@ As an example, consider the following: ::
985 bb.plain("second") 1169 bb.plain("second")
986 } 1170 }
987 1171
988 python do_foo_append() { 1172 python do_foo:append() {
989 bb.plain("third") 1173 bb.plain("third")
990 } 1174 }
991 1175
992Running ``do_foo`` prints the following: :: 1176Running ``do_foo`` prints the following::
993 1177
994 recipename do_foo: first 1178 recipename do_foo: first
995 recipename do_foo: second 1179 recipename do_foo: second
996 recipename do_foo: third 1180 recipename do_foo: third
997 1181
998You can use the ``bitbake -e`` recipename command to view 1182You can use the ``bitbake -e recipename`` command to view
999the final assembled function after all overrides have been applied. 1183the final assembled function after all overrides have been applied.
1000 1184
1001Python Functions 1185Python Functions
@@ -1004,7 +1188,7 @@ Python Functions
1004These functions are written in Python and are executed by other Python 1188These functions are written in Python and are executed by other Python
1005code. Examples of Python functions are utility functions that you intend 1189code. Examples of Python functions are utility functions that you intend
1006to call from in-line Python or from within other Python functions. Here 1190to call from in-line Python or from within other Python functions. Here
1007is an example: :: 1191is an example::
1008 1192
1009 def get_depends(d): 1193 def get_depends(d):
1010 if d.getVar('SOMECONDITION'): 1194 if d.getVar('SOMECONDITION'):
@@ -1015,7 +1199,7 @@ is an example: ::
1015 SOMECONDITION = "1" 1199 SOMECONDITION = "1"
1016 DEPENDS = "${@get_depends(d)}" 1200 DEPENDS = "${@get_depends(d)}"
1017 1201
1018This would result in ``DEPENDS`` containing ``dependencywithcond``. 1202This would result in :term:`DEPENDS` containing ``dependencywithcond``.
1019 1203
1020Here are some things to know about Python functions: 1204Here are some things to know about Python functions:
1021 1205
@@ -1056,7 +1240,7 @@ functions and regular Python functions defined with "def":
1056- Regular Python functions are called with the usual Python syntax. 1240- Regular Python functions are called with the usual Python syntax.
1057 BitBake-style Python functions are usually tasks and are called 1241 BitBake-style Python functions are usually tasks and are called
1058 directly by BitBake, but can also be called manually from Python code 1242 directly by BitBake, but can also be called manually from Python code
1059 by using the ``bb.build.exec_func()`` function. Here is an example: :: 1243 by using the ``bb.build.exec_func()`` function. Here is an example::
1060 1244
1061 bb.build.exec_func("my_bitbake_style_function", d) 1245 bb.build.exec_func("my_bitbake_style_function", d)
1062 1246
@@ -1094,7 +1278,7 @@ Sometimes it is useful to set variables or perform other operations
1094programmatically during parsing. To do this, you can define special 1278programmatically during parsing. To do this, you can define special
1095Python functions, called anonymous Python functions, that run at the end 1279Python functions, called anonymous Python functions, that run at the end
1096of parsing. For example, the following conditionally sets a variable 1280of parsing. For example, the following conditionally sets a variable
1097based on the value of another variable: :: 1281based on the value of another variable::
1098 1282
1099 python () { 1283 python () {
1100 if d.getVar('SOMEVAR') == 'value': 1284 if d.getVar('SOMEVAR') == 'value':
@@ -1107,7 +1291,7 @@ the name "__anonymous", rather than no name.
1107Anonymous Python functions always run at the end of parsing, regardless 1291Anonymous Python functions always run at the end of parsing, regardless
1108of where they are defined. If a recipe contains many anonymous 1292of where they are defined. If a recipe contains many anonymous
1109functions, they run in the same order as they are defined within the 1293functions, they run in the same order as they are defined within the
1110recipe. As an example, consider the following snippet: :: 1294recipe. As an example, consider the following snippet::
1111 1295
1112 python () { 1296 python () {
1113 d.setVar('FOO', 'foo 2') 1297 d.setVar('FOO', 'foo 2')
@@ -1122,7 +1306,7 @@ recipe. As an example, consider the following snippet: ::
1122 BAR = "bar 1" 1306 BAR = "bar 1"
1123 1307
1124The previous example is conceptually 1308The previous example is conceptually
1125equivalent to the following snippet: :: 1309equivalent to the following snippet::
1126 1310
1127 FOO = "foo 1" 1311 FOO = "foo 1"
1128 BAR = "bar 1" 1312 BAR = "bar 1"
@@ -1134,12 +1318,12 @@ equivalent to the following snippet: ::
1134values set for the variables within the anonymous functions become 1318values set for the variables within the anonymous functions become
1135available to tasks, which always run after parsing. 1319available to tasks, which always run after parsing.
1136 1320
1137Overrides and override-style operators such as "``_append``" are applied 1321Overrides and override-style operators such as "``:append``" are applied
1138before anonymous functions run. In the following example, ``FOO`` ends 1322before anonymous functions run. In the following example, ``FOO`` ends
1139up with the value "foo from anonymous": :: 1323up with the value "foo from anonymous"::
1140 1324
1141 FOO = "foo" 1325 FOO = "foo"
1142 FOO_append = " from outside" 1326 FOO:append = " from outside"
1143 1327
1144 python () { 1328 python () {
1145 d.setVar("FOO", "foo from anonymous") 1329 d.setVar("FOO", "foo from anonymous")
@@ -1164,7 +1348,7 @@ To understand the benefits of this feature, consider the basic scenario
1164where a class defines a task function and your recipe inherits the 1348where a class defines a task function and your recipe inherits the
1165class. In this basic scenario, your recipe inherits the task function as 1349class. In this basic scenario, your recipe inherits the task function as
1166defined in the class. If desired, your recipe can add to the start and 1350defined in the class. If desired, your recipe can add to the start and
1167end of the function by using the "_prepend" or "_append" operations 1351end of the function by using the ":prepend" or ":append" operations
1168respectively, or it can redefine the function completely. However, if it 1352respectively, or it can redefine the function completely. However, if it
1169redefines the function, there is no means for it to call the class 1353redefines the function, there is no means for it to call the class
1170version of the function. ``EXPORT_FUNCTIONS`` provides a mechanism that 1354version of the function. ``EXPORT_FUNCTIONS`` provides a mechanism that
@@ -1173,24 +1357,24 @@ version of the function.
1173 1357
1174To make use of this technique, you need the following things in place: 1358To make use of this technique, you need the following things in place:
1175 1359
1176- The class needs to define the function as follows: :: 1360- The class needs to define the function as follows::
1177 1361
1178 classname_functionname 1362 classname_functionname
1179 1363
1180 For example, if you have a class file 1364 For example, if you have a class file
1181 ``bar.bbclass`` and a function named ``do_foo``, the class must 1365 ``bar.bbclass`` and a function named ``do_foo``, the class must
1182 define the function as follows: :: 1366 define the function as follows::
1183 1367
1184 bar_do_foo 1368 bar_do_foo
1185 1369
1186- The class needs to contain the ``EXPORT_FUNCTIONS`` statement as 1370- The class needs to contain the ``EXPORT_FUNCTIONS`` statement as
1187 follows: :: 1371 follows::
1188 1372
1189 EXPORT_FUNCTIONS functionname 1373 EXPORT_FUNCTIONS functionname
1190 1374
1191 For example, continuing with 1375 For example, continuing with
1192 the same example, the statement in the ``bar.bbclass`` would be as 1376 the same example, the statement in the ``bar.bbclass`` would be as
1193 follows: :: 1377 follows::
1194 1378
1195 EXPORT_FUNCTIONS do_foo 1379 EXPORT_FUNCTIONS do_foo
1196 1380
@@ -1199,7 +1383,7 @@ To make use of this technique, you need the following things in place:
1199 class version of the function, it should call ``bar_do_foo``. 1383 class version of the function, it should call ``bar_do_foo``.
1200 Assuming ``do_foo`` was a shell function and ``EXPORT_FUNCTIONS`` was 1384 Assuming ``do_foo`` was a shell function and ``EXPORT_FUNCTIONS`` was
1201 used as above, the recipe's function could conditionally call the 1385 used as above, the recipe's function could conditionally call the
1202 class version of the function as follows: :: 1386 class version of the function as follows::
1203 1387
1204 do_foo() { 1388 do_foo() {
1205 if [ somecondition ] ; then 1389 if [ somecondition ] ; then
@@ -1233,11 +1417,11 @@ Tasks are either :ref:`shell functions <bitbake-user-manual/bitbake-user-manual-
1233that have been promoted to tasks by using the ``addtask`` command. The 1417that have been promoted to tasks by using the ``addtask`` command. The
1234``addtask`` command can also optionally describe dependencies between 1418``addtask`` command can also optionally describe dependencies between
1235the task and other tasks. Here is an example that shows how to define a 1419the task and other tasks. Here is an example that shows how to define a
1236task and declare some dependencies: :: 1420task and declare some dependencies::
1237 1421
1238 python do_printdate () { 1422 python do_printdate () {
1239 import time 1423 import datetime
1240 print time.strftime('%Y%m%d', time.gmtime()) 1424 bb.plain('Date: %s' % (datetime.date.today()))
1241 } 1425 }
1242 addtask printdate after do_fetch before do_build 1426 addtask printdate after do_fetch before do_build
1243 1427
@@ -1264,12 +1448,12 @@ Additionally, the ``do_printdate`` task becomes dependent upon the
1264 rerun for experimentation purposes, you can make BitBake always 1448 rerun for experimentation purposes, you can make BitBake always
1265 consider the task "out-of-date" by using the 1449 consider the task "out-of-date" by using the
1266 :ref:`[nostamp] <bitbake-user-manual/bitbake-user-manual-metadata:Variable Flags>` 1450 :ref:`[nostamp] <bitbake-user-manual/bitbake-user-manual-metadata:Variable Flags>`
1267 variable flag, as follows: :: 1451 variable flag, as follows::
1268 1452
1269 do_printdate[nostamp] = "1" 1453 do_printdate[nostamp] = "1"
1270 1454
1271 You can also explicitly run the task and provide the 1455 You can also explicitly run the task and provide the
1272 -f option as follows: :: 1456 -f option as follows::
1273 1457
1274 $ bitbake recipe -c printdate -f 1458 $ bitbake recipe -c printdate -f
1275 1459
@@ -1278,7 +1462,7 @@ Additionally, the ``do_printdate`` task becomes dependent upon the
1278 name. 1462 name.
1279 1463
1280You might wonder about the practical effects of using ``addtask`` 1464You might wonder about the practical effects of using ``addtask``
1281without specifying any dependencies as is done in the following example: :: 1465without specifying any dependencies as is done in the following example::
1282 1466
1283 addtask printdate 1467 addtask printdate
1284 1468
@@ -1286,7 +1470,7 @@ In this example, assuming dependencies have not been
1286added through some other means, the only way to run the task is by 1470added through some other means, the only way to run the task is by
1287explicitly selecting it with ``bitbake`` recipe ``-c printdate``. You 1471explicitly selecting it with ``bitbake`` recipe ``-c printdate``. You
1288can use the ``do_listtasks`` task to list all tasks defined in a recipe 1472can use the ``do_listtasks`` task to list all tasks defined in a recipe
1289as shown in the following example: :: 1473as shown in the following example::
1290 1474
1291 $ bitbake recipe -c listtasks 1475 $ bitbake recipe -c listtasks
1292 1476
@@ -1296,12 +1480,23 @@ For more information on task dependencies, see the
1296See the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:variable flags`" section for information 1480See the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:variable flags`" section for information
1297on variable flags you can use with tasks. 1481on variable flags you can use with tasks.
1298 1482
1483.. note::
1484
1485 While it's infrequent, it's possible to define multiple tasks as
1486 dependencies when calling ``addtask``. For example, here's a snippet
1487 from the OpenEmbedded class file ``package_tar.bbclass``::
1488
1489 addtask package_write_tar before do_build after do_packagedata do_package
1490
1491 Note how the ``package_write_tar`` task has to wait until both of
1492 ``do_packagedata`` and ``do_package`` complete.
1493
1299Deleting a Task 1494Deleting a Task
1300--------------- 1495---------------
1301 1496
1302As well as being able to add tasks, you can delete them. Simply use the 1497As well as being able to add tasks, you can delete them. Simply use the
1303``deltask`` command to delete a task. For example, to delete the example 1498``deltask`` command to delete a task. For example, to delete the example
1304task used in the previous sections, you would use: :: 1499task used in the previous sections, you would use::
1305 1500
1306 deltask printdate 1501 deltask printdate
1307 1502
@@ -1317,7 +1512,7 @@ to run before ``do_a``.
1317 1512
1318If you want dependencies such as these to remain intact, use the 1513If you want dependencies such as these to remain intact, use the
1319``[noexec]`` varflag to disable the task instead of using the 1514``[noexec]`` varflag to disable the task instead of using the
1320``deltask`` command to delete it: :: 1515``deltask`` command to delete it::
1321 1516
1322 do_b[noexec] = "1" 1517 do_b[noexec] = "1"
1323 1518
@@ -1331,8 +1526,8 @@ the build machine cannot influence the build.
1331.. note:: 1526.. note::
1332 1527
1333 By default, BitBake cleans the environment to include only those 1528 By default, BitBake cleans the environment to include only those
1334 things exported or listed in its whitelist to ensure that the build 1529 things exported or listed in its passthrough list to ensure that the
1335 environment is reproducible and consistent. You can prevent this 1530 build environment is reproducible and consistent. You can prevent this
1336 "cleaning" by setting the :term:`BB_PRESERVE_ENV` variable. 1531 "cleaning" by setting the :term:`BB_PRESERVE_ENV` variable.
1337 1532
1338Consequently, if you do want something to get passed into the build task 1533Consequently, if you do want something to get passed into the build task
@@ -1340,14 +1535,14 @@ environment, you must take these two steps:
1340 1535
1341#. Tell BitBake to load what you want from the environment into the 1536#. Tell BitBake to load what you want from the environment into the
1342 datastore. You can do so through the 1537 datastore. You can do so through the
1343 :term:`BB_ENV_WHITELIST` and 1538 :term:`BB_ENV_PASSTHROUGH` and
1344 :term:`BB_ENV_EXTRAWHITE` variables. For 1539 :term:`BB_ENV_PASSTHROUGH_ADDITIONS` variables. For
1345 example, assume you want to prevent the build system from accessing 1540 example, assume you want to prevent the build system from accessing
1346 your ``$HOME/.ccache`` directory. The following command "whitelists" 1541 your ``$HOME/.ccache`` directory. The following command adds the
1347 the environment variable ``CCACHE_DIR`` causing BitBake to allow that 1542 the environment variable ``CCACHE_DIR`` to BitBake's passthrough
1348 variable into the datastore: :: 1543 list to allow that variable into the datastore::
1349 1544
1350 export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE CCACHE_DIR" 1545 export BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS CCACHE_DIR"
1351 1546
1352#. Tell BitBake to export what you have loaded into the datastore to the 1547#. Tell BitBake to export what you have loaded into the datastore to the
1353 task environment of every running task. Loading something from the 1548 task environment of every running task. Loading something from the
@@ -1355,7 +1550,7 @@ environment, you must take these two steps:
1355 available in the datastore. To export it to the task environment of 1550 available in the datastore. To export it to the task environment of
1356 every running task, use a command similar to the following in your 1551 every running task, use a command similar to the following in your
1357 local configuration file ``local.conf`` or your distribution 1552 local configuration file ``local.conf`` or your distribution
1358 configuration file: :: 1553 configuration file::
1359 1554
1360 export CCACHE_DIR 1555 export CCACHE_DIR
1361 1556
@@ -1364,17 +1559,17 @@ environment, you must take these two steps:
1364 A side effect of the previous steps is that BitBake records the 1559 A side effect of the previous steps is that BitBake records the
1365 variable as a dependency of the build process in things like the 1560 variable as a dependency of the build process in things like the
1366 setscene checksums. If doing so results in unnecessary rebuilds of 1561 setscene checksums. If doing so results in unnecessary rebuilds of
1367 tasks, you can whitelist the variable so that the setscene code 1562 tasks, you can also flag the variable so that the setscene code
1368 ignores the dependency when it creates checksums. 1563 ignores the dependency when it creates checksums.
1369 1564
1370Sometimes, it is useful to be able to obtain information from the 1565Sometimes, it is useful to be able to obtain information from the
1371original execution environment. BitBake saves a copy of the original 1566original execution environment. BitBake saves a copy of the original
1372environment into a special variable named :term:`BB_ORIGENV`. 1567environment into a special variable named :term:`BB_ORIGENV`.
1373 1568
1374The ``BB_ORIGENV`` variable returns a datastore object that can be 1569The :term:`BB_ORIGENV` variable returns a datastore object that can be
1375queried using the standard datastore operators such as 1570queried using the standard datastore operators such as
1376``getVar(, False)``. The datastore object is useful, for example, to 1571``getVar(, False)``. The datastore object is useful, for example, to
1377find the original ``DISPLAY`` variable. Here is an example: :: 1572find the original ``DISPLAY`` variable. Here is an example::
1378 1573
1379 origenv = d.getVar("BB_ORIGENV", False) 1574 origenv = d.getVar("BB_ORIGENV", False)
1380 bar = origenv.getVar("BAR", False) 1575 bar = origenv.getVar("BAR", False)
@@ -1387,7 +1582,7 @@ Variable Flags
1387 1582
1388Variable flags (varflags) help control a task's functionality and 1583Variable flags (varflags) help control a task's functionality and
1389dependencies. BitBake reads and writes varflags to the datastore using 1584dependencies. BitBake reads and writes varflags to the datastore using
1390the following command forms: :: 1585the following command forms::
1391 1586
1392 variable = d.getVarFlags("variable") 1587 variable = d.getVarFlags("variable")
1393 self.d.setVarFlags("FOO", {"func": True}) 1588 self.d.setVarFlags("FOO", {"func": True})
@@ -1418,12 +1613,35 @@ functionality of the task:
1418 directory listed is used as the current working directory for the 1613 directory listed is used as the current working directory for the
1419 task. 1614 task.
1420 1615
1616- ``[file-checksums]``: Controls the file dependencies for a task. The
1617 baseline file list is the set of files associated with
1618 :term:`SRC_URI`. May be used to set additional dependencies on
1619 files not associated with :term:`SRC_URI`.
1620
1621 The value set to the list is a file-boolean pair where the first
1622 value is the file name and the second is whether or not it
1623 physically exists on the filesystem. ::
1624
1625 do_configure[file-checksums] += "${MY_DIRPATH}/my-file.txt:True"
1626
1627 It is important to record any paths which the task looked at and
1628 which didn't exist. This means that if these do exist at a later
1629 time, the task can be rerun with the new additional files. The
1630 "exists" True or False value after the path allows this to be
1631 handled.
1632
1421- ``[lockfiles]``: Specifies one or more lockfiles to lock while the 1633- ``[lockfiles]``: Specifies one or more lockfiles to lock while the
1422 task executes. Only one task may hold a lockfile, and any task that 1634 task executes. Only one task may hold a lockfile, and any task that
1423 attempts to lock an already locked file will block until the lock is 1635 attempts to lock an already locked file will block until the lock is
1424 released. You can use this variable flag to accomplish mutual 1636 released. You can use this variable flag to accomplish mutual
1425 exclusion. 1637 exclusion.
1426 1638
1639- ``[network]``: When set to "1", allows a task to access the network. By
1640 default, only the ``do_fetch`` task is granted network access. Recipes
1641 shouldn't access the network outside of ``do_fetch`` as it usually
1642 undermines fetcher source mirroring, image and licence manifests, software
1643 auditing and supply chain security.
1644
1427- ``[noexec]``: When set to "1", marks the task as being empty, with 1645- ``[noexec]``: When set to "1", marks the task as being empty, with
1428 no execution required. You can use the ``[noexec]`` flag to set up 1646 no execution required. You can use the ``[noexec]`` flag to set up
1429 tasks as dependency placeholders, or to disable tasks defined 1647 tasks as dependency placeholders, or to disable tasks defined
@@ -1456,7 +1674,7 @@ functionality of the task:
1456 can result in unpredictable behavior. 1674 can result in unpredictable behavior.
1457 1675
1458 - Setting the varflag to a value greater than the value used in 1676 - Setting the varflag to a value greater than the value used in
1459 the ``BB_NUMBER_THREADS`` variable causes ``number_threads`` to 1677 the :term:`BB_NUMBER_THREADS` variable causes ``number_threads`` to
1460 have no effect. 1678 have no effect.
1461 1679
1462- ``[postfuncs]``: List of functions to call after the completion of 1680- ``[postfuncs]``: List of functions to call after the completion of
@@ -1526,7 +1744,7 @@ intent is to make it easy to do things like email notification on build
1526failures. 1744failures.
1527 1745
1528Following is an example event handler that prints the name of the event 1746Following is an example event handler that prints the name of the event
1529and the content of the ``FILE`` variable: :: 1747and the content of the :term:`FILE` variable::
1530 1748
1531 addhandler myclass_eventhandler 1749 addhandler myclass_eventhandler
1532 python myclass_eventhandler() { 1750 python myclass_eventhandler() {
@@ -1565,11 +1783,11 @@ might have an interest in viewing:
1565 1783
1566- ``bb.event.ConfigParsed()``: Fired when the base configuration; which 1784- ``bb.event.ConfigParsed()``: Fired when the base configuration; which
1567 consists of ``bitbake.conf``, ``base.bbclass`` and any global 1785 consists of ``bitbake.conf``, ``base.bbclass`` and any global
1568 ``INHERIT`` statements; has been parsed. You can see multiple such 1786 :term:`INHERIT` statements; has been parsed. You can see multiple such
1569 events when each of the workers parse the base configuration or if 1787 events when each of the workers parse the base configuration or if
1570 the server changes configuration and reparses. Any given datastore 1788 the server changes configuration and reparses. Any given datastore
1571 only has one such event executed against it, however. If 1789 only has one such event executed against it, however. If
1572 ```BB_INVALIDCONF`` <#>`__ is set in the datastore by the event 1790 :term:`BB_INVALIDCONF` is set in the datastore by the event
1573 handler, the configuration is reparsed and a new event triggered, 1791 handler, the configuration is reparsed and a new event triggered,
1574 allowing the metadata to update configuration. 1792 allowing the metadata to update configuration.
1575 1793
@@ -1636,13 +1854,18 @@ user interfaces:
1636 1854
1637.. _variants-class-extension-mechanism: 1855.. _variants-class-extension-mechanism:
1638 1856
1639Variants - Class Extension Mechanism 1857Variants --- Class Extension Mechanism
1640==================================== 1858======================================
1859
1860BitBake supports multiple incarnations of a recipe file via the
1861:term:`BBCLASSEXTEND` variable.
1862
1863The :term:`BBCLASSEXTEND` variable is a space separated list of classes used
1864to "extend" the recipe for each variant. Here is an example that results in a
1865second incarnation of the current recipe being available. This second
1866incarnation will have the "native" class inherited. ::
1641 1867
1642BitBake supports two features that facilitate creating from a single 1868 BBCLASSEXTEND = "native"
1643recipe file multiple incarnations of that recipe file where all
1644incarnations are buildable. These features are enabled through the
1645:term:`BBCLASSEXTEND` and :term:`BBVERSIONS` variables.
1646 1869
1647.. note:: 1870.. note::
1648 1871
@@ -1652,34 +1875,6 @@ incarnations are buildable. These features are enabled through the
1652 class. For specific examples, see the OE-Core native , nativesdk , and 1875 class. For specific examples, see the OE-Core native , nativesdk , and
1653 multilib classes. 1876 multilib classes.
1654 1877
1655- ``BBCLASSEXTEND``: This variable is a space separated list of
1656 classes used to "extend" the recipe for each variant. Here is an
1657 example that results in a second incarnation of the current recipe
1658 being available. This second incarnation will have the "native" class
1659 inherited. ::
1660
1661 BBCLASSEXTEND = "native"
1662
1663- ``BBVERSIONS``: This variable allows a single recipe to build
1664 multiple versions of a project from a single recipe file. You can
1665 also specify conditional metadata (using the
1666 :term:`OVERRIDES` mechanism) for a single
1667 version, or an optionally named range of versions. Here is an
1668 example: ::
1669
1670 BBVERSIONS = "1.0 2.0 git"
1671 SRC_URI_git = "git://someurl/somepath.git"
1672
1673 BBVERSIONS = "1.0.[0-6]:1.0.0+ 1.0.[7-9]:1.0.7+"
1674 SRC_URI_append_1.0.7+ = "file://some_patch_which_the_new_versions_need.patch;patch=1"
1675
1676 The name of the range defaults to the original version of the recipe. For
1677 example, in OpenEmbedded, the recipe file ``foo_1.0.0+.bb`` creates a default
1678 name range of ``1.0.0+``. This is useful because the range name is not only
1679 placed into overrides, but it is also made available for the metadata to use
1680 in the variable that defines the base recipe versions for use in ``file://``
1681 search paths (:term:`FILESPATH`).
1682
1683Dependencies 1878Dependencies
1684============ 1879============
1685 1880
@@ -1708,7 +1903,7 @@ Dependencies Internal to the ``.bb`` File
1708BitBake uses the ``addtask`` directive to manage dependencies that are 1903BitBake uses the ``addtask`` directive to manage dependencies that are
1709internal to a given recipe file. You can use the ``addtask`` directive 1904internal to a given recipe file. You can use the ``addtask`` directive
1710to indicate when a task is dependent on other tasks or when other tasks 1905to indicate when a task is dependent on other tasks or when other tasks
1711depend on that recipe. Here is an example: :: 1906depend on that recipe. Here is an example::
1712 1907
1713 addtask printdate after do_fetch before do_build 1908 addtask printdate after do_fetch before do_build
1714 1909
@@ -1732,7 +1927,7 @@ task depends on the completion of the ``do_printdate`` task.
1732 1927
1733 - The directive ``addtask mytask after do_configure`` by itself 1928 - The directive ``addtask mytask after do_configure`` by itself
1734 never causes ``do_mytask`` to run. ``do_mytask`` can still be run 1929 never causes ``do_mytask`` to run. ``do_mytask`` can still be run
1735 manually as follows: :: 1930 manually as follows::
1736 1931
1737 $ bitbake recipe -c mytask 1932 $ bitbake recipe -c mytask
1738 1933
@@ -1745,13 +1940,13 @@ Build Dependencies
1745 1940
1746BitBake uses the :term:`DEPENDS` variable to manage 1941BitBake uses the :term:`DEPENDS` variable to manage
1747build time dependencies. The ``[deptask]`` varflag for tasks signifies 1942build time dependencies. The ``[deptask]`` varflag for tasks signifies
1748the task of each item listed in ``DEPENDS`` that must complete before 1943the task of each item listed in :term:`DEPENDS` that must complete before
1749that task can be executed. Here is an example: :: 1944that task can be executed. Here is an example::
1750 1945
1751 do_configure[deptask] = "do_populate_sysroot" 1946 do_configure[deptask] = "do_populate_sysroot"
1752 1947
1753In this example, the ``do_populate_sysroot`` task 1948In this example, the ``do_populate_sysroot`` task
1754of each item in ``DEPENDS`` must complete before ``do_configure`` can 1949of each item in :term:`DEPENDS` must complete before ``do_configure`` can
1755execute. 1950execute.
1756 1951
1757Runtime Dependencies 1952Runtime Dependencies
@@ -1760,8 +1955,8 @@ Runtime Dependencies
1760BitBake uses the :term:`PACKAGES`, :term:`RDEPENDS`, and :term:`RRECOMMENDS` 1955BitBake uses the :term:`PACKAGES`, :term:`RDEPENDS`, and :term:`RRECOMMENDS`
1761variables to manage runtime dependencies. 1956variables to manage runtime dependencies.
1762 1957
1763The ``PACKAGES`` variable lists runtime packages. Each of those packages 1958The :term:`PACKAGES` variable lists runtime packages. Each of those packages
1764can have ``RDEPENDS`` and ``RRECOMMENDS`` runtime dependencies. The 1959can have :term:`RDEPENDS` and :term:`RRECOMMENDS` runtime dependencies. The
1765``[rdeptask]`` flag for tasks is used to signify the task of each item 1960``[rdeptask]`` flag for tasks is used to signify the task of each item
1766runtime dependency which must have completed before that task can be 1961runtime dependency which must have completed before that task can be
1767executed. :: 1962executed. ::
@@ -1769,9 +1964,9 @@ executed. ::
1769 do_package_qa[rdeptask] = "do_packagedata" 1964 do_package_qa[rdeptask] = "do_packagedata"
1770 1965
1771In the previous 1966In the previous
1772example, the ``do_packagedata`` task of each item in ``RDEPENDS`` must 1967example, the ``do_packagedata`` task of each item in :term:`RDEPENDS` must
1773have completed before ``do_package_qa`` can execute. 1968have completed before ``do_package_qa`` can execute.
1774Although ``RDEPENDS`` contains entries from the 1969Although :term:`RDEPENDS` contains entries from the
1775runtime dependency namespace, BitBake knows how to map them back 1970runtime dependency namespace, BitBake knows how to map them back
1776to the build-time dependency namespace, in which the tasks are defined. 1971to the build-time dependency namespace, in which the tasks are defined.
1777 1972
@@ -1788,7 +1983,7 @@ dependencies are discovered and added.
1788 1983
1789The ``[recrdeptask]`` flag is most commonly used in high-level recipes 1984The ``[recrdeptask]`` flag is most commonly used in high-level recipes
1790that need to wait for some task to finish "globally". For example, 1985that need to wait for some task to finish "globally". For example,
1791``image.bbclass`` has the following: :: 1986``image.bbclass`` has the following::
1792 1987
1793 do_rootfs[recrdeptask] += "do_packagedata" 1988 do_rootfs[recrdeptask] += "do_packagedata"
1794 1989
@@ -1797,7 +1992,7 @@ the current recipe and all recipes reachable (by way of dependencies)
1797from the image recipe must run before the ``do_rootfs`` task can run. 1992from the image recipe must run before the ``do_rootfs`` task can run.
1798 1993
1799BitBake allows a task to recursively depend on itself by 1994BitBake allows a task to recursively depend on itself by
1800referencing itself in the task list: :: 1995referencing itself in the task list::
1801 1996
1802 do_a[recrdeptask] = "do_a do_b" 1997 do_a[recrdeptask] = "do_a do_b"
1803 1998
@@ -1814,7 +2009,7 @@ Inter-Task Dependencies
1814BitBake uses the ``[depends]`` flag in a more generic form to manage 2009BitBake uses the ``[depends]`` flag in a more generic form to manage
1815inter-task dependencies. This more generic form allows for 2010inter-task dependencies. This more generic form allows for
1816inter-dependency checks for specific tasks rather than checks for the 2011inter-dependency checks for specific tasks rather than checks for the
1817data in ``DEPENDS``. Here is an example: :: 2012data in :term:`DEPENDS`. Here is an example::
1818 2013
1819 do_patch[depends] = "quilt-native:do_populate_sysroot" 2014 do_patch[depends] = "quilt-native:do_populate_sysroot"
1820 2015
@@ -1894,11 +2089,35 @@ access. Here is a list of available operations:
1894Other Functions 2089Other Functions
1895--------------- 2090---------------
1896 2091
1897You can find many other functions that can be called from Python by 2092Other functions are documented in the
1898looking at the source code of the ``bb`` module, which is in 2093:doc:`/bitbake-user-manual/bitbake-user-manual-library-functions` document.
1899``bitbake/lib/bb``. For example, ``bitbake/lib/bb/utils.py`` includes 2094
1900the commonly used functions ``bb.utils.contains()`` and 2095Extending Python Library Code
1901``bb.utils.mkdirhier()``, which come with docstrings. 2096-----------------------------
2097
2098If you wish to add your own Python library code (e.g. to provide
2099functions/classes you can use from Python functions in the metadata)
2100you can do so from any layer using the ``addpylib`` directive.
2101This directive is typically added to your layer configuration (
2102``conf/layer.conf``) although it will be handled in any ``.conf`` file.
2103
2104Usage is of the form::
2105
2106 addpylib <directory> <namespace>
2107
2108Where <directory> specifies the directory to add to the library path.
2109The specified <namespace> is imported automatically, and if the imported
2110module specifies an attribute named ``BBIMPORTS``, that list of
2111sub-modules is iterated and imported too.
2112
2113Testing and Debugging BitBake Python code
2114-----------------------------------------
2115
2116The OpenEmbedded build system implements a convenient ``pydevshell`` target which
2117you can use to access the BitBake datastore and experiment with your own Python
2118code. See :yocto_docs:`Using a Python Development Shell
2119</dev-manual/python-development-shell.html#using-a-python-development-shell>` in the Yocto
2120Project manual for details.
1902 2121
1903Task Checksums and Setscene 2122Task Checksums and Setscene
1904=========================== 2123===========================
@@ -1909,7 +2128,7 @@ To help understand how BitBake does this, the section assumes an
1909OpenEmbedded metadata-based example. 2128OpenEmbedded metadata-based example.
1910 2129
1911These checksums are stored in :term:`STAMP`. You can 2130These checksums are stored in :term:`STAMP`. You can
1912examine the checksums using the following BitBake command: :: 2131examine the checksums using the following BitBake command::
1913 2132
1914 $ bitbake-dumpsigs 2133 $ bitbake-dumpsigs
1915 2134
@@ -1932,16 +2151,6 @@ The following list describes related variables:
1932 Specifies a function BitBake calls that determines whether BitBake 2151 Specifies a function BitBake calls that determines whether BitBake
1933 requires a setscene dependency to be met. 2152 requires a setscene dependency to be met.
1934 2153
1935- :term:`BB_SETSCENE_VERIFY_FUNCTION2`:
1936 Specifies a function to call that verifies the list of planned task
1937 execution before the main task execution happens.
1938
1939- :term:`BB_STAMP_POLICY`: Defines the mode
1940 for comparing timestamps of stamp files.
1941
1942- :term:`BB_STAMP_WHITELIST`: Lists stamp
1943 files that are looked at when the stamp policy is "whitelist".
1944
1945- :term:`BB_TASKHASH`: Within an executing task, 2154- :term:`BB_TASKHASH`: Within an executing task,
1946 this variable holds the hash of the task as returned by the currently 2155 this variable holds the hash of the task as returned by the currently
1947 enabled signature generator. 2156 enabled signature generator.
@@ -1956,7 +2165,7 @@ Wildcard Support in Variables
1956============================= 2165=============================
1957 2166
1958Support for wildcard use in variables varies depending on the context in 2167Support for wildcard use in variables varies depending on the context in
1959which it is used. For example, some variables and file names allow 2168which it is used. For example, some variables and filenames allow
1960limited use of wildcards through the "``%``" and "``*``" characters. 2169limited use of wildcards through the "``%``" and "``*``" characters.
1961Other variables or names support Python's 2170Other variables or names support Python's
1962`glob <https://docs.python.org/3/library/glob.html>`_ syntax, 2171`glob <https://docs.python.org/3/library/glob.html>`_ syntax,
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst
new file mode 100644
index 0000000000..e9c454ba11
--- /dev/null
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst
@@ -0,0 +1,91 @@
1.. SPDX-License-Identifier: CC-BY-2.5
2
3================
4Variable Context
5================
6
7|
8
9Variables might only have an impact or can be used in certain contexts. Some
10should only be used in global files like ``.conf``, while others are intended only
11for local files like ``.bb``. This chapter aims to describe some important variable
12contexts.
13
14.. _ref-varcontext-configuration:
15
16BitBake's own configuration
17===========================
18
19Variables starting with ``BB_`` usually configure the behaviour of BitBake itself.
20For example, one could configure:
21
22- System resources, like disk space to be used (:term:`BB_DISKMON_DIRS`),
23 or the number of tasks to be run in parallel by BitBake (:term:`BB_NUMBER_THREADS`).
24
25- How the fetchers shall behave, e.g., :term:`BB_FETCH_PREMIRRORONLY` is used
26 by BitBake to determine if BitBake's fetcher shall search only
27 :term:`PREMIRRORS` for files.
28
29Those variables are usually configured globally.
30
31BitBake configuration
32=====================
33
34There are variables:
35
36- Like :term:`B` or :term:`T`, that are used to specify directories used by
37 BitBake during the build of a particular recipe. Those variables are
38 specified in ``bitbake.conf``. Some, like :term:`B`, are quite often
39 overwritten in recipes.
40
41- Starting with ``FAKEROOT``, to configure how the ``fakeroot`` command is
42 handled. Those are usually set by ``bitbake.conf`` and might get adapted in a
43 ``bbclass``.
44
45- Detailing where BitBake will store and fetch information from, for
46 data reuse between build runs like :term:`CACHE`, :term:`DL_DIR` or
47 :term:`PERSISTENT_DIR`. Those are usually global.
48
49
50Layers and files
51================
52
53Variables starting with ``LAYER`` configure how BitBake handles layers.
54Additionally, variables starting with ``BB`` configure how layers and files are
55handled. For example:
56
57- :term:`LAYERDEPENDS` is used to configure on which layers a given layer
58 depends.
59
60- The configured layers are contained in :term:`BBLAYERS` and files in
61 :term:`BBFILES`.
62
63Those variables are often used in the files ``layer.conf`` and ``bblayers.conf``.
64
65Recipes and packages
66====================
67
68Variables handling recipes and packages can be split into:
69
70- :term:`PN`, :term:`PV` or :term:`PF` for example, contain information about
71 the name or revision of a recipe or package. Usually, the default set in
72 ``bitbake.conf`` is used, but those are from time to time overwritten in
73 recipes.
74
75- :term:`SUMMARY`, :term:`DESCRIPTION`, :term:`LICENSE` or :term:`HOMEPAGE`
76 contain the expected information and should be set specifically for every
77 recipe.
78
79- In recipes, variables are also used to control build and runtime
80 dependencies between recipes/packages with other recipes/packages. The
81 most common should be: :term:`PROVIDES`, :term:`RPROVIDES`, :term:`DEPENDS`,
82 and :term:`RDEPENDS`.
83
84- There are further variables starting with ``SRC`` that specify the sources in
85 a recipe like :term:`SRC_URI` or :term:`SRCDATE`. Those are also usually set
86 in recipes.
87
88- Which version or provider of a recipe should be given preference when
89 multiple recipes would provide the same item, is controlled by variables
90 starting with ``PREFERRED_``. Those are normally set in the configuration
91 files of a ``MACHINE`` or ``DISTRO``.
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
index 6469f9d1a4..6be8dbbf63 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
@@ -23,22 +23,31 @@ overview of their function and contents.
23 systems extend the functionality of the variable as it is 23 systems extend the functionality of the variable as it is
24 described here in this glossary. 24 described here in this glossary.
25 25
26 - Finally, there are variables mentioned in this glossary that do
27 not appear in the BitBake glossary. These other variables are
28 variables used in systems that use BitBake.
29
30.. glossary:: 26.. glossary::
27 :sorted:
31 28
32 :term:`ASSUME_PROVIDED` 29 :term:`ASSUME_PROVIDED`
33 Lists recipe names (:term:`PN` values) BitBake does not 30 Lists recipe names (:term:`PN` values) BitBake does not
34 attempt to build. Instead, BitBake assumes these recipes have already 31 attempt to build. Instead, BitBake assumes these recipes have already
35 been built. 32 been built.
36 33
37 In OpenEmbedded-Core, ``ASSUME_PROVIDED`` mostly specifies native 34 In OpenEmbedded-Core, :term:`ASSUME_PROVIDED` mostly specifies native
38 tools that should not be built. An example is ``git-native``, which 35 tools that should not be built. An example is ``git-native``, which
39 when specified allows for the Git binary from the host to be used 36 when specified allows for the Git binary from the host to be used
40 rather than building ``git-native``. 37 rather than building ``git-native``.
41 38
39 :term:`AZ_SAS`
40 Azure Storage Shared Access Signature, when using the
41 :ref:`Azure Storage fetcher <bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
42 This variable can be defined to be used by the fetcher to authenticate
43 and gain access to non-public artifacts::
44
45 AZ_SAS = ""se=2021-01-01&sp=r&sv=2018-11-09&sr=c&skoid=<skoid>&sig=<signature>""
46
47 For more information see Microsoft's Azure Storage documentation at
48 https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview
49
50
42 :term:`B` 51 :term:`B`
43 The directory in which BitBake executes functions during a recipe's 52 The directory in which BitBake executes functions during a recipe's
44 build process. 53 build process.
@@ -74,14 +83,41 @@ overview of their function and contents.
74 83
75 - Attempts to access networks not in the host list cause a failure. 84 - Attempts to access networks not in the host list cause a failure.
76 85
77 Using ``BB_ALLOWED_NETWORKS`` in conjunction with 86 Using :term:`BB_ALLOWED_NETWORKS` in conjunction with
78 :term:`PREMIRRORS` is very useful. Adding the 87 :term:`PREMIRRORS` is very useful. Adding the
79 host you want to use to ``PREMIRRORS`` results in the source code 88 host you want to use to :term:`PREMIRRORS` results in the source code
80 being fetched from an allowed location and avoids raising an error 89 being fetched from an allowed location and avoids raising an error
81 when a host that is not allowed is in a 90 when a host that is not allowed is in a
82 :term:`SRC_URI` statement. This is because the 91 :term:`SRC_URI` statement. This is because the
83 fetcher does not attempt to use the host listed in ``SRC_URI`` after 92 fetcher does not attempt to use the host listed in :term:`SRC_URI` after
84 a successful fetch from the ``PREMIRRORS`` occurs. 93 a successful fetch from the :term:`PREMIRRORS` occurs.
94
95 :term:`BB_BASEHASH_IGNORE_VARS`
96 Lists variables that are excluded from checksum and dependency data.
97 Variables that are excluded can therefore change without affecting
98 the checksum mechanism. A common example would be the variable for
99 the path of the build. BitBake's output should not (and usually does
100 not) depend on the directory in which it was built.
101
102 :term:`BB_CACHEDIR`
103 Specifies the code parser cache directory (distinct from :term:`CACHE`
104 and :term:`PERSISTENT_DIR` although they can be set to the same value
105 if desired). The default value is "${TOPDIR}/cache".
106
107 :term:`BB_CHECK_SSL_CERTS`
108 Specifies if SSL certificates should be checked when fetching. The default
109 value is ``1`` and certificates are not checked if the value is set to ``0``.
110
111 :term:`BB_HASH_CODEPARSER_VALS`
112 Specifies values for variables to use when populating the codeparser cache.
113 This can be used selectively to set dummy values for variables to avoid
114 the codeparser cache growing on every parse. Variables that would typically
115 be included are those where the value is not significant for where the
116 codeparser cache is used (i.e. when calculating variable dependencies for
117 code fragments.) The value is space-separated without quoting values, for
118 example::
119
120 BB_HASH_CODEPARSER_VALS = "T=/ WORKDIR=/ DATE=1234 TIME=1234"
85 121
86 :term:`BB_CONSOLELOG` 122 :term:`BB_CONSOLELOG`
87 Specifies the path to a log file into which BitBake's user interface 123 Specifies the path to a log file into which BitBake's user interface
@@ -91,17 +127,10 @@ overview of their function and contents.
91 Contains the name of the currently running task. The name does not 127 Contains the name of the currently running task. The name does not
92 include the ``do_`` prefix. 128 include the ``do_`` prefix.
93 129
94 :term:`BB_DANGLINGAPPENDS_WARNONLY` 130 :term:`BB_CURRENT_MC`
95 Defines how BitBake handles situations where an append file 131 Contains the name of the current multiconfig a task is being run under.
96 (``.bbappend``) has no corresponding recipe file (``.bb``). This 132 The name is taken from the multiconfig configuration file (a file
97 condition often occurs when layers get out of sync (e.g. ``oe-core`` 133 ``mc1.conf`` would make this variable equal to ``mc1``).
98 bumps a recipe version and the old recipe no longer exists and the
99 other layer has not been updated to the new version of the recipe
100 yet).
101
102 The default fatal behavior is safest because it is the sane reaction
103 given something is out of sync. It is important to realize when your
104 changes are no longer being applied.
105 134
106 :term:`BB_DEFAULT_TASK` 135 :term:`BB_DEFAULT_TASK`
107 The default task to use when none is specified (e.g. with the ``-c`` 136 The default task to use when none is specified (e.g. with the ``-c``
@@ -117,14 +146,14 @@ overview of their function and contents.
117 you to control the build based on these parameters. 146 you to control the build based on these parameters.
118 147
119 Disk space monitoring is disabled by default. When setting this 148 Disk space monitoring is disabled by default. When setting this
120 variable, use the following form: :: 149 variable, use the following form::
121 150
122 BB_DISKMON_DIRS = "<action>,<dir>,<threshold> [...]" 151 BB_DISKMON_DIRS = "<action>,<dir>,<threshold> [...]"
123 152
124 where: 153 where:
125 154
126 <action> is: 155 <action> is:
127 ABORT: Immediately abort the build when 156 HALT: Immediately halt the build when
128 a threshold is broken. 157 a threshold is broken.
129 STOPTASKS: Stop the build after the currently 158 STOPTASKS: Stop the build after the currently
130 executing tasks have finished when 159 executing tasks have finished when
@@ -153,48 +182,48 @@ overview of their function and contents.
153 not specify G, M, or K, Kbytes is assumed by 182 not specify G, M, or K, Kbytes is assumed by
154 default. Do not use GB, MB, or KB. 183 default. Do not use GB, MB, or KB.
155 184
156 Here are some examples: :: 185 Here are some examples::
157 186
158 BB_DISKMON_DIRS = "ABORT,${TMPDIR},1G,100K WARN,${SSTATE_DIR},1G,100K" 187 BB_DISKMON_DIRS = "HALT,${TMPDIR},1G,100K WARN,${SSTATE_DIR},1G,100K"
159 BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},1G" 188 BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},1G"
160 BB_DISKMON_DIRS = "ABORT,${TMPDIR},,100K" 189 BB_DISKMON_DIRS = "HALT,${TMPDIR},,100K"
161 190
162 The first example works only if you also set the 191 The first example works only if you also set the
163 :term:`BB_DISKMON_WARNINTERVAL` 192 :term:`BB_DISKMON_WARNINTERVAL`
164 variable. This example causes the build system to immediately abort 193 variable. This example causes the build system to immediately halt
165 when either the disk space in ``${TMPDIR}`` drops below 1 Gbyte or 194 when either the disk space in ``${TMPDIR}`` drops below 1 Gbyte or
166 the available free inodes drops below 100 Kbytes. Because two 195 the available free inodes drops below 100 Kbytes. Because two
167 directories are provided with the variable, the build system also 196 directories are provided with the variable, the build system also
168 issues a warning when the disk space in the ``${SSTATE_DIR}`` 197 issues a warning when the disk space in the ``${SSTATE_DIR}``
169 directory drops below 1 Gbyte or the number of free inodes drops 198 directory drops below 1 Gbyte or the number of free inodes drops
170 below 100 Kbytes. Subsequent warnings are issued during intervals as 199 below 100 Kbytes. Subsequent warnings are issued during intervals as
171 defined by the ``BB_DISKMON_WARNINTERVAL`` variable. 200 defined by the :term:`BB_DISKMON_WARNINTERVAL` variable.
172 201
173 The second example stops the build after all currently executing 202 The second example stops the build after all currently executing
174 tasks complete when the minimum disk space in the ``${TMPDIR}`` 203 tasks complete when the minimum disk space in the ``${TMPDIR}``
175 directory drops below 1 Gbyte. No disk monitoring occurs for the free 204 directory drops below 1 Gbyte. No disk monitoring occurs for the free
176 inodes in this case. 205 inodes in this case.
177 206
178 The final example immediately aborts the build when the number of 207 The final example immediately halts the build when the number of
179 free inodes in the ``${TMPDIR}`` directory drops below 100 Kbytes. No 208 free inodes in the ``${TMPDIR}`` directory drops below 100 Kbytes. No
180 disk space monitoring for the directory itself occurs in this case. 209 disk space monitoring for the directory itself occurs in this case.
181 210
182 :term:`BB_DISKMON_WARNINTERVAL` 211 :term:`BB_DISKMON_WARNINTERVAL`
183 Defines the disk space and free inode warning intervals. 212 Defines the disk space and free inode warning intervals.
184 213
185 If you are going to use the ``BB_DISKMON_WARNINTERVAL`` variable, you 214 If you are going to use the :term:`BB_DISKMON_WARNINTERVAL` variable, you
186 must also use the :term:`BB_DISKMON_DIRS` 215 must also use the :term:`BB_DISKMON_DIRS`
187 variable and define its action as "WARN". During the build, 216 variable and define its action as "WARN". During the build,
188 subsequent warnings are issued each time disk space or number of free 217 subsequent warnings are issued each time disk space or number of free
189 inodes further reduces by the respective interval. 218 inodes further reduces by the respective interval.
190 219
191 If you do not provide a ``BB_DISKMON_WARNINTERVAL`` variable and you 220 If you do not provide a :term:`BB_DISKMON_WARNINTERVAL` variable and you
192 do use ``BB_DISKMON_DIRS`` with the "WARN" action, the disk 221 do use :term:`BB_DISKMON_DIRS` with the "WARN" action, the disk
193 monitoring interval defaults to the following: 222 monitoring interval defaults to the following:
194 BB_DISKMON_WARNINTERVAL = "50M,5K" 223 BB_DISKMON_WARNINTERVAL = "50M,5K"
195 224
196 When specifying the variable in your configuration file, use the 225 When specifying the variable in your configuration file, use the
197 following form: :: 226 following form::
198 227
199 BB_DISKMON_WARNINTERVAL = "<disk_space_interval>,<disk_inode_interval>" 228 BB_DISKMON_WARNINTERVAL = "<disk_space_interval>,<disk_inode_interval>"
200 229
@@ -210,7 +239,7 @@ overview of their function and contents.
210 G, M, or K for Gbytes, Mbytes, or Kbytes, 239 G, M, or K for Gbytes, Mbytes, or Kbytes,
211 respectively. You cannot use GB, MB, or KB. 240 respectively. You cannot use GB, MB, or KB.
212 241
213 Here is an example: :: 242 Here is an example::
214 243
215 BB_DISKMON_DIRS = "WARN,${SSTATE_DIR},1G,100K" 244 BB_DISKMON_DIRS = "WARN,${SSTATE_DIR},1G,100K"
216 BB_DISKMON_WARNINTERVAL = "50M,5K" 245 BB_DISKMON_WARNINTERVAL = "50M,5K"
@@ -222,23 +251,23 @@ overview of their function and contents.
222 based on the interval occur each time a respective interval is 251 based on the interval occur each time a respective interval is
223 reached beyond the initial warning (i.e. 1 Gbytes and 100 Kbytes). 252 reached beyond the initial warning (i.e. 1 Gbytes and 100 Kbytes).
224 253
225 :term:`BB_ENV_WHITELIST` 254 :term:`BB_ENV_PASSTHROUGH`
226 Specifies the internal whitelist of variables to allow through from 255 Specifies the internal list of variables to allow through from
227 the external environment into BitBake's datastore. If the value of 256 the external environment into BitBake's datastore. If the value of
228 this variable is not specified (which is the default), the following 257 this variable is not specified (which is the default), the following
229 list is used: :term:`BBPATH`, :term:`BB_PRESERVE_ENV`, 258 list is used: :term:`BBPATH`, :term:`BB_PRESERVE_ENV`,
230 :term:`BB_ENV_WHITELIST`, and :term:`BB_ENV_EXTRAWHITE`. 259 :term:`BB_ENV_PASSTHROUGH`, and :term:`BB_ENV_PASSTHROUGH_ADDITIONS`.
231 260
232 .. note:: 261 .. note::
233 262
234 You must set this variable in the external environment in order 263 You must set this variable in the external environment in order
235 for it to work. 264 for it to work.
236 265
237 :term:`BB_ENV_EXTRAWHITE` 266 :term:`BB_ENV_PASSTHROUGH_ADDITIONS`
238 Specifies an additional set of variables to allow through (whitelist) 267 Specifies an additional set of variables to allow through from the
239 from the external environment into BitBake's datastore. This list of 268 external environment into BitBake's datastore. This list of variables
240 variables are on top of the internal list set in 269 are on top of the internal list set in
241 :term:`BB_ENV_WHITELIST`. 270 :term:`BB_ENV_PASSTHROUGH`.
242 271
243 .. note:: 272 .. note::
244 273
@@ -254,76 +283,102 @@ overview of their function and contents.
254 :term:`BB_FILENAME` 283 :term:`BB_FILENAME`
255 Contains the filename of the recipe that owns the currently running 284 Contains the filename of the recipe that owns the currently running
256 task. For example, if the ``do_fetch`` task that resides in the 285 task. For example, if the ``do_fetch`` task that resides in the
257 ``my-recipe.bb`` is executing, the ``BB_FILENAME`` variable contains 286 ``my-recipe.bb`` is executing, the :term:`BB_FILENAME` variable contains
258 "/foo/path/my-recipe.bb". 287 "/foo/path/my-recipe.bb".
259 288
260 :term:`BBFILES_DYNAMIC` 289 :term:`BB_GENERATE_MIRROR_TARBALLS`
261 Activates content depending on presence of identified layers. You 290 Causes tarballs of the Git repositories, including the Git metadata,
262 identify the layers by the collections that the layers define. 291 to be placed in the :term:`DL_DIR` directory. Anyone
292 wishing to create a source mirror would want to enable this variable.
263 293
264 Use the ``BBFILES_DYNAMIC`` variable to avoid ``.bbappend`` files whose 294 For performance reasons, creating and placing tarballs of the Git
265 corresponding ``.bb`` file is in a layer that attempts to modify other 295 repositories is not the default action by BitBake. ::
266 layers through ``.bbappend`` but does not want to introduce a hard
267 dependency on those other layers.
268 296
269 Additionally you can prefix the rule with "!" to add ``.bbappend`` and 297 BB_GENERATE_MIRROR_TARBALLS = "1"
270 ``.bb`` files in case a layer is not present. Use this avoid hard
271 dependency on those other layers.
272 298
273 Use the following form for ``BBFILES_DYNAMIC``: :: 299 :term:`BB_GENERATE_SHALLOW_TARBALLS`
300 Setting this variable to "1" when :term:`BB_GIT_SHALLOW` is also set to
301 "1" causes bitbake to generate shallow mirror tarballs when fetching git
302 repositories. The number of commits included in the shallow mirror
303 tarballs is controlled by :term:`BB_GIT_SHALLOW_DEPTH`.
274 304
275 collection_name:filename_pattern 305 If both :term:`BB_GIT_SHALLOW` and :term:`BB_GENERATE_MIRROR_TARBALLS` are
306 enabled, bitbake will generate shallow mirror tarballs by default for git
307 repositories. This separate variable exists so that shallow tarball
308 generation can be enabled without needing to also enable normal mirror
309 generation if it is not desired.
276 310
277 The following example identifies two collection names and two filename 311 For example usage, see :term:`BB_GIT_SHALLOW`.
278 patterns: ::
279 312
280 BBFILES_DYNAMIC += "\ 313 :term:`BB_GIT_DEFAULT_DESTSUFFIX`
281 clang-layer:${LAYERDIR}/bbappends/meta-clang/*/*/*.bbappend \ 314 The default destination directory where the Git fetcher unpacks the
282 core:${LAYERDIR}/bbappends/openembedded-core/meta/*/*/*.bbappend \ 315 source code. If this variable is not set, the source code is unpacked in a
283 " 316 directory named "git".
284 317
285 When the collection name is prefixed with "!" it will add the file pattern in case 318 :term:`BB_GIT_SHALLOW`
286 the layer is absent: :: 319 Setting this variable to "1" enables the support for fetching, using and
320 generating mirror tarballs of `shallow git repositories <https://riptutorial.com/git/example/4584/shallow-clone>`_.
321 The external `git-make-shallow <https://git.openembedded.org/bitbake/tree/bin/git-make-shallow>`_
322 script is used for shallow mirror tarball creation.
287 323
288 BBFILES_DYNAMIC += "\ 324 When :term:`BB_GIT_SHALLOW` is enabled, bitbake will attempt to fetch a shallow
289 !clang-layer:${LAYERDIR}/backfill/meta-clang/*/*/*.bb \ 325 mirror tarball. If the shallow mirror tarball cannot be fetched, it will
290 " 326 try to fetch the full mirror tarball and use that.
291 327
292 This next example shows an error message that occurs because invalid 328 This setting causes an initial shallow clone instead of an initial full bare clone.
293 entries are found, which cause parsing to abort: :: 329 The amount of data transferred during the initial clone will be significantly reduced.
294 330
295 ERROR: BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not: 331 However, every time the source revision (referenced in :term:`SRCREV`)
296 /work/my-layer/bbappends/meta-security-isafw/*/*/*.bbappend 332 changes, regardless of whether the cache within the download directory
297 /work/my-layer/bbappends/openembedded-core/meta/*/*/*.bbappend 333 (defined by :term:`DL_DIR`) has been cleaned up or not,
334 the data transfer may be significantly higher because entirely
335 new shallow clones are required for each source revision change.
298 336
299 :term:`BB_GENERATE_MIRROR_TARBALLS` 337 Over time, numerous shallow clones may cumulatively transfer
300 Causes tarballs of the Git repositories, including the Git metadata, 338 the same amount of data as an initial full bare clone.
301 to be placed in the :term:`DL_DIR` directory. Anyone 339 This is especially the case with very large repositories.
302 wishing to create a source mirror would want to enable this variable.
303 340
304 For performance reasons, creating and placing tarballs of the Git 341 Existing initial full bare clones, created without this setting,
305 repositories is not the default action by BitBake. :: 342 will still be utilized.
306 343
307 BB_GENERATE_MIRROR_TARBALLS = "1" 344 If the Git error "Server does not allow request for unadvertised object"
345 occurs, an initial full bare clone is fetched automatically.
346 This may happen if the Git server does not allow the request
347 or if the Git client has issues with this functionality.
308 348
309 :term:`BB_HASHCONFIG_WHITELIST` 349 See also :term:`BB_GIT_SHALLOW_DEPTH` and
310 Lists variables that are excluded from base configuration checksum, 350 :term:`BB_GENERATE_SHALLOW_TARBALLS`.
311 which is used to determine if the cache can be reused.
312 351
313 One of the ways BitBake determines whether to re-parse the main 352 Example usage::
314 metadata is through checksums of the variables in the datastore of
315 the base configuration data. There are variables that you typically
316 want to exclude when checking whether or not to re-parse and thus
317 rebuild the cache. As an example, you would usually exclude ``TIME``
318 and ``DATE`` because these variables are always changing. If you did
319 not exclude them, BitBake would never reuse the cache.
320 353
321 :term:`BB_HASHBASE_WHITELIST` 354 BB_GIT_SHALLOW ?= "1"
322 Lists variables that are excluded from checksum and dependency data. 355
323 Variables that are excluded can therefore change without affecting 356 # Keep only the top commit
324 the checksum mechanism. A common example would be the variable for 357 BB_GIT_SHALLOW_DEPTH ?= "1"
325 the path of the build. BitBake's output should not (and usually does 358
326 not) depend on the directory in which it was built. 359 # This defaults to enabled if both BB_GIT_SHALLOW and
360 # BB_GENERATE_MIRROR_TARBALLS are enabled
361 BB_GENERATE_SHALLOW_TARBALLS ?= "1"
362
363 :term:`BB_GIT_SHALLOW_DEPTH`
364 When used with :term:`BB_GENERATE_SHALLOW_TARBALLS`, this variable sets
365 the number of commits to include in generated shallow mirror tarballs.
366 With a depth of 1, only the commit referenced in :term:`SRCREV` is
367 included in the shallow mirror tarball. Increasing the depth includes
368 additional parent commits, working back through the commit history.
369
370 If this variable is unset, bitbake will default to a depth of 1 when
371 generating shallow mirror tarballs.
372
373 For example usage, see :term:`BB_GIT_SHALLOW`.
374
375 :term:`BB_GLOBAL_PYMODULES`
376 Specifies the list of Python modules to place in the global namespace.
377 It is intended that only the core layer should set this and it is meant
378 to be a very small list, typically just ``os`` and ``sys``.
379 :term:`BB_GLOBAL_PYMODULES` is expected to be set before the first
380 ``addpylib`` directive.
381 See also ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:extending python library code`".
327 382
328 :term:`BB_HASHCHECK_FUNCTION` 383 :term:`BB_HASHCHECK_FUNCTION`
329 Specifies the name of the function to call during the "setscene" part 384 Specifies the name of the function to call during the "setscene" part
@@ -339,12 +394,66 @@ overview of their function and contents.
339 However, the more accurate the data returned, the more efficient the 394 However, the more accurate the data returned, the more efficient the
340 build will be. 395 build will be.
341 396
397 :term:`BB_HASHCONFIG_IGNORE_VARS`
398 Lists variables that are excluded from base configuration checksum,
399 which is used to determine if the cache can be reused.
400
401 One of the ways BitBake determines whether to re-parse the main
402 metadata is through checksums of the variables in the datastore of
403 the base configuration data. There are variables that you typically
404 want to exclude when checking whether or not to re-parse and thus
405 rebuild the cache. As an example, you would usually exclude ``TIME``
406 and ``DATE`` because these variables are always changing. If you did
407 not exclude them, BitBake would never reuse the cache.
408
409 :term:`BB_HASHSERVE`
410 Specifies the Hash Equivalence server to use.
411
412 If set to ``auto``, BitBake automatically starts its own server
413 over a UNIX domain socket. An option is to connect this server
414 to an upstream one, by setting :term:`BB_HASHSERVE_UPSTREAM`.
415
416 If set to ``unix://path``, BitBake will connect to an existing
417 hash server available over a UNIX domain socket.
418
419 If set to ``host:port``, BitBake will connect to a remote server on the
420 specified host. This allows multiple clients to share the same
421 hash equivalence data.
422
423 The remote server can be started manually through
424 the ``bin/bitbake-hashserv`` script provided by BitBake,
425 which supports UNIX domain sockets too. This script also allows
426 to start the server in read-only mode, to avoid accepting
427 equivalences that correspond to Share State caches that are
428 only available on specific clients.
429
430 :term:`BB_HASHSERVE_UPSTREAM`
431 Specifies an upstream Hash Equivalence server.
432
433 This optional setting is only useful when a local Hash Equivalence
434 server is started (setting :term:`BB_HASHSERVE` to ``auto``),
435 and you wish the local server to query an upstream server for
436 Hash Equivalence data.
437
438 Example usage::
439
440 BB_HASHSERVE_UPSTREAM = "hashserv.yoctoproject.org:8686"
441
342 :term:`BB_INVALIDCONF` 442 :term:`BB_INVALIDCONF`
343 Used in combination with the ``ConfigParsed`` event to trigger 443 Used in combination with the ``ConfigParsed`` event to trigger
344 re-parsing the base metadata (i.e. all the recipes). The 444 re-parsing the base metadata (i.e. all the recipes). The
345 ``ConfigParsed`` event can set the variable to trigger the re-parse. 445 ``ConfigParsed`` event can set the variable to trigger the re-parse.
346 You must be careful to avoid recursive loops with this functionality. 446 You must be careful to avoid recursive loops with this functionality.
347 447
448 :term:`BB_LOADFACTOR_MAX`
449 Setting this to a value will cause BitBake to check the system load
450 average before executing new tasks. If the load average is above the
451 the number of CPUs multipled by this factor, no new task will be started
452 unless there is no task executing. A value of "1.5" has been found to
453 work reasonably. This is helpful for systems which don't have pressure
454 regulation enabled, which is more granular. Pressure values take
455 precedence over loadfactor.
456
348 :term:`BB_LOGCONFIG` 457 :term:`BB_LOGCONFIG`
349 Specifies the name of a config file that contains the user logging 458 Specifies the name of a config file that contains the user logging
350 configuration. See 459 configuration. See
@@ -353,15 +462,28 @@ overview of their function and contents.
353 462
354 :term:`BB_LOGFMT` 463 :term:`BB_LOGFMT`
355 Specifies the name of the log files saved into 464 Specifies the name of the log files saved into
356 ``${``\ :term:`T`\ ``}``. By default, the ``BB_LOGFMT`` 465 ``${``\ :term:`T`\ ``}``. By default, the :term:`BB_LOGFMT`
357 variable is undefined and the log file names get created using the 466 variable is undefined and the log filenames get created using the
358 following form: :: 467 following form::
359 468
360 log.{task}.{pid} 469 log.{task}.{pid}
361 470
362 If you want to force log files to take a specific name, you can set this 471 If you want to force log files to take a specific name, you can set this
363 variable in a configuration file. 472 variable in a configuration file.
364 473
474 :term:`BB_MULTI_PROVIDER_ALLOWED`
475 Allows you to suppress BitBake warnings caused when building two
476 separate recipes that provide the same output.
477
478 BitBake normally issues a warning when building two different recipes
479 where each provides the same output. This scenario is usually
480 something the user does not want. However, cases do exist where it
481 makes sense, particularly in the ``virtual/*`` namespace. You can use
482 this variable to suppress BitBake's warnings.
483
484 To use the variable, list provider names (e.g. recipe names,
485 ``virtual/kernel``, and so forth).
486
365 :term:`BB_NICE_LEVEL` 487 :term:`BB_NICE_LEVEL`
366 Allows BitBake to run at a specific priority (i.e. nice level). 488 Allows BitBake to run at a specific priority (i.e. nice level).
367 System permissions usually mean that BitBake can reduce its priority 489 System permissions usually mean that BitBake can reduce its priority
@@ -377,19 +499,20 @@ overview of their function and contents.
377 running builds when not connected to the Internet, and when operating 499 running builds when not connected to the Internet, and when operating
378 in certain kinds of firewall environments. 500 in certain kinds of firewall environments.
379 501
502 :term:`BB_NUMBER_PARSE_THREADS`
503 Sets the number of threads BitBake uses when parsing. By default, the
504 number of threads is equal to the number of cores on the system.
505
380 :term:`BB_NUMBER_THREADS` 506 :term:`BB_NUMBER_THREADS`
381 The maximum number of tasks BitBake should run in parallel at any one 507 The maximum number of tasks BitBake should run in parallel at any one
382 time. If your host development system supports multiple cores, a good 508 time. If your host development system supports multiple cores, a good
383 rule of thumb is to set this variable to twice the number of cores. 509 rule of thumb is to set this variable to twice the number of cores.
384 510
385 :term:`BB_NUMBER_PARSE_THREADS`
386 Sets the number of threads BitBake uses when parsing. By default, the
387 number of threads is equal to the number of cores on the system.
388
389 :term:`BB_ORIGENV` 511 :term:`BB_ORIGENV`
390 Contains a copy of the original external environment in which BitBake 512 Contains a copy of the original external environment in which BitBake
391 was run. The copy is taken before any whitelisted variable values are 513 was run. The copy is taken before any variable values configured to
392 filtered into BitBake's datastore. 514 pass through from the external environment are filtered into BitBake's
515 datastore.
393 516
394 .. note:: 517 .. note::
395 518
@@ -397,21 +520,130 @@ overview of their function and contents.
397 queried using the normal datastore operations. 520 queried using the normal datastore operations.
398 521
399 :term:`BB_PRESERVE_ENV` 522 :term:`BB_PRESERVE_ENV`
400 Disables whitelisting and instead allows all variables through from 523 Disables environment filtering and instead allows all variables through
401 the external environment into BitBake's datastore. 524 from the external environment into BitBake's datastore.
402 525
403 .. note:: 526 .. note::
404 527
405 You must set this variable in the external environment in order 528 You must set this variable in the external environment in order
406 for it to work. 529 for it to work.
407 530
531 :term:`BB_PRESSURE_MAX_CPU`
532 Specifies a maximum CPU pressure threshold, above which BitBake's
533 scheduler will not start new tasks (providing there is at least
534 one active task). If no value is set, CPU pressure is not
535 monitored when starting tasks.
536
537 The pressure data is calculated based upon what Linux kernels since
538 version 4.20 expose under ``/proc/pressure``. The threshold represents
539 the difference in "total" pressure from the previous second. The
540 minimum value is 1.0 (extremely slow builds) and the maximum is
541 1000000 (a pressure value unlikely to ever be reached). See
542 https://docs.kernel.org/accounting/psi.html for more information.
543
544 A default value to limit the CPU pressure to be set in ``conf/local.conf``
545 could be::
546
547 BB_PRESSURE_MAX_CPU = "15000"
548
549 Multiple values should be tested on the build host to determine what suits
550 best, depending on the need for performances versus load average during
551 the build.
552
553 .. note::
554
555 You may see numerous messages printed by BitBake in the case the
556 :term:`BB_PRESSURE_MAX_CPU` is too low:
557
558 Pressure status changed to CPU: True, IO: False, Mem: False (CPU: 1105.9/2.0, IO: 0.0/2.0, Mem: 0.0/2.0) - using 1/64 bitbake threads
559
560 This means that the :term:`BB_PRESSURE_MAX_CPU` should be increased to
561 a reasonable value for limiting the CPU pressure on the system.
562 Monitor the varying value after ``IO:`` above to set a sensible value.
563
564 :term:`BB_PRESSURE_MAX_IO`
565 Specifies a maximum I/O pressure threshold, above which BitBake's
566 scheduler will not start new tasks (providing there is at least
567 one active task). If no value is set, I/O pressure is not
568 monitored when starting tasks.
569
570 The pressure data is calculated based upon what Linux kernels since
571 version 4.20 expose under ``/proc/pressure``. The threshold represents
572 the difference in "total" pressure from the previous second. The
573 minimum value is 1.0 (extremely slow builds) and the maximum is
574 1000000 (a pressure value unlikely to ever be reached). See
575 https://docs.kernel.org/accounting/psi.html for more information.
576
577 At this point in time, experiments show that IO pressure tends to
578 be short-lived and regulating just the CPU with
579 :term:`BB_PRESSURE_MAX_CPU` can help to reduce it.
580
581 A default value to limit the IO pressure to be set in ``conf/local.conf``
582 could be::
583
584 BB_PRESSURE_MAX_IO = "15000"
585
586 Multiple values should be tested on the build host to determine what suits
587 best, depending on the need for performances versus I/O usage during the
588 build.
589
590 .. note::
591
592 You may see numerous messages printed by BitBake in the case the
593 :term:`BB_PRESSURE_MAX_IO` is too low::
594
595 Pressure status changed to CPU: None, IO: True, Mem: False (CPU: 2236.0/None, IO: 153.6/2.0, Mem: 0.0/2.0) - using 19/64 bitbake threads
596
597 This means that the :term:`BB_PRESSURE_MAX_IO` should be increased to
598 a reasonable value for limiting the I/O pressure on the system.
599 Monitor the varying value after ``IO:`` above to set a sensible value.
600
601 :term:`BB_PRESSURE_MAX_MEMORY`
602 Specifies a maximum memory pressure threshold, above which BitBake's
603 scheduler will not start new tasks (providing there is at least
604 one active task). If no value is set, memory pressure is not
605 monitored when starting tasks.
606
607 The pressure data is calculated based upon what Linux kernels since
608 version 4.20 expose under ``/proc/pressure``. The threshold represents
609 the difference in "total" pressure from the previous second. The
610 minimum value is 1.0 (extremely slow builds) and the maximum is
611 1000000 (a pressure value unlikely to ever be reached). See
612 https://docs.kernel.org/accounting/psi.html for more information.
613
614 Memory pressure is experienced when time is spent swapping,
615 refaulting pages from the page cache or performing direct reclaim.
616 This is why memory pressure is rarely seen, but setting this variable
617 might be useful as a last resort to prevent OOM errors if they are
618 occurring during builds.
619
620 A default value to limit the memory pressure to be set in
621 ``conf/local.conf`` could be::
622
623 BB_PRESSURE_MAX_MEMORY = "15000"
624
625 Multiple values should be tested on the build host to determine what suits
626 best, depending on the need for performances versus memory consumption
627 during the build.
628
629 .. note::
630
631 You may see numerous messages printed by BitBake in the case the
632 :term:`BB_PRESSURE_MAX_MEMORY` is too low::
633
634 Pressure status changed to CPU: None, IO: False, Mem: True (CPU: 29.5/None, IO: 0.0/2.0, Mem: 2553.3/2.0) - using 17/64 bitbake threads
635
636 This means that the :term:`BB_PRESSURE_MAX_MEMORY` should be increased to
637 a reasonable value for limiting the memory pressure on the system.
638 Monitor the varying value after ``Mem:`` above to set a sensible value.
639
408 :term:`BB_RUNFMT` 640 :term:`BB_RUNFMT`
409 Specifies the name of the executable script files (i.e. run files) 641 Specifies the name of the executable script files (i.e. run files)
410 saved into ``${``\ :term:`T`\ ``}``. By default, the 642 saved into ``${``\ :term:`T`\ ``}``. By default, the
411 ``BB_RUNFMT`` variable is undefined and the run file names get 643 :term:`BB_RUNFMT` variable is undefined and the run filenames get
412 created using the following form: :: 644 created using the following form::
413 645
414 run.{task}.{pid} 646 run.{func}.{pid}
415 647
416 If you want to force run files to take a specific name, you can set this 648 If you want to force run files to take a specific name, you can set this
417 variable in a configuration file. 649 variable in a configuration file.
@@ -425,14 +657,14 @@ overview of their function and contents.
425 Selects the name of the scheduler to use for the scheduling of 657 Selects the name of the scheduler to use for the scheduling of
426 BitBake tasks. Three options exist: 658 BitBake tasks. Three options exist:
427 659
428 - *basic* - The basic framework from which everything derives. Using 660 - *basic* --- the basic framework from which everything derives. Using
429 this option causes tasks to be ordered numerically as they are 661 this option causes tasks to be ordered numerically as they are
430 parsed. 662 parsed.
431 663
432 - *speed* - Executes tasks first that have more tasks depending on 664 - *speed* --- executes tasks first that have more tasks depending on
433 them. The "speed" option is the default. 665 them. The "speed" option is the default.
434 666
435 - *completion* - Causes the scheduler to try to complete a given 667 - *completion* --- causes the scheduler to try to complete a given
436 recipe once its build has started. 668 recipe once its build has started.
437 669
438 :term:`BB_SCHEDULERS` 670 :term:`BB_SCHEDULERS`
@@ -452,17 +684,6 @@ overview of their function and contents.
452 The function specified by this variable returns a "True" or "False" 684 The function specified by this variable returns a "True" or "False"
453 depending on whether the dependency needs to be met. 685 depending on whether the dependency needs to be met.
454 686
455 :term:`BB_SETSCENE_VERIFY_FUNCTION2`
456 Specifies a function to call that verifies the list of planned task
457 execution before the main task execution happens. The function is
458 called once BitBake has a list of setscene tasks that have run and
459 either succeeded or failed.
460
461 The function allows for a task list check to see if they make sense.
462 Even if BitBake was planning to skip a task, the returned value of
463 the function can force BitBake to run the task, which is necessary
464 under certain metadata defined circumstances.
465
466 :term:`BB_SIGNATURE_EXCLUDE_FLAGS` 687 :term:`BB_SIGNATURE_EXCLUDE_FLAGS`
467 Lists variable flags (varflags) that can be safely excluded from 688 Lists variable flags (varflags) that can be safely excluded from
468 checksum and dependency data for keys in the datastore. When 689 checksum and dependency data for keys in the datastore. When
@@ -485,40 +706,17 @@ overview of their function and contents.
485 :term:`BB_SRCREV_POLICY` 706 :term:`BB_SRCREV_POLICY`
486 Defines the behavior of the fetcher when it interacts with source 707 Defines the behavior of the fetcher when it interacts with source
487 control systems and dynamic source revisions. The 708 control systems and dynamic source revisions. The
488 ``BB_SRCREV_POLICY`` variable is useful when working without a 709 :term:`BB_SRCREV_POLICY` variable is useful when working without a
489 network. 710 network.
490 711
491 The variable can be set using one of two policies: 712 The variable can be set using one of two policies:
492 713
493 - *cache* - Retains the value the system obtained previously rather 714 - *cache* --- retains the value the system obtained previously rather
494 than querying the source control system each time. 715 than querying the source control system each time.
495 716
496 - *clear* - Queries the source controls system every time. With this 717 - *clear* --- queries the source controls system every time. With this
497 policy, there is no cache. The "clear" policy is the default. 718 policy, there is no cache. The "clear" policy is the default.
498 719
499 :term:`BB_STAMP_POLICY`
500 Defines the mode used for how timestamps of stamp files are compared.
501 You can set the variable to one of the following modes:
502
503 - *perfile* - Timestamp comparisons are only made between timestamps
504 of a specific recipe. This is the default mode.
505
506 - *full* - Timestamp comparisons are made for all dependencies.
507
508 - *whitelist* - Identical to "full" mode except timestamp
509 comparisons are made for recipes listed in the
510 :term:`BB_STAMP_WHITELIST` variable.
511
512 .. note::
513
514 Stamp policies are largely obsolete with the introduction of
515 setscene tasks.
516
517 :term:`BB_STAMP_WHITELIST`
518 Lists files whose stamp file timestamps are compared when the stamp
519 policy mode is set to "whitelist". For information on stamp policies,
520 see the :term:`BB_STAMP_POLICY` variable.
521
522 :term:`BB_STRICT_CHECKSUM` 720 :term:`BB_STRICT_CHECKSUM`
523 Sets a more strict checksum mechanism for non-local URLs. Setting 721 Sets a more strict checksum mechanism for non-local URLs. Setting
524 this variable to a value causes BitBake to report an error if it 722 this variable to a value causes BitBake to report an error if it
@@ -529,7 +727,7 @@ overview of their function and contents.
529 Allows adjustment of a task's Input/Output priority. During 727 Allows adjustment of a task's Input/Output priority. During
530 Autobuilder testing, random failures can occur for tasks due to I/O 728 Autobuilder testing, random failures can occur for tasks due to I/O
531 starvation. These failures occur during various QEMU runtime 729 starvation. These failures occur during various QEMU runtime
532 timeouts. You can use the ``BB_TASK_IONICE_LEVEL`` variable to adjust 730 timeouts. You can use the :term:`BB_TASK_IONICE_LEVEL` variable to adjust
533 the I/O priority of these tasks. 731 the I/O priority of these tasks.
534 732
535 .. note:: 733 .. note::
@@ -537,7 +735,7 @@ overview of their function and contents.
537 This variable works similarly to the :term:`BB_TASK_NICE_LEVEL` 735 This variable works similarly to the :term:`BB_TASK_NICE_LEVEL`
538 variable except with a task's I/O priorities. 736 variable except with a task's I/O priorities.
539 737
540 Set the variable as follows: :: 738 Set the variable as follows::
541 739
542 BB_TASK_IONICE_LEVEL = "class.prio" 740 BB_TASK_IONICE_LEVEL = "class.prio"
543 741
@@ -555,7 +753,7 @@ overview of their function and contents.
555 In order for your I/O priority settings to take effect, you need the 753 In order for your I/O priority settings to take effect, you need the
556 Completely Fair Queuing (CFQ) Scheduler selected for the backing block 754 Completely Fair Queuing (CFQ) Scheduler selected for the backing block
557 device. To select the scheduler, use the following command form where 755 device. To select the scheduler, use the following command form where
558 device is the device (e.g. sda, sdb, and so forth): :: 756 device is the device (e.g. sda, sdb, and so forth)::
559 757
560 $ sudo sh -c "echo cfq > /sys/block/device/queu/scheduler" 758 $ sudo sh -c "echo cfq > /sys/block/device/queu/scheduler"
561 759
@@ -564,7 +762,7 @@ overview of their function and contents.
564 762
565 You can use this variable in combination with task overrides to raise 763 You can use this variable in combination with task overrides to raise
566 or lower priorities of specific tasks. For example, on the `Yocto 764 or lower priorities of specific tasks. For example, on the `Yocto
567 Project <http://www.yoctoproject.org>`__ autobuilder, QEMU emulation 765 Project <https://www.yoctoproject.org>`__ autobuilder, QEMU emulation
568 in images is given a higher priority as compared to build tasks to 766 in images is given a higher priority as compared to build tasks to
569 ensure that images do not suffer timeouts on loaded systems. 767 ensure that images do not suffer timeouts on loaded systems.
570 768
@@ -572,6 +770,12 @@ overview of their function and contents.
572 Within an executing task, this variable holds the hash of the task as 770 Within an executing task, this variable holds the hash of the task as
573 returned by the currently enabled signature generator. 771 returned by the currently enabled signature generator.
574 772
773 :term:`BB_USE_HOME_NPMRC`
774 Controls whether or not BitBake uses the user's .npmrc file within their
775 home directory within the npm fetcher. This can be used for authentication
776 of private NPM registries, among other uses. This is turned off by default
777 and requires the user to explicitly set it to "1" to enable.
778
575 :term:`BB_VERBOSE_LOGS` 779 :term:`BB_VERBOSE_LOGS`
576 Controls how verbose BitBake is during builds. If set, shell scripts 780 Controls how verbose BitBake is during builds. If set, shell scripts
577 echo commands and shell script output appears on standard out 781 echo commands and shell script output appears on standard out
@@ -596,20 +800,20 @@ overview of their function and contents.
596 To build a different variant of the recipe with a minimal amount of 800 To build a different variant of the recipe with a minimal amount of
597 code, it usually is as simple as adding the variable to your recipe. 801 code, it usually is as simple as adding the variable to your recipe.
598 Here are two examples. The "native" variants are from the 802 Here are two examples. The "native" variants are from the
599 OpenEmbedded-Core metadata: :: 803 OpenEmbedded-Core metadata::
600 804
601 BBCLASSEXTEND =+ "native nativesdk" 805 BBCLASSEXTEND =+ "native nativesdk"
602 BBCLASSEXTEND =+ "multilib:multilib_name" 806 BBCLASSEXTEND =+ "multilib:multilib_name"
603 807
604 .. note:: 808 .. note::
605 809
606 Internally, the ``BBCLASSEXTEND`` mechanism generates recipe 810 Internally, the :term:`BBCLASSEXTEND` mechanism generates recipe
607 variants by rewriting variable values and applying overrides such 811 variants by rewriting variable values and applying overrides such
608 as ``_class-native``. For example, to generate a native version of 812 as ``_class-native``. For example, to generate a native version of
609 a recipe, a :term:`DEPENDS` on "foo" is 813 a recipe, a :term:`DEPENDS` on "foo" is
610 rewritten to a ``DEPENDS`` on "foo-native". 814 rewritten to a :term:`DEPENDS` on "foo-native".
611 815
612 Even when using ``BBCLASSEXTEND``, the recipe is only parsed once. 816 Even when using :term:`BBCLASSEXTEND`, the recipe is only parsed once.
613 Parsing once adds some limitations. For example, it is not 817 Parsing once adds some limitations. For example, it is not
614 possible to include a different file depending on the variant, 818 possible to include a different file depending on the variant,
615 since ``include`` statements are processed when the recipe is 819 since ``include`` statements are processed when the recipe is
@@ -639,21 +843,25 @@ overview of their function and contents.
639 :term:`BBFILE_PRIORITY` 843 :term:`BBFILE_PRIORITY`
640 Assigns the priority for recipe files in each layer. 844 Assigns the priority for recipe files in each layer.
641 845
846 This variable is used in the ``conf/layer.conf`` file and must be
847 suffixed with a `_` followed by the name of the specific layer (e.g.
848 ``BBFILE_PRIORITY_emenlow``). Colon as separator is not supported.
849
642 This variable is useful in situations where the same recipe appears 850 This variable is useful in situations where the same recipe appears
643 in more than one layer. Setting this variable allows you to 851 in more than one layer. Setting this variable allows you to
644 prioritize a layer against other layers that contain the same recipe 852 prioritize a layer against other layers that contain the same recipe
645 - effectively letting you control the precedence for the multiple 853 --- effectively letting you control the precedence for the multiple
646 layers. The precedence established through this variable stands 854 layers. The precedence established through this variable stands
647 regardless of a recipe's version (:term:`PV` variable). 855 regardless of a recipe's version (:term:`PV` variable).
648 For example, a layer that has a recipe with a higher ``PV`` value but 856 For example, a layer that has a recipe with a higher :term:`PV` value but
649 for which the ``BBFILE_PRIORITY`` is set to have a lower precedence 857 for which the :term:`BBFILE_PRIORITY` is set to have a lower precedence
650 still has a lower precedence. 858 still has a lower precedence.
651 859
652 A larger value for the ``BBFILE_PRIORITY`` variable results in a 860 A larger value for the :term:`BBFILE_PRIORITY` variable results in a
653 higher precedence. For example, the value 6 has a higher precedence 861 higher precedence. For example, the value 6 has a higher precedence
654 than the value 5. If not specified, the ``BBFILE_PRIORITY`` variable 862 than the value 5. If not specified, the :term:`BBFILE_PRIORITY` variable
655 is set based on layer dependencies (see the ``LAYERDEPENDS`` variable 863 is set based on layer dependencies (see the :term:`LAYERDEPENDS` variable
656 for more information. The default priority, if unspecified for a 864 for more information). The default priority, if unspecified for a
657 layer with no dependencies, is the lowest defined priority + 1 (or 1 865 layer with no dependencies, is the lowest defined priority + 1 (or 1
658 if no priorities are defined). 866 if no priorities are defined).
659 867
@@ -671,6 +879,45 @@ overview of their function and contents.
671 For details on the syntax, see the documentation by following the 879 For details on the syntax, see the documentation by following the
672 previous link. 880 previous link.
673 881
882 :term:`BBFILES_DYNAMIC`
883 Activates content depending on presence of identified layers. You
884 identify the layers by the collections that the layers define.
885
886 Use the :term:`BBFILES_DYNAMIC` variable to avoid ``.bbappend`` files whose
887 corresponding ``.bb`` file is in a layer that attempts to modify other
888 layers through ``.bbappend`` but does not want to introduce a hard
889 dependency on those other layers.
890
891 Additionally you can prefix the rule with "!" to add ``.bbappend`` and
892 ``.bb`` files in case a layer is not present. Use this avoid hard
893 dependency on those other layers.
894
895 Use the following form for :term:`BBFILES_DYNAMIC`::
896
897 collection_name:filename_pattern
898
899 The following example identifies two collection names and two filename
900 patterns::
901
902 BBFILES_DYNAMIC += "\
903 clang-layer:${LAYERDIR}/bbappends/meta-clang/*/*/*.bbappend \
904 core:${LAYERDIR}/bbappends/openembedded-core/meta/*/*/*.bbappend \
905 "
906
907 When the collection name is prefixed with "!" it will add the file pattern in case
908 the layer is absent::
909
910 BBFILES_DYNAMIC += "\
911 !clang-layer:${LAYERDIR}/backfill/meta-clang/*/*/*.bb \
912 "
913
914 This next example shows an error message that occurs because invalid
915 entries are found, which cause parsing to fail::
916
917 ERROR: BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:
918 /work/my-layer/bbappends/meta-security-isafw/*/*/*.bbappend
919 /work/my-layer/bbappends/openembedded-core/meta/*/*/*.bbappend
920
674 :term:`BBINCLUDED` 921 :term:`BBINCLUDED`
675 Contains a space-separated list of all of all files that BitBake's 922 Contains a space-separated list of all of all files that BitBake's
676 parser included during parsing of the current file. 923 parser included during parsing of the current file.
@@ -682,13 +929,13 @@ overview of their function and contents.
682 :term:`BBINCLUDELOGS_LINES` 929 :term:`BBINCLUDELOGS_LINES`
683 If :term:`BBINCLUDELOGS` is set, specifies 930 If :term:`BBINCLUDELOGS` is set, specifies
684 the maximum number of lines from the task log file to print when 931 the maximum number of lines from the task log file to print when
685 reporting a failed task. If you do not set ``BBINCLUDELOGS_LINES``, 932 reporting a failed task. If you do not set :term:`BBINCLUDELOGS_LINES`,
686 the entire log is printed. 933 the entire log is printed.
687 934
688 :term:`BBLAYERS` 935 :term:`BBLAYERS`
689 Lists the layers to enable during the build. This variable is defined 936 Lists the layers to enable during the build. This variable is defined
690 in the ``bblayers.conf`` configuration file in the build directory. 937 in the ``bblayers.conf`` configuration file in the build directory.
691 Here is an example: :: 938 Here is an example::
692 939
693 BBLAYERS = " \ 940 BBLAYERS = " \
694 /home/scottrif/poky/meta \ 941 /home/scottrif/poky/meta \
@@ -708,7 +955,7 @@ overview of their function and contents.
708 :term:`BBMASK` 955 :term:`BBMASK`
709 Prevents BitBake from processing recipes and recipe append files. 956 Prevents BitBake from processing recipes and recipe append files.
710 957
711 You can use the ``BBMASK`` variable to "hide" these ``.bb`` and 958 You can use the :term:`BBMASK` variable to "hide" these ``.bb`` and
712 ``.bbappend`` files. BitBake ignores any recipe or recipe append 959 ``.bbappend`` files. BitBake ignores any recipe or recipe append
713 files that match any of the expressions. It is as if BitBake does not 960 files that match any of the expressions. It is as if BitBake does not
714 see them at all. Consequently, matching files are not parsed or 961 see them at all. Consequently, matching files are not parsed or
@@ -722,13 +969,13 @@ overview of their function and contents.
722 969
723 The following example uses a complete regular expression to tell 970 The following example uses a complete regular expression to tell
724 BitBake to ignore all recipe and recipe append files in the 971 BitBake to ignore all recipe and recipe append files in the
725 ``meta-ti/recipes-misc/`` directory: :: 972 ``meta-ti/recipes-misc/`` directory::
726 973
727 BBMASK = "meta-ti/recipes-misc/" 974 BBMASK = "meta-ti/recipes-misc/"
728 975
729 If you want to mask out multiple directories or recipes, you can 976 If you want to mask out multiple directories or recipes, you can
730 specify multiple regular expression fragments. This next example 977 specify multiple regular expression fragments. This next example
731 masks out multiple directories and individual recipes: :: 978 masks out multiple directories and individual recipes::
732 979
733 BBMASK += "/meta-ti/recipes-misc/ meta-ti/recipes-ti/packagegroup/" 980 BBMASK += "/meta-ti/recipes-misc/ meta-ti/recipes-ti/packagegroup/"
734 BBMASK += "/meta-oe/recipes-support/" 981 BBMASK += "/meta-oe/recipes-support/"
@@ -745,11 +992,11 @@ overview of their function and contents.
745 Enables BitBake to perform multiple configuration builds and lists 992 Enables BitBake to perform multiple configuration builds and lists
746 each separate configuration (multiconfig). You can use this variable 993 each separate configuration (multiconfig). You can use this variable
747 to cause BitBake to build multiple targets where each target has a 994 to cause BitBake to build multiple targets where each target has a
748 separate configuration. Define ``BBMULTICONFIG`` in your 995 separate configuration. Define :term:`BBMULTICONFIG` in your
749 ``conf/local.conf`` configuration file. 996 ``conf/local.conf`` configuration file.
750 997
751 As an example, the following line specifies three multiconfigs, each 998 As an example, the following line specifies three multiconfigs, each
752 having a separate configuration file: :: 999 having a separate configuration file::
753 1000
754 BBMULTIFONFIG = "configA configB configC" 1001 BBMULTIFONFIG = "configA configB configC"
755 1002
@@ -757,20 +1004,20 @@ overview of their function and contents.
757 build directory within a directory named ``conf/multiconfig`` (e.g. 1004 build directory within a directory named ``conf/multiconfig`` (e.g.
758 build_directory\ ``/conf/multiconfig/configA.conf``). 1005 build_directory\ ``/conf/multiconfig/configA.conf``).
759 1006
760 For information on how to use ``BBMULTICONFIG`` in an environment 1007 For information on how to use :term:`BBMULTICONFIG` in an environment
761 that supports building targets with multiple configurations, see the 1008 that supports building targets with multiple configurations, see the
762 ":ref:`bitbake-user-manual/bitbake-user-manual-intro:executing a multiple configuration build`" 1009 ":ref:`bitbake-user-manual/bitbake-user-manual-intro:executing a multiple configuration build`"
763 section. 1010 section.
764 1011
765 :term:`BBPATH` 1012 :term:`BBPATH`
766 Used by BitBake to locate class (``.bbclass``) and configuration 1013 A colon-separated list used by BitBake to locate class (``.bbclass``)
767 (``.conf``) files. This variable is analogous to the ``PATH`` 1014 and configuration (``.conf``) files. This variable is analogous to the
768 variable. 1015 ``PATH`` variable.
769 1016
770 If you run BitBake from a directory outside of the build directory, 1017 If you run BitBake from a directory outside of the build directory,
771 you must be sure to set ``BBPATH`` to point to the build directory. 1018 you must be sure to set :term:`BBPATH` to point to the build directory.
772 Set the variable as you would any environment variable and then run 1019 Set the variable as you would any environment variable and then run
773 BitBake: :: 1020 BitBake::
774 1021
775 $ BBPATH="build_directory" 1022 $ BBPATH="build_directory"
776 $ export BBPATH 1023 $ export BBPATH
@@ -784,16 +1031,6 @@ overview of their function and contents.
784 Allows you to use a configuration file to add to the list of 1031 Allows you to use a configuration file to add to the list of
785 command-line target recipes you want to build. 1032 command-line target recipes you want to build.
786 1033
787 :term:`BBVERSIONS`
788 Allows a single recipe to build multiple versions of a project from a
789 single recipe file. You also able to specify conditional metadata
790 using the :term:`OVERRIDES` mechanism for a
791 single version or for an optionally named range of versions.
792
793 For more information on ``BBVERSIONS``, see the
794 ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:variants - class extension mechanism`"
795 section.
796
797 :term:`BITBAKE_UI` 1034 :term:`BITBAKE_UI`
798 Used to specify the UI module to use when running BitBake. Using this 1035 Used to specify the UI module to use when running BitBake. Using this
799 variable is equivalent to using the ``-u`` command-line option. 1036 variable is equivalent to using the ``-u`` command-line option.
@@ -825,7 +1062,7 @@ overview of their function and contents.
825 The most common usage of this is variable is to set it to "-1" within 1062 The most common usage of this is variable is to set it to "-1" within
826 a recipe for a development version of a piece of software. Using the 1063 a recipe for a development version of a piece of software. Using the
827 variable in this way causes the stable version of the recipe to build 1064 variable in this way causes the stable version of the recipe to build
828 by default in the absence of ``PREFERRED_VERSION`` being used to 1065 by default in the absence of :term:`PREFERRED_VERSION` being used to
829 build the development version. 1066 build the development version.
830 1067
831 .. note:: 1068 .. note::
@@ -838,8 +1075,8 @@ overview of their function and contents.
838 Lists a recipe's build-time dependencies (i.e. other recipe files). 1075 Lists a recipe's build-time dependencies (i.e. other recipe files).
839 1076
840 Consider this simple example for two recipes named "a" and "b" that 1077 Consider this simple example for two recipes named "a" and "b" that
841 produce similarly named packages. In this example, the ``DEPENDS`` 1078 produce similarly named packages. In this example, the :term:`DEPENDS`
842 statement appears in the "a" recipe: :: 1079 statement appears in the "a" recipe::
843 1080
844 DEPENDS = "b" 1081 DEPENDS = "b"
845 1082
@@ -856,7 +1093,7 @@ overview of their function and contents.
856 1093
857 :term:`DL_DIR` 1094 :term:`DL_DIR`
858 The central download directory used by the build process to store 1095 The central download directory used by the build process to store
859 downloads. By default, ``DL_DIR`` gets files suitable for mirroring for 1096 downloads. By default, :term:`DL_DIR` gets files suitable for mirroring for
860 everything except Git repositories. If you want tarballs of Git 1097 everything except Git repositories. If you want tarballs of Git
861 repositories, use the :term:`BB_GENERATE_MIRROR_TARBALLS` variable. 1098 repositories, use the :term:`BB_GENERATE_MIRROR_TARBALLS` variable.
862 1099
@@ -867,18 +1104,18 @@ overview of their function and contents.
867 ``bblayers.conf`` configuration file. 1104 ``bblayers.conf`` configuration file.
868 1105
869 To exclude a recipe from a world build using this variable, set the 1106 To exclude a recipe from a world build using this variable, set the
870 variable to "1" in the recipe. 1107 variable to "1" in the recipe. Set it to "0" to add it back to world build.
871 1108
872 .. note:: 1109 .. note::
873 1110
874 Recipes added to ``EXCLUDE_FROM_WORLD`` may still be built during a world 1111 Recipes added to :term:`EXCLUDE_FROM_WORLD` may still be built during a world
875 build in order to satisfy dependencies of other recipes. Adding a 1112 build in order to satisfy dependencies of other recipes. Adding a
876 recipe to ``EXCLUDE_FROM_WORLD`` only ensures that the recipe is not 1113 recipe to :term:`EXCLUDE_FROM_WORLD` only ensures that the recipe is not
877 explicitly added to the list of build targets in a world build. 1114 explicitly added to the list of build targets in a world build.
878 1115
879 :term:`FAKEROOT` 1116 :term:`FAKEROOT`
880 Contains the command to use when running a shell script in a fakeroot 1117 Contains the command to use when running a shell script in a fakeroot
881 environment. The ``FAKEROOT`` variable is obsolete and has been 1118 environment. The :term:`FAKEROOT` variable is obsolete and has been
882 replaced by the other ``FAKEROOT*`` variables. See these entries in 1119 replaced by the other ``FAKEROOT*`` variables. See these entries in
883 the glossary for more information. 1120 the glossary for more information.
884 1121
@@ -925,6 +1162,11 @@ overview of their function and contents.
925 environment variable. The value is a colon-separated list of 1162 environment variable. The value is a colon-separated list of
926 directories that are searched left-to-right in order. 1163 directories that are searched left-to-right in order.
927 1164
1165 :term:`FILE_LAYERNAME`
1166 During parsing and task execution, this is set to the name of the
1167 layer containing the recipe file. Code can use this to identify which
1168 layer a recipe is from.
1169
928 :term:`GITDIR` 1170 :term:`GITDIR`
929 The directory in which a local copy of a Git repository is stored 1171 The directory in which a local copy of a Git repository is stored
930 when it is cloned. 1172 when it is cloned.
@@ -941,9 +1183,9 @@ overview of their function and contents.
941 Causes the named class or classes to be inherited globally. Anonymous 1183 Causes the named class or classes to be inherited globally. Anonymous
942 functions in the class or classes are not executed for the base 1184 functions in the class or classes are not executed for the base
943 configuration and in each individual recipe. The OpenEmbedded build 1185 configuration and in each individual recipe. The OpenEmbedded build
944 system ignores changes to ``INHERIT`` in individual recipes. 1186 system ignores changes to :term:`INHERIT` in individual recipes.
945 1187
946 For more information on ``INHERIT``, see the 1188 For more information on :term:`INHERIT`, see the
947 ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:\`\`inherit\`\` configuration directive`" 1189 ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:\`\`inherit\`\` configuration directive`"
948 section. 1190 section.
949 1191
@@ -973,6 +1215,29 @@ overview of their function and contents.
973 variable is not available outside of ``layer.conf`` and references 1215 variable is not available outside of ``layer.conf`` and references
974 are expanded immediately when parsing of the file completes. 1216 are expanded immediately when parsing of the file completes.
975 1217
1218 :term:`LAYERSERIES_COMPAT`
1219 Lists the versions of the OpenEmbedded-Core (OE-Core) for which
1220 a layer is compatible. Using the :term:`LAYERSERIES_COMPAT` variable
1221 allows the layer maintainer to indicate which combinations of the
1222 layer and OE-Core can be expected to work. The variable gives the
1223 system a way to detect when a layer has not been tested with new
1224 releases of OE-Core (e.g. the layer is not maintained).
1225
1226 To specify the OE-Core versions for which a layer is compatible, use
1227 this variable in your layer's ``conf/layer.conf`` configuration file.
1228 For the list, use the Yocto Project release name (e.g. "kirkstone",
1229 "mickledore"). To specify multiple OE-Core versions for the layer, use
1230 a space-separated list::
1231
1232 LAYERSERIES_COMPAT_layer_root_name = "kirkstone mickledore"
1233
1234 .. note::
1235
1236 Setting :term:`LAYERSERIES_COMPAT` is required by the Yocto Project
1237 Compatible version 2 standard.
1238 The OpenEmbedded build system produces a warning if the variable
1239 is not set for any given layer.
1240
976 :term:`LAYERVERSION` 1241 :term:`LAYERVERSION`
977 Optionally specifies the version of a layer as a single number. You 1242 Optionally specifies the version of a layer as a single number. You
978 can use this variable within 1243 can use this variable within
@@ -991,29 +1256,16 @@ overview of their function and contents.
991 the build system searches for source code, it first tries the local 1256 the build system searches for source code, it first tries the local
992 download directory. If that location fails, the build system tries 1257 download directory. If that location fails, the build system tries
993 locations defined by :term:`PREMIRRORS`, the 1258 locations defined by :term:`PREMIRRORS`, the
994 upstream source, and then locations specified by ``MIRRORS`` in that 1259 upstream source, and then locations specified by :term:`MIRRORS` in that
995 order. 1260 order.
996 1261
997 :term:`MULTI_PROVIDER_WHITELIST`
998 Allows you to suppress BitBake warnings caused when building two
999 separate recipes that provide the same output.
1000
1001 BitBake normally issues a warning when building two different recipes
1002 where each provides the same output. This scenario is usually
1003 something the user does not want. However, cases do exist where it
1004 makes sense, particularly in the ``virtual/*`` namespace. You can use
1005 this variable to suppress BitBake's warnings.
1006
1007 To use the variable, list provider names (e.g. recipe names,
1008 ``virtual/kernel``, and so forth).
1009
1010 :term:`OVERRIDES` 1262 :term:`OVERRIDES`
1011 BitBake uses ``OVERRIDES`` to control what variables are overridden 1263 A colon-separated list that BitBake uses to control what variables are
1012 after BitBake parses recipes and configuration files. 1264 overridden after BitBake parses recipes and configuration files.
1013 1265
1014 Following is a simple example that uses an overrides list based on 1266 Following is a simple example that uses an overrides list based on
1015 machine architectures: OVERRIDES = "arm:x86:mips:powerpc" You can 1267 machine architectures: OVERRIDES = "arm:x86:mips:powerpc" You can
1016 find information on how to use ``OVERRIDES`` in the 1268 find information on how to use :term:`OVERRIDES` in the
1017 ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:conditional syntax 1269 ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:conditional syntax
1018 (overrides)`" section. 1270 (overrides)`" section.
1019 1271
@@ -1027,11 +1279,11 @@ overview of their function and contents.
1027 :term:`PACKAGES_DYNAMIC` 1279 :term:`PACKAGES_DYNAMIC`
1028 A promise that your recipe satisfies runtime dependencies for 1280 A promise that your recipe satisfies runtime dependencies for
1029 optional modules that are found in other recipes. 1281 optional modules that are found in other recipes.
1030 ``PACKAGES_DYNAMIC`` does not actually satisfy the dependencies, it 1282 :term:`PACKAGES_DYNAMIC` does not actually satisfy the dependencies, it
1031 only states that they should be satisfied. For example, if a hard, 1283 only states that they should be satisfied. For example, if a hard,
1032 runtime dependency (:term:`RDEPENDS`) of another 1284 runtime dependency (:term:`RDEPENDS`) of another
1033 package is satisfied during the build through the 1285 package is satisfied during the build through the
1034 ``PACKAGES_DYNAMIC`` variable, but a package with the module name is 1286 :term:`PACKAGES_DYNAMIC` variable, but a package with the module name is
1035 never actually produced, then the other package will be broken. 1287 never actually produced, then the other package will be broken.
1036 1288
1037 :term:`PE` 1289 :term:`PE`
@@ -1061,7 +1313,7 @@ overview of their function and contents.
1061 recipes provide the same item. You should always suffix the variable 1313 recipes provide the same item. You should always suffix the variable
1062 with the name of the provided item, and you should set it to the 1314 with the name of the provided item, and you should set it to the
1063 :term:`PN` of the recipe to which you want to give 1315 :term:`PN` of the recipe to which you want to give
1064 precedence. Some examples: :: 1316 precedence. Some examples::
1065 1317
1066 PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto" 1318 PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
1067 PREFERRED_PROVIDER_virtual/xserver = "xserver-xf86" 1319 PREFERRED_PROVIDER_virtual/xserver = "xserver-xf86"
@@ -1070,30 +1322,30 @@ overview of their function and contents.
1070 :term:`PREFERRED_PROVIDERS` 1322 :term:`PREFERRED_PROVIDERS`
1071 Determines which recipe should be given preference for cases where 1323 Determines which recipe should be given preference for cases where
1072 multiple recipes provide the same item. Functionally, 1324 multiple recipes provide the same item. Functionally,
1073 ``PREFERRED_PROVIDERS`` is identical to 1325 :term:`PREFERRED_PROVIDERS` is identical to
1074 :term:`PREFERRED_PROVIDER`. However, the ``PREFERRED_PROVIDERS`` variable 1326 :term:`PREFERRED_PROVIDER`. However, the :term:`PREFERRED_PROVIDERS` variable
1075 lets you define preferences for multiple situations using the following 1327 lets you define preferences for multiple situations using the following
1076 form: :: 1328 form::
1077 1329
1078 PREFERRED_PROVIDERS = "xxx:yyy aaa:bbb ..." 1330 PREFERRED_PROVIDERS = "xxx:yyy aaa:bbb ..."
1079 1331
1080 This form is a convenient replacement for the following: :: 1332 This form is a convenient replacement for the following::
1081 1333
1082 PREFERRED_PROVIDER_xxx = "yyy" 1334 PREFERRED_PROVIDER_xxx = "yyy"
1083 PREFERRED_PROVIDER_aaa = "bbb" 1335 PREFERRED_PROVIDER_aaa = "bbb"
1084 1336
1085 :term:`PREFERRED_VERSION` 1337 :term:`PREFERRED_VERSION`
1086 If there are multiple versions of recipes available, this variable 1338 If there are multiple versions of a recipe available, this variable
1087 determines which recipe should be given preference. You must always 1339 determines which version should be given preference. You must always
1088 suffix the variable with the :term:`PN` you want to 1340 suffix the variable with the :term:`PN` you want to
1089 select, and you should set :term:`PV` accordingly for 1341 select, and you should set :term:`PV` accordingly for
1090 precedence. 1342 precedence.
1091 1343
1092 The ``PREFERRED_VERSION`` variable supports limited wildcard use 1344 The :term:`PREFERRED_VERSION` variable supports limited wildcard use
1093 through the "``%``" character. You can use the character to match any 1345 through the "``%``" character. You can use the character to match any
1094 number of characters, which can be useful when specifying versions 1346 number of characters, which can be useful when specifying versions
1095 that contain long revision numbers that potentially change. Here are 1347 that contain long revision numbers that potentially change. Here are
1096 two examples: :: 1348 two examples::
1097 1349
1098 PREFERRED_VERSION_python = "2.7.3" 1350 PREFERRED_VERSION_python = "2.7.3"
1099 PREFERRED_VERSION_linux-yocto = "4.12%" 1351 PREFERRED_VERSION_linux-yocto = "4.12%"
@@ -1104,22 +1356,26 @@ overview of their function and contents.
1104 end of the string. You cannot use the wildcard character in any other 1356 end of the string. You cannot use the wildcard character in any other
1105 location of the string. 1357 location of the string.
1106 1358
1359 If a recipe with the specified version is not available, a warning
1360 message will be shown. See :term:`REQUIRED_VERSION` if you want this
1361 to be an error instead.
1362
1107 :term:`PREMIRRORS` 1363 :term:`PREMIRRORS`
1108 Specifies additional paths from which BitBake gets source code. When 1364 Specifies additional paths from which BitBake gets source code. When
1109 the build system searches for source code, it first tries the local 1365 the build system searches for source code, it first tries the local
1110 download directory. If that location fails, the build system tries 1366 download directory. If that location fails, the build system tries
1111 locations defined by ``PREMIRRORS``, the upstream source, and then 1367 locations defined by :term:`PREMIRRORS`, the upstream source, and then
1112 locations specified by :term:`MIRRORS` in that order. 1368 locations specified by :term:`MIRRORS` in that order.
1113 1369
1114 Typically, you would add a specific server for the build system to 1370 Typically, you would add a specific server for the build system to
1115 attempt before any others by adding something like the following to 1371 attempt before any others by adding something like the following to
1116 your configuration: :: 1372 your configuration::
1117 1373
1118 PREMIRRORS_prepend = "\ 1374 PREMIRRORS:prepend = "\
1119 git://.*/.* http://www.yoctoproject.org/sources/ \n \ 1375 git://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
1120 ftp://.*/.* http://www.yoctoproject.org/sources/ \n \ 1376 ftp://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
1121 http://.*/.* http://www.yoctoproject.org/sources/ \n \ 1377 http://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
1122 https://.*/.* http://www.yoctoproject.org/sources/ \n" 1378 https://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
1123 1379
1124 These changes cause the build system to intercept Git, FTP, HTTP, and 1380 These changes cause the build system to intercept Git, FTP, HTTP, and
1125 HTTPS requests and direct them to the ``http://`` sources mirror. You can 1381 HTTPS requests and direct them to the ``http://`` sources mirror. You can
@@ -1128,25 +1384,25 @@ overview of their function and contents.
1128 1384
1129 :term:`PROVIDES` 1385 :term:`PROVIDES`
1130 A list of aliases by which a particular recipe can be known. By 1386 A list of aliases by which a particular recipe can be known. By
1131 default, a recipe's own ``PN`` is implicitly already in its 1387 default, a recipe's own :term:`PN` is implicitly already in its
1132 ``PROVIDES`` list. If a recipe uses ``PROVIDES``, the additional 1388 :term:`PROVIDES` list. If a recipe uses :term:`PROVIDES`, the additional
1133 aliases are synonyms for the recipe and can be useful satisfying 1389 aliases are synonyms for the recipe and can be useful satisfying
1134 dependencies of other recipes during the build as specified by 1390 dependencies of other recipes during the build as specified by
1135 ``DEPENDS``. 1391 :term:`DEPENDS`.
1136 1392
1137 Consider the following example ``PROVIDES`` statement from a recipe 1393 Consider the following example :term:`PROVIDES` statement from a recipe
1138 file ``libav_0.8.11.bb``: :: 1394 file ``libav_0.8.11.bb``::
1139 1395
1140 PROVIDES += "libpostproc" 1396 PROVIDES += "libpostproc"
1141 1397
1142 The ``PROVIDES`` statement results in the "libav" recipe also being known 1398 The :term:`PROVIDES` statement results in the "libav" recipe also being known
1143 as "libpostproc". 1399 as "libpostproc".
1144 1400
1145 In addition to providing recipes under alternate names, the 1401 In addition to providing recipes under alternate names, the
1146 ``PROVIDES`` mechanism is also used to implement virtual targets. A 1402 :term:`PROVIDES` mechanism is also used to implement virtual targets. A
1147 virtual target is a name that corresponds to some particular 1403 virtual target is a name that corresponds to some particular
1148 functionality (e.g. a Linux kernel). Recipes that provide the 1404 functionality (e.g. a Linux kernel). Recipes that provide the
1149 functionality in question list the virtual target in ``PROVIDES``. 1405 functionality in question list the virtual target in :term:`PROVIDES`.
1150 Recipes that depend on the functionality in question can include the 1406 Recipes that depend on the functionality in question can include the
1151 virtual target in :term:`DEPENDS` to leave the 1407 virtual target in :term:`DEPENDS` to leave the
1152 choice of provider open. 1408 choice of provider open.
@@ -1158,12 +1414,12 @@ overview of their function and contents.
1158 :term:`PRSERV_HOST` 1414 :term:`PRSERV_HOST`
1159 The network based :term:`PR` service host and port. 1415 The network based :term:`PR` service host and port.
1160 1416
1161 Following is an example of how the ``PRSERV_HOST`` variable is set: :: 1417 Following is an example of how the :term:`PRSERV_HOST` variable is set::
1162 1418
1163 PRSERV_HOST = "localhost:0" 1419 PRSERV_HOST = "localhost:0"
1164 1420
1165 You must set the variable if you want to automatically start a local PR 1421 You must set the variable if you want to automatically start a local PR
1166 service. You can set ``PRSERV_HOST`` to other values to use a remote PR 1422 service. You can set :term:`PRSERV_HOST` to other values to use a remote PR
1167 service. 1423 service.
1168 1424
1169 :term:`PV` 1425 :term:`PV`
@@ -1175,26 +1431,26 @@ overview of their function and contents.
1175 a package in this list cannot be found during the build, you will get 1431 a package in this list cannot be found during the build, you will get
1176 a build error. 1432 a build error.
1177 1433
1178 Because the ``RDEPENDS`` variable applies to packages being built, 1434 Because the :term:`RDEPENDS` variable applies to packages being built,
1179 you should always use the variable in a form with an attached package 1435 you should always use the variable in a form with an attached package
1180 name. For example, suppose you are building a development package 1436 name. For example, suppose you are building a development package
1181 that depends on the ``perl`` package. In this case, you would use the 1437 that depends on the ``perl`` package. In this case, you would use the
1182 following ``RDEPENDS`` statement: :: 1438 following :term:`RDEPENDS` statement::
1183 1439
1184 RDEPENDS_${PN}-dev += "perl" 1440 RDEPENDS:${PN}-dev += "perl"
1185 1441
1186 In the example, the development package depends on the ``perl`` package. 1442 In the example, the development package depends on the ``perl`` package.
1187 Thus, the ``RDEPENDS`` variable has the ``${PN}-dev`` package name as part 1443 Thus, the :term:`RDEPENDS` variable has the ``${PN}-dev`` package name as part
1188 of the variable. 1444 of the variable.
1189 1445
1190 BitBake supports specifying versioned dependencies. Although the 1446 BitBake supports specifying versioned dependencies. Although the
1191 syntax varies depending on the packaging format, BitBake hides these 1447 syntax varies depending on the packaging format, BitBake hides these
1192 differences from you. Here is the general syntax to specify versions 1448 differences from you. Here is the general syntax to specify versions
1193 with the ``RDEPENDS`` variable: :: 1449 with the :term:`RDEPENDS` variable::
1194 1450
1195 RDEPENDS_${PN} = "package (operator version)" 1451 RDEPENDS:${PN} = "package (operator version)"
1196 1452
1197 For ``operator``, you can specify the following: :: 1453 For ``operator``, you can specify the following::
1198 1454
1199 = 1455 =
1200 < 1456 <
@@ -1203,9 +1459,9 @@ overview of their function and contents.
1203 >= 1459 >=
1204 1460
1205 For example, the following sets up a dependency on version 1.2 or 1461 For example, the following sets up a dependency on version 1.2 or
1206 greater of the package ``foo``: :: 1462 greater of the package ``foo``::
1207 1463
1208 RDEPENDS_${PN} = "foo (>= 1.2)" 1464 RDEPENDS:${PN} = "foo (>= 1.2)"
1209 1465
1210 For information on build-time dependencies, see the :term:`DEPENDS` 1466 For information on build-time dependencies, see the :term:`DEPENDS`
1211 variable. 1467 variable.
@@ -1214,33 +1470,43 @@ overview of their function and contents.
1214 The directory in which a local copy of a ``google-repo`` directory is 1470 The directory in which a local copy of a ``google-repo`` directory is
1215 stored when it is synced. 1471 stored when it is synced.
1216 1472
1473 :term:`REQUIRED_VERSION`
1474 If there are multiple versions of a recipe available, this variable
1475 determines which version should be given preference. :term:`REQUIRED_VERSION`
1476 works in exactly the same manner as :term:`PREFERRED_VERSION`, except
1477 that if the specified version is not available then an error message
1478 is shown and the build fails immediately.
1479
1480 If both :term:`REQUIRED_VERSION` and :term:`PREFERRED_VERSION` are set for
1481 the same recipe, the :term:`REQUIRED_VERSION` value applies.
1482
1217 :term:`RPROVIDES` 1483 :term:`RPROVIDES`
1218 A list of package name aliases that a package also provides. These 1484 A list of package name aliases that a package also provides. These
1219 aliases are useful for satisfying runtime dependencies of other 1485 aliases are useful for satisfying runtime dependencies of other
1220 packages both during the build and on the target (as specified by 1486 packages both during the build and on the target (as specified by
1221 ``RDEPENDS``). 1487 :term:`RDEPENDS`).
1222 1488
1223 As with all package-controlling variables, you must always use the 1489 As with all package-controlling variables, you must always use the
1224 variable in conjunction with a package name override. Here is an 1490 variable in conjunction with a package name override. Here is an
1225 example: :: 1491 example::
1226 1492
1227 RPROVIDES_${PN} = "widget-abi-2" 1493 RPROVIDES:${PN} = "widget-abi-2"
1228 1494
1229 :term:`RRECOMMENDS` 1495 :term:`RRECOMMENDS`
1230 A list of packages that extends the usability of a package being 1496 A list of packages that extends the usability of a package being
1231 built. The package being built does not depend on this list of 1497 built. The package being built does not depend on this list of
1232 packages in order to successfully build, but needs them for the 1498 packages in order to successfully build, but needs them for the
1233 extended usability. To specify runtime dependencies for packages, see 1499 extended usability. To specify runtime dependencies for packages, see
1234 the ``RDEPENDS`` variable. 1500 the :term:`RDEPENDS` variable.
1235 1501
1236 BitBake supports specifying versioned recommends. Although the syntax 1502 BitBake supports specifying versioned recommends. Although the syntax
1237 varies depending on the packaging format, BitBake hides these 1503 varies depending on the packaging format, BitBake hides these
1238 differences from you. Here is the general syntax to specify versions 1504 differences from you. Here is the general syntax to specify versions
1239 with the ``RRECOMMENDS`` variable: :: 1505 with the :term:`RRECOMMENDS` variable::
1240 1506
1241 RRECOMMENDS_${PN} = "package (operator version)" 1507 RRECOMMENDS:${PN} = "package (operator version)"
1242 1508
1243 For ``operator``, you can specify the following: :: 1509 For ``operator``, you can specify the following::
1244 1510
1245 = 1511 =
1246 < 1512 <
@@ -1249,76 +1515,114 @@ overview of their function and contents.
1249 >= 1515 >=
1250 1516
1251 For example, the following sets up a recommend on version 1517 For example, the following sets up a recommend on version
1252 1.2 or greater of the package ``foo``: :: 1518 1.2 or greater of the package ``foo``::
1253 1519
1254 RRECOMMENDS_${PN} = "foo (>= 1.2)" 1520 RRECOMMENDS:${PN} = "foo (>= 1.2)"
1255 1521
1256 :term:`SECTION` 1522 :term:`SECTION`
1257 The section in which packages should be categorized. 1523 The section in which packages should be categorized.
1258 1524
1259 :term:`SRC_URI` 1525 :term:`SRC_URI`
1260 The list of source files - local or remote. This variable tells 1526 The list of source files --- local or remote. This variable tells
1261 BitBake which bits to pull for the build and how to pull them. For 1527 BitBake which bits to pull for the build and how to pull them. For
1262 example, if the recipe or append file needs to fetch a single tarball 1528 example, if the recipe or append file needs to fetch a single tarball
1263 from the Internet, the recipe or append file uses a ``SRC_URI`` entry 1529 from the Internet, the recipe or append file uses a :term:`SRC_URI`
1264 that specifies that tarball. On the other hand, if the recipe or 1530 entry that specifies that tarball. On the other hand, if the recipe or
1265 append file needs to fetch a tarball and include a custom file, the 1531 append file needs to fetch a tarball, apply two patches, and include
1266 recipe or append file needs an ``SRC_URI`` variable that specifies 1532 a custom file, the recipe or append file needs an :term:`SRC_URI`
1267 all those sources. 1533 variable that specifies all those sources.
1268 1534
1269 The following list explains the available URI protocols: 1535 The following list explains the available URI protocols. URI
1536 protocols are highly dependent on particular BitBake Fetcher
1537 submodules. Depending on the fetcher BitBake uses, various URL
1538 parameters are employed. For specifics on the supported Fetchers, see
1539 the :ref:`bitbake-user-manual/bitbake-user-manual-fetching:fetchers`
1540 section.
1270 1541
1271 - ``file://`` : Fetches files, which are usually files shipped 1542 - ``az://``: Fetches files from an Azure Storage account using HTTPS.
1272 with the metadata, from the local machine. The path is relative to
1273 the :term:`FILESPATH` variable.
1274 1543
1275 - ``bzr://`` : Fetches files from a Bazaar revision control 1544 - ``bzr://``: Fetches files from a Bazaar revision control
1276 repository. 1545 repository.
1277 1546
1278 - ``git://`` : Fetches files from a Git revision control 1547 - ``ccrc://``: Fetches files from a ClearCase repository.
1548
1549 - ``cvs://``: Fetches files from a CVS revision control
1279 repository. 1550 repository.
1280 1551
1281 - ``osc://`` : Fetches files from an OSC (OpenSUSE Build service) 1552 - ``file://``: Fetches files, which are usually files shipped
1282 revision control repository. 1553 with the Metadata, from the local machine.
1554 The path is relative to the :term:`FILESPATH`
1555 variable. Thus, the build system searches, in order, from the
1556 following directories, which are assumed to be a subdirectories of
1557 the directory in which the recipe file (``.bb``) or append file
1558 (``.bbappend``) resides:
1283 1559
1284 - ``repo://`` : Fetches files from a repo (Git) repository. 1560 - ``${BPN}``: the base recipe name without any special suffix
1561 or version numbers.
1285 1562
1286 - ``http://`` : Fetches files from the Internet using HTTP. 1563 - ``${BP}`` - ``${BPN}-${PV}``: the base recipe name and
1564 version but without any special package name suffix.
1287 1565
1288 - ``https://`` : Fetches files from the Internet using HTTPS. 1566 - ``files``: files within a directory, which is named ``files``
1567 and is also alongside the recipe or append file.
1289 1568
1290 - ``ftp://`` : Fetches files from the Internet using FTP. 1569 - ``ftp://``: Fetches files from the Internet using FTP.
1291 1570
1292 - ``cvs://`` : Fetches files from a CVS revision control 1571 - ``git://``: Fetches files from a Git revision control
1293 repository. 1572 repository.
1294 1573
1295 - ``hg://`` : Fetches files from a Mercurial (``hg``) revision 1574 - ``gitsm://``: Fetches submodules from a Git revision control
1575 repository.
1576
1577 - ``hg://``: Fetches files from a Mercurial (``hg``) revision
1296 control repository. 1578 control repository.
1297 1579
1298 - ``p4://`` : Fetches files from a Perforce (``p4``) revision 1580 - ``http://``: Fetches files from the Internet using HTTP.
1581
1582 - ``https://``: Fetches files from the Internet using HTTPS.
1583
1584 - ``npm://``: Fetches JavaScript modules from a registry.
1585
1586 - ``osc://``: Fetches files from an OSC (OpenSUSE Build service)
1587 revision control repository.
1588
1589 - ``p4://``: Fetches files from a Perforce (``p4``) revision
1299 control repository. 1590 control repository.
1300 1591
1301 - ``ssh://`` : Fetches files from a secure shell. 1592 - ``repo://``: Fetches files from a repo (Git) repository.
1593
1594 - ``ssh://``: Fetches files from a secure shell.
1302 1595
1303 - ``svn://`` : Fetches files from a Subversion (``svn``) revision 1596 - ``svn://``: Fetches files from a Subversion (``svn``) revision
1304 control repository. 1597 control repository.
1305 1598
1306 Here are some additional options worth mentioning: 1599 Here are some additional options worth mentioning:
1307 1600
1308 - ``unpack`` : Controls whether or not to unpack the file if it is 1601 - ``downloadfilename``: Specifies the filename used when storing
1309 an archive. The default action is to unpack the file. 1602 the downloaded file.
1603
1604 - ``name``: Specifies a name to be used for association with
1605 :term:`SRC_URI` checksums or :term:`SRCREV` when you have more than one
1606 file or git repository specified in :term:`SRC_URI`. For example::
1607
1608 SRC_URI = "git://example.com/foo.git;branch=main;name=first \
1609 git://example.com/bar.git;branch=main;name=second \
1610 http://example.com/file.tar.gz;name=third"
1310 1611
1311 - ``subdir`` : Places the file (or extracts its contents) into the 1612 SRCREV_first = "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15"
1613 SRCREV_second = "e242ed3bffccdf271b7fbaf34ed72d089537b42f"
1614 SRC_URI[third.sha256sum] = "13550350a8681c84c861aac2e5b440161c2b33a3e4f302ac680ca5b686de48de"
1615
1616 - ``subdir``: Places the file (or extracts its contents) into the
1312 specified subdirectory. This option is useful for unusual tarballs 1617 specified subdirectory. This option is useful for unusual tarballs
1313 or other archives that do not have their files already in a 1618 or other archives that do not have their files already in a
1314 subdirectory within the archive. 1619 subdirectory within the archive.
1315 1620
1316 - ``name`` : Specifies a name to be used for association with 1621 - ``subpath``: Limits the checkout to a specific subpath of the
1317 ``SRC_URI`` checksums when you have more than one file specified 1622 tree when using the Git fetcher is used.
1318 in ``SRC_URI``.
1319 1623
1320 - ``downloadfilename`` : Specifies the filename used when storing 1624 - ``unpack``: Controls whether or not to unpack the file if it is
1321 the downloaded file. 1625 an archive. The default action is to unpack the file.
1322 1626
1323 :term:`SRCDATE` 1627 :term:`SRCDATE`
1324 The date of the source code used to build the package. This variable 1628 The date of the source code used to build the package. This variable
@@ -1330,7 +1634,7 @@ overview of their function and contents.
1330 variable applies only when using Subversion, Git, Mercurial and 1634 variable applies only when using Subversion, Git, Mercurial and
1331 Bazaar. If you want to build a fixed revision and you want to avoid 1635 Bazaar. If you want to build a fixed revision and you want to avoid
1332 performing a query on the remote repository every time BitBake parses 1636 performing a query on the remote repository every time BitBake parses
1333 your recipe, you should specify a ``SRCREV`` that is a full revision 1637 your recipe, you should specify a :term:`SRCREV` that is a full revision
1334 identifier and not just a tag. 1638 identifier and not just a tag.
1335 1639
1336 :term:`SRCREV_FORMAT` 1640 :term:`SRCREV_FORMAT`
@@ -1339,10 +1643,10 @@ overview of their function and contents.
1339 :term:`SRC_URI`. 1643 :term:`SRC_URI`.
1340 1644
1341 The system needs help constructing these values under these 1645 The system needs help constructing these values under these
1342 circumstances. Each component in the ``SRC_URI`` is assigned a name 1646 circumstances. Each component in the :term:`SRC_URI` is assigned a name
1343 and these are referenced in the ``SRCREV_FORMAT`` variable. Consider 1647 and these are referenced in the :term:`SRCREV_FORMAT` variable. Consider
1344 an example with URLs named "machine" and "meta". In this case, 1648 an example with URLs named "machine" and "meta". In this case,
1345 ``SRCREV_FORMAT`` could look like "machine_meta" and those names 1649 :term:`SRCREV_FORMAT` could look like "machine_meta" and those names
1346 would have the SCM versions substituted into each position. Only one 1650 would have the SCM versions substituted into each position. Only one
1347 ``AUTOINC`` placeholder is added and if needed. And, this placeholder 1651 ``AUTOINC`` placeholder is added and if needed. And, this placeholder
1348 is placed at the start of the returned string. 1652 is placed at the start of the returned string.
@@ -1354,7 +1658,7 @@ overview of their function and contents.
1354 1658
1355 :term:`STAMPCLEAN` 1659 :term:`STAMPCLEAN`
1356 Specifies the base path used to create recipe stamp files. Unlike the 1660 Specifies the base path used to create recipe stamp files. Unlike the
1357 :term:`STAMP` variable, ``STAMPCLEAN`` can contain 1661 :term:`STAMP` variable, :term:`STAMPCLEAN` can contain
1358 wildcards to match the range of files a clean operation should 1662 wildcards to match the range of files a clean operation should
1359 remove. BitBake uses a clean operation to remove any other stamps it 1663 remove. BitBake uses a clean operation to remove any other stamps it
1360 should be removing when creating a new stamp. 1664 should be removing when creating a new stamp.
diff --git a/bitbake/doc/conf.py b/bitbake/doc/conf.py
index fc2ee08111..f61241e28b 100644
--- a/bitbake/doc/conf.py
+++ b/bitbake/doc/conf.py
@@ -17,6 +17,8 @@
17import sys 17import sys
18import datetime 18import datetime
19 19
20from pathlib import Path
21
20current_version = "dev" 22current_version = "dev"
21 23
22# String used in sidebar 24# String used in sidebar
@@ -47,6 +49,7 @@ extlinks = {
47extensions = [ 49extensions = [
48 'sphinx.ext.autosectionlabel', 50 'sphinx.ext.autosectionlabel',
49 'sphinx.ext.extlinks', 51 'sphinx.ext.extlinks',
52 'sphinx.ext.autodoc',
50] 53]
51autosectionlabel_prefix_document = True 54autosectionlabel_prefix_document = True
52 55
@@ -99,3 +102,7 @@ html_last_updated_fmt = '%b %d, %Y'
99 102
100# Remove the trailing 'dot' in section numbers 103# Remove the trailing 'dot' in section numbers
101html_secnumber_suffix = " " 104html_secnumber_suffix = " "
105
106# autoconf needs the modules available to auto-generate documentation from the
107# code
108sys.path.insert(0, str(Path('..', 'lib').resolve()))
diff --git a/bitbake/doc/index.rst b/bitbake/doc/index.rst
index 3ff8b1580f..546ef36c16 100644
--- a/bitbake/doc/index.rst
+++ b/bitbake/doc/index.rst
@@ -13,8 +13,10 @@ BitBake User Manual
13 bitbake-user-manual/bitbake-user-manual-intro 13 bitbake-user-manual/bitbake-user-manual-intro
14 bitbake-user-manual/bitbake-user-manual-execution 14 bitbake-user-manual/bitbake-user-manual-execution
15 bitbake-user-manual/bitbake-user-manual-metadata 15 bitbake-user-manual/bitbake-user-manual-metadata
16 bitbake-user-manual/bitbake-user-manual-ref-variables-context
16 bitbake-user-manual/bitbake-user-manual-fetching 17 bitbake-user-manual/bitbake-user-manual-fetching
17 bitbake-user-manual/bitbake-user-manual-ref-variables 18 bitbake-user-manual/bitbake-user-manual-ref-variables
19 bitbake-user-manual/bitbake-user-manual-library-functions
18 bitbake-user-manual/bitbake-user-manual-hello 20 bitbake-user-manual/bitbake-user-manual-hello
19 21
20.. toctree:: 22.. toctree::
diff --git a/bitbake/doc/releases.rst b/bitbake/doc/releases.rst
index d68d71599c..676db66ec5 100644
--- a/bitbake/doc/releases.rst
+++ b/bitbake/doc/releases.rst
@@ -1,32 +1,95 @@
1.. SPDX-License-Identifier: CC-BY-2.5 1.. SPDX-License-Identifier: CC-BY-2.5
2 2
3========================= 3=================================
4 Current Release Manuals 4BitBake Supported Release Manuals
5========================= 5=================================
6
7******************************
8Release Series 5.2 (walnascar)
9******************************
10
11- :yocto_docs:`BitBake 2.12 User Manual </bitbake/2.12/>`
12
13*******************************
14Release Series 5.0 (scarthgap)
15*******************************
16
17- :yocto_docs:`BitBake 2.8 User Manual </bitbake/2.8/>`
18
19******************************
20Release Series 4.0 (kirkstone)
21******************************
22
23- :yocto_docs:`BitBake 2.0 User Manual </bitbake/2.0/>`
24
25================================
26BitBake Outdated Release Manuals
27================================
28
29****************************
30Release Series 5.1 (styhead)
31****************************
32
33- :yocto_docs:`BitBake 2.10 User Manual </bitbake/2.10/>`
34
35*******************************
36Release Series 4.3 (nanbield)
37*******************************
38
39- :yocto_docs:`BitBake 2.6 User Manual </bitbake/2.6/>`
40
41*******************************
42Release Series 4.2 (mickledore)
43*******************************
44
45- :yocto_docs:`BitBake 2.4 User Manual </bitbake/2.4/>`
46
47*****************************
48Release Series 4.1 (langdale)
49*****************************
50
51- :yocto_docs:`BitBake 2.2 User Manual </bitbake/2.2/>`
52
53******************************
54Release Series 3.4 (honister)
55******************************
56
57- :yocto_docs:`BitBake 1.52 User Manual </bitbake/1.52/>`
58
59******************************
60Release Series 3.3 (hardknott)
61******************************
62
63- :yocto_docs:`BitBake 1.50 User Manual </bitbake/1.50/>`
64
65*******************************
66Release Series 3.2 (gatesgarth)
67*******************************
68
69- :yocto_docs:`BitBake 1.48 User Manual </bitbake/1.48/>`
6 70
7**************************** 71****************************
83.1 'dunfell' Release Series 72Release Series 3.1 (dunfell)
9**************************** 73****************************
10 74
75- :yocto_docs:`BitBake 1.46 User Manual </bitbake/1.46/>`
11- :yocto_docs:`3.1 BitBake User Manual </3.1/bitbake-user-manual/bitbake-user-manual.html>` 76- :yocto_docs:`3.1 BitBake User Manual </3.1/bitbake-user-manual/bitbake-user-manual.html>`
12- :yocto_docs:`3.1.1 BitBake User Manual </3.1.1/bitbake-user-manual/bitbake-user-manual.html>` 77- :yocto_docs:`3.1.1 BitBake User Manual </3.1.1/bitbake-user-manual/bitbake-user-manual.html>`
13- :yocto_docs:`3.1.2 BitBake User Manual </3.1.2/bitbake-user-manual/bitbake-user-manual.html>` 78- :yocto_docs:`3.1.2 BitBake User Manual </3.1.2/bitbake-user-manual/bitbake-user-manual.html>`
14 79- :yocto_docs:`3.1.3 BitBake User Manual </3.1.3/bitbake-user-manual/bitbake-user-manual.html>`
15==========================
16 Previous Release Manuals
17==========================
18 80
19************************* 81*************************
203.0 'zeus' Release Series 82Release Series 3.0 (zeus)
21************************* 83*************************
22 84
23- :yocto_docs:`3.0 BitBake User Manual </3.0/bitbake-user-manual/bitbake-user-manual.html>` 85- :yocto_docs:`3.0 BitBake User Manual </3.0/bitbake-user-manual/bitbake-user-manual.html>`
24- :yocto_docs:`3.0.1 BitBake User Manual </3.0.1/bitbake-user-manual/bitbake-user-manual.html>` 86- :yocto_docs:`3.0.1 BitBake User Manual </3.0.1/bitbake-user-manual/bitbake-user-manual.html>`
25- :yocto_docs:`3.0.2 BitBake User Manual </3.0.2/bitbake-user-manual/bitbake-user-manual.html>` 87- :yocto_docs:`3.0.2 BitBake User Manual </3.0.2/bitbake-user-manual/bitbake-user-manual.html>`
26- :yocto_docs:`3.0.3 BitBake User Manual </3.0.3/bitbake-user-manual/bitbake-user-manual.html>` 88- :yocto_docs:`3.0.3 BitBake User Manual </3.0.3/bitbake-user-manual/bitbake-user-manual.html>`
89- :yocto_docs:`3.0.4 BitBake User Manual </3.0.4/bitbake-user-manual/bitbake-user-manual.html>`
27 90
28**************************** 91****************************
292.7 'warrior' Release Series 92Release Series 2.7 (warrior)
30**************************** 93****************************
31 94
32- :yocto_docs:`2.7 BitBake User Manual </2.7/bitbake-user-manual/bitbake-user-manual.html>` 95- :yocto_docs:`2.7 BitBake User Manual </2.7/bitbake-user-manual/bitbake-user-manual.html>`
@@ -36,7 +99,7 @@
36- :yocto_docs:`2.7.4 BitBake User Manual </2.7.4/bitbake-user-manual/bitbake-user-manual.html>` 99- :yocto_docs:`2.7.4 BitBake User Manual </2.7.4/bitbake-user-manual/bitbake-user-manual.html>`
37 100
38************************* 101*************************
392.6 'thud' Release Series 102Release Series 2.6 (thud)
40************************* 103*************************
41 104
42- :yocto_docs:`2.6 BitBake User Manual </2.6/bitbake-user-manual/bitbake-user-manual.html>` 105- :yocto_docs:`2.6 BitBake User Manual </2.6/bitbake-user-manual/bitbake-user-manual.html>`
@@ -46,16 +109,16 @@
46- :yocto_docs:`2.6.4 BitBake User Manual </2.6.4/bitbake-user-manual/bitbake-user-manual.html>` 109- :yocto_docs:`2.6.4 BitBake User Manual </2.6.4/bitbake-user-manual/bitbake-user-manual.html>`
47 110
48************************* 111*************************
492.5 'sumo' Release Series 112Release Series 2.5 (sumo)
50************************* 113*************************
51 114
52- :yocto_docs:`2.5 BitBake User Manual </2.5/bitbake-user-manual/bitbake-user-manual.html>` 115- :yocto_docs:`2.5 Documentation </2.5>`
53- :yocto_docs:`2.5.1 BitBake User Manual </2.5.1/bitbake-user-manual/bitbake-user-manual.html>` 116- :yocto_docs:`2.5.1 Documentation </2.5.1>`
54- :yocto_docs:`2.5.2 BitBake User Manual </2.5.2/bitbake-user-manual/bitbake-user-manual.html>` 117- :yocto_docs:`2.5.2 Documentation </2.5.2>`
55- :yocto_docs:`2.5.3 BitBake User Manual </2.5.3/bitbake-user-manual/bitbake-user-manual.html>` 118- :yocto_docs:`2.5.3 Documentation </2.5.3>`
56 119
57************************** 120**************************
582.4 'rocko' Release Series 121Release Series 2.4 (rocko)
59************************** 122**************************
60 123
61- :yocto_docs:`2.4 BitBake User Manual </2.4/bitbake-user-manual/bitbake-user-manual.html>` 124- :yocto_docs:`2.4 BitBake User Manual </2.4/bitbake-user-manual/bitbake-user-manual.html>`
@@ -65,7 +128,7 @@
65- :yocto_docs:`2.4.4 BitBake User Manual </2.4.4/bitbake-user-manual/bitbake-user-manual.html>` 128- :yocto_docs:`2.4.4 BitBake User Manual </2.4.4/bitbake-user-manual/bitbake-user-manual.html>`
66 129
67************************* 130*************************
682.3 'pyro' Release Series 131Release Series 2.3 (pyro)
69************************* 132*************************
70 133
71- :yocto_docs:`2.3 BitBake User Manual </2.3/bitbake-user-manual/bitbake-user-manual.html>` 134- :yocto_docs:`2.3 BitBake User Manual </2.3/bitbake-user-manual/bitbake-user-manual.html>`
@@ -75,7 +138,7 @@
75- :yocto_docs:`2.3.4 BitBake User Manual </2.3.4/bitbake-user-manual/bitbake-user-manual.html>` 138- :yocto_docs:`2.3.4 BitBake User Manual </2.3.4/bitbake-user-manual/bitbake-user-manual.html>`
76 139
77************************** 140**************************
782.2 'morty' Release Series 141Release Series 2.2 (morty)
79************************** 142**************************
80 143
81- :yocto_docs:`2.2 BitBake User Manual </2.2/bitbake-user-manual/bitbake-user-manual.html>` 144- :yocto_docs:`2.2 BitBake User Manual </2.2/bitbake-user-manual/bitbake-user-manual.html>`
@@ -84,7 +147,7 @@
84- :yocto_docs:`2.2.3 BitBake User Manual </2.2.3/bitbake-user-manual/bitbake-user-manual.html>` 147- :yocto_docs:`2.2.3 BitBake User Manual </2.2.3/bitbake-user-manual/bitbake-user-manual.html>`
85 148
86**************************** 149****************************
872.1 'krogoth' Release Series 150Release Series 2.1 (krogoth)
88**************************** 151****************************
89 152
90- :yocto_docs:`2.1 BitBake User Manual </2.1/bitbake-user-manual/bitbake-user-manual.html>` 153- :yocto_docs:`2.1 BitBake User Manual </2.1/bitbake-user-manual/bitbake-user-manual.html>`
@@ -93,7 +156,7 @@
93- :yocto_docs:`2.1.3 BitBake User Manual </2.1.3/bitbake-user-manual/bitbake-user-manual.html>` 156- :yocto_docs:`2.1.3 BitBake User Manual </2.1.3/bitbake-user-manual/bitbake-user-manual.html>`
94 157
95*************************** 158***************************
962.0 'jethro' Release Series 159Release Series 2.0 (jethro)
97*************************** 160***************************
98 161
99- :yocto_docs:`1.9 BitBake User Manual </1.9/bitbake-user-manual/bitbake-user-manual.html>` 162- :yocto_docs:`1.9 BitBake User Manual </1.9/bitbake-user-manual/bitbake-user-manual.html>`
@@ -103,7 +166,7 @@
103- :yocto_docs:`2.0.3 BitBake User Manual </2.0.3/bitbake-user-manual/bitbake-user-manual.html>` 166- :yocto_docs:`2.0.3 BitBake User Manual </2.0.3/bitbake-user-manual/bitbake-user-manual.html>`
104 167
105************************* 168*************************
1061.8 'fido' Release Series 169Release Series 1.8 (fido)
107************************* 170*************************
108 171
109- :yocto_docs:`1.8 BitBake User Manual </1.8/bitbake-user-manual/bitbake-user-manual.html>` 172- :yocto_docs:`1.8 BitBake User Manual </1.8/bitbake-user-manual/bitbake-user-manual.html>`
@@ -111,7 +174,7 @@
111- :yocto_docs:`1.8.2 BitBake User Manual </1.8.2/bitbake-user-manual/bitbake-user-manual.html>` 174- :yocto_docs:`1.8.2 BitBake User Manual </1.8.2/bitbake-user-manual/bitbake-user-manual.html>`
112 175
113************************** 176**************************
1141.7 'dizzy' Release Series 177Release Series 1.7 (dizzy)
115************************** 178**************************
116 179
117- :yocto_docs:`1.7 BitBake User Manual </1.7/bitbake-user-manual/bitbake-user-manual.html>` 180- :yocto_docs:`1.7 BitBake User Manual </1.7/bitbake-user-manual/bitbake-user-manual.html>`
@@ -120,7 +183,7 @@
120- :yocto_docs:`1.7.3 BitBake User Manual </1.7.3/bitbake-user-manual/bitbake-user-manual.html>` 183- :yocto_docs:`1.7.3 BitBake User Manual </1.7.3/bitbake-user-manual/bitbake-user-manual.html>`
121 184
122************************** 185**************************
1231.6 'daisy' Release Series 186Release Series 1.6 (daisy)
124************************** 187**************************
125 188
126- :yocto_docs:`1.6 BitBake User Manual </1.6/bitbake-user-manual/bitbake-user-manual.html>` 189- :yocto_docs:`1.6 BitBake User Manual </1.6/bitbake-user-manual/bitbake-user-manual.html>`
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
index 23c22b65ef..4af03c54ad 100644
--- a/bitbake/lib/bb/COW.py
+++ b/bitbake/lib/bb/COW.py
@@ -3,6 +3,8 @@
3# 3#
4# Copyright (C) 2006 Tim Ansell 4# Copyright (C) 2006 Tim Ansell
5# 5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
6# Please Note: 8# Please Note:
7# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. 9# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
8# Assign a file to __warn__ to get warnings about slow operations. 10# Assign a file to __warn__ to get warnings about slow operations.
@@ -34,8 +36,9 @@ class COWDictMeta(COWMeta):
34 __marker__ = tuple() 36 __marker__ = tuple()
35 37
36 def __str__(cls): 38 def __str__(cls):
37 # FIXME: I have magic numbers! 39 ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"])
38 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) 40 keys = set(cls.__dict__.keys()) - ignored_keys
41 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(keys))
39 42
40 __repr__ = __str__ 43 __repr__ = __str__
41 44
@@ -159,8 +162,9 @@ class COWDictMeta(COWMeta):
159 162
160class COWSetMeta(COWDictMeta): 163class COWSetMeta(COWDictMeta):
161 def __str__(cls): 164 def __str__(cls):
162 # FIXME: I have magic numbers! 165 ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"])
163 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) 166 keys = set(cls.__dict__.keys()) - ignored_keys
167 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(keys))
164 168
165 __repr__ = __str__ 169 __repr__ = __str__
166 170
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 84a9051c13..bf4c54d829 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -9,12 +9,19 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12__version__ = "1.49.2" 12__version__ = "2.15.1"
13 13
14import sys 14import sys
15if sys.version_info < (3, 5, 0): 15if sys.version_info < (3, 9, 0):
16 raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake") 16 raise RuntimeError("Sorry, python 3.9.0 or later is required for this version of bitbake")
17 17
18if sys.version_info < (3, 10, 0):
19 # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
20 # https://stackoverflow.com/questions/64797838/libgcc-s-so-1-must-be-installed-for-pthread-cancel-to-work
21 # https://bugs.ams1.psf.io/issue42888
22 # so ensure libgcc_s is loaded early on
23 import ctypes
24 libgcc_s = ctypes.CDLL('libgcc_s.so.1')
18 25
19class BBHandledException(Exception): 26class BBHandledException(Exception):
20 """ 27 """
@@ -29,6 +36,7 @@ class BBHandledException(Exception):
29 36
30import os 37import os
31import logging 38import logging
39from collections import namedtuple
32 40
33 41
34class NullHandler(logging.Handler): 42class NullHandler(logging.Handler):
@@ -58,8 +66,12 @@ class BBLoggerMixin(object):
58 if not bb.event.worker_pid: 66 if not bb.event.worker_pid:
59 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]): 67 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]):
60 return 68 return
61 if loglevel > bb.msg.loggerDefaultLogLevel: 69 if loglevel < bb.msg.loggerDefaultLogLevel:
62 return 70 return
71
72 if not isinstance(level, int) or not isinstance(msg, str):
73 mainlogger.warning("Invalid arguments in bbdebug: %s" % repr((level, msg,) + args))
74
63 return self.log(loglevel, msg, *args, **kwargs) 75 return self.log(loglevel, msg, *args, **kwargs)
64 76
65 def plain(self, msg, *args, **kwargs): 77 def plain(self, msg, *args, **kwargs):
@@ -71,6 +83,13 @@ class BBLoggerMixin(object):
71 def verbnote(self, msg, *args, **kwargs): 83 def verbnote(self, msg, *args, **kwargs):
72 return self.log(logging.INFO + 2, msg, *args, **kwargs) 84 return self.log(logging.INFO + 2, msg, *args, **kwargs)
73 85
86 def warnonce(self, msg, *args, **kwargs):
87 return self.log(logging.WARNING - 1, msg, *args, **kwargs)
88
89 def erroronce(self, msg, *args, **kwargs):
90 return self.log(logging.ERROR - 1, msg, *args, **kwargs)
91
92
74Logger = logging.getLoggerClass() 93Logger = logging.getLoggerClass()
75class BBLogger(Logger, BBLoggerMixin): 94class BBLogger(Logger, BBLoggerMixin):
76 def __init__(self, name, *args, **kwargs): 95 def __init__(self, name, *args, **kwargs):
@@ -85,26 +104,6 @@ class BBLoggerAdapter(logging.LoggerAdapter, BBLoggerMixin):
85 self.setup_bblogger(logger.name) 104 self.setup_bblogger(logger.name)
86 super().__init__(logger, *args, **kwargs) 105 super().__init__(logger, *args, **kwargs)
87 106
88 if sys.version_info < (3, 6):
89 # These properties were added in Python 3.6. Add them in older versions
90 # for compatibility
91 @property
92 def manager(self):
93 return self.logger.manager
94
95 @manager.setter
96 def manager(self, value):
97 self.logger.manager = value
98
99 @property
100 def name(self):
101 return self.logger.name
102
103 def __repr__(self):
104 logger = self.logger
105 level = logger.getLevelName(logger.getEffectiveLevel())
106 return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level)
107
108logging.LoggerAdapter = BBLoggerAdapter 107logging.LoggerAdapter = BBLoggerAdapter
109 108
110logger = logging.getLogger("BitBake") 109logger = logging.getLogger("BitBake")
@@ -130,9 +129,25 @@ sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
130 129
131# Messaging convenience functions 130# Messaging convenience functions
132def plain(*args): 131def plain(*args):
132 """
133 Prints a message at "plain" level (higher level than a ``bb.note()``).
134
135 Arguments:
136
137 - ``args``: one or more strings to print.
138 """
133 mainlogger.plain(''.join(args)) 139 mainlogger.plain(''.join(args))
134 140
135def debug(lvl, *args): 141def debug(lvl, *args):
142 """
143 Prints a debug message.
144
145 Arguments:
146
147 - ``lvl``: debug level. Higher value increases the debug level
148 (determined by ``bitbake -D``).
149 - ``args``: one or more strings to print.
150 """
136 if isinstance(lvl, str): 151 if isinstance(lvl, str):
137 mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) 152 mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
138 args = (lvl,) + args 153 args = (lvl,) + args
@@ -140,27 +155,81 @@ def debug(lvl, *args):
140 mainlogger.bbdebug(lvl, ''.join(args)) 155 mainlogger.bbdebug(lvl, ''.join(args))
141 156
142def note(*args): 157def note(*args):
158 """
159 Prints a message at "note" level.
160
161 Arguments:
162
163 - ``args``: one or more strings to print.
164 """
143 mainlogger.info(''.join(args)) 165 mainlogger.info(''.join(args))
144 166
145#
146# A higher prioity note which will show on the console but isn't a warning
147#
148# Something is happening the user should be aware of but they probably did
149# something to make it happen
150#
151def verbnote(*args): 167def verbnote(*args):
168 """
169 A higher priority note which will show on the console but isn't a warning.
170
171 Use in contexts when something is happening the user should be aware of but
172 they probably did something to make it happen.
173
174 Arguments:
175
176 - ``args``: one or more strings to print.
177 """
152 mainlogger.verbnote(''.join(args)) 178 mainlogger.verbnote(''.join(args))
153 179
154# 180#
155# Warnings - things the user likely needs to pay attention to and fix 181# Warnings - things the user likely needs to pay attention to and fix
156# 182#
157def warn(*args): 183def warn(*args):
184 """
185 Prints a warning message.
186
187 Arguments:
188
189 - ``args``: one or more strings to print.
190 """
158 mainlogger.warning(''.join(args)) 191 mainlogger.warning(''.join(args))
159 192
193def warnonce(*args):
194 """
195 Prints a warning message like ``bb.warn()``, but only prints the message
196 once.
197
198 Arguments:
199
200 - ``args``: one or more strings to print.
201 """
202 mainlogger.warnonce(''.join(args))
203
160def error(*args, **kwargs): 204def error(*args, **kwargs):
205 """
206 Prints an error message.
207
208 Arguments:
209
210 - ``args``: one or more strings to print.
211 """
161 mainlogger.error(''.join(args), extra=kwargs) 212 mainlogger.error(''.join(args), extra=kwargs)
162 213
214def erroronce(*args):
215 """
216 Prints an error message like ``bb.error()``, but only prints the message
217 once.
218
219 Arguments:
220
221 - ``args``: one or more strings to print.
222 """
223 mainlogger.erroronce(''.join(args))
224
163def fatal(*args, **kwargs): 225def fatal(*args, **kwargs):
226 """
227 Prints an error message and stops the BitBake execution.
228
229 Arguments:
230
231 - ``args``: one or more strings to print.
232 """
164 mainlogger.critical(''.join(args), extra=kwargs) 233 mainlogger.critical(''.join(args), extra=kwargs)
165 raise BBHandledException() 234 raise BBHandledException()
166 235
@@ -189,7 +258,6 @@ def deprecated(func, name=None, advice=""):
189# For compatibility 258# For compatibility
190def deprecate_import(current, modulename, fromlist, renames = None): 259def deprecate_import(current, modulename, fromlist, renames = None):
191 """Import objects from one module into another, wrapping them with a DeprecationWarning""" 260 """Import objects from one module into another, wrapping them with a DeprecationWarning"""
192 import sys
193 261
194 module = __import__(modulename, fromlist = fromlist) 262 module = __import__(modulename, fromlist = fromlist)
195 for position, objname in enumerate(fromlist): 263 for position, objname in enumerate(fromlist):
@@ -203,3 +271,14 @@ def deprecate_import(current, modulename, fromlist, renames = None):
203 271
204 setattr(sys.modules[current], newname, newobj) 272 setattr(sys.modules[current], newname, newobj)
205 273
274TaskData = namedtuple("TaskData", [
275 "pn",
276 "taskname",
277 "fn",
278 "deps",
279 "provides",
280 "taskhash",
281 "unihash",
282 "hashfn",
283 "taskhash_deps",
284])
diff --git a/bitbake/lib/bb/acl.py b/bitbake/lib/bb/acl.py
new file mode 100755
index 0000000000..e9dbdb617f
--- /dev/null
+++ b/bitbake/lib/bb/acl.py
@@ -0,0 +1,213 @@
1#! /usr/bin/env python3
2#
3# Copyright 2023 by Garmin Ltd. or its subsidiaries
4#
5# SPDX-License-Identifier: MIT
6
7
8import sys
9import ctypes
10import os
11import errno
12import pwd
13import grp
14
15libacl = ctypes.CDLL("libacl.so.1", use_errno=True)
16
17
18ACL_TYPE_ACCESS = 0x8000
19ACL_TYPE_DEFAULT = 0x4000
20
21ACL_FIRST_ENTRY = 0
22ACL_NEXT_ENTRY = 1
23
24ACL_UNDEFINED_TAG = 0x00
25ACL_USER_OBJ = 0x01
26ACL_USER = 0x02
27ACL_GROUP_OBJ = 0x04
28ACL_GROUP = 0x08
29ACL_MASK = 0x10
30ACL_OTHER = 0x20
31
32ACL_READ = 0x04
33ACL_WRITE = 0x02
34ACL_EXECUTE = 0x01
35
36acl_t = ctypes.c_void_p
37acl_entry_t = ctypes.c_void_p
38acl_permset_t = ctypes.c_void_p
39acl_perm_t = ctypes.c_uint
40
41acl_tag_t = ctypes.c_int
42
43libacl.acl_free.argtypes = [acl_t]
44
45
46def acl_free(acl):
47 libacl.acl_free(acl)
48
49
50libacl.acl_get_file.restype = acl_t
51libacl.acl_get_file.argtypes = [ctypes.c_char_p, ctypes.c_uint]
52
53
54def acl_get_file(path, typ):
55 acl = libacl.acl_get_file(os.fsencode(path), typ)
56 if acl is None:
57 err = ctypes.get_errno()
58 raise OSError(err, os.strerror(err), str(path))
59
60 return acl
61
62
63libacl.acl_get_entry.argtypes = [acl_t, ctypes.c_int, ctypes.c_void_p]
64
65
66def acl_get_entry(acl, entry_id):
67 entry = acl_entry_t()
68 ret = libacl.acl_get_entry(acl, entry_id, ctypes.byref(entry))
69 if ret < 0:
70 err = ctypes.get_errno()
71 raise OSError(err, os.strerror(err))
72
73 if ret == 0:
74 return None
75
76 return entry
77
78
79libacl.acl_get_tag_type.argtypes = [acl_entry_t, ctypes.c_void_p]
80
81
82def acl_get_tag_type(entry_d):
83 tag = acl_tag_t()
84 ret = libacl.acl_get_tag_type(entry_d, ctypes.byref(tag))
85 if ret < 0:
86 err = ctypes.get_errno()
87 raise OSError(err, os.strerror(err))
88 return tag.value
89
90
91libacl.acl_get_qualifier.restype = ctypes.c_void_p
92libacl.acl_get_qualifier.argtypes = [acl_entry_t]
93
94
95def acl_get_qualifier(entry_d):
96 ret = libacl.acl_get_qualifier(entry_d)
97 if ret is None:
98 err = ctypes.get_errno()
99 raise OSError(err, os.strerror(err))
100 return ctypes.c_void_p(ret)
101
102
103libacl.acl_get_permset.argtypes = [acl_entry_t, ctypes.c_void_p]
104
105
106def acl_get_permset(entry_d):
107 permset = acl_permset_t()
108 ret = libacl.acl_get_permset(entry_d, ctypes.byref(permset))
109 if ret < 0:
110 err = ctypes.get_errno()
111 raise OSError(err, os.strerror(err))
112
113 return permset
114
115
116libacl.acl_get_perm.argtypes = [acl_permset_t, acl_perm_t]
117
118
119def acl_get_perm(permset_d, perm):
120 ret = libacl.acl_get_perm(permset_d, perm)
121 if ret < 0:
122 err = ctypes.get_errno()
123 raise OSError(err, os.strerror(err))
124 return bool(ret)
125
126
127class Entry(object):
128 def __init__(self, tag, qualifier, mode):
129 self.tag = tag
130 self.qualifier = qualifier
131 self.mode = mode
132
133 def __str__(self):
134 typ = ""
135 qual = ""
136 if self.tag == ACL_USER:
137 typ = "user"
138 qual = pwd.getpwuid(self.qualifier).pw_name
139 elif self.tag == ACL_GROUP:
140 typ = "group"
141 qual = grp.getgrgid(self.qualifier).gr_name
142 elif self.tag == ACL_USER_OBJ:
143 typ = "user"
144 elif self.tag == ACL_GROUP_OBJ:
145 typ = "group"
146 elif self.tag == ACL_MASK:
147 typ = "mask"
148 elif self.tag == ACL_OTHER:
149 typ = "other"
150
151 r = "r" if self.mode & ACL_READ else "-"
152 w = "w" if self.mode & ACL_WRITE else "-"
153 x = "x" if self.mode & ACL_EXECUTE else "-"
154
155 return f"{typ}:{qual}:{r}{w}{x}"
156
157
158class ACL(object):
159 def __init__(self, acl):
160 self.acl = acl
161
162 def __del__(self):
163 acl_free(self.acl)
164
165 def entries(self):
166 entry_id = ACL_FIRST_ENTRY
167 while True:
168 entry = acl_get_entry(self.acl, entry_id)
169 if entry is None:
170 break
171
172 permset = acl_get_permset(entry)
173
174 mode = 0
175 for m in (ACL_READ, ACL_WRITE, ACL_EXECUTE):
176 if acl_get_perm(permset, m):
177 mode |= m
178
179 qualifier = None
180 tag = acl_get_tag_type(entry)
181
182 if tag == ACL_USER or tag == ACL_GROUP:
183 qual = acl_get_qualifier(entry)
184 qualifier = ctypes.cast(qual, ctypes.POINTER(ctypes.c_int))[0]
185
186 yield Entry(tag, qualifier, mode)
187
188 entry_id = ACL_NEXT_ENTRY
189
190 @classmethod
191 def from_path(cls, path, typ):
192 acl = acl_get_file(path, typ)
193 return cls(acl)
194
195
196def main():
197 import argparse
198 from pathlib import Path
199
200 parser = argparse.ArgumentParser()
201 parser.add_argument("path", help="File Path", type=Path)
202
203 args = parser.parse_args()
204
205 acl = ACL.from_path(args.path, ACL_TYPE_ACCESS)
206 for entry in acl.entries():
207 print(str(entry))
208
209 return 0
210
211
212if __name__ == "__main__":
213 sys.exit(main())
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py
new file mode 100644
index 0000000000..a4371643d7
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/__init__.py
@@ -0,0 +1,16 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7
8from .client import AsyncClient, Client
9from .serv import AsyncServer, AsyncServerConnection
10from .connection import DEFAULT_MAX_CHUNK
11from .exceptions import (
12 ClientError,
13 ServerError,
14 ConnectionClosedError,
15 InvokeError,
16)
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py
new file mode 100644
index 0000000000..17b72033b9
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/client.py
@@ -0,0 +1,271 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import abc
8import asyncio
9import json
10import os
11import socket
12import sys
13import re
14import contextlib
15from threading import Thread
16from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK
17from .exceptions import ConnectionClosedError, InvokeError
18
19UNIX_PREFIX = "unix://"
20WS_PREFIX = "ws://"
21WSS_PREFIX = "wss://"
22
23ADDR_TYPE_UNIX = 0
24ADDR_TYPE_TCP = 1
25ADDR_TYPE_WS = 2
26
27WEBSOCKETS_MIN_VERSION = (9, 1)
28# Need websockets 10 with python 3.10+
29if sys.version_info >= (3, 10, 0):
30 WEBSOCKETS_MIN_VERSION = (10, 0)
31
32
33def parse_address(addr):
34 if addr.startswith(UNIX_PREFIX):
35 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
36 elif addr.startswith(WS_PREFIX) or addr.startswith(WSS_PREFIX):
37 return (ADDR_TYPE_WS, (addr,))
38 else:
39 m = re.match(r"\[(?P<host>[^\]]*)\]:(?P<port>\d+)$", addr)
40 if m is not None:
41 host = m.group("host")
42 port = m.group("port")
43 else:
44 host, port = addr.split(":")
45
46 return (ADDR_TYPE_TCP, (host, int(port)))
47
48
49class AsyncClient(object):
50 def __init__(
51 self,
52 proto_name,
53 proto_version,
54 logger,
55 timeout=30,
56 server_headers=False,
57 headers={},
58 ):
59 self.socket = None
60 self.max_chunk = DEFAULT_MAX_CHUNK
61 self.proto_name = proto_name
62 self.proto_version = proto_version
63 self.logger = logger
64 self.timeout = timeout
65 self.needs_server_headers = server_headers
66 self.server_headers = {}
67 self.headers = headers
68
69 async def connect_tcp(self, address, port):
70 async def connect_sock():
71 reader, writer = await asyncio.open_connection(address, port)
72 return StreamConnection(reader, writer, self.timeout, self.max_chunk)
73
74 self._connect_sock = connect_sock
75
76 async def connect_unix(self, path):
77 async def connect_sock():
78 # AF_UNIX has path length issues so chdir here to workaround
79 cwd = os.getcwd()
80 try:
81 os.chdir(os.path.dirname(path))
82 # The socket must be opened synchronously so that CWD doesn't get
83 # changed out from underneath us so we pass as a sock into asyncio
84 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
85 sock.connect(os.path.basename(path))
86 finally:
87 os.chdir(cwd)
88 reader, writer = await asyncio.open_unix_connection(sock=sock)
89 return StreamConnection(reader, writer, self.timeout, self.max_chunk)
90
91 self._connect_sock = connect_sock
92
93 async def connect_websocket(self, uri):
94 import websockets
95
96 try:
97 version = tuple(
98 int(v)
99 for v in websockets.__version__.split(".")[
100 0 : len(WEBSOCKETS_MIN_VERSION)
101 ]
102 )
103 except ValueError:
104 raise ImportError(
105 f"Unable to parse websockets version '{websockets.__version__}'"
106 )
107
108 if version < WEBSOCKETS_MIN_VERSION:
109 min_ver_str = ".".join(str(v) for v in WEBSOCKETS_MIN_VERSION)
110 raise ImportError(
111 f"Websockets version {websockets.__version__} is less than minimum required version {min_ver_str}"
112 )
113
114 async def connect_sock():
115 try:
116 websocket = await websockets.connect(
117 uri,
118 ping_interval=None,
119 open_timeout=self.timeout,
120 )
121 except asyncio.exceptions.TimeoutError:
122 raise ConnectionError("Timeout while connecting to websocket")
123 except (OSError, websockets.InvalidHandshake, websockets.InvalidURI) as exc:
124 raise ConnectionError(f"Could not connect to websocket: {exc}") from exc
125 return WebsocketConnection(websocket, self.timeout)
126
127 self._connect_sock = connect_sock
128
129 async def setup_connection(self):
130 # Send headers
131 await self.socket.send("%s %s" % (self.proto_name, self.proto_version))
132 await self.socket.send(
133 "needs-headers: %s" % ("true" if self.needs_server_headers else "false")
134 )
135 for k, v in self.headers.items():
136 await self.socket.send("%s: %s" % (k, v))
137
138 # End of headers
139 await self.socket.send("")
140
141 self.server_headers = {}
142 if self.needs_server_headers:
143 while True:
144 line = await self.socket.recv()
145 if not line:
146 # End headers
147 break
148 tag, value = line.split(":", 1)
149 self.server_headers[tag.lower()] = value.strip()
150
151 async def get_header(self, tag, default):
152 await self.connect()
153 return self.server_headers.get(tag, default)
154
155 async def connect(self):
156 if self.socket is None:
157 self.socket = await self._connect_sock()
158 await self.setup_connection()
159
160 async def disconnect(self):
161 if self.socket is not None:
162 await self.socket.close()
163 self.socket = None
164
165 async def close(self):
166 await self.disconnect()
167
168 async def _send_wrapper(self, proc):
169 count = 0
170 while True:
171 try:
172 await self.connect()
173 return await proc()
174 except (
175 OSError,
176 ConnectionError,
177 ConnectionClosedError,
178 json.JSONDecodeError,
179 UnicodeDecodeError,
180 ) as e:
181 self.logger.warning("Error talking to server: %s" % e)
182 if count >= 3:
183 if not isinstance(e, ConnectionError):
184 raise ConnectionError(str(e))
185 raise e
186 await self.close()
187 count += 1
188
189 def check_invoke_error(self, msg):
190 if isinstance(msg, dict) and "invoke-error" in msg:
191 raise InvokeError(msg["invoke-error"]["message"])
192
193 async def invoke(self, msg):
194 async def proc():
195 await self.socket.send_message(msg)
196 return await self.socket.recv_message()
197
198 result = await self._send_wrapper(proc)
199 self.check_invoke_error(result)
200 return result
201
202 async def ping(self):
203 return await self.invoke({"ping": {}})
204
205 async def __aenter__(self):
206 return self
207
208 async def __aexit__(self, exc_type, exc_value, traceback):
209 await self.close()
210
211
212class Client(object):
213 def __init__(self):
214 self.client = self._get_async_client()
215 self.loop = asyncio.new_event_loop()
216
217 # Override any pre-existing loop.
218 # Without this, the PR server export selftest triggers a hang
219 # when running with Python 3.7. The drawback is that there is
220 # potential for issues if the PR and hash equiv (or some new)
221 # clients need to both be instantiated in the same process.
222 # This should be revisited if/when Python 3.9 becomes the
223 # minimum required version for BitBake, as it seems not
224 # required (but harmless) with it.
225 asyncio.set_event_loop(self.loop)
226
227 self._add_methods("connect_tcp", "ping")
228
229 @abc.abstractmethod
230 def _get_async_client(self):
231 pass
232
233 def _get_downcall_wrapper(self, downcall):
234 def wrapper(*args, **kwargs):
235 return self.loop.run_until_complete(downcall(*args, **kwargs))
236
237 return wrapper
238
239 def _add_methods(self, *methods):
240 for m in methods:
241 downcall = getattr(self.client, m)
242 setattr(self, m, self._get_downcall_wrapper(downcall))
243
244 def connect_unix(self, path):
245 self.loop.run_until_complete(self.client.connect_unix(path))
246 self.loop.run_until_complete(self.client.connect())
247
248 @property
249 def max_chunk(self):
250 return self.client.max_chunk
251
252 @max_chunk.setter
253 def max_chunk(self, value):
254 self.client.max_chunk = value
255
256 def disconnect(self):
257 self.loop.run_until_complete(self.client.close())
258
259 def close(self):
260 if self.loop:
261 self.loop.run_until_complete(self.client.close())
262 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
263 self.loop.close()
264 self.loop = None
265
266 def __enter__(self):
267 return self
268
269 def __exit__(self, exc_type, exc_value, traceback):
270 self.close()
271 return False
diff --git a/bitbake/lib/bb/asyncrpc/connection.py b/bitbake/lib/bb/asyncrpc/connection.py
new file mode 100644
index 0000000000..7f0cf6ba96
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/connection.py
@@ -0,0 +1,146 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import asyncio
8import itertools
9import json
10from datetime import datetime
11from .exceptions import ClientError, ConnectionClosedError
12
13
14# The Python async server defaults to a 64K receive buffer, so we hardcode our
15# maximum chunk size. It would be better if the client and server reported to
16# each other what the maximum chunk sizes were, but that will slow down the
17# connection setup with a round trip delay so I'd rather not do that unless it
18# is necessary
19DEFAULT_MAX_CHUNK = 32 * 1024
20
21
22def chunkify(msg, max_chunk):
23 if len(msg) < max_chunk - 1:
24 yield "".join((msg, "\n"))
25 else:
26 yield "".join((json.dumps({"chunk-stream": None}), "\n"))
27
28 args = [iter(msg)] * (max_chunk - 1)
29 for m in map("".join, itertools.zip_longest(*args, fillvalue="")):
30 yield "".join(itertools.chain(m, "\n"))
31 yield "\n"
32
33
34def json_serialize(obj):
35 if isinstance(obj, datetime):
36 return obj.isoformat()
37 raise TypeError("Type %s not serializeable" % type(obj))
38
39
40class StreamConnection(object):
41 def __init__(self, reader, writer, timeout, max_chunk=DEFAULT_MAX_CHUNK):
42 self.reader = reader
43 self.writer = writer
44 self.timeout = timeout
45 self.max_chunk = max_chunk
46
47 @property
48 def address(self):
49 return self.writer.get_extra_info("peername")
50
51 async def send_message(self, msg):
52 for c in chunkify(json.dumps(msg, default=json_serialize), self.max_chunk):
53 self.writer.write(c.encode("utf-8"))
54 await self.writer.drain()
55
56 async def recv_message(self):
57 l = await self.recv()
58
59 m = json.loads(l)
60 if not m:
61 return m
62
63 if "chunk-stream" in m:
64 lines = []
65 while True:
66 l = await self.recv()
67 if not l:
68 break
69 lines.append(l)
70
71 m = json.loads("".join(lines))
72
73 return m
74
75 async def send(self, msg):
76 self.writer.write(("%s\n" % msg).encode("utf-8"))
77 await self.writer.drain()
78
79 async def recv(self):
80 if self.timeout < 0:
81 line = await self.reader.readline()
82 else:
83 try:
84 line = await asyncio.wait_for(self.reader.readline(), self.timeout)
85 except asyncio.TimeoutError:
86 raise ConnectionError("Timed out waiting for data")
87
88 if not line:
89 raise ConnectionClosedError("Connection closed")
90
91 line = line.decode("utf-8")
92
93 if not line.endswith("\n"):
94 raise ConnectionError("Bad message %r" % (line))
95
96 return line.rstrip()
97
98 async def close(self):
99 self.reader = None
100 if self.writer is not None:
101 self.writer.close()
102 self.writer = None
103
104
105class WebsocketConnection(object):
106 def __init__(self, socket, timeout):
107 self.socket = socket
108 self.timeout = timeout
109
110 @property
111 def address(self):
112 return ":".join(str(s) for s in self.socket.remote_address)
113
114 async def send_message(self, msg):
115 await self.send(json.dumps(msg, default=json_serialize))
116
117 async def recv_message(self):
118 m = await self.recv()
119 return json.loads(m)
120
121 async def send(self, msg):
122 import websockets.exceptions
123
124 try:
125 await self.socket.send(msg)
126 except websockets.exceptions.ConnectionClosed:
127 raise ConnectionClosedError("Connection closed")
128
129 async def recv(self):
130 import websockets.exceptions
131
132 try:
133 if self.timeout < 0:
134 return await self.socket.recv()
135
136 try:
137 return await asyncio.wait_for(self.socket.recv(), self.timeout)
138 except asyncio.TimeoutError:
139 raise ConnectionError("Timed out waiting for data")
140 except websockets.exceptions.ConnectionClosed:
141 raise ConnectionClosedError("Connection closed")
142
143 async def close(self):
144 if self.socket is not None:
145 await self.socket.close()
146 self.socket = None
diff --git a/bitbake/lib/bb/asyncrpc/exceptions.py b/bitbake/lib/bb/asyncrpc/exceptions.py
new file mode 100644
index 0000000000..ae1043a38b
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/exceptions.py
@@ -0,0 +1,21 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7
8class ClientError(Exception):
9 pass
10
11
12class InvokeError(Exception):
13 pass
14
15
16class ServerError(Exception):
17 pass
18
19
20class ConnectionClosedError(Exception):
21 pass
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py
new file mode 100644
index 0000000000..667217c5c1
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/serv.py
@@ -0,0 +1,410 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import abc
8import asyncio
9import json
10import os
11import signal
12import socket
13import sys
14import multiprocessing
15import logging
16from .connection import StreamConnection, WebsocketConnection
17from .exceptions import ClientError, ServerError, ConnectionClosedError, InvokeError
18
19
20class ClientLoggerAdapter(logging.LoggerAdapter):
21 def process(self, msg, kwargs):
22 return f"[Client {self.extra['address']}] {msg}", kwargs
23
24
25class AsyncServerConnection(object):
26 # If a handler returns this object (e.g. `return self.NO_RESPONSE`), no
27 # return message will be automatically be sent back to the client
28 NO_RESPONSE = object()
29
30 def __init__(self, socket, proto_name, logger):
31 self.socket = socket
32 self.proto_name = proto_name
33 self.handlers = {
34 "ping": self.handle_ping,
35 }
36 self.logger = ClientLoggerAdapter(
37 logger,
38 {
39 "address": socket.address,
40 },
41 )
42 self.client_headers = {}
43
44 async def close(self):
45 await self.socket.close()
46
47 async def handle_headers(self, headers):
48 return {}
49
50 async def process_requests(self):
51 try:
52 self.logger.info("Client %r connected" % (self.socket.address,))
53
54 # Read protocol and version
55 client_protocol = await self.socket.recv()
56 if not client_protocol:
57 return
58
59 (client_proto_name, client_proto_version) = client_protocol.split()
60 if client_proto_name != self.proto_name:
61 self.logger.debug("Rejecting invalid protocol %s" % (self.proto_name))
62 return
63
64 self.proto_version = tuple(int(v) for v in client_proto_version.split("."))
65 if not self.validate_proto_version():
66 self.logger.debug(
67 "Rejecting invalid protocol version %s" % (client_proto_version)
68 )
69 return
70
71 # Read headers
72 self.client_headers = {}
73 while True:
74 header = await self.socket.recv()
75 if not header:
76 # Empty line. End of headers
77 break
78 tag, value = header.split(":", 1)
79 self.client_headers[tag.lower()] = value.strip()
80
81 if self.client_headers.get("needs-headers", "false") == "true":
82 for k, v in (await self.handle_headers(self.client_headers)).items():
83 await self.socket.send("%s: %s" % (k, v))
84 await self.socket.send("")
85
86 # Handle messages
87 while True:
88 d = await self.socket.recv_message()
89 if d is None:
90 break
91 try:
92 response = await self.dispatch_message(d)
93 except InvokeError as e:
94 await self.socket.send_message(
95 {"invoke-error": {"message": str(e)}}
96 )
97 break
98
99 if response is not self.NO_RESPONSE:
100 await self.socket.send_message(response)
101
102 except ConnectionClosedError as e:
103 self.logger.info(str(e))
104 except (ClientError, ConnectionError) as e:
105 self.logger.error(str(e))
106 finally:
107 await self.close()
108
109 async def dispatch_message(self, msg):
110 for k in self.handlers.keys():
111 if k in msg:
112 self.logger.debug("Handling %s" % k)
113 return await self.handlers[k](msg[k])
114
115 raise ClientError("Unrecognized command %r" % msg)
116
117 async def handle_ping(self, request):
118 return {"alive": True}
119
120
121class StreamServer(object):
122 def __init__(self, handler, logger):
123 self.handler = handler
124 self.logger = logger
125 self.closed = False
126
127 async def handle_stream_client(self, reader, writer):
128 # writer.transport.set_write_buffer_limits(0)
129 socket = StreamConnection(reader, writer, -1)
130 if self.closed:
131 await socket.close()
132 return
133
134 await self.handler(socket)
135
136 async def stop(self):
137 self.closed = True
138
139
140class TCPStreamServer(StreamServer):
141 def __init__(self, host, port, handler, logger, *, reuseport=False):
142 super().__init__(handler, logger)
143 self.host = host
144 self.port = port
145 self.reuseport = reuseport
146
147 def start(self, loop):
148 self.server = loop.run_until_complete(
149 asyncio.start_server(
150 self.handle_stream_client,
151 self.host,
152 self.port,
153 reuse_port=self.reuseport,
154 )
155 )
156
157 for s in self.server.sockets:
158 self.logger.debug("Listening on %r" % (s.getsockname(),))
159 # Newer python does this automatically. Do it manually here for
160 # maximum compatibility
161 s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
162 s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
163
164 # Enable keep alives. This prevents broken client connections
165 # from persisting on the server for long periods of time.
166 s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
167 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
168 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
169 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
170
171 name = self.server.sockets[0].getsockname()
172 if self.server.sockets[0].family == socket.AF_INET6:
173 self.address = "[%s]:%d" % (name[0], name[1])
174 else:
175 self.address = "%s:%d" % (name[0], name[1])
176
177 return [self.server.wait_closed()]
178
179 async def stop(self):
180 await super().stop()
181 self.server.close()
182
183 def cleanup(self):
184 pass
185
186
187class UnixStreamServer(StreamServer):
188 def __init__(self, path, handler, logger):
189 super().__init__(handler, logger)
190 self.path = path
191
192 def start(self, loop):
193 cwd = os.getcwd()
194 try:
195 # Work around path length limits in AF_UNIX
196 os.chdir(os.path.dirname(self.path))
197 self.server = loop.run_until_complete(
198 asyncio.start_unix_server(
199 self.handle_stream_client, os.path.basename(self.path)
200 )
201 )
202 finally:
203 os.chdir(cwd)
204
205 self.logger.debug("Listening on %r" % self.path)
206 self.address = "unix://%s" % os.path.abspath(self.path)
207 return [self.server.wait_closed()]
208
209 async def stop(self):
210 await super().stop()
211 self.server.close()
212
213 def cleanup(self):
214 os.unlink(self.path)
215
216
217class WebsocketsServer(object):
218 def __init__(self, host, port, handler, logger, *, reuseport=False):
219 self.host = host
220 self.port = port
221 self.handler = handler
222 self.logger = logger
223 self.reuseport = reuseport
224
225 def start(self, loop):
226 import websockets.server
227
228 self.server = loop.run_until_complete(
229 websockets.server.serve(
230 self.client_handler,
231 self.host,
232 self.port,
233 ping_interval=None,
234 reuse_port=self.reuseport,
235 )
236 )
237
238 for s in self.server.sockets:
239 self.logger.debug("Listening on %r" % (s.getsockname(),))
240
241 # Enable keep alives. This prevents broken client connections
242 # from persisting on the server for long periods of time.
243 s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
244 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
245 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
246 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
247
248 name = self.server.sockets[0].getsockname()
249 if self.server.sockets[0].family == socket.AF_INET6:
250 self.address = "ws://[%s]:%d" % (name[0], name[1])
251 else:
252 self.address = "ws://%s:%d" % (name[0], name[1])
253
254 return [self.server.wait_closed()]
255
256 async def stop(self):
257 self.server.close()
258
259 def cleanup(self):
260 pass
261
262 async def client_handler(self, websocket):
263 socket = WebsocketConnection(websocket, -1)
264 await self.handler(socket)
265
266
267class AsyncServer(object):
268 def __init__(self, logger):
269 self.logger = logger
270 self.loop = None
271 self.run_tasks = []
272
273 def start_tcp_server(self, host, port, *, reuseport=False):
274 self.server = TCPStreamServer(
275 host,
276 port,
277 self._client_handler,
278 self.logger,
279 reuseport=reuseport,
280 )
281
282 def start_unix_server(self, path):
283 self.server = UnixStreamServer(path, self._client_handler, self.logger)
284
285 def start_websocket_server(self, host, port, reuseport=False):
286 self.server = WebsocketsServer(
287 host,
288 port,
289 self._client_handler,
290 self.logger,
291 reuseport=reuseport,
292 )
293
294 async def _client_handler(self, socket):
295 address = socket.address
296 try:
297 client = self.accept_client(socket)
298 await client.process_requests()
299 except Exception as e:
300 import traceback
301
302 self.logger.error(
303 "Error from client %s: %s" % (address, str(e)), exc_info=True
304 )
305 traceback.print_exc()
306 finally:
307 self.logger.debug("Client %s disconnected", address)
308 await socket.close()
309
310 @abc.abstractmethod
311 def accept_client(self, socket):
312 pass
313
314 async def stop(self):
315 self.logger.debug("Stopping server")
316 await self.server.stop()
317
318 def start(self):
319 tasks = self.server.start(self.loop)
320 self.address = self.server.address
321 return tasks
322
323 def signal_handler(self):
324 self.logger.debug("Got exit signal")
325 self.loop.create_task(self.stop())
326
327 def _serve_forever(self, tasks):
328 try:
329 self.loop.add_signal_handler(signal.SIGTERM, self.signal_handler)
330 self.loop.add_signal_handler(signal.SIGINT, self.signal_handler)
331 self.loop.add_signal_handler(signal.SIGQUIT, self.signal_handler)
332 signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGTERM])
333
334 self.loop.run_until_complete(asyncio.gather(*tasks))
335
336 self.logger.debug("Server shutting down")
337 finally:
338 self.server.cleanup()
339
340 def serve_forever(self):
341 """
342 Serve requests in the current process
343 """
344 self._create_loop()
345 tasks = self.start()
346 self._serve_forever(tasks)
347 self.loop.close()
348
349 def _create_loop(self):
350 # Create loop and override any loop that may have existed in
351 # a parent process. It is possible that the usecases of
352 # serve_forever might be constrained enough to allow using
353 # get_event_loop here, but better safe than sorry for now.
354 self.loop = asyncio.new_event_loop()
355 asyncio.set_event_loop(self.loop)
356
357 def serve_as_process(self, *, prefunc=None, args=(), log_level=None):
358 """
359 Serve requests in a child process
360 """
361
362 def run(queue):
363 # Create loop and override any loop that may have existed
364 # in a parent process. Without doing this and instead
365 # using get_event_loop, at the very minimum the hashserv
366 # unit tests will hang when running the second test.
367 # This happens since get_event_loop in the spawned server
368 # process for the second testcase ends up with the loop
369 # from the hashserv client created in the unit test process
370 # when running the first testcase. The problem is somewhat
371 # more general, though, as any potential use of asyncio in
372 # Cooker could create a loop that needs to replaced in this
373 # new process.
374 self._create_loop()
375 try:
376 self.address = None
377 tasks = self.start()
378 finally:
379 # Always put the server address to wake up the parent task
380 queue.put(self.address)
381 queue.close()
382
383 if prefunc is not None:
384 prefunc(self, *args)
385
386 if log_level is not None:
387 self.logger.setLevel(log_level)
388
389 self._serve_forever(tasks)
390
391 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
392 self.loop.close()
393
394 queue = multiprocessing.Queue()
395
396 # Temporarily block SIGTERM. The server process will inherit this
397 # block which will ensure it doesn't receive the SIGTERM until the
398 # handler is ready for it
399 mask = signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGTERM])
400 try:
401 self.process = multiprocessing.Process(target=run, args=(queue,))
402 self.process.start()
403
404 self.address = queue.get()
405 queue.close()
406 queue.join_thread()
407
408 return self.process
409 finally:
410 signal.pthread_sigmask(signal.SIG_SETMASK, mask)
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index f4f897e41a..40839a81b5 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -20,10 +20,12 @@ import itertools
20import time 20import time
21import re 21import re
22import stat 22import stat
23import datetime
23import bb 24import bb
24import bb.msg 25import bb.msg
25import bb.process 26import bb.process
26import bb.progress 27import bb.progress
28from io import StringIO
27from bb import data, event, utils 29from bb import data, event, utils
28 30
29bblogger = logging.getLogger('BitBake') 31bblogger = logging.getLogger('BitBake')
@@ -176,7 +178,9 @@ class StdoutNoopContextManager:
176 178
177 @property 179 @property
178 def name(self): 180 def name(self):
179 return sys.stdout.name 181 if "name" in dir(sys.stdout):
182 return sys.stdout.name
183 return "<mem>"
180 184
181 185
182def exec_func(func, d, dirs = None): 186def exec_func(func, d, dirs = None):
@@ -193,6 +197,8 @@ def exec_func(func, d, dirs = None):
193 for cdir in d.expand(cleandirs).split(): 197 for cdir in d.expand(cleandirs).split():
194 bb.utils.remove(cdir, True) 198 bb.utils.remove(cdir, True)
195 bb.utils.mkdirhier(cdir) 199 bb.utils.mkdirhier(cdir)
200 if cdir == oldcwd:
201 os.chdir(cdir)
196 202
197 if flags and dirs is None: 203 if flags and dirs is None:
198 dirs = flags.get('dirs') 204 dirs = flags.get('dirs')
@@ -295,9 +301,25 @@ def exec_func_python(func, d, runfile, cwd=None):
295 lineno = int(d.getVarFlag(func, "lineno", False)) 301 lineno = int(d.getVarFlag(func, "lineno", False))
296 bb.methodpool.insert_method(func, text, fn, lineno - 1) 302 bb.methodpool.insert_method(func, text, fn, lineno - 1)
297 303
298 comp = utils.better_compile(code, func, "exec_python_func() autogenerated") 304 if verboseStdoutLogging:
299 utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated") 305 sys.stdout.flush()
306 sys.stderr.flush()
307 currout = sys.stdout
308 currerr = sys.stderr
309 sys.stderr = sys.stdout = execio = StringIO()
310 comp = utils.better_compile(code, func, "exec_func_python() autogenerated")
311 utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated")
300 finally: 312 finally:
313 if verboseStdoutLogging:
314 execio.flush()
315 logger.plain("%s" % execio.getvalue())
316 sys.stdout = currout
317 sys.stderr = currerr
318 execio.close()
319 # We want any stdout/stderr to be printed before any other log messages to make debugging
320 # more accurate. In some cases we seem to lose stdout/stderr entirely in logging tests without this.
321 sys.stdout.flush()
322 sys.stderr.flush()
301 bb.debug(2, "Python function %s finished" % func) 323 bb.debug(2, "Python function %s finished" % func)
302 324
303 if cwd and olddir: 325 if cwd and olddir:
@@ -375,7 +397,7 @@ def create_progress_handler(func, progress, logfile, d):
375 # Use specified regex 397 # Use specified regex
376 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile) 398 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
377 elif progress.startswith("custom:"): 399 elif progress.startswith("custom:"):
378 # Use a custom progress handler that was injected via OE_EXTRA_IMPORTS or __builtins__ 400 # Use a custom progress handler that was injected via other means
379 import functools 401 import functools
380 from types import ModuleType 402 from types import ModuleType
381 403
@@ -436,7 +458,11 @@ exit $ret
436 if fakerootcmd: 458 if fakerootcmd:
437 cmd = [fakerootcmd, runfile] 459 cmd = [fakerootcmd, runfile]
438 460
439 if verboseStdoutLogging: 461 # We only want to output to logger via LogTee if stdout is sys.__stdout__ (which will either
462 # be real stdout or subprocess PIPE or similar). In other cases we are being run "recursively",
463 # ie. inside another function, in which case stdout is already being captured so we don't
464 # want to Tee here as output would be printed twice, and out of order.
465 if verboseStdoutLogging and sys.stdout == sys.__stdout__:
440 logfile = LogTee(logger, StdoutNoopContextManager()) 466 logfile = LogTee(logger, StdoutNoopContextManager())
441 else: 467 else:
442 logfile = StdoutNoopContextManager() 468 logfile = StdoutNoopContextManager()
@@ -565,10 +591,8 @@ exit $ret
565def _task_data(fn, task, d): 591def _task_data(fn, task, d):
566 localdata = bb.data.createCopy(d) 592 localdata = bb.data.createCopy(d)
567 localdata.setVar('BB_FILENAME', fn) 593 localdata.setVar('BB_FILENAME', fn)
568 localdata.setVar('BB_CURRENTTASK', task[3:])
569 localdata.setVar('OVERRIDES', 'task-%s:%s' % 594 localdata.setVar('OVERRIDES', 'task-%s:%s' %
570 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False))) 595 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
571 localdata.finalize()
572 bb.data.expandKeys(localdata) 596 bb.data.expandKeys(localdata)
573 return localdata 597 return localdata
574 598
@@ -579,7 +603,7 @@ def _exec_task(fn, task, d, quieterr):
579 running it with its own local metadata, and with some useful variables set. 603 running it with its own local metadata, and with some useful variables set.
580 """ 604 """
581 if not d.getVarFlag(task, 'task', False): 605 if not d.getVarFlag(task, 'task', False):
582 event.fire(TaskInvalid(task, d), d) 606 event.fire(TaskInvalid(task, fn, d), d)
583 logger.error("No such task: %s" % task) 607 logger.error("No such task: %s" % task)
584 return 1 608 return 1
585 609
@@ -615,7 +639,8 @@ def _exec_task(fn, task, d, quieterr):
615 logorder = os.path.join(tempdir, 'log.task_order') 639 logorder = os.path.join(tempdir, 'log.task_order')
616 try: 640 try:
617 with open(logorder, 'a') as logorderfile: 641 with open(logorder, 'a') as logorderfile:
618 logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase)) 642 timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S.%f")
643 logorderfile.write('{0} {1} ({2}): {3}\n'.format(timestamp, task, os.getpid(), logbase))
619 except OSError: 644 except OSError:
620 logger.exception("Opening log file '%s'", logorder) 645 logger.exception("Opening log file '%s'", logorder)
621 pass 646 pass
@@ -682,47 +707,55 @@ def _exec_task(fn, task, d, quieterr):
682 try: 707 try:
683 try: 708 try:
684 event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata) 709 event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata)
685 except (bb.BBHandledException, SystemExit):
686 return 1
687 710
688 try:
689 for func in (prefuncs or '').split(): 711 for func in (prefuncs or '').split():
690 exec_func(func, localdata) 712 exec_func(func, localdata)
691 exec_func(task, localdata) 713 exec_func(task, localdata)
692 for func in (postfuncs or '').split(): 714 for func in (postfuncs or '').split():
693 exec_func(func, localdata) 715 exec_func(func, localdata)
694 except bb.BBHandledException: 716 finally:
695 event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata) 717 # Need to flush and close the logs before sending events where the
696 return 1 718 # UI may try to look at the logs.
697 except Exception as exc: 719 sys.stdout.flush()
698 if quieterr: 720 sys.stderr.flush()
699 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) 721
700 else: 722 bblogger.removeHandler(handler)
701 errprinted = errchk.triggered 723
724 # Restore the backup fds
725 os.dup2(osi[0], osi[1])
726 os.dup2(oso[0], oso[1])
727 os.dup2(ose[0], ose[1])
728
729 # Close the backup fds
730 os.close(osi[0])
731 os.close(oso[0])
732 os.close(ose[0])
733
734 logfile.close()
735 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
736 logger.debug2("Zero size logfn %s, removing", logfn)
737 bb.utils.remove(logfn)
738 bb.utils.remove(loglink)
739 except (Exception, SystemExit) as exc:
740 handled = False
741 if isinstance(exc, bb.BBHandledException):
742 handled = True
743
744 if quieterr:
745 if not handled:
746 logger.warning(str(exc))
747 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
748 else:
749 errprinted = errchk.triggered
750 # If the output is already on stdout, we've printed the information in the
751 # logs once already so don't duplicate
752 if verboseStdoutLogging or handled:
753 errprinted = True
754 if not handled:
702 logger.error(str(exc)) 755 logger.error(str(exc))
703 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) 756 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
704 return 1 757 return 1
705 finally:
706 sys.stdout.flush()
707 sys.stderr.flush()
708
709 bblogger.removeHandler(handler)
710
711 # Restore the backup fds
712 os.dup2(osi[0], osi[1])
713 os.dup2(oso[0], oso[1])
714 os.dup2(ose[0], ose[1])
715
716 # Close the backup fds
717 os.close(osi[0])
718 os.close(oso[0])
719 os.close(ose[0])
720 758
721 logfile.close()
722 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
723 logger.debug2("Zero size logfn %s, removing", logfn)
724 bb.utils.remove(logfn)
725 bb.utils.remove(loglink)
726 event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata) 759 event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata)
727 760
728 if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False): 761 if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
@@ -760,132 +793,92 @@ def exec_task(fn, task, d, profile = False):
760 event.fire(failedevent, d) 793 event.fire(failedevent, d)
761 return 1 794 return 1
762 795
763def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False): 796def _get_cleanmask(taskname, mcfn):
764 """ 797 """
765 Internal stamp helper function 798 Internal stamp helper function to generate stamp cleaning mask
766 Makes sure the stamp directory exists
767 Returns the stamp path+filename 799 Returns the stamp path+filename
768 800
769 In the bitbake core, d can be a CacheData and file_name will be set. 801 In the bitbake core, d can be a CacheData and file_name will be set.
770 When called in task context, d will be a data store, file_name will not be set 802 When called in task context, d will be a data store, file_name will not be set
771 """ 803 """
772 taskflagname = taskname 804 cleanmask = bb.parse.siggen.stampcleanmask_mcfn(taskname, mcfn)
773 if taskname.endswith("_setscene") and taskname != "do_setscene": 805 taskflagname = taskname.replace("_setscene", "")
774 taskflagname = taskname.replace("_setscene", "") 806 if cleanmask:
775 807 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
776 if file_name: 808 return []
777 stamp = d.stamp[file_name] 809
778 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" 810def clean_stamp_mcfn(task, mcfn):
779 else: 811 cleanmask = _get_cleanmask(task, mcfn)
780 stamp = d.getVar('STAMP') 812 for mask in cleanmask:
781 file_name = d.getVar('BB_FILENAME') 813 for name in glob.glob(mask):
782 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or "" 814 # Preserve sigdata files in the stamps directory
815 if "sigdata" in name or "sigbasedata" in name:
816 continue
817 # Preserve taint files in the stamps directory
818 if name.endswith('.taint'):
819 continue
820 os.unlink(name)
783 821
784 if baseonly: 822def clean_stamp(task, d):
785 return stamp 823 mcfn = d.getVar('BB_FILENAME')
786 if noextra: 824 clean_stamp_mcfn(task, mcfn)
787 extrainfo = ""
788 825
789 if not stamp: 826def make_stamp_mcfn(task, mcfn):
790 return
791 827
792 stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo) 828 basestamp = bb.parse.siggen.stampfile_mcfn(task, mcfn)
793 829
794 stampdir = os.path.dirname(stamp) 830 stampdir = os.path.dirname(basestamp)
795 if cached_mtime_noerror(stampdir) == 0: 831 if cached_mtime_noerror(stampdir) == 0:
796 bb.utils.mkdirhier(stampdir) 832 bb.utils.mkdirhier(stampdir)
797 833
798 return stamp 834 clean_stamp_mcfn(task, mcfn)
799 835
800def stamp_cleanmask_internal(taskname, d, file_name): 836 # Remove the file and recreate to force timestamp
801 """ 837 # change on broken NFS filesystems
802 Internal stamp helper function to generate stamp cleaning mask 838 if basestamp:
803 Returns the stamp path+filename 839 bb.utils.remove(basestamp)
840 open(basestamp, "w").close()
804 841
805 In the bitbake core, d can be a CacheData and file_name will be set. 842def make_stamp(task, d):
806 When called in task context, d will be a data store, file_name will not be set
807 """ 843 """
808 taskflagname = taskname 844 Creates/updates a stamp for a given task
809 if taskname.endswith("_setscene") and taskname != "do_setscene": 845 """
810 taskflagname = taskname.replace("_setscene", "") 846 mcfn = d.getVar('BB_FILENAME')
811
812 if file_name:
813 stamp = d.stampclean[file_name]
814 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
815 else:
816 stamp = d.getVar('STAMPCLEAN')
817 file_name = d.getVar('BB_FILENAME')
818 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
819 847
820 if not stamp: 848 make_stamp_mcfn(task, mcfn)
821 return []
822 849
823 cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo) 850 # If we're in task context, write out a signature file for each task
851 # as it completes
852 if not task.endswith("_setscene"):
853 stampbase = bb.parse.siggen.stampfile_base(mcfn)
854 bb.parse.siggen.dump_sigtask(mcfn, task, stampbase, True)
824 855
825 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
826 856
827def make_stamp(task, d, file_name = None): 857def find_stale_stamps(task, mcfn):
828 """ 858 current = bb.parse.siggen.stampfile_mcfn(task, mcfn)
829 Creates/updates a stamp for a given task 859 current2 = bb.parse.siggen.stampfile_mcfn(task + "_setscene", mcfn)
830 (d can be a data dict or dataCache) 860 cleanmask = _get_cleanmask(task, mcfn)
831 """ 861 found = []
832 cleanmask = stamp_cleanmask_internal(task, d, file_name)
833 for mask in cleanmask: 862 for mask in cleanmask:
834 for name in glob.glob(mask): 863 for name in glob.glob(mask):
835 # Preserve sigdata files in the stamps directory
836 if "sigdata" in name or "sigbasedata" in name: 864 if "sigdata" in name or "sigbasedata" in name:
837 continue 865 continue
838 # Preserve taint files in the stamps directory
839 if name.endswith('.taint'): 866 if name.endswith('.taint'):
840 continue 867 continue
841 os.unlink(name) 868 if name == current or name == current2:
842 869 continue
843 stamp = stamp_internal(task, d, file_name) 870 logger.debug2("Stampfile %s does not match %s or %s" % (name, current, current2))
844 # Remove the file and recreate to force timestamp 871 found.append(name)
845 # change on broken NFS filesystems 872 return found
846 if stamp:
847 bb.utils.remove(stamp)
848 open(stamp, "w").close()
849
850 # If we're in task context, write out a signature file for each task
851 # as it completes
852 if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
853 stampbase = stamp_internal(task, d, None, True)
854 file_name = d.getVar('BB_FILENAME')
855 bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
856
857def del_stamp(task, d, file_name = None):
858 """
859 Removes a stamp for a given task
860 (d can be a data dict or dataCache)
861 """
862 stamp = stamp_internal(task, d, file_name)
863 bb.utils.remove(stamp)
864 873
865def write_taint(task, d, file_name = None): 874def write_taint(task, d):
866 """ 875 """
867 Creates a "taint" file which will force the specified task and its 876 Creates a "taint" file which will force the specified task and its
868 dependents to be re-run the next time by influencing the value of its 877 dependents to be re-run the next time by influencing the value of its
869 taskhash. 878 taskhash.
870 (d can be a data dict or dataCache)
871 """ 879 """
872 import uuid 880 mcfn = d.getVar('BB_FILENAME')
873 if file_name: 881 bb.parse.siggen.invalidate_task(task, mcfn)
874 taintfn = d.stamp[file_name] + '.' + task + '.taint'
875 else:
876 taintfn = d.getVar('STAMP') + '.' + task + '.taint'
877 bb.utils.mkdirhier(os.path.dirname(taintfn))
878 # The specific content of the taint file is not really important,
879 # we just need it to be random, so a random UUID is used
880 with open(taintfn, 'w') as taintf:
881 taintf.write(str(uuid.uuid4()))
882
883def stampfile(taskname, d, file_name = None, noextra=False):
884 """
885 Return the stamp for a given task
886 (d can be a data dict or dataCache)
887 """
888 return stamp_internal(taskname, d, file_name, noextra=noextra)
889 882
890def add_tasks(tasklist, d): 883def add_tasks(tasklist, d):
891 task_deps = d.getVar('_task_deps', False) 884 task_deps = d.getVar('_task_deps', False)
@@ -910,6 +903,11 @@ def add_tasks(tasklist, d):
910 task_deps[name] = {} 903 task_deps[name] = {}
911 if name in flags: 904 if name in flags:
912 deptask = d.expand(flags[name]) 905 deptask = d.expand(flags[name])
906 if name in ['noexec', 'fakeroot', 'nostamp']:
907 if deptask != '1':
908 bb.warn("In a future version of BitBake, setting the '{}' flag to something other than '1' "
909 "will result in the flag not being set. See YP bug #13808.".format(name))
910
913 task_deps[name][task] = deptask 911 task_deps[name][task] = deptask
914 getTask('mcdepends') 912 getTask('mcdepends')
915 getTask('depends') 913 getTask('depends')
@@ -934,9 +932,13 @@ def add_tasks(tasklist, d):
934 # don't assume holding a reference 932 # don't assume holding a reference
935 d.setVar('_task_deps', task_deps) 933 d.setVar('_task_deps', task_deps)
936 934
935def ensure_task_prefix(name):
936 if name[:3] != "do_":
937 name = "do_" + name
938 return name
939
937def addtask(task, before, after, d): 940def addtask(task, before, after, d):
938 if task[:3] != "do_": 941 task = ensure_task_prefix(task)
939 task = "do_" + task
940 942
941 d.setVarFlag(task, "task", 1) 943 d.setVarFlag(task, "task", 1)
942 bbtasks = d.getVar('__BBTASKS', False) or [] 944 bbtasks = d.getVar('__BBTASKS', False) or []
@@ -948,19 +950,20 @@ def addtask(task, before, after, d):
948 if after is not None: 950 if after is not None:
949 # set up deps for function 951 # set up deps for function
950 for entry in after.split(): 952 for entry in after.split():
953 entry = ensure_task_prefix(entry)
951 if entry not in existing: 954 if entry not in existing:
952 existing.append(entry) 955 existing.append(entry)
953 d.setVarFlag(task, "deps", existing) 956 d.setVarFlag(task, "deps", existing)
954 if before is not None: 957 if before is not None:
955 # set up things that depend on this func 958 # set up things that depend on this func
956 for entry in before.split(): 959 for entry in before.split():
960 entry = ensure_task_prefix(entry)
957 existing = d.getVarFlag(entry, "deps", False) or [] 961 existing = d.getVarFlag(entry, "deps", False) or []
958 if task not in existing: 962 if task not in existing:
959 d.setVarFlag(entry, "deps", [task] + existing) 963 d.setVarFlag(entry, "deps", [task] + existing)
960 964
961def deltask(task, d): 965def deltask(task, d):
962 if task[:3] != "do_": 966 task = ensure_task_prefix(task)
963 task = "do_" + task
964 967
965 bbtasks = d.getVar('__BBTASKS', False) or [] 968 bbtasks = d.getVar('__BBTASKS', False) or []
966 if task in bbtasks: 969 if task in bbtasks:
@@ -1008,6 +1011,8 @@ def tasksbetween(task_start, task_end, d):
1008 def follow_chain(task, endtask, chain=None): 1011 def follow_chain(task, endtask, chain=None):
1009 if not chain: 1012 if not chain:
1010 chain = [] 1013 chain = []
1014 if task in chain:
1015 bb.fatal("Circular task dependencies as %s depends on itself via the chain %s" % (task, " -> ".join(chain)))
1011 chain.append(task) 1016 chain.append(task)
1012 for othertask in tasks: 1017 for othertask in tasks:
1013 if othertask == task: 1018 if othertask == task:
@@ -1023,3 +1028,9 @@ def tasksbetween(task_start, task_end, d):
1023 chain.pop() 1028 chain.pop()
1024 follow_chain(task_start, task_end) 1029 follow_chain(task_start, task_end)
1025 return outtasks 1030 return outtasks
1031
1032def listtasks(d):
1033 """
1034 Return the list of tasks in the current recipe.
1035 """
1036 return tuple(d.getVar('__BBTASKS', False) or ())
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index aea2b8bc11..2361c5684d 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -19,14 +19,16 @@
19import os 19import os
20import logging 20import logging
21import pickle 21import pickle
22from collections import defaultdict, Mapping 22from collections import defaultdict
23from collections.abc import Mapping
23import bb.utils 24import bb.utils
24from bb import PrefixLoggerAdapter 25from bb import PrefixLoggerAdapter
25import re 26import re
27import shutil
26 28
27logger = logging.getLogger("BitBake.Cache") 29logger = logging.getLogger("BitBake.Cache")
28 30
29__cache_version__ = "154" 31__cache_version__ = "156"
30 32
31def getCacheFile(path, filename, mc, data_hash): 33def getCacheFile(path, filename, mc, data_hash):
32 mcspec = '' 34 mcspec = ''
@@ -53,12 +55,12 @@ class RecipeInfoCommon(object):
53 55
54 @classmethod 56 @classmethod
55 def pkgvar(cls, var, packages, metadata): 57 def pkgvar(cls, var, packages, metadata):
56 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) 58 return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
57 for pkg in packages) 59 for pkg in packages)
58 60
59 @classmethod 61 @classmethod
60 def taskvar(cls, var, tasks, metadata): 62 def taskvar(cls, var, tasks, metadata):
61 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) 63 return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
62 for task in tasks) 64 for task in tasks)
63 65
64 @classmethod 66 @classmethod
@@ -103,7 +105,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
103 105
104 self.tasks = metadata.getVar('__BBTASKS', False) 106 self.tasks = metadata.getVar('__BBTASKS', False)
105 107
106 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata) 108 self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {}
107 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata) 109 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
108 110
109 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}} 111 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
@@ -126,6 +128,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
126 self.inherits = self.getvar('__inherit_cache', metadata, expand=False) 128 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
127 self.fakerootenv = self.getvar('FAKEROOTENV', metadata) 129 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
128 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata) 130 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
131 self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
129 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata) 132 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
130 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata) 133 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
131 134
@@ -163,6 +166,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
163 cachedata.fakerootenv = {} 166 cachedata.fakerootenv = {}
164 cachedata.fakerootnoenv = {} 167 cachedata.fakerootnoenv = {}
165 cachedata.fakerootdirs = {} 168 cachedata.fakerootdirs = {}
169 cachedata.fakerootlogs = {}
166 cachedata.extradepsfunc = {} 170 cachedata.extradepsfunc = {}
167 171
168 def add_cacheData(self, cachedata, fn): 172 def add_cacheData(self, cachedata, fn):
@@ -212,7 +216,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
212 216
213 # Collect files we may need for possible world-dep 217 # Collect files we may need for possible world-dep
214 # calculations 218 # calculations
215 if not self.not_world: 219 if not bb.utils.to_boolean(self.not_world):
216 cachedata.possible_world.append(fn) 220 cachedata.possible_world.append(fn)
217 #else: 221 #else:
218 # logger.debug2("EXCLUDE FROM WORLD: %s", fn) 222 # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
@@ -231,17 +235,116 @@ class CoreRecipeInfo(RecipeInfoCommon):
231 cachedata.fakerootenv[fn] = self.fakerootenv 235 cachedata.fakerootenv[fn] = self.fakerootenv
232 cachedata.fakerootnoenv[fn] = self.fakerootnoenv 236 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
233 cachedata.fakerootdirs[fn] = self.fakerootdirs 237 cachedata.fakerootdirs[fn] = self.fakerootdirs
238 cachedata.fakerootlogs[fn] = self.fakerootlogs
234 cachedata.extradepsfunc[fn] = self.extradepsfunc 239 cachedata.extradepsfunc[fn] = self.extradepsfunc
235 240
241
242class SiggenRecipeInfo(RecipeInfoCommon):
243 __slots__ = ()
244
245 classname = "SiggenRecipeInfo"
246 cachefile = "bb_cache_" + classname +".dat"
247 # we don't want to show this information in graph files so don't set cachefields
248 #cachefields = []
249
250 def __init__(self, filename, metadata):
251 self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False)
252 self.siggen_varvals = metadata.getVar("__siggen_varvals", False)
253 self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False)
254
255 @classmethod
256 def init_cacheData(cls, cachedata):
257 cachedata.siggen_taskdeps = {}
258 cachedata.siggen_gendeps = {}
259 cachedata.siggen_varvals = {}
260
261 def add_cacheData(self, cachedata, fn):
262 cachedata.siggen_gendeps[fn] = self.siggen_gendeps
263 cachedata.siggen_varvals[fn] = self.siggen_varvals
264 cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps
265
266 # The siggen variable data is large and impacts:
267 # - bitbake's overall memory usage
268 # - the amount of data sent over IPC between parsing processes and the server
269 # - the size of the cache files on disk
270 # - the size of "sigdata" hash information files on disk
271 # The data consists of strings (some large) or frozenset lists of variables
272 # As such, we a) deplicate the data here and b) pass references to the object at second
273 # access (e.g. over IPC or saving into pickle).
274
275 store = {}
276 save_map = {}
277 save_count = 1
278 restore_map = {}
279 restore_count = {}
280
281 @classmethod
282 def reset(cls):
283 # Needs to be called before starting new streamed data in a given process
284 # (e.g. writing out the cache again)
285 cls.save_map = {}
286 cls.save_count = 1
287 cls.restore_map = {}
288
289 @classmethod
290 def _save(cls, deps):
291 ret = []
292 if not deps:
293 return deps
294 for dep in deps:
295 fs = deps[dep]
296 if fs is None:
297 ret.append((dep, None, None))
298 elif fs in cls.save_map:
299 ret.append((dep, None, cls.save_map[fs]))
300 else:
301 cls.save_map[fs] = cls.save_count
302 ret.append((dep, fs, cls.save_count))
303 cls.save_count = cls.save_count + 1
304 return ret
305
306 @classmethod
307 def _restore(cls, deps, pid):
308 ret = {}
309 if not deps:
310 return deps
311 if pid not in cls.restore_map:
312 cls.restore_map[pid] = {}
313 map = cls.restore_map[pid]
314 for dep, fs, mapnum in deps:
315 if fs is None and mapnum is None:
316 ret[dep] = None
317 elif fs is None:
318 ret[dep] = map[mapnum]
319 else:
320 try:
321 fs = cls.store[fs]
322 except KeyError:
323 cls.store[fs] = fs
324 map[mapnum] = fs
325 ret[dep] = fs
326 return ret
327
328 def __getstate__(self):
329 ret = {}
330 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
331 ret[key] = self._save(self.__dict__[key])
332 ret['pid'] = os.getpid()
333 return ret
334
335 def __setstate__(self, state):
336 pid = state['pid']
337 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
338 setattr(self, key, self._restore(state[key], pid))
339
340
236def virtualfn2realfn(virtualfn): 341def virtualfn2realfn(virtualfn):
237 """ 342 """
238 Convert a virtual file name to a real one + the associated subclass keyword 343 Convert a virtual file name to a real one + the associated subclass keyword
239 """ 344 """
240 mc = "" 345 mc = ""
241 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2: 346 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
242 elems = virtualfn.split(':') 347 (_, mc, virtualfn) = virtualfn.split(':', 2)
243 mc = elems[1]
244 virtualfn = ":".join(elems[2:])
245 348
246 fn = virtualfn 349 fn = virtualfn
247 cls = "" 350 cls = ""
@@ -264,7 +367,7 @@ def realfn2virtual(realfn, cls, mc):
264 367
265def variant2virtual(realfn, variant): 368def variant2virtual(realfn, variant):
266 """ 369 """
267 Convert a real filename + the associated subclass keyword to a virtual filename 370 Convert a real filename + a variant to a virtual filename
268 """ 371 """
269 if variant == "": 372 if variant == "":
270 return realfn 373 return realfn
@@ -275,104 +378,26 @@ def variant2virtual(realfn, variant):
275 return "mc:" + elems[1] + ":" + realfn 378 return "mc:" + elems[1] + ":" + realfn
276 return "virtual:" + variant + ":" + realfn 379 return "virtual:" + variant + ":" + realfn
277 380
278def parse_recipe(bb_data, bbfile, appends, mc=''): 381#
279 """ 382# Cooker calls cacheValid on its recipe list, then either calls loadCached
280 Parse a recipe 383# from it's main thread or parse from separate processes to generate an up to
281 """ 384# date cache
282 385#
283 chdir_back = False 386class Cache(object):
284
285 bb_data.setVar("__BBMULTICONFIG", mc)
286
287 # expand tmpdir to include this topdir
288 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
289 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
290 oldpath = os.path.abspath(os.getcwd())
291 bb.parse.cached_mtime_noerror(bbfile_loc)
292
293 # The ConfHandler first looks if there is a TOPDIR and if not
294 # then it would call getcwd().
295 # Previously, we chdir()ed to bbfile_loc, called the handler
296 # and finally chdir()ed back, a couple of thousand times. We now
297 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
298 if not bb_data.getVar('TOPDIR', False):
299 chdir_back = True
300 bb_data.setVar('TOPDIR', bbfile_loc)
301 try:
302 if appends:
303 bb_data.setVar('__BBAPPEND', " ".join(appends))
304 bb_data = bb.parse.handle(bbfile, bb_data)
305 if chdir_back:
306 os.chdir(oldpath)
307 return bb_data
308 except:
309 if chdir_back:
310 os.chdir(oldpath)
311 raise
312
313
314
315class NoCache(object):
316
317 def __init__(self, databuilder):
318 self.databuilder = databuilder
319 self.data = databuilder.data
320
321 def loadDataFull(self, virtualfn, appends):
322 """
323 Return a complete set of data for fn.
324 To do this, we need to parse the file.
325 """
326 logger.debug("Parsing %s (full)" % virtualfn)
327 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
328 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
329 return bb_data[virtual]
330
331 def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
332 """
333 Load and parse one .bb build file
334 Return the data and whether parsing resulted in the file being skipped
335 """
336
337 if virtonly:
338 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
339 bb_data = self.databuilder.mcdata[mc].createCopy()
340 bb_data.setVar("__ONLYFINALISE", virtual or "default")
341 datastores = parse_recipe(bb_data, bbfile, appends, mc)
342 return datastores
343
344 if mc is not None:
345 bb_data = self.databuilder.mcdata[mc].createCopy()
346 return parse_recipe(bb_data, bbfile, appends, mc)
347
348 bb_data = self.data.createCopy()
349 datastores = parse_recipe(bb_data, bbfile, appends)
350
351 for mc in self.databuilder.mcdata:
352 if not mc:
353 continue
354 bb_data = self.databuilder.mcdata[mc].createCopy()
355 newstores = parse_recipe(bb_data, bbfile, appends, mc)
356 for ns in newstores:
357 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
358
359 return datastores
360
361class Cache(NoCache):
362 """ 387 """
363 BitBake Cache implementation 388 BitBake Cache implementation
364 """ 389 """
365 def __init__(self, databuilder, mc, data_hash, caches_array): 390 def __init__(self, databuilder, mc, data_hash, caches_array):
366 super().__init__(databuilder) 391 self.databuilder = databuilder
367 data = databuilder.data 392 self.data = databuilder.data
368 393
369 # Pass caches_array information into Cache Constructor 394 # Pass caches_array information into Cache Constructor
370 # It will be used later for deciding whether we 395 # It will be used later for deciding whether we
371 # need extra cache file dump/load support 396 # need extra cache file dump/load support
372 self.mc = mc 397 self.mc = mc
373 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger) 398 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else ''), logger)
374 self.caches_array = caches_array 399 self.caches_array = caches_array
375 self.cachedir = data.getVar("CACHE") 400 self.cachedir = self.data.getVar("CACHE")
376 self.clean = set() 401 self.clean = set()
377 self.checked = set() 402 self.checked = set()
378 self.depends_cache = {} 403 self.depends_cache = {}
@@ -382,20 +407,12 @@ class Cache(NoCache):
382 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+') 407 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
383 408
384 if self.cachedir in [None, '']: 409 if self.cachedir in [None, '']:
385 self.has_cache = False 410 bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use")
386 self.logger.info("Not using a cache. "
387 "Set CACHE = <directory> to enable.")
388 return
389
390 self.has_cache = True
391 411
392 def getCacheFile(self, cachefile): 412 def getCacheFile(self, cachefile):
393 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash) 413 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
394 414
395 def prepare_cache(self, progress): 415 def prepare_cache(self, progress):
396 if not self.has_cache:
397 return 0
398
399 loaded = 0 416 loaded = 0
400 417
401 self.cachefile = self.getCacheFile("bb_cache.dat") 418 self.cachefile = self.getCacheFile("bb_cache.dat")
@@ -424,7 +441,7 @@ class Cache(NoCache):
424 else: 441 else:
425 symlink = os.path.join(self.cachedir, "bb_cache.dat") 442 symlink = os.path.join(self.cachedir, "bb_cache.dat")
426 443
427 if os.path.exists(symlink): 444 if os.path.exists(symlink) or os.path.islink(symlink):
428 bb.utils.remove(symlink) 445 bb.utils.remove(symlink)
429 try: 446 try:
430 os.symlink(os.path.basename(self.cachefile), symlink) 447 os.symlink(os.path.basename(self.cachefile), symlink)
@@ -434,9 +451,6 @@ class Cache(NoCache):
434 return loaded 451 return loaded
435 452
436 def cachesize(self): 453 def cachesize(self):
437 if not self.has_cache:
438 return 0
439
440 cachesize = 0 454 cachesize = 0
441 for cache_class in self.caches_array: 455 for cache_class in self.caches_array:
442 cachefile = self.getCacheFile(cache_class.cachefile) 456 cachefile = self.getCacheFile(cache_class.cachefile)
@@ -498,11 +512,11 @@ class Cache(NoCache):
498 512
499 return len(self.depends_cache) 513 return len(self.depends_cache)
500 514
501 def parse(self, filename, appends): 515 def parse(self, filename, appends, layername):
502 """Parse the specified filename, returning the recipe information""" 516 """Parse the specified filename, returning the recipe information"""
503 self.logger.debug("Parsing %s", filename) 517 self.logger.debug("Parsing %s", filename)
504 infos = [] 518 infos = []
505 datastores = self.load_bbfile(filename, appends, mc=self.mc) 519 datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc, layername=layername)
506 depends = [] 520 depends = []
507 variants = [] 521 variants = []
508 # Process the "real" fn last so we can store variants list 522 # Process the "real" fn last so we can store variants list
@@ -524,43 +538,19 @@ class Cache(NoCache):
524 538
525 return infos 539 return infos
526 540
527 def load(self, filename, appends): 541 def loadCached(self, filename, appends):
528 """Obtain the recipe information for the specified filename, 542 """Obtain the recipe information for the specified filename,
529 using cached values if available, otherwise parsing. 543 using cached values.
530 544 """
531 Note that if it does parse to obtain the info, it will not
532 automatically add the information to the cache or to your
533 CacheData. Use the add or add_info method to do so after
534 running this, or use loadData instead."""
535 cached = self.cacheValid(filename, appends)
536 if cached:
537 infos = []
538 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
539 info_array = self.depends_cache[filename]
540 for variant in info_array[0].variants:
541 virtualfn = variant2virtual(filename, variant)
542 infos.append((virtualfn, self.depends_cache[virtualfn]))
543 else:
544 return self.parse(filename, appends, configdata, self.caches_array)
545
546 return cached, infos
547
548 def loadData(self, fn, appends, cacheData):
549 """Load the recipe info for the specified filename,
550 parsing and adding to the cache if necessary, and adding
551 the recipe information to the supplied CacheData instance."""
552 skipped, virtuals = 0, 0
553 545
554 cached, infos = self.load(fn, appends) 546 infos = []
555 for virtualfn, info_array in infos: 547 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
556 if info_array[0].skipped: 548 info_array = self.depends_cache[filename]
557 self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason) 549 for variant in info_array[0].variants:
558 skipped += 1 550 virtualfn = variant2virtual(filename, variant)
559 else: 551 infos.append((virtualfn, self.depends_cache[virtualfn]))
560 self.add_info(virtualfn, info_array, cacheData, not cached)
561 virtuals += 1
562 552
563 return cached, skipped, virtuals 553 return infos
564 554
565 def cacheValid(self, fn, appends): 555 def cacheValid(self, fn, appends):
566 """ 556 """
@@ -569,10 +559,6 @@ class Cache(NoCache):
569 """ 559 """
570 if fn not in self.checked: 560 if fn not in self.checked:
571 self.cacheValidUpdate(fn, appends) 561 self.cacheValidUpdate(fn, appends)
572
573 # Is cache enabled?
574 if not self.has_cache:
575 return False
576 if fn in self.clean: 562 if fn in self.clean:
577 return True 563 return True
578 return False 564 return False
@@ -582,10 +568,6 @@ class Cache(NoCache):
582 Is the cache valid for fn? 568 Is the cache valid for fn?
583 Make thorough (slower) checks including timestamps. 569 Make thorough (slower) checks including timestamps.
584 """ 570 """
585 # Is cache enabled?
586 if not self.has_cache:
587 return False
588
589 self.checked.add(fn) 571 self.checked.add(fn)
590 572
591 # File isn't in depends_cache 573 # File isn't in depends_cache
@@ -636,7 +618,7 @@ class Cache(NoCache):
636 for f in flist: 618 for f in flist:
637 if not f: 619 if not f:
638 continue 620 continue
639 f, exist = f.split(":") 621 f, exist = f.rsplit(":", 1)
640 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): 622 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
641 self.logger.debug2("%s's file checksum list file %s changed", 623 self.logger.debug2("%s's file checksum list file %s changed",
642 fn, f) 624 fn, f)
@@ -692,10 +674,6 @@ class Cache(NoCache):
692 Save the cache 674 Save the cache
693 Called from the parser when complete (or exiting) 675 Called from the parser when complete (or exiting)
694 """ 676 """
695
696 if not self.has_cache:
697 return
698
699 if self.cacheclean: 677 if self.cacheclean:
700 self.logger.debug2("Cache is clean, not saving.") 678 self.logger.debug2("Cache is clean, not saving.")
701 return 679 return
@@ -716,6 +694,7 @@ class Cache(NoCache):
716 p.dump(info) 694 p.dump(info)
717 695
718 del self.depends_cache 696 del self.depends_cache
697 SiggenRecipeInfo.reset()
719 698
720 @staticmethod 699 @staticmethod
721 def mtime(cachefile): 700 def mtime(cachefile):
@@ -738,26 +717,11 @@ class Cache(NoCache):
738 if watcher: 717 if watcher:
739 watcher(info_array[0].file_depends) 718 watcher(info_array[0].file_depends)
740 719
741 if not self.has_cache:
742 return
743
744 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache: 720 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
745 if parsed: 721 if parsed:
746 self.cacheclean = False 722 self.cacheclean = False
747 self.depends_cache[filename] = info_array 723 self.depends_cache[filename] = info_array
748 724
749 def add(self, file_name, data, cacheData, parsed=None):
750 """
751 Save data we need into the cache
752 """
753
754 realfn = virtualfn2realfn(file_name)[0]
755
756 info_array = []
757 for cache_class in self.caches_array:
758 info_array.append(cache_class(realfn, data))
759 self.add_info(file_name, info_array, cacheData, parsed)
760
761class MulticonfigCache(Mapping): 725class MulticonfigCache(Mapping):
762 def __init__(self, databuilder, data_hash, caches_array): 726 def __init__(self, databuilder, data_hash, caches_array):
763 def progress(p): 727 def progress(p):
@@ -794,6 +758,7 @@ class MulticonfigCache(Mapping):
794 loaded = 0 758 loaded = 0
795 759
796 for c in self.__caches.values(): 760 for c in self.__caches.values():
761 SiggenRecipeInfo.reset()
797 loaded += c.prepare_cache(progress) 762 loaded += c.prepare_cache(progress)
798 previous_progress = current_progress 763 previous_progress = current_progress
799 764
@@ -814,25 +779,6 @@ class MulticonfigCache(Mapping):
814 for k in self.__caches: 779 for k in self.__caches:
815 yield k 780 yield k
816 781
817def init(cooker):
818 """
819 The Objective: Cache the minimum amount of data possible yet get to the
820 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
821
822 To do this, we intercept getVar calls and only cache the variables we see
823 being accessed. We rely on the cache getVar calls being made for all
824 variables bitbake might need to use to reach this stage. For each cached
825 file we need to track:
826
827 * Its mtime
828 * The mtimes of all its dependencies
829 * Whether it caused a parse.SkipRecipe exception
830
831 Files causing parsing errors are evicted from the cache.
832
833 """
834 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
835
836 782
837class CacheData(object): 783class CacheData(object):
838 """ 784 """
@@ -871,11 +817,10 @@ class MultiProcessCache(object):
871 self.cachedata = self.create_cachedata() 817 self.cachedata = self.create_cachedata()
872 self.cachedata_extras = self.create_cachedata() 818 self.cachedata_extras = self.create_cachedata()
873 819
874 def init_cache(self, d, cache_file_name=None): 820 def init_cache(self, cachedir, cache_file_name=None):
875 cachedir = (d.getVar("PERSISTENT_DIR") or 821 if not cachedir:
876 d.getVar("CACHE"))
877 if cachedir in [None, '']:
878 return 822 return
823
879 bb.utils.mkdirhier(cachedir) 824 bb.utils.mkdirhier(cachedir)
880 self.cachefile = os.path.join(cachedir, 825 self.cachefile = os.path.join(cachedir,
881 cache_file_name or self.__class__.cache_file_name) 826 cache_file_name or self.__class__.cache_file_name)
@@ -902,10 +847,24 @@ class MultiProcessCache(object):
902 data = [{}] 847 data = [{}]
903 return data 848 return data
904 849
850 def clear_cache(self):
851 if not self.cachefile:
852 bb.fatal("Can't clear invalid cachefile")
853
854 self.cachedata = self.create_cachedata()
855 self.cachedata_extras = self.create_cachedata()
856 with bb.utils.fileslocked([self.cachefile + ".lock"]):
857 bb.utils.remove(self.cachefile)
858 bb.utils.remove(self.cachefile + "-*")
859
905 def save_extras(self): 860 def save_extras(self):
906 if not self.cachefile: 861 if not self.cachefile:
907 return 862 return
908 863
864 have_data = any(self.cachedata_extras)
865 if not have_data:
866 return
867
909 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) 868 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
910 869
911 i = os.getpid() 870 i = os.getpid()
@@ -940,6 +899,8 @@ class MultiProcessCache(object):
940 899
941 data = self.cachedata 900 data = self.cachedata
942 901
902 have_data = False
903
943 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: 904 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
944 f = os.path.join(os.path.dirname(self.cachefile), f) 905 f = os.path.join(os.path.dirname(self.cachefile), f)
945 try: 906 try:
@@ -954,12 +915,14 @@ class MultiProcessCache(object):
954 os.unlink(f) 915 os.unlink(f)
955 continue 916 continue
956 917
918 have_data = True
957 self.merge_data(extradata, data) 919 self.merge_data(extradata, data)
958 os.unlink(f) 920 os.unlink(f)
959 921
960 with open(self.cachefile, "wb") as f: 922 if have_data:
961 p = pickle.Pickler(f, -1) 923 with open(self.cachefile, "wb") as f:
962 p.dump([data, self.__class__.CACHE_VERSION]) 924 p = pickle.Pickler(f, -1)
925 p.dump([data, self.__class__.CACHE_VERSION])
963 926
964 bb.utils.unlockfile(glf) 927 bb.utils.unlockfile(glf)
965 928
@@ -1015,3 +978,11 @@ class SimpleCache(object):
1015 p.dump([data, self.cacheversion]) 978 p.dump([data, self.cacheversion])
1016 979
1017 bb.utils.unlockfile(glf) 980 bb.utils.unlockfile(glf)
981
982 def copyfile(self, target):
983 if not self.cachefile:
984 return
985
986 glf = bb.utils.lockfile(self.cachefile + ".lock")
987 shutil.copy(self.cachefile, target)
988 bb.utils.unlockfile(glf)
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py
index 1d50a26426..3fb39a303e 100644
--- a/bitbake/lib/bb/checksum.py
+++ b/bitbake/lib/bb/checksum.py
@@ -11,10 +11,13 @@ import os
11import stat 11import stat
12import bb.utils 12import bb.utils
13import logging 13import logging
14import re
14from bb.cache import MultiProcessCache 15from bb.cache import MultiProcessCache
15 16
16logger = logging.getLogger("BitBake.Cache") 17logger = logging.getLogger("BitBake.Cache")
17 18
19filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
20
18# mtime cache (non-persistent) 21# mtime cache (non-persistent)
19# based upon the assumption that files do not change during bitbake run 22# based upon the assumption that files do not change during bitbake run
20class FileMtimeCache(object): 23class FileMtimeCache(object):
@@ -50,6 +53,7 @@ class FileChecksumCache(MultiProcessCache):
50 MultiProcessCache.__init__(self) 53 MultiProcessCache.__init__(self)
51 54
52 def get_checksum(self, f): 55 def get_checksum(self, f):
56 f = os.path.normpath(f)
53 entry = self.cachedata[0].get(f) 57 entry = self.cachedata[0].get(f)
54 cmtime = self.mtime_cache.cached_mtime(f) 58 cmtime = self.mtime_cache.cached_mtime(f)
55 if entry: 59 if entry:
@@ -84,22 +88,36 @@ class FileChecksumCache(MultiProcessCache):
84 return None 88 return None
85 return checksum 89 return checksum
86 90
91 #
92 # Changing the format of file-checksums is problematic as both OE and Bitbake have
93 # knowledge of them. We need to encode a new piece of data, the portion of the path
94 # we care about from a checksum perspective. This means that files that change subdirectory
95 # are tracked by the task hashes. To do this, we do something horrible and put a "/./" into
96 # the path. The filesystem handles it but it gives us a marker to know which subsection
97 # of the path to cache.
98 #
87 def checksum_dir(pth): 99 def checksum_dir(pth):
88 # Handle directories recursively 100 # Handle directories recursively
89 if pth == "/": 101 if pth == "/":
90 bb.fatal("Refusing to checksum /") 102 bb.fatal("Refusing to checksum /")
103 pth = pth.rstrip("/")
91 dirchecksums = [] 104 dirchecksums = []
92 for root, dirs, files in os.walk(pth, topdown=True): 105 for root, dirs, files in os.walk(pth, topdown=True):
93 [dirs.remove(d) for d in list(dirs) if d in localdirsexclude] 106 [dirs.remove(d) for d in list(dirs) if d in localdirsexclude]
94 for name in files: 107 for name in files:
95 fullpth = os.path.join(root, name) 108 fullpth = os.path.join(root, name).replace(pth, os.path.join(pth, "."))
96 checksum = checksum_file(fullpth) 109 checksum = checksum_file(fullpth)
97 if checksum: 110 if checksum:
98 dirchecksums.append((fullpth, checksum)) 111 dirchecksums.append((fullpth, checksum))
99 return dirchecksums 112 return dirchecksums
100 113
101 checksums = [] 114 checksums = []
102 for pth in filelist.split(): 115 for pth in filelist_regex.split(filelist):
116 if not pth:
117 continue
118 pth = pth.strip()
119 if not pth:
120 continue
103 exist = pth.split(":")[1] 121 exist = pth.split(":")[1]
104 if exist == "False": 122 if exist == "False":
105 continue 123 continue
@@ -124,3 +142,28 @@ class FileChecksumCache(MultiProcessCache):
124 142
125 checksums.sort(key=operator.itemgetter(1)) 143 checksums.sort(key=operator.itemgetter(1))
126 return checksums 144 return checksums
145
146class RevisionsCache(MultiProcessCache):
147 cache_file_name = "local_srcrevisions.dat"
148 CACHE_VERSION = 1
149
150 def __init__(self):
151 MultiProcessCache.__init__(self)
152
153 def get_revs(self):
154 return self.cachedata[0]
155
156 def get_rev(self, k):
157 if k in self.cachedata_extras[0]:
158 return self.cachedata_extras[0][k]
159 if k in self.cachedata[0]:
160 return self.cachedata[0][k]
161 return None
162
163 def set_rev(self, k, v):
164 self.cachedata[0][k] = v
165 self.cachedata_extras[0][k] = v
166
167 def merge_data(self, source, dest):
168 for h in source[0]:
169 dest[0][h] = source[0][h]
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 25a7ac69d3..4f70cf7fe7 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -25,6 +27,7 @@ import ast
25import sys 27import sys
26import codegen 28import codegen
27import logging 29import logging
30import inspect
28import bb.pysh as pysh 31import bb.pysh as pysh
29import bb.utils, bb.data 32import bb.utils, bb.data
30import hashlib 33import hashlib
@@ -56,10 +59,56 @@ def check_indent(codestr):
56 59
57 return codestr 60 return codestr
58 61
59# A custom getstate/setstate using tuples is actually worth 15% cachesize by 62modulecode_deps = {}
60# avoiding duplication of the attribute names!
61 63
64def add_module_functions(fn, functions, namespace):
65 import os
66 fstat = os.stat(fn)
67 fixedhash = fn + ":" + str(fstat.st_size) + ":" + str(fstat.st_mtime)
68 for f in functions:
69 name = "%s.%s" % (namespace, f)
70 parser = PythonParser(name, logger)
71 try:
72 parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f, func=functions[f])
73 #bb.warn("Cached %s" % f)
74 except KeyError:
75 try:
76 targetfn = inspect.getsourcefile(functions[f])
77 except TypeError:
78 # Builtin
79 continue
80 if fn != targetfn:
81 # Skip references to other modules outside this file
82 #bb.warn("Skipping %s" % name)
83 continue
84 try:
85 lines, lineno = inspect.getsourcelines(functions[f])
86 except TypeError:
87 # Builtin
88 continue
89 src = "".join(lines)
90 parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f, func=functions[f])
91 #bb.warn("Not cached %s" % f)
92 execs = parser.execs.copy()
93 # Expand internal module exec references
94 for e in parser.execs:
95 if e in functions:
96 execs.remove(e)
97 execs.add(namespace + "." + e)
98 visitorcode = None
99 if hasattr(functions[f], 'visitorcode'):
100 visitorcode = getattr(functions[f], "visitorcode")
101 modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy(), parser.extra, visitorcode]
102 #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains))
103
104def update_module_dependencies(d):
105 for mod in modulecode_deps:
106 excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split())
107 if excludes:
108 modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3], modulecode_deps[mod][4], modulecode_deps[mod][5]]
62 109
110# A custom getstate/setstate using tuples is actually worth 15% cachesize by
111# avoiding duplication of the attribute names!
63class SetCache(object): 112class SetCache(object):
64 def __init__(self): 113 def __init__(self):
65 self.setcache = {} 114 self.setcache = {}
@@ -79,21 +128,22 @@ class SetCache(object):
79codecache = SetCache() 128codecache = SetCache()
80 129
81class pythonCacheLine(object): 130class pythonCacheLine(object):
82 def __init__(self, refs, execs, contains): 131 def __init__(self, refs, execs, contains, extra):
83 self.refs = codecache.internSet(refs) 132 self.refs = codecache.internSet(refs)
84 self.execs = codecache.internSet(execs) 133 self.execs = codecache.internSet(execs)
85 self.contains = {} 134 self.contains = {}
86 for c in contains: 135 for c in contains:
87 self.contains[c] = codecache.internSet(contains[c]) 136 self.contains[c] = codecache.internSet(contains[c])
137 self.extra = extra
88 138
89 def __getstate__(self): 139 def __getstate__(self):
90 return (self.refs, self.execs, self.contains) 140 return (self.refs, self.execs, self.contains, self.extra)
91 141
92 def __setstate__(self, state): 142 def __setstate__(self, state):
93 (refs, execs, contains) = state 143 (refs, execs, contains, extra) = state
94 self.__init__(refs, execs, contains) 144 self.__init__(refs, execs, contains, extra)
95 def __hash__(self): 145 def __hash__(self):
96 l = (hash(self.refs), hash(self.execs)) 146 l = (hash(self.refs), hash(self.execs), hash(self.extra))
97 for c in sorted(self.contains.keys()): 147 for c in sorted(self.contains.keys()):
98 l = l + (c, hash(self.contains[c])) 148 l = l + (c, hash(self.contains[c]))
99 return hash(l) 149 return hash(l)
@@ -122,7 +172,7 @@ class CodeParserCache(MultiProcessCache):
122 # so that an existing cache gets invalidated. Additionally you'll need 172 # so that an existing cache gets invalidated. Additionally you'll need
123 # to increment __cache_version__ in cache.py in order to ensure that old 173 # to increment __cache_version__ in cache.py in order to ensure that old
124 # recipe caches don't trigger "Taskhash mismatch" errors. 174 # recipe caches don't trigger "Taskhash mismatch" errors.
125 CACHE_VERSION = 11 175 CACHE_VERSION = 14
126 176
127 def __init__(self): 177 def __init__(self):
128 MultiProcessCache.__init__(self) 178 MultiProcessCache.__init__(self)
@@ -136,8 +186,8 @@ class CodeParserCache(MultiProcessCache):
136 self.pythoncachelines = {} 186 self.pythoncachelines = {}
137 self.shellcachelines = {} 187 self.shellcachelines = {}
138 188
139 def newPythonCacheLine(self, refs, execs, contains): 189 def newPythonCacheLine(self, refs, execs, contains, extra):
140 cacheline = pythonCacheLine(refs, execs, contains) 190 cacheline = pythonCacheLine(refs, execs, contains, extra)
141 h = hash(cacheline) 191 h = hash(cacheline)
142 if h in self.pythoncachelines: 192 if h in self.pythoncachelines:
143 return self.pythoncachelines[h] 193 return self.pythoncachelines[h]
@@ -152,12 +202,12 @@ class CodeParserCache(MultiProcessCache):
152 self.shellcachelines[h] = cacheline 202 self.shellcachelines[h] = cacheline
153 return cacheline 203 return cacheline
154 204
155 def init_cache(self, d): 205 def init_cache(self, cachedir):
156 # Check if we already have the caches 206 # Check if we already have the caches
157 if self.pythoncache: 207 if self.pythoncache:
158 return 208 return
159 209
160 MultiProcessCache.init_cache(self, d) 210 MultiProcessCache.init_cache(self, cachedir)
161 211
162 # cachedata gets re-assigned in the parent 212 # cachedata gets re-assigned in the parent
163 self.pythoncache = self.cachedata[0] 213 self.pythoncache = self.cachedata[0]
@@ -169,8 +219,8 @@ class CodeParserCache(MultiProcessCache):
169 219
170codeparsercache = CodeParserCache() 220codeparsercache = CodeParserCache()
171 221
172def parser_cache_init(d): 222def parser_cache_init(cachedir):
173 codeparsercache.init_cache(d) 223 codeparsercache.init_cache(cachedir)
174 224
175def parser_cache_save(): 225def parser_cache_save():
176 codeparsercache.save_extras() 226 codeparsercache.save_extras()
@@ -195,6 +245,10 @@ class BufferedLogger(Logger):
195 self.target.handle(record) 245 self.target.handle(record)
196 self.buffer = [] 246 self.buffer = []
197 247
248class DummyLogger():
249 def flush(self):
250 return
251
198class PythonParser(): 252class PythonParser():
199 getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional") 253 getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional")
200 getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag") 254 getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
@@ -212,26 +266,34 @@ class PythonParser():
212 funcstr = codegen.to_source(func) 266 funcstr = codegen.to_source(func)
213 argstr = codegen.to_source(arg) 267 argstr = codegen.to_source(arg)
214 except TypeError: 268 except TypeError:
215 self.log.debug(2, 'Failed to convert function and argument to source form') 269 self.log.debug2('Failed to convert function and argument to source form')
216 else: 270 else:
217 self.log.debug(1, self.unhandled_message % (funcstr, argstr)) 271 self.log.debug(self.unhandled_message % (funcstr, argstr))
218 272
219 def visit_Call(self, node): 273 def visit_Call(self, node):
220 name = self.called_node_name(node.func) 274 name = self.called_node_name(node.func)
221 if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): 275 if name and name in modulecode_deps and modulecode_deps[name][5]:
222 if isinstance(node.args[0], ast.Str): 276 visitorcode = modulecode_deps[name][5]
223 varname = node.args[0].s 277 contains, execs, warn = visitorcode(name, node.args)
224 if name in self.containsfuncs and isinstance(node.args[1], ast.Str): 278 for i in contains:
279 self.contains[i] = contains[i]
280 self.execs |= execs
281 if warn:
282 self.warn(node.func, warn)
283 elif name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
284 if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str):
285 varname = node.args[0].value
286 if name in self.containsfuncs and isinstance(node.args[1], ast.Constant):
225 if varname not in self.contains: 287 if varname not in self.contains:
226 self.contains[varname] = set() 288 self.contains[varname] = set()
227 self.contains[varname].add(node.args[1].s) 289 self.contains[varname].add(node.args[1].value)
228 elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str): 290 elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Constant):
229 if varname not in self.contains: 291 if varname not in self.contains:
230 self.contains[varname] = set() 292 self.contains[varname] = set()
231 self.contains[varname].update(node.args[1].s.split()) 293 self.contains[varname].update(node.args[1].value.split())
232 elif name.endswith(self.getvarflags): 294 elif name.endswith(self.getvarflags):
233 if isinstance(node.args[1], ast.Str): 295 if isinstance(node.args[1], ast.Constant):
234 self.references.add('%s[%s]' % (varname, node.args[1].s)) 296 self.references.add('%s[%s]' % (varname, node.args[1].value))
235 else: 297 else:
236 self.warn(node.func, node.args[1]) 298 self.warn(node.func, node.args[1])
237 else: 299 else:
@@ -239,8 +301,8 @@ class PythonParser():
239 else: 301 else:
240 self.warn(node.func, node.args[0]) 302 self.warn(node.func, node.args[0])
241 elif name and name.endswith(".expand"): 303 elif name and name.endswith(".expand"):
242 if isinstance(node.args[0], ast.Str): 304 if isinstance(node.args[0], ast.Constant):
243 value = node.args[0].s 305 value = node.args[0].value
244 d = bb.data.init() 306 d = bb.data.init()
245 parser = d.expandWithRefs(value, self.name) 307 parser = d.expandWithRefs(value, self.name)
246 self.references |= parser.references 308 self.references |= parser.references
@@ -250,8 +312,8 @@ class PythonParser():
250 self.contains[varname] = set() 312 self.contains[varname] = set()
251 self.contains[varname] |= parser.contains[varname] 313 self.contains[varname] |= parser.contains[varname]
252 elif name in self.execfuncs: 314 elif name in self.execfuncs:
253 if isinstance(node.args[0], ast.Str): 315 if isinstance(node.args[0], ast.Constant):
254 self.var_execs.add(node.args[0].s) 316 self.var_execs.add(node.args[0].value)
255 else: 317 else:
256 self.warn(node.func, node.args[0]) 318 self.warn(node.func, node.args[0])
257 elif name and isinstance(node.func, (ast.Name, ast.Attribute)): 319 elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
@@ -276,16 +338,24 @@ class PythonParser():
276 self.contains = {} 338 self.contains = {}
277 self.execs = set() 339 self.execs = set()
278 self.references = set() 340 self.references = set()
279 self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log) 341 self._log = log
342 # Defer init as expensive
343 self.log = DummyLogger()
280 344
281 self.unhandled_message = "in call of %s, argument '%s' is not a string literal" 345 self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
282 self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message) 346 self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
283 347
284 def parse_python(self, node, lineno=0, filename="<string>"): 348 # For the python module code it is expensive to have the function text so it is
285 if not node or not node.strip(): 349 # uses a different fixedhash to cache against. We can take the hit on obtaining the
350 # text if it isn't in the cache.
351 def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None, func=None):
352 if not fixedhash and (not node or not node.strip()):
286 return 353 return
287 354
288 h = bbhash(str(node)) 355 if fixedhash:
356 h = fixedhash
357 else:
358 h = bbhash(str(node))
289 359
290 if h in codeparsercache.pythoncache: 360 if h in codeparsercache.pythoncache:
291 self.references = set(codeparsercache.pythoncache[h].refs) 361 self.references = set(codeparsercache.pythoncache[h].refs)
@@ -293,6 +363,7 @@ class PythonParser():
293 self.contains = {} 363 self.contains = {}
294 for i in codeparsercache.pythoncache[h].contains: 364 for i in codeparsercache.pythoncache[h].contains:
295 self.contains[i] = set(codeparsercache.pythoncache[h].contains[i]) 365 self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
366 self.extra = codeparsercache.pythoncache[h].extra
296 return 367 return
297 368
298 if h in codeparsercache.pythoncacheextras: 369 if h in codeparsercache.pythoncacheextras:
@@ -301,8 +372,15 @@ class PythonParser():
301 self.contains = {} 372 self.contains = {}
302 for i in codeparsercache.pythoncacheextras[h].contains: 373 for i in codeparsercache.pythoncacheextras[h].contains:
303 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) 374 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
375 self.extra = codeparsercache.pythoncacheextras[h].extra
304 return 376 return
305 377
378 if fixedhash and not node:
379 raise KeyError
380
381 # Need to parse so take the hit on the real log buffer
382 self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, self._log)
383
306 # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though 384 # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
307 node = "\n" * int(lineno) + node 385 node = "\n" * int(lineno) + node
308 code = compile(check_indent(str(node)), filename, "exec", 386 code = compile(check_indent(str(node)), filename, "exec",
@@ -312,16 +390,27 @@ class PythonParser():
312 if n.__class__.__name__ == "Call": 390 if n.__class__.__name__ == "Call":
313 self.visit_Call(n) 391 self.visit_Call(n)
314 392
393 if func is not None:
394 self.references |= getattr(func, "bb_vardeps", set())
395 self.references -= getattr(func, "bb_vardepsexclude", set())
396
315 self.execs.update(self.var_execs) 397 self.execs.update(self.var_execs)
398 self.extra = None
399 if fixedhash:
400 self.extra = bbhash(str(node))
316 401
317 codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains) 402 codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains, self.extra)
318 403
319class ShellParser(): 404class ShellParser():
320 def __init__(self, name, log): 405 def __init__(self, name, log):
321 self.funcdefs = set() 406 self.funcdefs = set()
322 self.allexecs = set() 407 self.allexecs = set()
323 self.execs = set() 408 self.execs = set()
324 self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log) 409 self._name = name
410 self._log = log
411 # Defer init as expensive
412 self.log = DummyLogger()
413
325 self.unhandled_template = "unable to handle non-literal command '%s'" 414 self.unhandled_template = "unable to handle non-literal command '%s'"
326 self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template) 415 self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
327 416
@@ -340,6 +429,9 @@ class ShellParser():
340 self.execs = set(codeparsercache.shellcacheextras[h].execs) 429 self.execs = set(codeparsercache.shellcacheextras[h].execs)
341 return self.execs 430 return self.execs
342 431
432 # Need to parse so take the hit on the real log buffer
433 self.log = BufferedLogger('BitBake.Data.%s' % self._name, logging.DEBUG, self._log)
434
343 self._parse_shell(value) 435 self._parse_shell(value)
344 self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs) 436 self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
345 437
@@ -426,19 +518,34 @@ class ShellParser():
426 """ 518 """
427 519
428 words = list(words) 520 words = list(words)
429 for word in list(words): 521 for word in words:
430 wtree = pyshlex.make_wordtree(word[1]) 522 wtree = pyshlex.make_wordtree(word[1])
431 for part in wtree: 523 for part in wtree:
432 if not isinstance(part, list): 524 if not isinstance(part, list):
433 continue 525 continue
434 526
435 if part[0] in ('`', '$('): 527 candidates = [part]
436 command = pyshlex.wordtree_as_string(part[1:-1]) 528
437 self._parse_shell(command) 529 # If command is of type:
438 530 #
439 if word[0] in ("cmd_name", "cmd_word"): 531 # var="... $(cmd [...]) ..."
440 if word in words: 532 #
441 words.remove(word) 533 # Then iterate on what's between the quotes and if we find a
534 # list, make that what we check for below.
535 if len(part) >= 3 and part[0] == '"':
536 for p in part[1:-1]:
537 if isinstance(p, list):
538 candidates.append(p)
539
540 for candidate in candidates:
541 if len(candidate) >= 2:
542 if candidate[0] in ('`', '$('):
543 command = pyshlex.wordtree_as_string(candidate[1:-1])
544 self._parse_shell(command)
545
546 if word[0] in ("cmd_name", "cmd_word"):
547 if word in words:
548 words.remove(word)
442 549
443 usetoken = False 550 usetoken = False
444 for word in words: 551 for word in words:
@@ -450,7 +557,7 @@ class ShellParser():
450 557
451 cmd = word[1] 558 cmd = word[1]
452 if cmd.startswith("$"): 559 if cmd.startswith("$"):
453 self.log.debug(1, self.unhandled_template % cmd) 560 self.log.debug(self.unhandled_template % cmd)
454 elif cmd == "eval": 561 elif cmd == "eval":
455 command = " ".join(word for _, word in words[1:]) 562 command = " ".join(word for _, word in words[1:])
456 self._parse_shell(command) 563 self._parse_shell(command)
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index dd77cdd6e2..59a979ee90 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -20,9 +20,11 @@ Commands are queued in a CommandQueue
20 20
21from collections import OrderedDict, defaultdict 21from collections import OrderedDict, defaultdict
22 22
23import io
23import bb.event 24import bb.event
24import bb.cooker 25import bb.cooker
25import bb.remotedata 26import bb.remotedata
27import bb.parse
26 28
27class DataStoreConnectionHandle(object): 29class DataStoreConnectionHandle(object):
28 def __init__(self, dsindex=0): 30 def __init__(self, dsindex=0):
@@ -50,23 +52,32 @@ class Command:
50 """ 52 """
51 A queue of asynchronous commands for bitbake 53 A queue of asynchronous commands for bitbake
52 """ 54 """
53 def __init__(self, cooker): 55 def __init__(self, cooker, process_server):
54 self.cooker = cooker 56 self.cooker = cooker
55 self.cmds_sync = CommandsSync() 57 self.cmds_sync = CommandsSync()
56 self.cmds_async = CommandsAsync() 58 self.cmds_async = CommandsAsync()
57 self.remotedatastores = None 59 self.remotedatastores = None
58 60
59 # FIXME Add lock for this 61 self.process_server = process_server
62 # Access with locking using process_server.{get/set/clear}_async_cmd()
60 self.currentAsyncCommand = None 63 self.currentAsyncCommand = None
61 64
62 def runCommand(self, commandline, ro_only = False): 65 def runCommand(self, commandline, process_server, ro_only=False):
63 command = commandline.pop(0) 66 command = commandline.pop(0)
64 67
65 # Ensure cooker is ready for commands 68 # Ensure cooker is ready for commands
66 if command != "updateConfig" and command != "setFeatures": 69 if command not in ["updateConfig", "setFeatures", "ping"]:
67 self.cooker.init_configdata() 70 try:
68 if not self.remotedatastores: 71 self.cooker.init_configdata()
69 self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker) 72 if not self.remotedatastores:
73 self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
74 except (Exception, SystemExit) as exc:
75 import traceback
76 if isinstance(exc, bb.BBHandledException):
77 # We need to start returning real exceptions here. Until we do, we can't
78 # tell if an exception is an instance of bb.BBHandledException
79 return None, "bb.BBHandledException()\n" + traceback.format_exc()
80 return None, traceback.format_exc()
70 81
71 if hasattr(CommandsSync, command): 82 if hasattr(CommandsSync, command):
72 # Can run synchronous commands straight away 83 # Can run synchronous commands straight away
@@ -75,7 +86,6 @@ class Command:
75 if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'): 86 if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'):
76 return None, "Not able to execute not readonly commands in readonly mode" 87 return None, "Not able to execute not readonly commands in readonly mode"
77 try: 88 try:
78 self.cooker.process_inotify_updates()
79 if getattr(command_method, 'needconfig', True): 89 if getattr(command_method, 'needconfig', True):
80 self.cooker.updateCacheSync() 90 self.cooker.updateCacheSync()
81 result = command_method(self, commandline) 91 result = command_method(self, commandline)
@@ -90,61 +100,57 @@ class Command:
90 return None, traceback.format_exc() 100 return None, traceback.format_exc()
91 else: 101 else:
92 return result, None 102 return result, None
93 if self.currentAsyncCommand is not None:
94 return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
95 if command not in CommandsAsync.__dict__: 103 if command not in CommandsAsync.__dict__:
96 return None, "No such command" 104 return None, "No such command"
97 self.currentAsyncCommand = (command, commandline) 105 if not process_server.set_async_cmd((command, commandline)):
98 self.cooker.idleCallBackRegister(self.cooker.runCommands, self.cooker) 106 return None, "Busy (%s in progress)" % self.process_server.get_async_cmd()[0]
107 self.cooker.idleCallBackRegister(self.runAsyncCommand, process_server)
99 return True, None 108 return True, None
100 109
101 def runAsyncCommand(self): 110 def runAsyncCommand(self, _, process_server, halt):
102 try: 111 try:
103 self.cooker.process_inotify_updates() 112 if self.cooker.state in (bb.cooker.State.ERROR, bb.cooker.State.SHUTDOWN, bb.cooker.State.FORCE_SHUTDOWN):
104 if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
105 # updateCache will trigger a shutdown of the parser 113 # updateCache will trigger a shutdown of the parser
106 # and then raise BBHandledException triggering an exit 114 # and then raise BBHandledException triggering an exit
107 self.cooker.updateCache() 115 self.cooker.updateCache()
108 return False 116 return bb.server.process.idleFinish("Cooker in error state")
109 if self.currentAsyncCommand is not None: 117 cmd = process_server.get_async_cmd()
110 (command, options) = self.currentAsyncCommand 118 if cmd is not None:
119 (command, options) = cmd
111 commandmethod = getattr(CommandsAsync, command) 120 commandmethod = getattr(CommandsAsync, command)
112 needcache = getattr( commandmethod, "needcache" ) 121 needcache = getattr( commandmethod, "needcache" )
113 if needcache and self.cooker.state != bb.cooker.state.running: 122 if needcache and self.cooker.state != bb.cooker.State.RUNNING:
114 self.cooker.updateCache() 123 self.cooker.updateCache()
115 return True 124 return True
116 else: 125 else:
117 commandmethod(self.cmds_async, self, options) 126 commandmethod(self.cmds_async, self, options)
118 return False 127 return False
119 else: 128 else:
120 return False 129 return bb.server.process.idleFinish("Nothing to do, no async command?")
121 except KeyboardInterrupt as exc: 130 except KeyboardInterrupt as exc:
122 self.finishAsyncCommand("Interrupted") 131 return bb.server.process.idleFinish("Interrupted")
123 return False
124 except SystemExit as exc: 132 except SystemExit as exc:
125 arg = exc.args[0] 133 arg = exc.args[0]
126 if isinstance(arg, str): 134 if isinstance(arg, str):
127 self.finishAsyncCommand(arg) 135 return bb.server.process.idleFinish(arg)
128 else: 136 else:
129 self.finishAsyncCommand("Exited with %s" % arg) 137 return bb.server.process.idleFinish("Exited with %s" % arg)
130 return False
131 except Exception as exc: 138 except Exception as exc:
132 import traceback 139 import traceback
133 if isinstance(exc, bb.BBHandledException): 140 if isinstance(exc, bb.BBHandledException):
134 self.finishAsyncCommand("") 141 return bb.server.process.idleFinish("")
135 else: 142 else:
136 self.finishAsyncCommand(traceback.format_exc()) 143 return bb.server.process.idleFinish(traceback.format_exc())
137 return False
138 144
139 def finishAsyncCommand(self, msg=None, code=None): 145 def finishAsyncCommand(self, msg=None, code=None):
146 self.cooker.finishcommand()
147 self.process_server.clear_async_cmd()
140 if msg or msg == "": 148 if msg or msg == "":
141 bb.event.fire(CommandFailed(msg), self.cooker.data) 149 bb.event.fire(CommandFailed(msg), self.cooker.data)
142 elif code: 150 elif code:
143 bb.event.fire(CommandExit(code), self.cooker.data) 151 bb.event.fire(CommandExit(code), self.cooker.data)
144 else: 152 else:
145 bb.event.fire(CommandCompleted(), self.cooker.data) 153 bb.event.fire(CommandCompleted(), self.cooker.data)
146 self.currentAsyncCommand = None
147 self.cooker.finishcommand()
148 154
149 def reset(self): 155 def reset(self):
150 if self.remotedatastores: 156 if self.remotedatastores:
@@ -157,6 +163,14 @@ class CommandsSync:
157 These must not influence any running synchronous command. 163 These must not influence any running synchronous command.
158 """ 164 """
159 165
166 def ping(self, command, params):
167 """
168 Allow a UI to check the server is still alive
169 """
170 return "Still alive!"
171 ping.needconfig = False
172 ping.readonly = True
173
160 def stateShutdown(self, command, params): 174 def stateShutdown(self, command, params):
161 """ 175 """
162 Trigger cooker 'shutdown' mode 176 Trigger cooker 'shutdown' mode
@@ -294,6 +308,11 @@ class CommandsSync:
294 return ret 308 return ret
295 getLayerPriorities.readonly = True 309 getLayerPriorities.readonly = True
296 310
311 def revalidateCaches(self, command, params):
312 """Called by UI clients when metadata may have changed"""
313 command.cooker.revalidateCaches()
314 revalidateCaches.needconfig = False
315
297 def getRecipes(self, command, params): 316 def getRecipes(self, command, params):
298 try: 317 try:
299 mc = params[0] 318 mc = params[0]
@@ -402,15 +421,30 @@ class CommandsSync:
402 return command.cooker.recipecaches[mc].pkg_dp 421 return command.cooker.recipecaches[mc].pkg_dp
403 getDefaultPreference.readonly = True 422 getDefaultPreference.readonly = True
404 423
424
405 def getSkippedRecipes(self, command, params): 425 def getSkippedRecipes(self, command, params):
426 """
427 Get the map of skipped recipes for the specified multiconfig/mc name (`params[0]`).
428
429 Invoked by `bb.tinfoil.Tinfoil.get_skipped_recipes`
430
431 :param command: Internally used parameter.
432 :param params: Parameter array. params[0] is multiconfig/mc name. If not given, then default mc '' is assumed.
433 :return: Dict whose keys are virtualfns and values are `bb.cooker.SkippedPackage`
434 """
435 try:
436 mc = params[0]
437 except IndexError:
438 mc = ''
439
406 # Return list sorted by reverse priority order 440 # Return list sorted by reverse priority order
407 import bb.cache 441 import bb.cache
408 def sortkey(x): 442 def sortkey(x):
409 vfn, _ = x 443 vfn, _ = x
410 realfn, _, mc = bb.cache.virtualfn2realfn(vfn) 444 realfn, _, item_mc = bb.cache.virtualfn2realfn(vfn)
411 return (-command.cooker.collections[mc].calc_bbfile_priority(realfn)[0], vfn) 445 return -command.cooker.collections[item_mc].calc_bbfile_priority(realfn)[0], vfn
412 446
413 skipdict = OrderedDict(sorted(command.cooker.skiplist.items(), key=sortkey)) 447 skipdict = OrderedDict(sorted(command.cooker.skiplist_by_mc[mc].items(), key=sortkey))
414 return list(skipdict.items()) 448 return list(skipdict.items())
415 getSkippedRecipes.readonly = True 449 getSkippedRecipes.readonly = True
416 450
@@ -500,6 +534,17 @@ class CommandsSync:
500 d = command.remotedatastores[dsindex].varhistory 534 d = command.remotedatastores[dsindex].varhistory
501 return getattr(d, method)(*args, **kwargs) 535 return getattr(d, method)(*args, **kwargs)
502 536
537 def dataStoreConnectorVarHistCmdEmit(self, command, params):
538 dsindex = params[0]
539 var = params[1]
540 oval = params[2]
541 val = params[3]
542 d = command.remotedatastores[params[4]]
543
544 o = io.StringIO()
545 command.remotedatastores[dsindex].varhistory.emit(var, oval, val, o, d)
546 return o.getvalue()
547
503 def dataStoreConnectorIncHistCmd(self, command, params): 548 def dataStoreConnectorIncHistCmd(self, command, params):
504 dsindex = params[0] 549 dsindex = params[0]
505 method = params[1] 550 method = params[1]
@@ -521,8 +566,8 @@ class CommandsSync:
521 and return a datastore object representing the environment 566 and return a datastore object representing the environment
522 for the recipe. 567 for the recipe.
523 """ 568 """
524 fn = params[0] 569 virtualfn = params[0]
525 mc = bb.runqueue.mc_from_tid(fn) 570 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
526 appends = params[1] 571 appends = params[1]
527 appendlist = params[2] 572 appendlist = params[2]
528 if len(params) > 3: 573 if len(params) > 3:
@@ -537,6 +582,7 @@ class CommandsSync:
537 appendfiles = command.cooker.collections[mc].get_file_appends(fn) 582 appendfiles = command.cooker.collections[mc].get_file_appends(fn)
538 else: 583 else:
539 appendfiles = [] 584 appendfiles = []
585 layername = command.cooker.collections[mc].calc_bbfile_priority(fn)[2]
540 # We are calling bb.cache locally here rather than on the server, 586 # We are calling bb.cache locally here rather than on the server,
541 # but that's OK because it doesn't actually need anything from 587 # but that's OK because it doesn't actually need anything from
542 # the server barring the global datastore (which we have a remote 588 # the server barring the global datastore (which we have a remote
@@ -544,15 +590,21 @@ class CommandsSync:
544 if config_data: 590 if config_data:
545 # We have to use a different function here if we're passing in a datastore 591 # We have to use a different function here if we're passing in a datastore
546 # NOTE: we took a copy above, so we don't do it here again 592 # NOTE: we took a copy above, so we don't do it here again
547 envdata = bb.cache.parse_recipe(config_data, fn, appendfiles, mc)[''] 593 envdata = command.cooker.databuilder._parse_recipe(config_data, fn, appendfiles, mc, layername)[cls]
548 else: 594 else:
549 # Use the standard path 595 # Use the standard path
550 parser = bb.cache.NoCache(command.cooker.databuilder) 596 envdata = command.cooker.databuilder.parseRecipe(virtualfn, appendfiles, layername)
551 envdata = parser.loadDataFull(fn, appendfiles)
552 idx = command.remotedatastores.store(envdata) 597 idx = command.remotedatastores.store(envdata)
553 return DataStoreConnectionHandle(idx) 598 return DataStoreConnectionHandle(idx)
554 parseRecipeFile.readonly = True 599 parseRecipeFile.readonly = True
555 600
601 def finalizeData(self, command, params):
602 newdata = command.cooker.data.createCopy()
603 bb.data.expandKeys(newdata)
604 bb.parse.ast.runAnonFuncs(newdata)
605 idx = command.remotedatastores.store(newdata)
606 return DataStoreConnectionHandle(idx)
607
556class CommandsAsync: 608class CommandsAsync:
557 """ 609 """
558 A class of asynchronous commands 610 A class of asynchronous commands
@@ -647,6 +699,16 @@ class CommandsAsync:
647 command.finishAsyncCommand() 699 command.finishAsyncCommand()
648 findFilesMatchingInDir.needcache = False 700 findFilesMatchingInDir.needcache = False
649 701
702 def testCookerCommandEvent(self, command, params):
703 """
704 Dummy command used by OEQA selftest to test tinfoil without IO
705 """
706 pattern = params[0]
707
708 command.cooker.testCookerCommandEvent(pattern)
709 command.finishAsyncCommand()
710 testCookerCommandEvent.needcache = False
711
650 def findConfigFilePath(self, command, params): 712 def findConfigFilePath(self, command, params):
651 """ 713 """
652 Find the path of the requested configuration file 714 Find the path of the requested configuration file
@@ -711,7 +773,7 @@ class CommandsAsync:
711 """ 773 """
712 event = params[0] 774 event = params[0]
713 bb.event.fire(eval(event), command.cooker.data) 775 bb.event.fire(eval(event), command.cooker.data)
714 command.currentAsyncCommand = None 776 process_server.clear_async_cmd()
715 triggerEvent.needcache = False 777 triggerEvent.needcache = False
716 778
717 def resetCooker(self, command, params): 779 def resetCooker(self, command, params):
@@ -738,7 +800,14 @@ class CommandsAsync:
738 (mc, pn) = bb.runqueue.split_mc(params[0]) 800 (mc, pn) = bb.runqueue.split_mc(params[0])
739 taskname = params[1] 801 taskname = params[1]
740 sigs = params[2] 802 sigs = params[2]
803 bb.siggen.check_siggen_version(bb.siggen)
741 res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc]) 804 res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc])
742 bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc]) 805 bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc])
743 command.finishAsyncCommand() 806 command.finishAsyncCommand()
744 findSigInfo.needcache = False 807 findSigInfo.needcache = False
808
809 def getTaskSignatures(self, command, params):
810 res = command.cooker.getTaskSignatures(params[0], params[1])
811 bb.event.fire(bb.event.GetTaskSignatureResult(res), command.cooker.data)
812 command.finishAsyncCommand()
813 getTaskSignatures.needcache = True
diff --git a/bitbake/lib/bb/compress/_pipecompress.py b/bitbake/lib/bb/compress/_pipecompress.py
new file mode 100644
index 0000000000..4a403d62cf
--- /dev/null
+++ b/bitbake/lib/bb/compress/_pipecompress.py
@@ -0,0 +1,196 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# Helper library to implement streaming compression and decompression using an
7# external process
8#
9# This library should be used directly by end users; a wrapper library for the
10# specific compression tool should be created
11
12import builtins
13import io
14import os
15import subprocess
16
17
18def open_wrap(
19 cls, filename, mode="rb", *, encoding=None, errors=None, newline=None, **kwargs
20):
21 """
22 Open a compressed file in binary or text mode.
23
24 Users should not call this directly. A specific compression library can use
25 this helper to provide it's own "open" command
26
27 The filename argument can be an actual filename (a str or bytes object), or
28 an existing file object to read from or write to.
29
30 The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for
31 binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is
32 "rb".
33
34 For binary mode, this function is equivalent to the cls constructor:
35 cls(filename, mode). In this case, the encoding, errors and newline
36 arguments must not be provided.
37
38 For text mode, a cls object is created, and wrapped in an
39 io.TextIOWrapper instance with the specified encoding, error handling
40 behavior, and line ending(s).
41 """
42 if "t" in mode:
43 if "b" in mode:
44 raise ValueError("Invalid mode: %r" % (mode,))
45 else:
46 if encoding is not None:
47 raise ValueError("Argument 'encoding' not supported in binary mode")
48 if errors is not None:
49 raise ValueError("Argument 'errors' not supported in binary mode")
50 if newline is not None:
51 raise ValueError("Argument 'newline' not supported in binary mode")
52
53 file_mode = mode.replace("t", "")
54 if isinstance(filename, (str, bytes, os.PathLike, int)):
55 binary_file = cls(filename, file_mode, **kwargs)
56 elif hasattr(filename, "read") or hasattr(filename, "write"):
57 binary_file = cls(None, file_mode, fileobj=filename, **kwargs)
58 else:
59 raise TypeError("filename must be a str or bytes object, or a file")
60
61 if "t" in mode:
62 return io.TextIOWrapper(
63 binary_file, encoding, errors, newline, write_through=True
64 )
65 else:
66 return binary_file
67
68
69class CompressionError(OSError):
70 pass
71
72
73class PipeFile(io.RawIOBase):
74 """
75 Class that implements generically piping to/from a compression program
76
77 Derived classes should add the function get_compress() and get_decompress()
78 that return the required commands. Input will be piped into stdin and the
79 (de)compressed output should be written to stdout, e.g.:
80
81 class FooFile(PipeCompressionFile):
82 def get_decompress(self):
83 return ["fooc", "--decompress", "--stdout"]
84
85 def get_compress(self):
86 return ["fooc", "--compress", "--stdout"]
87
88 """
89
90 READ = 0
91 WRITE = 1
92
93 def __init__(self, filename=None, mode="rb", *, stderr=None, fileobj=None):
94 if "t" in mode or "U" in mode:
95 raise ValueError("Invalid mode: {!r}".format(mode))
96
97 if not "b" in mode:
98 mode += "b"
99
100 if mode.startswith("r"):
101 self.mode = self.READ
102 elif mode.startswith("w"):
103 self.mode = self.WRITE
104 else:
105 raise ValueError("Invalid mode %r" % mode)
106
107 if fileobj is not None:
108 self.fileobj = fileobj
109 else:
110 self.fileobj = builtins.open(filename, mode or "rb")
111
112 if self.mode == self.READ:
113 self.p = subprocess.Popen(
114 self.get_decompress(),
115 stdin=self.fileobj,
116 stdout=subprocess.PIPE,
117 stderr=stderr,
118 close_fds=True,
119 )
120 self.pipe = self.p.stdout
121 else:
122 self.p = subprocess.Popen(
123 self.get_compress(),
124 stdin=subprocess.PIPE,
125 stdout=self.fileobj,
126 stderr=stderr,
127 close_fds=True,
128 )
129 self.pipe = self.p.stdin
130
131 self.__closed = False
132
133 def _check_process(self):
134 if self.p is None:
135 return
136
137 returncode = self.p.wait()
138 if returncode:
139 raise CompressionError("Process died with %d" % returncode)
140 self.p = None
141
142 def close(self):
143 if self.closed:
144 return
145
146 self.pipe.close()
147 if self.p is not None:
148 self._check_process()
149 self.fileobj.close()
150
151 self.__closed = True
152
153 @property
154 def closed(self):
155 return self.__closed
156
157 def fileno(self):
158 return self.pipe.fileno()
159
160 def flush(self):
161 self.pipe.flush()
162
163 def isatty(self):
164 return self.pipe.isatty()
165
166 def readable(self):
167 return self.mode == self.READ
168
169 def writable(self):
170 return self.mode == self.WRITE
171
172 def readinto(self, b):
173 if self.mode != self.READ:
174 import errno
175
176 raise OSError(
177 errno.EBADF, "read() on write-only %s object" % self.__class__.__name__
178 )
179 size = self.pipe.readinto(b)
180 if size == 0:
181 self._check_process()
182 return size
183
184 def write(self, data):
185 if self.mode != self.WRITE:
186 import errno
187
188 raise OSError(
189 errno.EBADF, "write() on read-only %s object" % self.__class__.__name__
190 )
191 data = self.pipe.write(data)
192
193 if not data:
194 self._check_process()
195
196 return data
diff --git a/bitbake/lib/bb/compress/lz4.py b/bitbake/lib/bb/compress/lz4.py
new file mode 100644
index 0000000000..2a64681c86
--- /dev/null
+++ b/bitbake/lib/bb/compress/lz4.py
@@ -0,0 +1,19 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import bb.compress._pipecompress
8
9
10def open(*args, **kwargs):
11 return bb.compress._pipecompress.open_wrap(LZ4File, *args, **kwargs)
12
13
14class LZ4File(bb.compress._pipecompress.PipeFile):
15 def get_compress(self):
16 return ["lz4", "-z", "-c"]
17
18 def get_decompress(self):
19 return ["lz4", "-d", "-c"]
diff --git a/bitbake/lib/bb/compress/zstd.py b/bitbake/lib/bb/compress/zstd.py
new file mode 100644
index 0000000000..cdbbe9d60f
--- /dev/null
+++ b/bitbake/lib/bb/compress/zstd.py
@@ -0,0 +1,30 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import bb.compress._pipecompress
8import shutil
9
10
11def open(*args, **kwargs):
12 return bb.compress._pipecompress.open_wrap(ZstdFile, *args, **kwargs)
13
14
15class ZstdFile(bb.compress._pipecompress.PipeFile):
16 def __init__(self, *args, num_threads=1, compresslevel=3, **kwargs):
17 self.num_threads = num_threads
18 self.compresslevel = compresslevel
19 super().__init__(*args, **kwargs)
20
21 def _get_zstd(self):
22 if self.num_threads == 1 or not shutil.which("pzstd"):
23 return ["zstd"]
24 return ["pzstd", "-p", "%d" % self.num_threads]
25
26 def get_compress(self):
27 return self._get_zstd() + ["-c", "-%d" % self.compresslevel]
28
29 def get_decompress(self):
30 return self._get_zstd() + ["-d", "-c"]
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index f4ab797edf..1810bcc604 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -8,22 +8,20 @@
8# 8#
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11import enum
12import sys, os, glob, os.path, re, time 12import sys, os, glob, os.path, re, time
13import itertools 13import itertools
14import logging 14import logging
15import multiprocessing 15import multiprocessing
16import sre_constants
17import threading 16import threading
18from io import StringIO, UnsupportedOperation 17from io import StringIO, UnsupportedOperation
19from contextlib import closing 18from contextlib import closing
20from collections import defaultdict, namedtuple 19from collections import defaultdict, namedtuple
21import bb, bb.exceptions, bb.command 20import bb, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build 21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
23import queue 22import queue
24import signal 23import signal
25import prserv.serv 24import prserv.serv
26import pyinotify
27import json 25import json
28import pickle 26import pickle
29import codecs 27import codecs
@@ -50,16 +48,15 @@ class CollectionError(bb.BBHandledException):
50 Exception raised when layer configuration is incorrect 48 Exception raised when layer configuration is incorrect
51 """ 49 """
52 50
53class state:
54 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
55 51
56 @classmethod 52class State(enum.Enum):
57 def get_name(cls, code): 53 INITIAL = 0,
58 for name in dir(cls): 54 PARSING = 1,
59 value = getattr(cls, name) 55 RUNNING = 2,
60 if type(value) == type(cls.initial) and value == code: 56 SHUTDOWN = 3,
61 return name 57 FORCE_SHUTDOWN = 4,
62 raise ValueError("Invalid status code: %s" % code) 58 STOPPED = 5,
59 ERROR = 6
63 60
64 61
65class SkippedPackage: 62class SkippedPackage:
@@ -81,7 +78,7 @@ class SkippedPackage:
81 78
82 79
83class CookerFeatures(object): 80class CookerFeatures(object):
84 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3)) 81 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
85 82
86 def __init__(self): 83 def __init__(self):
87 self._features=set() 84 self._features=set()
@@ -104,12 +101,15 @@ class CookerFeatures(object):
104 101
105class EventWriter: 102class EventWriter:
106 def __init__(self, cooker, eventfile): 103 def __init__(self, cooker, eventfile):
107 self.file_inited = None
108 self.cooker = cooker 104 self.cooker = cooker
109 self.eventfile = eventfile 105 self.eventfile = eventfile
110 self.event_queue = [] 106 self.event_queue = []
111 107
112 def write_event(self, event): 108 def write_variables(self):
109 with open(self.eventfile, "a") as f:
110 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
111
112 def send(self, event):
113 with open(self.eventfile, "a") as f: 113 with open(self.eventfile, "a") as f:
114 try: 114 try:
115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8') 115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
@@ -119,28 +119,6 @@ class EventWriter:
119 import traceback 119 import traceback
120 print(err, traceback.format_exc()) 120 print(err, traceback.format_exc())
121 121
122 def send(self, event):
123 if self.file_inited:
124 # we have the file, just write the event
125 self.write_event(event)
126 else:
127 # init on bb.event.BuildStarted
128 name = "%s.%s" % (event.__module__, event.__class__.__name__)
129 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130 with open(self.eventfile, "w") as f:
131 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133 self.file_inited = True
134
135 # write pending events
136 for evt in self.event_queue:
137 self.write_event(evt)
138
139 # also write the current event
140 self.write_event(event)
141 else:
142 # queue all events until the file is inited
143 self.event_queue.append(event)
144 122
145#============================================================================# 123#============================================================================#
146# BBCooker 124# BBCooker
@@ -150,43 +128,34 @@ class BBCooker:
150 Manages one bitbake build run 128 Manages one bitbake build run
151 """ 129 """
152 130
153 def __init__(self, featureSet=None, idleCallBackRegister=None): 131 def __init__(self, featureSet=None, server=None):
154 self.recipecaches = None 132 self.recipecaches = None
133 self.baseconfig_valid = False
134 self.parsecache_valid = False
155 self.eventlog = None 135 self.eventlog = None
156 self.skiplist = {} 136 # The skiplists, one per multiconfig
137 self.skiplist_by_mc = defaultdict(dict)
157 self.featureset = CookerFeatures() 138 self.featureset = CookerFeatures()
158 if featureSet: 139 if featureSet:
159 for f in featureSet: 140 for f in featureSet:
160 self.featureset.setFeature(f) 141 self.featureset.setFeature(f)
161 142
143 self.orig_syspath = sys.path.copy()
144 self.orig_sysmodules = [*sys.modules]
145
162 self.configuration = bb.cookerdata.CookerConfiguration() 146 self.configuration = bb.cookerdata.CookerConfiguration()
163 147
164 self.idleCallBackRegister = idleCallBackRegister 148 self.process_server = server
149 self.idleCallBackRegister = None
150 self.waitIdle = None
151 if server:
152 self.idleCallBackRegister = server.register_idle_function
153 self.waitIdle = server.wait_for_idle
165 154
166 bb.debug(1, "BBCooker starting %s" % time.time()) 155 bb.debug(1, "BBCooker starting %s" % time.time())
167 sys.stdout.flush() 156
168 157 self.configwatched = {}
169 self.configwatcher = pyinotify.WatchManager() 158 self.parsewatched = {}
170 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
171 sys.stdout.flush()
172
173 self.configwatcher.bbseen = set()
174 self.configwatcher.bbwatchedfiles = set()
175 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
176 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
177 sys.stdout.flush()
178 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
179 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
180 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
181 self.watcher = pyinotify.WatchManager()
182 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
183 sys.stdout.flush()
184 self.watcher.bbseen = set()
185 self.watcher.bbwatchedfiles = set()
186 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
187
188 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
189 sys.stdout.flush()
190 159
191 # If being called by something like tinfoil, we need to clean cached data 160 # If being called by something like tinfoil, we need to clean cached data
192 # which may now be invalid 161 # which may now be invalid
@@ -197,14 +166,6 @@ class BBCooker:
197 self.hashserv = None 166 self.hashserv = None
198 self.hashservaddr = None 167 self.hashservaddr = None
199 168
200 self.inotify_modified_files = []
201
202 def _process_inotify_updates(server, cooker, abort):
203 cooker.process_inotify_updates()
204 return 1.0
205
206 self.idleCallBackRegister(_process_inotify_updates, self)
207
208 # TOSTOP must not be set or our children will hang when they output 169 # TOSTOP must not be set or our children will hang when they output
209 try: 170 try:
210 fd = sys.stdout.fileno() 171 fd = sys.stdout.fileno()
@@ -218,8 +179,8 @@ class BBCooker:
218 except UnsupportedOperation: 179 except UnsupportedOperation:
219 pass 180 pass
220 181
221 self.command = bb.command.Command(self) 182 self.command = bb.command.Command(self, self.process_server)
222 self.state = state.initial 183 self.state = State.INITIAL
223 184
224 self.parser = None 185 self.parser = None
225 186
@@ -228,108 +189,68 @@ class BBCooker:
228 signal.signal(signal.SIGHUP, self.sigterm_exception) 189 signal.signal(signal.SIGHUP, self.sigterm_exception)
229 190
230 bb.debug(1, "BBCooker startup complete %s" % time.time()) 191 bb.debug(1, "BBCooker startup complete %s" % time.time())
231 sys.stdout.flush()
232 192
233 def init_configdata(self): 193 def init_configdata(self):
234 if not hasattr(self, "data"): 194 if not hasattr(self, "data"):
235 self.initConfigurationData() 195 self.initConfigurationData()
236 bb.debug(1, "BBCooker parsed base configuration %s" % time.time()) 196 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
237 sys.stdout.flush()
238 self.handlePRServ() 197 self.handlePRServ()
239 198
240 def process_inotify_updates(self): 199 def _baseconfig_set(self, value):
241 for n in [self.confignotifier, self.notifier]: 200 if value and not self.baseconfig_valid:
242 if n.check_events(timeout=0): 201 bb.server.process.serverlog("Base config valid")
243 # read notified events and enqeue them 202 elif not value and self.baseconfig_valid:
244 n.read_events() 203 bb.server.process.serverlog("Base config invalidated")
245 n.process_events() 204 self.baseconfig_valid = value
246 205
247 def config_notifications(self, event): 206 def _parsecache_set(self, value):
248 if event.maskname == "IN_Q_OVERFLOW": 207 if value and not self.parsecache_valid:
249 bb.warn("inotify event queue overflowed, invalidating caches.") 208 bb.server.process.serverlog("Parse cache valid")
250 self.parsecache_valid = False 209 elif not value and self.parsecache_valid:
251 self.baseconfig_valid = False 210 bb.server.process.serverlog("Parse cache invalidated")
252 bb.parse.clear_cache() 211 self.parsecache_valid = value
253 return 212
254 if not event.pathname in self.configwatcher.bbwatchedfiles: 213 def add_filewatch(self, deps, configwatcher=False):
255 return 214 if configwatcher:
256 if not event.pathname in self.inotify_modified_files: 215 watcher = self.configwatched
257 self.inotify_modified_files.append(event.pathname) 216 else:
258 self.baseconfig_valid = False 217 watcher = self.parsewatched
259
260 def notifications(self, event):
261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
268 return
269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
271 self.parsecache_valid = False
272 218
273 def add_filewatch(self, deps, watcher=None, dirs=False):
274 if not watcher:
275 watcher = self.watcher
276 for i in deps: 219 for i in deps:
277 watcher.bbwatchedfiles.add(i[0]) 220 f = i[0]
278 if dirs: 221 mtime = i[1]
279 f = i[0] 222 watcher[f] = mtime
280 else:
281 f = os.path.dirname(i[0])
282 if f in watcher.bbseen:
283 continue
284 watcher.bbseen.add(f)
285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
288 # the parser. The parent directory of these files may not exist, in which case we need
289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
293 watcher.bbwatchedfiles.add(watchtarget)
294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
301 watcher.bbseen.add(f)
302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309 223
310 def sigterm_exception(self, signum, stackframe): 224 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM: 225 if signum == signal.SIGTERM:
312 bb.warn("Cooker received SIGTERM, shutting down...") 226 bb.warn("Cooker received SIGTERM, shutting down...")
313 elif signum == signal.SIGHUP: 227 elif signum == signal.SIGHUP:
314 bb.warn("Cooker received SIGHUP, shutting down...") 228 bb.warn("Cooker received SIGHUP, shutting down...")
315 self.state = state.forceshutdown 229 self.state = State.FORCE_SHUTDOWN
230 bb.event._should_exit.set()
316 231
317 def setFeatures(self, features): 232 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems 233 # we only accept a new feature set if we're in state initial, so we can reset without problems
319 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]: 234 if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]:
320 raise Exception("Illegal state for feature set change") 235 raise Exception("Illegal state for feature set change")
321 original_featureset = list(self.featureset) 236 original_featureset = list(self.featureset)
322 for feature in features: 237 for feature in features:
323 self.featureset.setFeature(feature) 238 self.featureset.setFeature(feature)
324 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) 239 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
325 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"): 240 if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"):
326 self.reset() 241 self.reset()
327 242
328 def initConfigurationData(self): 243 def initConfigurationData(self):
329 244 self.state = State.INITIAL
330 self.state = state.initial
331 self.caches_array = [] 245 self.caches_array = []
332 246
247 sys.path = self.orig_syspath.copy()
248 for mod in [*sys.modules]:
249 if mod not in self.orig_sysmodules:
250 del sys.modules[mod]
251
252 self.configwatched = {}
253
333 # Need to preserve BB_CONSOLELOG over resets 254 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None 255 consolelog = None
335 if hasattr(self, "data"): 256 if hasattr(self, "data"):
@@ -338,12 +259,12 @@ class BBCooker:
338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: 259 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking() 260 self.enableDataTracking()
340 261
341 all_extra_cache_names = [] 262 caches_name_array = ['bb.cache:CoreRecipeInfo']
342 # We hardcode all known cache types in a single place, here. 263 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: 264 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") 265 caches_name_array.append("bb.cache_extra:HobRecipeInfo")
345 266 if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names 267 caches_name_array.append("bb.cache:SiggenRecipeInfo")
347 268
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! 269 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed! 270 # This is the entry point, no further check needed!
@@ -359,9 +280,12 @@ class BBCooker:
359 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) 280 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360 self.databuilder.parseBaseConfiguration() 281 self.databuilder.parseBaseConfiguration()
361 self.data = self.databuilder.data 282 self.data = self.databuilder.data
362 self.data_hash = self.databuilder.data_hash
363 self.extraconfigdata = {} 283 self.extraconfigdata = {}
364 284
285 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
286 if not self.configuration.writeeventlog and eventlog:
287 self.setupEventLog(eventlog)
288
365 if consolelog: 289 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog) 290 self.data.setVar("BB_CONSOLELOG", consolelog)
367 291
@@ -371,31 +295,48 @@ class BBCooker:
371 self.disableDataTracking() 295 self.disableDataTracking()
372 296
373 for mc in self.databuilder.mcdata.values(): 297 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends") 298 self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True)
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
376 299
377 self.baseconfig_valid = True 300 self._baseconfig_set(True)
378 self.parsecache_valid = False 301 self._parsecache_set(False)
379 302
380 def handlePRServ(self): 303 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration 304 # Setup a PR Server based on the new configuration
382 try: 305 try:
383 self.prhost = prserv.serv.auto_start(self.data) 306 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e: 307 except prserv.serv.PRServiceConfigError as e:
385 bb.fatal("Unable to start PR Server, exitting") 308 bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
386 309
387 if self.data.getVar("BB_HASHSERVE") == "auto": 310 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket 311 # Create a new hash server bound to a unix domain socket
389 if not self.hashserv: 312 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 313 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
314 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
315 if upstream:
316 try:
317 with hashserv.create_client(upstream) as client:
318 client.ping()
319 except ImportError as e:
320 bb.fatal(""""Unable to use hash equivalence server at '%s' due to missing or incorrect python module:
321%s
322Please install the needed module on the build host, or use an environment containing it (e.g a pip venv or OpenEmbedded's buildtools tarball).
323You can also remove the BB_HASHSERVE_UPSTREAM setting, but this may result in significantly longer build times as bitbake will be unable to reuse prebuilt sstate artefacts."""
324 % (upstream, repr(e)))
325 except ConnectionError as e:
326 bb.warn("Unable to connect to hash equivalence server at '%s', please correct or remove BB_HASHSERVE_UPSTREAM:\n%s"
327 % (upstream, repr(e)))
328 upstream = None
329
391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 330 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False) 331 self.hashserv = hashserv.create_server(
393 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever) 332 self.hashservaddr,
394 self.hashserv.process.start() 333 dbfile,
395 self.data.setVar("BB_HASHSERVE", self.hashservaddr) 334 sync=False,
396 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr) 335 upstream=upstream,
397 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr) 336 )
337 self.hashserv.serve_as_process(log_level=logging.WARNING)
398 for mc in self.databuilder.mcdata: 338 for mc in self.databuilder.mcdata:
339 self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
399 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) 340 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
400 341
401 bb.parse.init_parser(self.data) 342 bb.parse.init_parser(self.data)
@@ -410,6 +351,34 @@ class BBCooker:
410 if hasattr(self, "data"): 351 if hasattr(self, "data"):
411 self.data.disableTracking() 352 self.data.disableTracking()
412 353
354 def revalidateCaches(self):
355 bb.parse.clear_cache()
356
357 clean = True
358 for f in self.configwatched:
359 if not bb.parse.check_mtime(f, self.configwatched[f]):
360 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
361 self._baseconfig_set(False)
362 self._parsecache_set(False)
363 clean = False
364 break
365
366 if clean:
367 for f in self.parsewatched:
368 if not bb.parse.check_mtime(f, self.parsewatched[f]):
369 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
370 self._parsecache_set(False)
371 clean = False
372 break
373
374 if not clean:
375 bb.parse.BBHandler.cached_statements = {}
376
377 # If writes were made to any of the data stores, we need to recalculate the data
378 # store cache
379 if hasattr(self, "databuilder"):
380 self.databuilder.calc_datastore_hashes()
381
413 def parseConfiguration(self): 382 def parseConfiguration(self):
414 self.updateCacheSync() 383 self.updateCacheSync()
415 384
@@ -428,8 +397,24 @@ class BBCooker:
428 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) 397 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
429 398
430 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) 399 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
431 400 self.collections = {}
432 self.parsecache_valid = False 401 for mc in self.multiconfigs:
402 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
403
404 self._parsecache_set(False)
405
406 def setupEventLog(self, eventlog):
407 if self.eventlog and self.eventlog[0] != eventlog:
408 bb.event.unregister_UIHhandler(self.eventlog[1])
409 self.eventlog = None
410 if not self.eventlog or self.eventlog[0] != eventlog:
411 # we log all events to a file if so directed
412 # register the log file writer as UI Handler
413 if not os.path.exists(os.path.dirname(eventlog)):
414 bb.utils.mkdirhier(os.path.dirname(eventlog))
415 writer = EventWriter(self, eventlog)
416 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
417 self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer)
433 418
434 def updateConfigOpts(self, options, environment, cmdline): 419 def updateConfigOpts(self, options, environment, cmdline):
435 self.ui_cmdline = cmdline 420 self.ui_cmdline = cmdline
@@ -450,14 +435,7 @@ class BBCooker:
450 setattr(self.configuration, o, options[o]) 435 setattr(self.configuration, o, options[o])
451 436
452 if self.configuration.writeeventlog: 437 if self.configuration.writeeventlog:
453 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog: 438 self.setupEventLog(self.configuration.writeeventlog)
454 bb.event.unregister_UIHhandler(self.eventlog[1])
455 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
456 # we log all events to a file if so directed
457 # register the log file writer as UI Handler
458 writer = EventWriter(self, self.configuration.writeeventlog)
459 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
460 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
461 439
462 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel 440 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
463 bb.msg.loggerDefaultDomains = self.configuration.debug_domains 441 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
@@ -487,37 +465,37 @@ class BBCooker:
487 # Now update all the variables not in the datastore to match 465 # Now update all the variables not in the datastore to match
488 self.configuration.env = environment 466 self.configuration.env = environment
489 467
468 self.revalidateCaches()
490 if not clean: 469 if not clean:
491 logger.debug("Base environment change, triggering reparse") 470 logger.debug("Base environment change, triggering reparse")
492 self.reset() 471 self.reset()
493 472
494 def runCommands(self, server, data, abort):
495 """
496 Run any queued asynchronous command
497 This is done by the idle handler so it runs in true context rather than
498 tied to any UI.
499 """
500
501 return self.command.runAsyncCommand()
502
503 def showVersions(self): 473 def showVersions(self):
504 474
505 (latest_versions, preferred_versions) = self.findProviders() 475 (latest_versions, preferred_versions, required) = self.findProviders()
506 476
507 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version") 477 logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
508 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================") 478 logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
509 479
510 for p in sorted(self.recipecaches[''].pkg_pn): 480 for p in sorted(self.recipecaches[''].pkg_pn):
511 pref = preferred_versions[p] 481 preferred = preferred_versions[p]
512 latest = latest_versions[p] 482 latest = latest_versions[p]
483 requiredstr = ""
484 preferredstr = ""
485 if required[p]:
486 if preferred[0] is not None:
487 requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
488 else:
489 bb.fatal("REQUIRED_VERSION of package %s not available" % p)
490 else:
491 preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
513 492
514 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
515 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] 493 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
516 494
517 if pref == latest: 495 if preferred == latest:
518 prefstr = "" 496 preferredstr = ""
519 497
520 logger.plain("%-35s %25s %25s", p, lateststr, prefstr) 498 logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
521 499
522 def showEnvironment(self, buildfile=None, pkgs_to_build=None): 500 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
523 """ 501 """
@@ -533,6 +511,8 @@ class BBCooker:
533 if not orig_tracking: 511 if not orig_tracking:
534 self.enableDataTracking() 512 self.enableDataTracking()
535 self.reset() 513 self.reset()
514 # reset() resets to the UI requested value so we have to redo this
515 self.enableDataTracking()
536 516
537 def mc_base(p): 517 def mc_base(p):
538 if p.startswith('mc:'): 518 if p.startswith('mc:'):
@@ -556,21 +536,21 @@ class BBCooker:
556 if pkgs_to_build[0] in set(ignore.split()): 536 if pkgs_to_build[0] in set(ignore.split()):
557 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) 537 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
558 538
559 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True) 539 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
560 540
561 mc = runlist[0][0] 541 mc = runlist[0][0]
562 fn = runlist[0][3] 542 fn = runlist[0][3]
563 543
564 if fn: 544 if fn:
565 try: 545 try:
566 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 546 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
567 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn)) 547 envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
568 except Exception as e: 548 except Exception as e:
569 parselog.exception("Unable to read %s", fn) 549 parselog.exception("Unable to read %s", fn)
570 raise 550 raise
571 else: 551 else:
572 if not mc in self.databuilder.mcdata: 552 if not mc in self.databuilder.mcdata:
573 bb.fatal('Not multiconfig named "%s" found' % mc) 553 bb.fatal('No multiconfig named "%s" found' % mc)
574 envdata = self.databuilder.mcdata[mc] 554 envdata = self.databuilder.mcdata[mc]
575 data.expandKeys(envdata) 555 data.expandKeys(envdata)
576 parse.ast.runAnonFuncs(envdata) 556 parse.ast.runAnonFuncs(envdata)
@@ -585,7 +565,7 @@ class BBCooker:
585 data.emit_env(env, envdata, True) 565 data.emit_env(env, envdata, True)
586 logger.plain(env.getvalue()) 566 logger.plain(env.getvalue())
587 567
588 # emit the metadata which isnt valid shell 568 # emit the metadata which isn't valid shell
589 for e in sorted(envdata.keys()): 569 for e in sorted(envdata.keys()):
590 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False): 570 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
591 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False)) 571 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
@@ -594,7 +574,7 @@ class BBCooker:
594 self.disableDataTracking() 574 self.disableDataTracking()
595 self.reset() 575 self.reset()
596 576
597 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False): 577 def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
598 """ 578 """
599 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 579 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
600 """ 580 """
@@ -641,8 +621,8 @@ class BBCooker:
641 localdata = {} 621 localdata = {}
642 622
643 for mc in self.multiconfigs: 623 for mc in self.multiconfigs:
644 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete) 624 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete)
645 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) 625 localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc])
646 bb.data.expandKeys(localdata[mc]) 626 bb.data.expandKeys(localdata[mc])
647 627
648 current = 0 628 current = 0
@@ -690,19 +670,18 @@ class BBCooker:
690 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 670 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
691 mcdeps |= set(taskdata[mc].get_mcdepends()) 671 mcdeps |= set(taskdata[mc].get_mcdepends())
692 new = False 672 new = False
693 for mc in self.multiconfigs: 673 for k in mcdeps:
694 for k in mcdeps: 674 if k in seen:
695 if k in seen: 675 continue
696 continue 676 l = k.split(':')
697 l = k.split(':') 677 depmc = l[2]
698 depmc = l[2] 678 if depmc not in self.multiconfigs:
699 if depmc not in self.multiconfigs: 679 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
700 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) 680 else:
701 else: 681 logger.debug("Adding providers for multiconfig dependency %s" % l[3])
702 logger.debug("Adding providers for multiconfig dependency %s" % l[3]) 682 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
703 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) 683 seen.add(k)
704 seen.add(k) 684 new = True
705 new = True
706 685
707 for mc in self.multiconfigs: 686 for mc in self.multiconfigs:
708 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 687 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
@@ -710,14 +689,14 @@ class BBCooker:
710 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 689 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
711 return taskdata, runlist 690 return taskdata, runlist
712 691
713 def prepareTreeData(self, pkgs_to_build, task): 692 def prepareTreeData(self, pkgs_to_build, task, halt=False):
714 """ 693 """
715 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 694 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
716 """ 695 """
717 696
718 # We set abort to False here to prevent unbuildable targets raising 697 # We set halt to False here to prevent unbuildable targets raising
719 # an exception when we're just generating data 698 # an exception when we're just generating data
720 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 699 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
721 700
722 return runlist, taskdata 701 return runlist, taskdata
723 702
@@ -731,7 +710,7 @@ class BBCooker:
731 if not task.startswith("do_"): 710 if not task.startswith("do_"):
732 task = "do_%s" % task 711 task = "do_%s" % task
733 712
734 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 713 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
735 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 714 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
736 rq.rqdata.prepare() 715 rq.rqdata.prepare()
737 return self.buildDependTree(rq, taskdata) 716 return self.buildDependTree(rq, taskdata)
@@ -792,7 +771,9 @@ class BBCooker:
792 for dep in rq.rqdata.runtaskentries[tid].depends: 771 for dep in rq.rqdata.runtaskentries[tid].depends:
793 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep) 772 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
794 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn] 773 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
795 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep))) 774 if depmc:
775 depmc = "mc:" + depmc + ":"
776 depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
796 if taskfn not in seen_fns: 777 if taskfn not in seen_fns:
797 seen_fns.append(taskfn) 778 seen_fns.append(taskfn)
798 packages = [] 779 packages = []
@@ -924,10 +905,11 @@ class BBCooker:
924 905
925 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 906 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
926 907
927 with open('pn-buildlist', 'w') as f: 908 pns = depgraph["pn"].keys()
928 for pn in depgraph["pn"]: 909 if pns:
929 f.write(pn + "\n") 910 with open('pn-buildlist', 'w') as f:
930 logger.info("PN build list saved to 'pn-buildlist'") 911 f.write("%s\n" % "\n".join(sorted(pns)))
912 logger.info("PN build list saved to 'pn-buildlist'")
931 913
932 # Remove old format output files to ensure no confusion with stale data 914 # Remove old format output files to ensure no confusion with stale data
933 try: 915 try:
@@ -961,7 +943,7 @@ class BBCooker:
961 for mc in self.multiconfigs: 943 for mc in self.multiconfigs:
962 # First get list of recipes, including skipped 944 # First get list of recipes, including skipped
963 recipefns = list(self.recipecaches[mc].pkg_fn.keys()) 945 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
964 recipefns.extend(self.skiplist.keys()) 946 recipefns.extend(self.skiplist_by_mc[mc].keys())
965 947
966 # Work out list of bbappends that have been applied 948 # Work out list of bbappends that have been applied
967 applied_appends = [] 949 applied_appends = []
@@ -980,13 +962,7 @@ class BBCooker:
980 '\n '.join(appends_without_recipes[mc]))) 962 '\n '.join(appends_without_recipes[mc])))
981 963
982 if msgs: 964 if msgs:
983 msg = "\n".join(msgs) 965 bb.fatal("\n".join(msgs))
984 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
985 False) or "no"
986 if warn_only.lower() in ("1", "yes", "true"):
987 bb.warn(msg)
988 else:
989 bb.fatal(msg)
990 966
991 def handlePrefProviders(self): 967 def handlePrefProviders(self):
992 968
@@ -1056,6 +1032,11 @@ class BBCooker:
1056 if matches: 1032 if matches:
1057 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) 1033 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1058 1034
1035 def testCookerCommandEvent(self, filepattern):
1036 # Dummy command used by OEQA selftest to test tinfoil without IO
1037 matches = ["A", "B"]
1038 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1039
1059 def findProviders(self, mc=''): 1040 def findProviders(self, mc=''):
1060 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1041 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1061 1042
@@ -1063,10 +1044,16 @@ class BBCooker:
1063 if pn in self.recipecaches[mc].providers: 1044 if pn in self.recipecaches[mc].providers:
1064 filenames = self.recipecaches[mc].providers[pn] 1045 filenames = self.recipecaches[mc].providers[pn]
1065 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc]) 1046 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1066 filename = eligible[0] 1047 if eligible is not None:
1048 filename = eligible[0]
1049 else:
1050 filename = None
1067 return None, None, None, filename 1051 return None, None, None, filename
1068 elif pn in self.recipecaches[mc].pkg_pn: 1052 elif pn in self.recipecaches[mc].pkg_pn:
1069 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1053 (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1054 if required and preferred_file is None:
1055 return None, None, None, None
1056 return (latest, latest_f, preferred_ver, preferred_file)
1070 else: 1057 else:
1071 return None, None, None, None 1058 return None, None, None, None
1072 1059
@@ -1211,15 +1198,15 @@ class BBCooker:
1211 except bb.utils.VersionStringException as vse: 1198 except bb.utils.VersionStringException as vse:
1212 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) 1199 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1213 if not res: 1200 if not res:
1214 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver) 1201 parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1215 continue 1202 continue
1216 else: 1203 else:
1217 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec) 1204 parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1218 continue 1205 continue
1219 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec) 1206 parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1220 collection_depends[c].append(rec) 1207 collection_depends[c].append(rec)
1221 else: 1208 else:
1222 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec) 1209 parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1223 1210
1224 # Recursively work out collection priorities based on dependencies 1211 # Recursively work out collection priorities based on dependencies
1225 def calc_layer_priority(collection): 1212 def calc_layer_priority(collection):
@@ -1231,7 +1218,7 @@ class BBCooker:
1231 if depprio > max_depprio: 1218 if depprio > max_depprio:
1232 max_depprio = depprio 1219 max_depprio = depprio
1233 max_depprio += 1 1220 max_depprio += 1
1234 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio) 1221 parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
1235 collection_priorities[collection] = max_depprio 1222 collection_priorities[collection] = max_depprio
1236 1223
1237 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities 1224 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
@@ -1243,7 +1230,7 @@ class BBCooker:
1243 errors = True 1230 errors = True
1244 continue 1231 continue
1245 elif regex == "": 1232 elif regex == "":
1246 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c) 1233 parselog.debug("BBFILE_PATTERN_%s is empty" % c)
1247 cre = re.compile('^NULL$') 1234 cre = re.compile('^NULL$')
1248 errors = False 1235 errors = False
1249 else: 1236 else:
@@ -1290,8 +1277,8 @@ class BBCooker:
1290 if bf.startswith("/") or bf.startswith("../"): 1277 if bf.startswith("/") or bf.startswith("../"):
1291 bf = os.path.abspath(bf) 1278 bf = os.path.abspath(bf)
1292 1279
1293 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)} 1280 collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1294 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1281 filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1295 try: 1282 try:
1296 os.stat(bf) 1283 os.stat(bf)
1297 bf = os.path.abspath(bf) 1284 bf = os.path.abspath(bf)
@@ -1355,9 +1342,10 @@ class BBCooker:
1355 self.buildSetVars() 1342 self.buildSetVars()
1356 self.reset_mtime_caches() 1343 self.reset_mtime_caches()
1357 1344
1358 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 1345 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
1359 1346
1360 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn)) 1347 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1348 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
1361 infos = dict(infos) 1349 infos = dict(infos)
1362 1350
1363 fn = bb.cache.realfn2virtual(fn, cls, mc) 1351 fn = bb.cache.realfn2virtual(fn, cls, mc)
@@ -1383,14 +1371,16 @@ class BBCooker:
1383 self.recipecaches[mc].rundeps[fn] = defaultdict(list) 1371 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1384 self.recipecaches[mc].runrecs[fn] = defaultdict(list) 1372 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1385 1373
1374 bb.parse.siggen.setup_datacache(self.recipecaches)
1375
1386 # Invalidate task for target if force mode active 1376 # Invalidate task for target if force mode active
1387 if self.configuration.force: 1377 if self.configuration.force:
1388 logger.verbose("Invalidate task %s, %s", task, fn) 1378 logger.verbose("Invalidate task %s, %s", task, fn)
1389 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn) 1379 bb.parse.siggen.invalidate_task(task, fn)
1390 1380
1391 # Setup taskdata structure 1381 # Setup taskdata structure
1392 taskdata = {} 1382 taskdata = {}
1393 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort) 1383 taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
1394 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item) 1384 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1395 1385
1396 if quietlog: 1386 if quietlog:
@@ -1400,21 +1390,24 @@ class BBCooker:
1400 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME") 1390 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1401 if fireevents: 1391 if fireevents:
1402 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc]) 1392 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1393 if self.eventlog:
1394 self.eventlog[2].write_variables()
1395 bb.event.enable_heartbeat()
1403 1396
1404 # Execute the runqueue 1397 # Execute the runqueue
1405 runlist = [[mc, item, task, fn]] 1398 runlist = [[mc, item, task, fn]]
1406 1399
1407 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1400 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1408 1401
1409 def buildFileIdle(server, rq, abort): 1402 def buildFileIdle(server, rq, halt):
1410 1403
1411 msg = None 1404 msg = None
1412 interrupted = 0 1405 interrupted = 0
1413 if abort or self.state == state.forceshutdown: 1406 if halt or self.state == State.FORCE_SHUTDOWN:
1414 rq.finish_runqueue(True) 1407 rq.finish_runqueue(True)
1415 msg = "Forced shutdown" 1408 msg = "Forced shutdown"
1416 interrupted = 2 1409 interrupted = 2
1417 elif self.state == state.shutdown: 1410 elif self.state == State.SHUTDOWN:
1418 rq.finish_runqueue(False) 1411 rq.finish_runqueue(False)
1419 msg = "Stopped build" 1412 msg = "Stopped build"
1420 interrupted = 1 1413 interrupted = 1
@@ -1425,41 +1418,71 @@ class BBCooker:
1425 failures += len(exc.args) 1418 failures += len(exc.args)
1426 retval = False 1419 retval = False
1427 except SystemExit as exc: 1420 except SystemExit as exc:
1428 self.command.finishAsyncCommand(str(exc))
1429 if quietlog: 1421 if quietlog:
1430 bb.runqueue.logger.setLevel(rqloglevel) 1422 bb.runqueue.logger.setLevel(rqloglevel)
1431 return False 1423 return bb.server.process.idleFinish(str(exc))
1432 1424
1433 if not retval: 1425 if not retval:
1434 if fireevents: 1426 if fireevents:
1435 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc]) 1427 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1436 self.command.finishAsyncCommand(msg) 1428 bb.event.disable_heartbeat()
1437 # We trashed self.recipecaches above 1429 # We trashed self.recipecaches above
1438 self.parsecache_valid = False 1430 self._parsecache_set(False)
1439 self.configuration.limited_deps = False 1431 self.configuration.limited_deps = False
1440 bb.parse.siggen.reset(self.data) 1432 bb.parse.siggen.reset(self.data)
1441 if quietlog: 1433 if quietlog:
1442 bb.runqueue.logger.setLevel(rqloglevel) 1434 bb.runqueue.logger.setLevel(rqloglevel)
1443 return False 1435 return bb.server.process.idleFinish(msg)
1444 if retval is True: 1436 if retval is True:
1445 return True 1437 return True
1446 return retval 1438 return retval
1447 1439
1448 self.idleCallBackRegister(buildFileIdle, rq) 1440 self.idleCallBackRegister(buildFileIdle, rq)
1449 1441
1442 def getTaskSignatures(self, target, tasks):
1443 sig = []
1444 getAllTaskSignatures = False
1445
1446 if not tasks:
1447 tasks = ["do_build"]
1448 getAllTaskSignatures = True
1449
1450 for task in tasks:
1451 taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt)
1452 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1453 rq.rqdata.prepare()
1454
1455 for l in runlist:
1456 mc, pn, taskname, fn = l
1457
1458 taskdep = rq.rqdata.dataCaches[mc].task_deps[fn]
1459 for t in taskdep['tasks']:
1460 if t in taskdep['nostamp'] or "setscene" in t:
1461 continue
1462 tid = bb.runqueue.build_tid(mc, fn, t)
1463
1464 if t in task or getAllTaskSignatures:
1465 try:
1466 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1467 except KeyError:
1468 sig.append(self.getTaskSignatures(target, [t])[0])
1469
1470 return sig
1471
1450 def buildTargets(self, targets, task): 1472 def buildTargets(self, targets, task):
1451 """ 1473 """
1452 Attempt to build the targets specified 1474 Attempt to build the targets specified
1453 """ 1475 """
1454 1476
1455 def buildTargetsIdle(server, rq, abort): 1477 def buildTargetsIdle(server, rq, halt):
1456 msg = None 1478 msg = None
1457 interrupted = 0 1479 interrupted = 0
1458 if abort or self.state == state.forceshutdown: 1480 if halt or self.state == State.FORCE_SHUTDOWN:
1481 bb.event._should_exit.set()
1459 rq.finish_runqueue(True) 1482 rq.finish_runqueue(True)
1460 msg = "Forced shutdown" 1483 msg = "Forced shutdown"
1461 interrupted = 2 1484 interrupted = 2
1462 elif self.state == state.shutdown: 1485 elif self.state == State.SHUTDOWN:
1463 rq.finish_runqueue(False) 1486 rq.finish_runqueue(False)
1464 msg = "Stopped build" 1487 msg = "Stopped build"
1465 interrupted = 1 1488 interrupted = 1
@@ -1470,16 +1493,16 @@ class BBCooker:
1470 failures += len(exc.args) 1493 failures += len(exc.args)
1471 retval = False 1494 retval = False
1472 except SystemExit as exc: 1495 except SystemExit as exc:
1473 self.command.finishAsyncCommand(str(exc)) 1496 return bb.server.process.idleFinish(str(exc))
1474 return False
1475 1497
1476 if not retval: 1498 if not retval:
1477 try: 1499 try:
1478 for mc in self.multiconfigs: 1500 for mc in self.multiconfigs:
1479 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc]) 1501 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1480 finally: 1502 finally:
1481 self.command.finishAsyncCommand(msg) 1503 bb.event.disable_heartbeat()
1482 return False 1504 return bb.server.process.idleFinish(msg)
1505
1483 if retval is True: 1506 if retval is True:
1484 return True 1507 return True
1485 return retval 1508 return retval
@@ -1498,7 +1521,7 @@ class BBCooker:
1498 1521
1499 bb.event.fire(bb.event.BuildInit(packages), self.data) 1522 bb.event.fire(bb.event.BuildInit(packages), self.data)
1500 1523
1501 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort) 1524 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
1502 1525
1503 buildname = self.data.getVar("BUILDNAME", False) 1526 buildname = self.data.getVar("BUILDNAME", False)
1504 1527
@@ -1511,6 +1534,9 @@ class BBCooker:
1511 1534
1512 for mc in self.multiconfigs: 1535 for mc in self.multiconfigs:
1513 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc]) 1536 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1537 if self.eventlog:
1538 self.eventlog[2].write_variables()
1539 bb.event.enable_heartbeat()
1514 1540
1515 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1541 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1516 if 'universe' in targets: 1542 if 'universe' in targets:
@@ -1520,7 +1546,13 @@ class BBCooker:
1520 1546
1521 1547
1522 def getAllKeysWithFlags(self, flaglist): 1548 def getAllKeysWithFlags(self, flaglist):
1549 def dummy_autorev(d):
1550 return
1551
1523 dump = {} 1552 dump = {}
1553 # Horrible but for now we need to avoid any sideeffects of autorev being called
1554 saved = bb.fetch2.get_autorev
1555 bb.fetch2.get_autorev = dummy_autorev
1524 for k in self.data.keys(): 1556 for k in self.data.keys():
1525 try: 1557 try:
1526 expand = True 1558 expand = True
@@ -1540,20 +1572,14 @@ class BBCooker:
1540 dump[k][d] = None 1572 dump[k][d] = None
1541 except Exception as e: 1573 except Exception as e:
1542 print(e) 1574 print(e)
1575 bb.fetch2.get_autorev = saved
1543 return dump 1576 return dump
1544 1577
1545 1578
1546 def updateCacheSync(self): 1579 def updateCacheSync(self):
1547 if self.state == state.running: 1580 if self.state == State.RUNNING:
1548 return 1581 return
1549 1582
1550 # reload files for which we got notifications
1551 for p in self.inotify_modified_files:
1552 bb.parse.update_cache(p)
1553 if p in bb.parse.BBHandler.cached_statements:
1554 del bb.parse.BBHandler.cached_statements[p]
1555 self.inotify_modified_files = []
1556
1557 if not self.baseconfig_valid: 1583 if not self.baseconfig_valid:
1558 logger.debug("Reloading base configuration data") 1584 logger.debug("Reloading base configuration data")
1559 self.initConfigurationData() 1585 self.initConfigurationData()
@@ -1561,19 +1587,22 @@ class BBCooker:
1561 1587
1562 # This is called for all async commands when self.state != running 1588 # This is called for all async commands when self.state != running
1563 def updateCache(self): 1589 def updateCache(self):
1564 if self.state == state.running: 1590 if self.state == State.RUNNING:
1565 return 1591 return
1566 1592
1567 if self.state in (state.shutdown, state.forceshutdown, state.error): 1593 if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR):
1568 if hasattr(self.parser, 'shutdown'): 1594 if hasattr(self.parser, 'shutdown'):
1569 self.parser.shutdown(clean=False, force = True) 1595 self.parser.shutdown(clean=False)
1570 self.parser.final_cleanup() 1596 self.parser.final_cleanup()
1571 raise bb.BBHandledException() 1597 raise bb.BBHandledException()
1572 1598
1573 if self.state != state.parsing: 1599 if self.state != State.PARSING:
1574 self.updateCacheSync() 1600 self.updateCacheSync()
1575 1601
1576 if self.state != state.parsing and not self.parsecache_valid: 1602 if self.state != State.PARSING and not self.parsecache_valid:
1603 bb.server.process.serverlog("Parsing started")
1604 self.parsewatched = {}
1605
1577 bb.parse.siggen.reset(self.data) 1606 bb.parse.siggen.reset(self.data)
1578 self.parseConfiguration () 1607 self.parseConfiguration ()
1579 if CookerFeatures.SEND_SANITYEVENTS in self.featureset: 1608 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
@@ -1587,37 +1616,35 @@ class BBCooker:
1587 for dep in self.configuration.extra_assume_provided: 1616 for dep in self.configuration.extra_assume_provided:
1588 self.recipecaches[mc].ignored_dependencies.add(dep) 1617 self.recipecaches[mc].ignored_dependencies.add(dep)
1589 1618
1590 self.collections = {}
1591
1592 mcfilelist = {} 1619 mcfilelist = {}
1593 total_masked = 0 1620 total_masked = 0
1594 searchdirs = set() 1621 searchdirs = set()
1595 for mc in self.multiconfigs: 1622 for mc in self.multiconfigs:
1596 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1597 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1623 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1598 1624
1599 mcfilelist[mc] = filelist 1625 mcfilelist[mc] = filelist
1600 total_masked += masked 1626 total_masked += masked
1601 searchdirs |= set(search) 1627 searchdirs |= set(search)
1602 1628
1603 # Add inotify watches for directories searched for bb/bbappend files 1629 # Add mtimes for directories searched for bb/bbappend files
1604 for dirent in searchdirs: 1630 for dirent in searchdirs:
1605 self.add_filewatch([[dirent]], dirs=True) 1631 self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))])
1606 1632
1607 self.parser = CookerParser(self, mcfilelist, total_masked) 1633 self.parser = CookerParser(self, mcfilelist, total_masked)
1608 self.parsecache_valid = True 1634 self._parsecache_set(True)
1609 1635
1610 self.state = state.parsing 1636 self.state = State.PARSING
1611 1637
1612 if not self.parser.parse_next(): 1638 if not self.parser.parse_next():
1613 collectlog.debug(1, "parsing complete") 1639 bb.server.process.serverlog("Parsing completed")
1640 collectlog.debug("parsing complete")
1614 if self.parser.error: 1641 if self.parser.error:
1615 raise bb.BBHandledException() 1642 raise bb.BBHandledException()
1616 self.show_appends_with_no_recipes() 1643 self.show_appends_with_no_recipes()
1617 self.handlePrefProviders() 1644 self.handlePrefProviders()
1618 for mc in self.multiconfigs: 1645 for mc in self.multiconfigs:
1619 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) 1646 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1620 self.state = state.running 1647 self.state = State.RUNNING
1621 1648
1622 # Send an event listing all stamps reachable after parsing 1649 # Send an event listing all stamps reachable after parsing
1623 # which the metadata may use to clean up stale data 1650 # which the metadata may use to clean up stale data
@@ -1633,7 +1660,7 @@ class BBCooker:
1633 # Return a copy, don't modify the original 1660 # Return a copy, don't modify the original
1634 pkgs_to_build = pkgs_to_build[:] 1661 pkgs_to_build = pkgs_to_build[:]
1635 1662
1636 if len(pkgs_to_build) == 0: 1663 if not pkgs_to_build:
1637 raise NothingToBuild 1664 raise NothingToBuild
1638 1665
1639 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split() 1666 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
@@ -1655,7 +1682,7 @@ class BBCooker:
1655 1682
1656 if 'universe' in pkgs_to_build: 1683 if 'universe' in pkgs_to_build:
1657 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.") 1684 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1658 parselog.debug(1, "collating packages for \"universe\"") 1685 parselog.debug("collating packages for \"universe\"")
1659 pkgs_to_build.remove('universe') 1686 pkgs_to_build.remove('universe')
1660 for mc in self.multiconfigs: 1687 for mc in self.multiconfigs:
1661 for t in self.recipecaches[mc].universe_target: 1688 for t in self.recipecaches[mc].universe_target:
@@ -1680,26 +1707,36 @@ class BBCooker:
1680 def post_serve(self): 1707 def post_serve(self):
1681 self.shutdown(force=True) 1708 self.shutdown(force=True)
1682 prserv.serv.auto_shutdown() 1709 prserv.serv.auto_shutdown()
1710 if hasattr(bb.parse, "siggen"):
1711 bb.parse.siggen.exit()
1683 if self.hashserv: 1712 if self.hashserv:
1684 self.hashserv.process.terminate() 1713 self.hashserv.process.terminate()
1685 self.hashserv.process.join() 1714 self.hashserv.process.join()
1686 if hasattr(self, "data"): 1715 if hasattr(self, "data"):
1687 bb.event.fire(CookerExit(), self.data) 1716 bb.event.fire(CookerExit(), self.data)
1688 1717
1689 def shutdown(self, force = False): 1718 def shutdown(self, force=False):
1690 if force: 1719 if force:
1691 self.state = state.forceshutdown 1720 self.state = State.FORCE_SHUTDOWN
1721 bb.event._should_exit.set()
1692 else: 1722 else:
1693 self.state = state.shutdown 1723 self.state = State.SHUTDOWN
1694 1724
1695 if self.parser: 1725 if self.parser:
1696 self.parser.shutdown(clean=not force, force=force) 1726 self.parser.shutdown(clean=False)
1697 self.parser.final_cleanup() 1727 self.parser.final_cleanup()
1698 1728
1699 def finishcommand(self): 1729 def finishcommand(self):
1700 self.state = state.initial 1730 if hasattr(self.parser, 'shutdown'):
1731 self.parser.shutdown(clean=False)
1732 self.parser.final_cleanup()
1733 self.state = State.INITIAL
1734 bb.event._should_exit.clear()
1701 1735
1702 def reset(self): 1736 def reset(self):
1737 if hasattr(bb.parse, "siggen"):
1738 bb.parse.siggen.exit()
1739 self.finishcommand()
1703 self.initConfigurationData() 1740 self.initConfigurationData()
1704 self.handlePRServ() 1741 self.handlePRServ()
1705 1742
@@ -1711,9 +1748,9 @@ class BBCooker:
1711 if hasattr(self, "data"): 1748 if hasattr(self, "data"):
1712 self.databuilder.reset() 1749 self.databuilder.reset()
1713 self.data = self.databuilder.data 1750 self.data = self.databuilder.data
1714 self.parsecache_valid = False 1751 # In theory tinfoil could have modified the base data before parsing,
1715 self.baseconfig_valid = False 1752 # ideally need to track if anything did modify the datastore
1716 1753 self._parsecache_set(False)
1717 1754
1718class CookerExit(bb.event.Event): 1755class CookerExit(bb.event.Event):
1719 """ 1756 """
@@ -1728,16 +1765,16 @@ class CookerCollectFiles(object):
1728 def __init__(self, priorities, mc=''): 1765 def __init__(self, priorities, mc=''):
1729 self.mc = mc 1766 self.mc = mc
1730 self.bbappends = [] 1767 self.bbappends = []
1731 # Priorities is a list of tupples, with the second element as the pattern. 1768 # Priorities is a list of tuples, with the second element as the pattern.
1732 # We need to sort the list with the longest pattern first, and so on to 1769 # We need to sort the list with the longest pattern first, and so on to
1733 # the shortest. This allows nested layers to be properly evaluated. 1770 # the shortest. This allows nested layers to be properly evaluated.
1734 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True) 1771 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1735 1772
1736 def calc_bbfile_priority(self, filename): 1773 def calc_bbfile_priority(self, filename):
1737 for _, _, regex, pri in self.bbfile_config_priorities: 1774 for layername, _, regex, pri in self.bbfile_config_priorities:
1738 if regex.match(filename): 1775 if regex.match(filename):
1739 return pri, regex 1776 return pri, regex, layername
1740 return 0, None 1777 return 0, None, None
1741 1778
1742 def get_bbfiles(self): 1779 def get_bbfiles(self):
1743 """Get list of default .bb files by reading out the current directory""" 1780 """Get list of default .bb files by reading out the current directory"""
@@ -1756,7 +1793,7 @@ class CookerCollectFiles(object):
1756 for ignored in ('SCCS', 'CVS', '.svn'): 1793 for ignored in ('SCCS', 'CVS', '.svn'):
1757 if ignored in dirs: 1794 if ignored in dirs:
1758 dirs.remove(ignored) 1795 dirs.remove(ignored)
1759 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))] 1796 found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
1760 1797
1761 return found 1798 return found
1762 1799
@@ -1764,7 +1801,7 @@ class CookerCollectFiles(object):
1764 """Collect all available .bb build files""" 1801 """Collect all available .bb build files"""
1765 masked = 0 1802 masked = 0
1766 1803
1767 collectlog.debug(1, "collecting .bb files") 1804 collectlog.debug("collecting .bb files")
1768 1805
1769 files = (config.getVar( "BBFILES") or "").split() 1806 files = (config.getVar( "BBFILES") or "").split()
1770 1807
@@ -1772,16 +1809,16 @@ class CookerCollectFiles(object):
1772 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] ) 1809 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1773 config.setVar("BBFILES_PRIORITIZED", " ".join(files)) 1810 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1774 1811
1775 if not len(files): 1812 if not files:
1776 files = self.get_bbfiles() 1813 files = self.get_bbfiles()
1777 1814
1778 if not len(files): 1815 if not files:
1779 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") 1816 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1780 bb.event.fire(CookerExit(), eventdata) 1817 bb.event.fire(CookerExit(), eventdata)
1781 1818
1782 # We need to track where we look so that we can add inotify watches. There 1819 # We need to track where we look so that we can know when the cache is invalid. There
1783 # is no nice way to do this, this is horrid. We intercept the os.listdir() 1820 # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1784 # (or os.scandir() for python 3.6+) calls while we run glob(). 1821 # calls while we run glob().
1785 origlistdir = os.listdir 1822 origlistdir = os.listdir
1786 if hasattr(os, 'scandir'): 1823 if hasattr(os, 'scandir'):
1787 origscandir = os.scandir 1824 origscandir = os.scandir
@@ -1835,7 +1872,7 @@ class CookerCollectFiles(object):
1835 try: 1872 try:
1836 re.compile(mask) 1873 re.compile(mask)
1837 bbmasks.append(mask) 1874 bbmasks.append(mask)
1838 except sre_constants.error: 1875 except re.error:
1839 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask) 1876 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1840 1877
1841 # Then validate the combined regular expressions. This should never 1878 # Then validate the combined regular expressions. This should never
@@ -1843,7 +1880,7 @@ class CookerCollectFiles(object):
1843 bbmask = "|".join(bbmasks) 1880 bbmask = "|".join(bbmasks)
1844 try: 1881 try:
1845 bbmask_compiled = re.compile(bbmask) 1882 bbmask_compiled = re.compile(bbmask)
1846 except sre_constants.error: 1883 except re.error:
1847 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask) 1884 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1848 bbmask = None 1885 bbmask = None
1849 1886
@@ -1851,7 +1888,7 @@ class CookerCollectFiles(object):
1851 bbappend = [] 1888 bbappend = []
1852 for f in newfiles: 1889 for f in newfiles:
1853 if bbmask and bbmask_compiled.search(f): 1890 if bbmask and bbmask_compiled.search(f):
1854 collectlog.debug(1, "skipping masked file %s", f) 1891 collectlog.debug("skipping masked file %s", f)
1855 masked += 1 1892 masked += 1
1856 continue 1893 continue
1857 if f.endswith('.bb'): 1894 if f.endswith('.bb'):
@@ -1859,7 +1896,7 @@ class CookerCollectFiles(object):
1859 elif f.endswith('.bbappend'): 1896 elif f.endswith('.bbappend'):
1860 bbappend.append(f) 1897 bbappend.append(f)
1861 else: 1898 else:
1862 collectlog.debug(1, "skipping %s: unknown file extension", f) 1899 collectlog.debug("skipping %s: unknown file extension", f)
1863 1900
1864 # Build a list of .bbappend files for each .bb file 1901 # Build a list of .bbappend files for each .bb file
1865 for f in bbappend: 1902 for f in bbappend:
@@ -1910,7 +1947,7 @@ class CookerCollectFiles(object):
1910 # Calculate priorities for each file 1947 # Calculate priorities for each file
1911 for p in pkgfns: 1948 for p in pkgfns:
1912 realfn, cls, mc = bb.cache.virtualfn2realfn(p) 1949 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1913 priorities[p], regex = self.calc_bbfile_priority(realfn) 1950 priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
1914 if regex in unmatched_regex: 1951 if regex in unmatched_regex:
1915 matched_regex.add(regex) 1952 matched_regex.add(regex)
1916 unmatched_regex.remove(regex) 1953 unmatched_regex.remove(regex)
@@ -1961,15 +1998,30 @@ class ParsingFailure(Exception):
1961 Exception.__init__(self, realexception, recipe) 1998 Exception.__init__(self, realexception, recipe)
1962 1999
1963class Parser(multiprocessing.Process): 2000class Parser(multiprocessing.Process):
1964 def __init__(self, jobs, results, quit, init, profile): 2001 def __init__(self, jobs, results, quit, profile):
1965 self.jobs = jobs 2002 self.jobs = jobs
1966 self.results = results 2003 self.results = results
1967 self.quit = quit 2004 self.quit = quit
1968 self.init = init
1969 multiprocessing.Process.__init__(self) 2005 multiprocessing.Process.__init__(self)
1970 self.context = bb.utils.get_context().copy() 2006 self.context = bb.utils.get_context().copy()
1971 self.handlers = bb.event.get_class_handlers().copy() 2007 self.handlers = bb.event.get_class_handlers().copy()
1972 self.profile = profile 2008 self.profile = profile
2009 self.queue_signals = False
2010 self.signal_received = []
2011 self.signal_threadlock = threading.Lock()
2012
2013 def catch_sig(self, signum, frame):
2014 if self.queue_signals:
2015 self.signal_received.append(signum)
2016 else:
2017 self.handle_sig(signum, frame)
2018
2019 def handle_sig(self, signum, frame):
2020 if signum == signal.SIGTERM:
2021 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2022 os.kill(os.getpid(), signal.SIGTERM)
2023 elif signum == signal.SIGINT:
2024 signal.default_int_handler(signum, frame)
1973 2025
1974 def run(self): 2026 def run(self):
1975 2027
@@ -1989,38 +2041,50 @@ class Parser(multiprocessing.Process):
1989 prof.dump_stats(logfile) 2041 prof.dump_stats(logfile)
1990 2042
1991 def realrun(self): 2043 def realrun(self):
1992 if self.init: 2044 # Signal handling here is hard. We must not terminate any process or thread holding the write
1993 self.init() 2045 # lock for the event stream as it will not be released, ever, and things will hang.
2046 # Python handles signals in the main thread/process but they can be raised from any thread and
2047 # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2048 # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2049 # new thread should also do so) and we defer handling but we handle with the local thread lock
2050 # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2051 # can be in the critical section.
2052 signal.signal(signal.SIGTERM, self.catch_sig)
2053 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2054 signal.signal(signal.SIGINT, self.catch_sig)
2055 bb.utils.set_process_name(multiprocessing.current_process().name)
2056 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2057 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
1994 2058
1995 pending = [] 2059 pending = []
1996 while True: 2060 havejobs = True
1997 try: 2061 try:
1998 self.quit.get_nowait() 2062 while havejobs or pending:
1999 except queue.Empty: 2063 if self.quit.is_set():
2000 pass 2064 break
2001 else:
2002 self.results.close()
2003 self.results.join_thread()
2004 break
2005 2065
2006 if pending: 2066 job = None
2007 result = pending.pop()
2008 else:
2009 try: 2067 try:
2010 job = self.jobs.pop() 2068 job = self.jobs.pop()
2011 except IndexError: 2069 except IndexError:
2012 self.results.close() 2070 havejobs = False
2013 self.results.join_thread() 2071 if job:
2014 break 2072 result = self.parse(*job)
2015 result = self.parse(*job) 2073 # Clear the siggen cache after parsing to control memory usage, its huge
2016 # Clear the siggen cache after parsing to control memory usage, its huge 2074 bb.parse.siggen.postparsing_clean_cache()
2017 bb.parse.siggen.postparsing_clean_cache() 2075 pending.append(result)
2018 try: 2076
2019 self.results.put(result, timeout=0.25) 2077 if pending:
2020 except queue.Full: 2078 try:
2021 pending.append(result) 2079 result = pending.pop()
2080 self.results.put(result, timeout=0.05)
2081 except queue.Full:
2082 pending.append(result)
2083 finally:
2084 self.results.close()
2085 self.results.join_thread()
2022 2086
2023 def parse(self, mc, cache, filename, appends): 2087 def parse(self, mc, cache, filename, appends, layername):
2024 try: 2088 try:
2025 origfilter = bb.event.LogHandler.filter 2089 origfilter = bb.event.LogHandler.filter
2026 # Record the filename we're parsing into any events generated 2090 # Record the filename we're parsing into any events generated
@@ -2034,17 +2098,16 @@ class Parser(multiprocessing.Process):
2034 bb.event.set_class_handlers(self.handlers.copy()) 2098 bb.event.set_class_handlers(self.handlers.copy())
2035 bb.event.LogHandler.filter = parse_filter 2099 bb.event.LogHandler.filter = parse_filter
2036 2100
2037 return True, mc, cache.parse(filename, appends) 2101 return True, mc, cache.parse(filename, appends, layername)
2038 except Exception as exc: 2102 except Exception as exc:
2039 tb = sys.exc_info()[2] 2103 tb = sys.exc_info()[2]
2040 exc.recipe = filename 2104 exc.recipe = filename
2041 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) 2105 return True, None, exc
2042 return True, exc
2043 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2106 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2044 # and for example a worker thread doesn't just exit on its own in response to 2107 # and for example a worker thread doesn't just exit on its own in response to
2045 # a SystemExit event for example. 2108 # a SystemExit event for example.
2046 except BaseException as exc: 2109 except BaseException as exc:
2047 return True, ParsingFailure(exc, filename) 2110 return True, None, ParsingFailure(exc, filename)
2048 finally: 2111 finally:
2049 bb.event.LogHandler.filter = origfilter 2112 bb.event.LogHandler.filter = origfilter
2050 2113
@@ -2053,7 +2116,7 @@ class CookerParser(object):
2053 self.mcfilelist = mcfilelist 2116 self.mcfilelist = mcfilelist
2054 self.cooker = cooker 2117 self.cooker = cooker
2055 self.cfgdata = cooker.data 2118 self.cfgdata = cooker.data
2056 self.cfghash = cooker.data_hash 2119 self.cfghash = cooker.databuilder.data_hash
2057 self.cfgbuilder = cooker.databuilder 2120 self.cfgbuilder = cooker.databuilder
2058 2121
2059 # Accounting statistics 2122 # Accounting statistics
@@ -2074,10 +2137,11 @@ class CookerParser(object):
2074 for mc in self.cooker.multiconfigs: 2137 for mc in self.cooker.multiconfigs:
2075 for filename in self.mcfilelist[mc]: 2138 for filename in self.mcfilelist[mc]:
2076 appends = self.cooker.collections[mc].get_file_appends(filename) 2139 appends = self.cooker.collections[mc].get_file_appends(filename)
2140 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2077 if not self.bb_caches[mc].cacheValid(filename, appends): 2141 if not self.bb_caches[mc].cacheValid(filename, appends):
2078 self.willparse.add((mc, self.bb_caches[mc], filename, appends)) 2142 self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername))
2079 else: 2143 else:
2080 self.fromcache.add((mc, self.bb_caches[mc], filename, appends)) 2144 self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
2081 2145
2082 self.total = len(self.fromcache) + len(self.willparse) 2146 self.total = len(self.fromcache) + len(self.willparse)
2083 self.toparse = len(self.willparse) 2147 self.toparse = len(self.willparse)
@@ -2086,6 +2150,7 @@ class CookerParser(object):
2086 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or 2150 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2087 multiprocessing.cpu_count()), self.toparse) 2151 multiprocessing.cpu_count()), self.toparse)
2088 2152
2153 bb.cache.SiggenRecipeInfo.reset()
2089 self.start() 2154 self.start()
2090 self.haveshutdown = False 2155 self.haveshutdown = False
2091 self.syncthread = None 2156 self.syncthread = None
@@ -2095,15 +2160,8 @@ class CookerParser(object):
2095 self.processes = [] 2160 self.processes = []
2096 if self.toparse: 2161 if self.toparse:
2097 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) 2162 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2098 def init(): 2163
2099 signal.signal(signal.SIGTERM, signal.SIG_DFL) 2164 self.parser_quit = multiprocessing.Event()
2100 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2101 signal.signal(signal.SIGINT, signal.SIG_IGN)
2102 bb.utils.set_process_name(multiprocessing.current_process().name)
2103 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2104 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2105
2106 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2107 self.result_queue = multiprocessing.Queue() 2165 self.result_queue = multiprocessing.Queue()
2108 2166
2109 def chunkify(lst,n): 2167 def chunkify(lst,n):
@@ -2111,14 +2169,14 @@ class CookerParser(object):
2111 self.jobs = chunkify(list(self.willparse), self.num_processes) 2169 self.jobs = chunkify(list(self.willparse), self.num_processes)
2112 2170
2113 for i in range(0, self.num_processes): 2171 for i in range(0, self.num_processes):
2114 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile) 2172 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2115 parser.start() 2173 parser.start()
2116 self.process_names.append(parser.name) 2174 self.process_names.append(parser.name)
2117 self.processes.append(parser) 2175 self.processes.append(parser)
2118 2176
2119 self.results = itertools.chain(self.results, self.parse_generator()) 2177 self.results = itertools.chain(self.results, self.parse_generator())
2120 2178
2121 def shutdown(self, clean=True, force=False): 2179 def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
2122 if not self.toparse: 2180 if not self.toparse:
2123 return 2181 return
2124 if self.haveshutdown: 2182 if self.haveshutdown:
@@ -2132,9 +2190,9 @@ class CookerParser(object):
2132 self.total) 2190 self.total)
2133 2191
2134 bb.event.fire(event, self.cfgdata) 2192 bb.event.fire(event, self.cfgdata)
2135 2193 else:
2136 for process in self.processes: 2194 bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
2137 self.parser_quit.put(None) 2195 bb.error("Parsing halted due to errors, see error messages above")
2138 2196
2139 # Cleanup the queue before call process.join(), otherwise there might be 2197 # Cleanup the queue before call process.join(), otherwise there might be
2140 # deadlocks. 2198 # deadlocks.
@@ -2144,106 +2202,152 @@ class CookerParser(object):
2144 except queue.Empty: 2202 except queue.Empty:
2145 break 2203 break
2146 2204
2147 for process in self.processes:
2148 if force:
2149 process.join(.1)
2150 process.terminate()
2151 else:
2152 process.join()
2153
2154 self.parser_quit.close()
2155 # Allow data left in the cancel queue to be discarded
2156 self.parser_quit.cancel_join_thread()
2157
2158 def sync_caches(): 2205 def sync_caches():
2159 for c in self.bb_caches.values(): 2206 for c in self.bb_caches.values():
2207 bb.cache.SiggenRecipeInfo.reset()
2160 c.sync() 2208 c.sync()
2161 2209
2162 sync = threading.Thread(target=sync_caches, name="SyncThread") 2210 self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
2163 self.syncthread = sync 2211 self.syncthread.start()
2164 sync.start() 2212
2213 self.parser_quit.set()
2214
2215 for process in self.processes:
2216 process.join(0.5)
2217
2218 for process in self.processes:
2219 if process.exitcode is None:
2220 os.kill(process.pid, signal.SIGINT)
2221
2222 for process in self.processes:
2223 process.join(0.5)
2224
2225 for process in self.processes:
2226 if process.exitcode is None:
2227 process.terminate()
2228
2229 for process in self.processes:
2230 process.join()
2231 # clean up zombies
2232 process.close()
2233
2234 bb.codeparser.parser_cache_save()
2165 bb.codeparser.parser_cache_savemerge() 2235 bb.codeparser.parser_cache_savemerge()
2236 bb.cache.SiggenRecipeInfo.reset()
2166 bb.fetch.fetcher_parse_done() 2237 bb.fetch.fetcher_parse_done()
2167 if self.cooker.configuration.profile: 2238 if self.cooker.configuration.profile:
2168 profiles = [] 2239 profiles = []
2169 for i in self.process_names: 2240 for i in self.process_names:
2170 logfile = "profile-parse-%s.log" % i 2241 logfile = "profile-parse-%s.log" % i
2171 if os.path.exists(logfile): 2242 if os.path.exists(logfile) and os.path.getsize(logfile):
2172 profiles.append(logfile) 2243 profiles.append(logfile)
2173 2244
2174 pout = "profile-parse.log.processed" 2245 if profiles:
2175 bb.utils.process_profilelog(profiles, pout = pout) 2246 pout = "profile-parse.log.processed"
2176 print("Processed parsing statistics saved to %s" % (pout)) 2247 bb.utils.process_profilelog(profiles, pout = pout)
2248 print("Processed parsing statistics saved to %s" % (pout))
2177 2249
2178 def final_cleanup(self): 2250 def final_cleanup(self):
2179 if self.syncthread: 2251 if self.syncthread:
2180 self.syncthread.join() 2252 self.syncthread.join()
2181 2253
2182 def load_cached(self): 2254 def load_cached(self):
2183 for mc, cache, filename, appends in self.fromcache: 2255 for mc, cache, filename, appends, layername in self.fromcache:
2184 cached, infos = cache.load(filename, appends) 2256 infos = cache.loadCached(filename, appends)
2185 yield not cached, mc, infos 2257 yield False, mc, infos
2186 2258
2187 def parse_generator(self): 2259 def parse_generator(self):
2188 while True: 2260 empty = False
2261 while self.processes or not empty:
2262 for process in self.processes.copy():
2263 if not process.is_alive():
2264 process.join()
2265 self.processes.remove(process)
2266
2189 if self.parsed >= self.toparse: 2267 if self.parsed >= self.toparse:
2190 break 2268 break
2191 2269
2192 try: 2270 try:
2193 result = self.result_queue.get(timeout=0.25) 2271 result = self.result_queue.get(timeout=0.25)
2194 except queue.Empty: 2272 except queue.Empty:
2195 pass 2273 empty = True
2274 yield None, None, None
2196 else: 2275 else:
2197 value = result[1] 2276 empty = False
2198 if isinstance(value, BaseException): 2277 yield result
2199 raise value 2278
2200 else: 2279 if not (self.parsed >= self.toparse):
2201 yield result 2280 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2281
2202 2282
2203 def parse_next(self): 2283 def parse_next(self):
2204 result = [] 2284 result = []
2205 parsed = None 2285 parsed = None
2206 try: 2286 try:
2207 parsed, mc, result = next(self.results) 2287 parsed, mc, result = next(self.results)
2288 if isinstance(result, BaseException):
2289 # Turn exceptions back into exceptions
2290 raise result
2291 if parsed is None:
2292 # Timeout, loop back through the main loop
2293 return True
2294
2208 except StopIteration: 2295 except StopIteration:
2209 self.shutdown() 2296 self.shutdown()
2210 return False 2297 return False
2211 except bb.BBHandledException as exc: 2298 except bb.BBHandledException as exc:
2212 self.error += 1 2299 self.error += 1
2213 logger.error('Failed to parse recipe: %s' % exc.recipe) 2300 logger.debug('Failed to parse recipe: %s' % exc.recipe)
2214 self.shutdown(clean=False, force=True) 2301 self.shutdown(clean=False)
2215 return False 2302 return False
2216 except ParsingFailure as exc: 2303 except ParsingFailure as exc:
2217 self.error += 1 2304 self.error += 1
2218 logger.error('Unable to parse %s: %s' % 2305
2219 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2306 exc_desc = str(exc)
2220 self.shutdown(clean=False, force=True) 2307 if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
2308 exc_desc = 'Exited with "%d"' % exc.code
2309
2310 logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
2311 self.shutdown(clean=False)
2221 return False 2312 return False
2222 except bb.parse.ParseError as exc: 2313 except bb.parse.ParseError as exc:
2223 self.error += 1 2314 self.error += 1
2224 logger.error(str(exc)) 2315 logger.error(str(exc))
2225 self.shutdown(clean=False, force=True) 2316 self.shutdown(clean=False, eventmsg=str(exc))
2226 return False 2317 return False
2227 except bb.data_smart.ExpansionError as exc: 2318 except bb.data_smart.ExpansionError as exc:
2319 def skip_frames(f, fn_prefix):
2320 while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
2321 f = f.tb_next
2322 return f
2323
2228 self.error += 1 2324 self.error += 1
2229 bbdir = os.path.dirname(__file__) + os.sep 2325 bbdir = os.path.dirname(__file__) + os.sep
2230 etype, value, _ = sys.exc_info() 2326 etype, value, tb = sys.exc_info()
2231 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2327
2328 # Remove any frames where the code comes from bitbake. This
2329 # prevents deep (and pretty useless) backtraces for expansion error
2330 tb = skip_frames(tb, bbdir)
2331 cur = tb
2332 while cur:
2333 cur.tb_next = skip_frames(cur.tb_next, bbdir)
2334 cur = cur.tb_next
2335
2232 logger.error('ExpansionError during parsing %s', value.recipe, 2336 logger.error('ExpansionError during parsing %s', value.recipe,
2233 exc_info=(etype, value, tb)) 2337 exc_info=(etype, value, tb))
2234 self.shutdown(clean=False, force=True) 2338 self.shutdown(clean=False)
2235 return False 2339 return False
2236 except Exception as exc: 2340 except Exception as exc:
2237 self.error += 1 2341 self.error += 1
2238 etype, value, tb = sys.exc_info() 2342 _, value, _ = sys.exc_info()
2239 if hasattr(value, "recipe"): 2343 if hasattr(value, "recipe"):
2240 logger.error('Unable to parse %s' % value.recipe, 2344 logger.error('Unable to parse %s' % value.recipe,
2241 exc_info=(etype, value, exc.traceback)) 2345 exc_info=sys.exc_info())
2242 else: 2346 else:
2243 # Most likely, an exception occurred during raising an exception 2347 # Most likely, an exception occurred during raising an exception
2244 import traceback 2348 import traceback
2245 logger.error('Exception during parse: %s' % traceback.format_exc()) 2349 logger.error('Exception during parse: %s' % traceback.format_exc())
2246 self.shutdown(clean=False, force=True) 2350 self.shutdown(clean=False)
2247 return False 2351 return False
2248 2352
2249 self.current += 1 2353 self.current += 1
@@ -2259,17 +2363,19 @@ class CookerParser(object):
2259 for virtualfn, info_array in result: 2363 for virtualfn, info_array in result:
2260 if info_array[0].skipped: 2364 if info_array[0].skipped:
2261 self.skipped += 1 2365 self.skipped += 1
2262 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) 2366 self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0])
2263 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], 2367 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2264 parsed=parsed, watcher = self.cooker.add_filewatch) 2368 parsed=parsed, watcher = self.cooker.add_filewatch)
2265 return True 2369 return True
2266 2370
2267 def reparse(self, filename): 2371 def reparse(self, filename):
2372 bb.cache.SiggenRecipeInfo.reset()
2268 to_reparse = set() 2373 to_reparse = set()
2269 for mc in self.cooker.multiconfigs: 2374 for mc in self.cooker.multiconfigs:
2270 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename))) 2375 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2376 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
2271 2377
2272 for mc, filename, appends in to_reparse: 2378 for mc, filename, appends, layername in to_reparse:
2273 infos = self.bb_caches[mc].parse(filename, appends) 2379 infos = self.bb_caches[mc].parse(filename, appends, layername)
2274 for vfn, info_array in infos: 2380 for vfn, info_array in infos:
2275 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) 2381 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
index 1c1e008c6b..65c153a5bb 100644
--- a/bitbake/lib/bb/cookerdata.py
+++ b/bitbake/lib/bb/cookerdata.py
@@ -1,3 +1,4 @@
1
1# 2#
2# Copyright (C) 2003, 2004 Chris Larson 3# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell 4# Copyright (C) 2003, 2004 Phil Blundell
@@ -57,7 +58,7 @@ class ConfigParameters(object):
57 58
58 def updateToServer(self, server, environment): 59 def updateToServer(self, server, environment):
59 options = {} 60 options = {}
60 for o in ["abort", "force", "invalidate_stamp", 61 for o in ["halt", "force", "invalidate_stamp",
61 "dry_run", "dump_signatures", 62 "dry_run", "dump_signatures",
62 "extra_assume_provided", "profile", 63 "extra_assume_provided", "profile",
63 "prefile", "postfile", "server_timeout", 64 "prefile", "postfile", "server_timeout",
@@ -86,7 +87,7 @@ class ConfigParameters(object):
86 action['msg'] = "Only one target can be used with the --environment option." 87 action['msg'] = "Only one target can be used with the --environment option."
87 elif self.options.buildfile and len(self.options.pkgs_to_build) > 0: 88 elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
88 action['msg'] = "No target should be used with the --environment and --buildfile options." 89 action['msg'] = "No target should be used with the --environment and --buildfile options."
89 elif len(self.options.pkgs_to_build) > 0: 90 elif self.options.pkgs_to_build:
90 action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build] 91 action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
91 else: 92 else:
92 action['action'] = ["showEnvironment", self.options.buildfile] 93 action['action'] = ["showEnvironment", self.options.buildfile]
@@ -124,7 +125,7 @@ class CookerConfiguration(object):
124 self.prefile = [] 125 self.prefile = []
125 self.postfile = [] 126 self.postfile = []
126 self.cmd = None 127 self.cmd = None
127 self.abort = True 128 self.halt = True
128 self.force = False 129 self.force = False
129 self.profile = False 130 self.profile = False
130 self.nosetscene = False 131 self.nosetscene = False
@@ -160,12 +161,7 @@ def catch_parse_error(func):
160 def wrapped(fn, *args): 161 def wrapped(fn, *args):
161 try: 162 try:
162 return func(fn, *args) 163 return func(fn, *args)
163 except IOError as exc: 164 except Exception as exc:
164 import traceback
165 parselog.critical(traceback.format_exc())
166 parselog.critical("Unable to parse %s: %s" % (fn, exc))
167 raise bb.BBHandledException()
168 except bb.data_smart.ExpansionError as exc:
169 import traceback 165 import traceback
170 166
171 bbdir = os.path.dirname(__file__) + os.sep 167 bbdir = os.path.dirname(__file__) + os.sep
@@ -177,14 +173,11 @@ def catch_parse_error(func):
177 break 173 break
178 parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb)) 174 parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
179 raise bb.BBHandledException() 175 raise bb.BBHandledException()
180 except bb.parse.ParseError as exc:
181 parselog.critical(str(exc))
182 raise bb.BBHandledException()
183 return wrapped 176 return wrapped
184 177
185@catch_parse_error 178@catch_parse_error
186def parse_config_file(fn, data, include=True): 179def parse_config_file(fn, data, include=True):
187 return bb.parse.handle(fn, data, include) 180 return bb.parse.handle(fn, data, include, baseconfig=True)
188 181
189@catch_parse_error 182@catch_parse_error
190def _inherit(bbclass, data): 183def _inherit(bbclass, data):
@@ -210,7 +203,7 @@ def findConfigFile(configfile, data):
210 203
211# 204#
212# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working 205# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working
213# up to /. If that fails, we search for a conf/bitbake.conf in BBPATH. 206# up to /. If that fails, bitbake would fall back to cwd.
214# 207#
215 208
216def findTopdir(): 209def findTopdir():
@@ -223,11 +216,8 @@ def findTopdir():
223 layerconf = findConfigFile("bblayers.conf", d) 216 layerconf = findConfigFile("bblayers.conf", d)
224 if layerconf: 217 if layerconf:
225 return os.path.dirname(os.path.dirname(layerconf)) 218 return os.path.dirname(os.path.dirname(layerconf))
226 if bbpath: 219
227 bitbakeconf = bb.utils.which(bbpath, "conf/bitbake.conf") 220 return os.path.abspath(os.getcwd())
228 if bitbakeconf:
229 return os.path.dirname(os.path.dirname(bitbakeconf))
230 return None
231 221
232class CookerDataBuilder(object): 222class CookerDataBuilder(object):
233 223
@@ -250,10 +240,14 @@ class CookerDataBuilder(object):
250 self.savedenv = bb.data.init() 240 self.savedenv = bb.data.init()
251 for k in cookercfg.env: 241 for k in cookercfg.env:
252 self.savedenv.setVar(k, cookercfg.env[k]) 242 self.savedenv.setVar(k, cookercfg.env[k])
243 if k in bb.data_smart.bitbake_renamed_vars:
244 bb.error('Shell environment variable %s has been renamed to %s' % (k, bb.data_smart.bitbake_renamed_vars[k]))
245 bb.fatal("Exiting to allow enviroment variables to be corrected")
253 246
254 filtered_keys = bb.utils.approved_variables() 247 filtered_keys = bb.utils.approved_variables()
255 bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys) 248 bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys)
256 self.basedata.setVar("BB_ORIGENV", self.savedenv) 249 self.basedata.setVar("BB_ORIGENV", self.savedenv)
250 self.basedata.setVar("__bbclasstype", "global")
257 251
258 if worker: 252 if worker:
259 self.basedata.setVar("BB_WORKERCONTEXT", "1") 253 self.basedata.setVar("BB_WORKERCONTEXT", "1")
@@ -261,15 +255,22 @@ class CookerDataBuilder(object):
261 self.data = self.basedata 255 self.data = self.basedata
262 self.mcdata = {} 256 self.mcdata = {}
263 257
264 def parseBaseConfiguration(self): 258 def calc_datastore_hashes(self):
265 data_hash = hashlib.sha256() 259 data_hash = hashlib.sha256()
260 data_hash.update(self.data.get_hash().encode('utf-8'))
261 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
262 for config in multiconfig:
263 data_hash.update(self.mcdata[config].get_hash().encode('utf-8'))
264 self.data_hash = data_hash.hexdigest()
265
266 def parseBaseConfiguration(self, worker=False):
267 mcdata = {}
266 try: 268 try:
267 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) 269 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
268 270
269 if self.data.getVar("BB_WORKERCONTEXT", False) is None: 271 servercontext = self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker
270 bb.fetch.fetcher_init(self.data) 272 bb.fetch.fetcher_init(self.data, servercontext)
271 bb.parse.init_parser(self.data) 273 bb.parse.init_parser(self.data)
272 bb.codeparser.parser_cache_init(self.data)
273 274
274 bb.event.fire(bb.event.ConfigParsed(), self.data) 275 bb.event.fire(bb.event.ConfigParsed(), self.data)
275 276
@@ -286,44 +287,66 @@ class CookerDataBuilder(object):
286 bb.event.fire(bb.event.ConfigParsed(), self.data) 287 bb.event.fire(bb.event.ConfigParsed(), self.data)
287 288
288 bb.parse.init_parser(self.data) 289 bb.parse.init_parser(self.data)
289 data_hash.update(self.data.get_hash().encode('utf-8')) 290 mcdata[''] = self.data
290 self.mcdata[''] = self.data
291 291
292 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() 292 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
293 for config in multiconfig: 293 for config in multiconfig:
294 mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) 294 if config[0].isdigit():
295 bb.event.fire(bb.event.ConfigParsed(), mcdata) 295 bb.fatal("Multiconfig name '%s' is invalid as multiconfigs cannot start with a digit" % config)
296 self.mcdata[config] = mcdata 296 parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
297 data_hash.update(mcdata.get_hash().encode('utf-8')) 297 bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata)
298 mcdata[config] = parsed_mcdata
298 if multiconfig: 299 if multiconfig:
299 bb.event.fire(bb.event.MultiConfigParsed(self.mcdata), self.data) 300 bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data)
300 301
301 self.data_hash = data_hash.hexdigest()
302 except (SyntaxError, bb.BBHandledException):
303 raise bb.BBHandledException()
304 except bb.data_smart.ExpansionError as e: 302 except bb.data_smart.ExpansionError as e:
305 logger.error(str(e)) 303 logger.error(str(e))
306 raise bb.BBHandledException() 304 raise bb.BBHandledException()
307 except Exception: 305
308 logger.exception("Error parsing configuration files") 306 bb.codeparser.update_module_dependencies(self.data)
307
308 # Handle obsolete variable names
309 d = self.data
310 renamedvars = d.getVarFlags('BB_RENAMED_VARIABLES') or {}
311 renamedvars.update(bb.data_smart.bitbake_renamed_vars)
312 issues = False
313 for v in renamedvars:
314 if d.getVar(v) != None or d.hasOverrides(v):
315 issues = True
316 loginfo = {}
317 history = d.varhistory.get_variable_refs(v)
318 for h in history:
319 for line in history[h]:
320 loginfo = {'file' : h, 'line' : line}
321 bb.data.data_smart._print_rename_error(v, loginfo, renamedvars)
322 if not history:
323 bb.data.data_smart._print_rename_error(v, loginfo, renamedvars)
324 if issues:
309 raise bb.BBHandledException() 325 raise bb.BBHandledException()
310 326
327 for mc in mcdata:
328 mcdata[mc].renameVar("__depends", "__base_depends")
329 mcdata[mc].setVar("__bbclasstype", "recipe")
330
311 # Create a copy so we can reset at a later date when UIs disconnect 331 # Create a copy so we can reset at a later date when UIs disconnect
312 self.origdata = self.data 332 self.mcorigdata = mcdata
313 self.data = bb.data.createCopy(self.origdata) 333 for mc in mcdata:
314 self.mcdata[''] = self.data 334 self.mcdata[mc] = bb.data.createCopy(mcdata[mc])
335 self.data = self.mcdata['']
336 self.calc_datastore_hashes()
315 337
316 def reset(self): 338 def reset(self):
317 # We may not have run parseBaseConfiguration() yet 339 # We may not have run parseBaseConfiguration() yet
318 if not hasattr(self, 'origdata'): 340 if not hasattr(self, 'mcorigdata'):
319 return 341 return
320 self.data = bb.data.createCopy(self.origdata) 342 for mc in self.mcorigdata:
321 self.mcdata[''] = self.data 343 self.mcdata[mc] = bb.data.createCopy(self.mcorigdata[mc])
344 self.data = self.mcdata['']
322 345
323 def _findLayerConf(self, data): 346 def _findLayerConf(self, data):
324 return findConfigFile("bblayers.conf", data) 347 return findConfigFile("bblayers.conf", data)
325 348
326 def parseConfigurationFiles(self, prefiles, postfiles, mc = "default"): 349 def parseConfigurationFiles(self, prefiles, postfiles, mc = ""):
327 data = bb.data.createCopy(self.basedata) 350 data = bb.data.createCopy(self.basedata)
328 data.setVar("BB_CURRENT_MC", mc) 351 data.setVar("BB_CURRENT_MC", mc)
329 352
@@ -333,15 +356,23 @@ class CookerDataBuilder(object):
333 356
334 layerconf = self._findLayerConf(data) 357 layerconf = self._findLayerConf(data)
335 if layerconf: 358 if layerconf:
336 parselog.debug(2, "Found bblayers.conf (%s)", layerconf) 359 parselog.debug2("Found bblayers.conf (%s)", layerconf)
337 # By definition bblayers.conf is in conf/ of TOPDIR. 360 # By definition bblayers.conf is in conf/ of TOPDIR.
338 # We may have been called with cwd somewhere else so reset TOPDIR 361 # We may have been called with cwd somewhere else so reset TOPDIR
339 data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) 362 data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
340 data = parse_config_file(layerconf, data) 363 data = parse_config_file(layerconf, data)
341 364
365 if not data.getVar("BB_CACHEDIR"):
366 data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
367
368 bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
369
342 layers = (data.getVar('BBLAYERS') or "").split() 370 layers = (data.getVar('BBLAYERS') or "").split()
343 broken_layers = [] 371 broken_layers = []
344 372
373 if not layers:
374 bb.fatal("The bblayers.conf file doesn't contain any BBLAYERS definition")
375
345 data = bb.data.createCopy(data) 376 data = bb.data.createCopy(data)
346 approved = bb.utils.approved_variables() 377 approved = bb.utils.approved_variables()
347 378
@@ -357,8 +388,10 @@ class CookerDataBuilder(object):
357 parselog.critical("Please check BBLAYERS in %s" % (layerconf)) 388 parselog.critical("Please check BBLAYERS in %s" % (layerconf))
358 raise bb.BBHandledException() 389 raise bb.BBHandledException()
359 390
391 layerseries = None
392 compat_entries = {}
360 for layer in layers: 393 for layer in layers:
361 parselog.debug(2, "Adding layer %s", layer) 394 parselog.debug2("Adding layer %s", layer)
362 if 'HOME' in approved and '~' in layer: 395 if 'HOME' in approved and '~' in layer:
363 layer = os.path.expanduser(layer) 396 layer = os.path.expanduser(layer)
364 if layer.endswith('/'): 397 if layer.endswith('/'):
@@ -369,8 +402,27 @@ class CookerDataBuilder(object):
369 data.expandVarref('LAYERDIR') 402 data.expandVarref('LAYERDIR')
370 data.expandVarref('LAYERDIR_RE') 403 data.expandVarref('LAYERDIR_RE')
371 404
405 # Sadly we can't have nice things.
406 # Some layers think they're going to be 'clever' and copy the values from
407 # another layer, e.g. using ${LAYERSERIES_COMPAT_core}. The whole point of
408 # this mechanism is to make it clear which releases a layer supports and
409 # show when a layer master branch is bitrotting and is unmaintained.
410 # We therefore avoid people doing this here.
411 collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
412 for c in collections:
413 compat_entry = data.getVar("LAYERSERIES_COMPAT_%s" % c)
414 if compat_entry:
415 compat_entries[c] = set(compat_entry.split())
416 data.delVar("LAYERSERIES_COMPAT_%s" % c)
417 if not layerseries:
418 layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
419 if layerseries:
420 data.delVar("LAYERSERIES_CORENAMES")
421
372 data.delVar('LAYERDIR_RE') 422 data.delVar('LAYERDIR_RE')
373 data.delVar('LAYERDIR') 423 data.delVar('LAYERDIR')
424 for c in compat_entries:
425 data.setVar("LAYERSERIES_COMPAT_%s" % c, " ".join(sorted(compat_entries[c])))
374 426
375 bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split() 427 bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split()
376 collections = (data.getVar('BBFILE_COLLECTIONS') or "").split() 428 collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
@@ -389,26 +441,38 @@ class CookerDataBuilder(object):
389 if invalid: 441 if invalid:
390 bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid)) 442 bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid))
391 443
392 layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
393 collections_tmp = collections[:] 444 collections_tmp = collections[:]
394 for c in collections: 445 for c in collections:
395 collections_tmp.remove(c) 446 collections_tmp.remove(c)
396 if c in collections_tmp: 447 if c in collections_tmp:
397 bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c) 448 bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
398 compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split()) 449
450 compat = set()
451 if c in compat_entries:
452 compat = compat_entries[c]
453 if compat and not layerseries:
454 bb.fatal("No core layer found to work with layer '%s'. Missing entry in bblayers.conf?" % c)
399 if compat and not (compat & layerseries): 455 if compat and not (compat & layerseries):
400 bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)" 456 bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
401 % (c, " ".join(layerseries), " ".join(compat))) 457 % (c, " ".join(layerseries), " ".join(compat)))
402 elif not compat and not data.getVar("BB_WORKERCONTEXT"): 458 elif not compat and not data.getVar("BB_WORKERCONTEXT"):
403 bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c)) 459 bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c))
404 460
461 data.setVar("LAYERSERIES_CORENAMES", " ".join(sorted(layerseries)))
462
405 if not data.getVar("BBPATH"): 463 if not data.getVar("BBPATH"):
406 msg = "The BBPATH variable is not set" 464 msg = "The BBPATH variable is not set"
407 if not layerconf: 465 if not layerconf:
408 msg += (" and bitbake did not find a conf/bblayers.conf file in" 466 msg += (" and bitbake did not find a conf/bblayers.conf file in"
409 " the expected location.\nMaybe you accidentally" 467 " the expected location.\nMaybe you accidentally"
410 " invoked bitbake from the wrong directory?") 468 " invoked bitbake from the wrong directory?")
411 raise SystemExit(msg) 469 bb.fatal(msg)
470
471 if not data.getVar("TOPDIR"):
472 data.setVar("TOPDIR", os.path.abspath(os.getcwd()))
473 if not data.getVar("BB_CACHEDIR"):
474 data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
475 bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
412 476
413 data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) 477 data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
414 478
@@ -421,7 +485,7 @@ class CookerDataBuilder(object):
421 for bbclass in bbclasses: 485 for bbclass in bbclasses:
422 data = _inherit(bbclass, data) 486 data = _inherit(bbclass, data)
423 487
424 # Nomally we only register event handlers at the end of parsing .bb files 488 # Normally we only register event handlers at the end of parsing .bb files
425 # We register any handlers we've found so far here... 489 # We register any handlers we've found so far here...
426 for var in data.getVar('__BBHANDLERS', False) or []: 490 for var in data.getVar('__BBHANDLERS', False) or []:
427 handlerfn = data.getVarFlag(var, "filename", False) 491 handlerfn = data.getVarFlag(var, "filename", False)
@@ -435,3 +499,54 @@ class CookerDataBuilder(object):
435 499
436 return data 500 return data
437 501
502 @staticmethod
503 def _parse_recipe(bb_data, bbfile, appends, mc, layername):
504 bb_data.setVar("__BBMULTICONFIG", mc)
505 bb_data.setVar("FILE_LAYERNAME", layername)
506
507 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
508 bb.parse.cached_mtime_noerror(bbfile_loc)
509
510 if appends:
511 bb_data.setVar('__BBAPPEND', " ".join(appends))
512
513 return bb.parse.handle(bbfile, bb_data)
514
515 def parseRecipeVariants(self, bbfile, appends, virtonly=False, mc=None, layername=None):
516 """
517 Load and parse one .bb build file
518 Return the data and whether parsing resulted in the file being skipped
519 """
520
521 if virtonly:
522 (bbfile, virtual, mc) = bb.cache.virtualfn2realfn(bbfile)
523 bb_data = self.mcdata[mc].createCopy()
524 bb_data.setVar("__ONLYFINALISE", virtual or "default")
525 return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
526
527 if mc is not None:
528 bb_data = self.mcdata[mc].createCopy()
529 return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
530
531 bb_data = self.data.createCopy()
532 datastores = self._parse_recipe(bb_data, bbfile, appends, '', layername)
533
534 for mc in self.mcdata:
535 if not mc:
536 continue
537 bb_data = self.mcdata[mc].createCopy()
538 newstores = self._parse_recipe(bb_data, bbfile, appends, mc, layername)
539 for ns in newstores:
540 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
541
542 return datastores
543
544 def parseRecipe(self, virtualfn, appends, layername):
545 """
546 Return a complete set of data for fn.
547 To do this, we need to parse the file.
548 """
549 logger.debug("Parsing %s (full)" % virtualfn)
550 (fn, virtual, mc) = bb.cache.virtualfn2realfn(virtualfn)
551 datastores = self.parseRecipeVariants(virtualfn, appends, virtonly=True, layername=layername)
552 return datastores[virtual]
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
index c187fcfc6c..7689404436 100644
--- a/bitbake/lib/bb/daemonize.py
+++ b/bitbake/lib/bb/daemonize.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -74,26 +76,26 @@ def createDaemon(function, logfile):
74 with open('/dev/null', 'r') as si: 76 with open('/dev/null', 'r') as si:
75 os.dup2(si.fileno(), sys.stdin.fileno()) 77 os.dup2(si.fileno(), sys.stdin.fileno())
76 78
77 try: 79 with open(logfile, 'a+') as so:
78 so = open(logfile, 'a+') 80 try:
79 os.dup2(so.fileno(), sys.stdout.fileno()) 81 os.dup2(so.fileno(), sys.stdout.fileno())
80 os.dup2(so.fileno(), sys.stderr.fileno()) 82 os.dup2(so.fileno(), sys.stderr.fileno())
81 except io.UnsupportedOperation: 83 except io.UnsupportedOperation:
82 sys.stdout = open(logfile, 'a+') 84 sys.stdout = so
83 85
84 # Have stdout and stderr be the same so log output matches chronologically 86 # Have stdout and stderr be the same so log output matches chronologically
85 # and there aren't two seperate buffers 87 # and there aren't two separate buffers
86 sys.stderr = sys.stdout 88 sys.stderr = sys.stdout
87 89
88 try: 90 try:
89 function() 91 function()
90 except Exception as e: 92 except Exception as e:
91 traceback.print_exc() 93 traceback.print_exc()
92 finally: 94 finally:
93 bb.event.print_ui_queue() 95 bb.event.print_ui_queue()
94 # os._exit() doesn't flush open files like os.exit() does. Manually flush 96 # os._exit() doesn't flush open files like os.exit() does. Manually flush
95 # stdout and stderr so that any logging output will be seen, particularly 97 # stdout and stderr so that any logging output will be seen, particularly
96 # exception tracebacks. 98 # exception tracebacks.
97 sys.stdout.flush() 99 sys.stdout.flush()
98 sys.stderr.flush() 100 sys.stderr.flush()
99 os._exit(0) 101 os._exit(0)
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 97022853ca..f672a84451 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -4,14 +4,16 @@ BitBake 'Data' implementations
4Functions for interacting with the data structure used by the 4Functions for interacting with the data structure used by the
5BitBake build tools. 5BitBake build tools.
6 6
7The expandKeys and update_data are the most expensive 7expandKeys and datastore iteration are the most expensive
8operations. At night the cookie monster came by and 8operations. Updating overrides is now "on the fly" but still based
9on the idea of the cookie monster introduced by zecke:
10"At night the cookie monster came by and
9suggested 'give me cookies on setting the variables and 11suggested 'give me cookies on setting the variables and
10things will work out'. Taking this suggestion into account 12things will work out'. Taking this suggestion into account
11applying the skills from the not yet passed 'Entwurf und 13applying the skills from the not yet passed 'Entwurf und
12Analyse von Algorithmen' lecture and the cookie 14Analyse von Algorithmen' lecture and the cookie
13monster seems to be right. We will track setVar more carefully 15monster seems to be right. We will track setVar more carefully
14to have faster update_data and expandKeys operations. 16to have faster datastore operations."
15 17
16This is a trade-off between speed and memory again but 18This is a trade-off between speed and memory again but
17the speed is more critical here. 19the speed is more critical here.
@@ -26,11 +28,6 @@ the speed is more critical here.
26 28
27import sys, os, re 29import sys, os, re
28import hashlib 30import hashlib
29if sys.argv[0][-5:] == "pydoc":
30 path = os.path.dirname(os.path.dirname(sys.argv[1]))
31else:
32 path = os.path.dirname(os.path.dirname(sys.argv[0]))
33sys.path.insert(0, path)
34from itertools import groupby 31from itertools import groupby
35 32
36from bb import data_smart 33from bb import data_smart
@@ -70,10 +67,6 @@ def keys(d):
70 """Return a list of keys in d""" 67 """Return a list of keys in d"""
71 return d.keys() 68 return d.keys()
72 69
73
74__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
75__expand_python_regexp__ = re.compile(r"\${@.+?}")
76
77def expand(s, d, varname = None): 70def expand(s, d, varname = None):
78 """Variable expansion using the data store""" 71 """Variable expansion using the data store"""
79 return d.expand(s, varname) 72 return d.expand(s, varname)
@@ -121,8 +114,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
121 if d.getVarFlag(var, 'python', False) and func: 114 if d.getVarFlag(var, 'python', False) and func:
122 return False 115 return False
123 116
124 export = d.getVarFlag(var, "export", False) 117 export = bb.utils.to_boolean(d.getVarFlag(var, "export"))
125 unexport = d.getVarFlag(var, "unexport", False) 118 unexport = bb.utils.to_boolean(d.getVarFlag(var, "unexport"))
126 if not all and not export and not unexport and not func: 119 if not all and not export and not unexport and not func:
127 return False 120 return False
128 121
@@ -195,8 +188,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False):
195 188
196def exported_keys(d): 189def exported_keys(d):
197 return (key for key in d.keys() if not key.startswith('__') and 190 return (key for key in d.keys() if not key.startswith('__') and
198 d.getVarFlag(key, 'export', False) and 191 bb.utils.to_boolean(d.getVarFlag(key, 'export')) and
199 not d.getVarFlag(key, 'unexport', False)) 192 not bb.utils.to_boolean(d.getVarFlag(key, 'unexport')))
200 193
201def exported_vars(d): 194def exported_vars(d):
202 k = list(exported_keys(d)) 195 k = list(exported_keys(d))
@@ -226,7 +219,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
226 deps = newdeps 219 deps = newdeps
227 seen |= deps 220 seen |= deps
228 newdeps = set() 221 newdeps = set()
229 for dep in deps: 222 for dep in sorted(deps):
230 if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False): 223 if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
231 emit_var(dep, o, d, False) and o.write('\n') 224 emit_var(dep, o, d, False) and o.write('\n')
232 newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep)) 225 newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep))
@@ -268,65 +261,72 @@ def emit_func_python(func, o=sys.__stdout__, d = init()):
268 newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split()) 261 newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
269 newdeps -= seen 262 newdeps -= seen
270 263
271def update_data(d): 264def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparsedata):
272 """Performs final steps upon the datastore, including application of overrides""" 265 def handle_contains(value, contains, exclusions, d):
273 d.finalize(parent = True) 266 newvalue = []
267 if value:
268 newvalue.append(str(value))
269 for k in sorted(contains):
270 if k in exclusions or k in ignored_vars:
271 continue
272 l = (d.getVar(k) or "").split()
273 for item in sorted(contains[k]):
274 for word in item.split():
275 if not word in l:
276 newvalue.append("\n%s{%s} = Unset" % (k, item))
277 break
278 else:
279 newvalue.append("\n%s{%s} = Set" % (k, item))
280 return "".join(newvalue)
281
282 def handle_remove(value, deps, removes, d):
283 for r in sorted(removes):
284 r2 = d.expandWithRefs(r, None)
285 value += "\n_remove of %s" % r
286 deps |= r2.references
287 deps = deps | (keys & r2.execs)
288 value = handle_contains(value, r2.contains, exclusions, d)
289 return value
274 290
275def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
276 deps = set() 291 deps = set()
277 try: 292 try:
293 if key in mod_funcs:
294 exclusions = set()
295 moddep = bb.codeparser.modulecode_deps[key]
296 value = handle_contains(moddep[4], moddep[3], exclusions, d)
297 return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value
298
278 if key[-1] == ']': 299 if key[-1] == ']':
279 vf = key[:-1].split('[') 300 vf = key[:-1].split('[')
301 if vf[1] == "vardepvalueexclude":
302 return deps, ""
280 value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True) 303 value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True)
281 deps |= parser.references 304 deps |= parser.references
282 deps = deps | (keys & parser.execs) 305 deps = deps | (keys & parser.execs)
283 return deps, value 306 deps -= ignored_vars
307 return frozenset(deps), value
284 varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {} 308 varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
285 vardeps = varflags.get("vardeps") 309 vardeps = varflags.get("vardeps")
286 310 exclusions = varflags.get("vardepsexclude", "").split()
287 def handle_contains(value, contains, d):
288 newvalue = ""
289 for k in sorted(contains):
290 l = (d.getVar(k) or "").split()
291 for item in sorted(contains[k]):
292 for word in item.split():
293 if not word in l:
294 newvalue += "\n%s{%s} = Unset" % (k, item)
295 break
296 else:
297 newvalue += "\n%s{%s} = Set" % (k, item)
298 if not newvalue:
299 return value
300 if not value:
301 return newvalue
302 return value + newvalue
303
304 def handle_remove(value, deps, removes, d):
305 for r in sorted(removes):
306 r2 = d.expandWithRefs(r, None)
307 value += "\n_remove of %s" % r
308 deps |= r2.references
309 deps = deps | (keys & r2.execs)
310 return value
311 311
312 if "vardepvalue" in varflags: 312 if "vardepvalue" in varflags:
313 value = varflags.get("vardepvalue") 313 value = varflags.get("vardepvalue")
314 elif varflags.get("func"): 314 elif varflags.get("func"):
315 if varflags.get("python"): 315 if varflags.get("python"):
316 value = d.getVarFlag(key, "_content", False) 316 value = codeparsedata.getVarFlag(key, "_content", False)
317 parser = bb.codeparser.PythonParser(key, logger) 317 parser = bb.codeparser.PythonParser(key, logger)
318 parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) 318 parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
319 deps = deps | parser.references 319 deps = deps | parser.references
320 deps = deps | (keys & parser.execs) 320 deps = deps | (keys & parser.execs)
321 value = handle_contains(value, parser.contains, d) 321 value = handle_contains(value, parser.contains, exclusions, d)
322 else: 322 else:
323 value, parsedvar = d.getVarFlag(key, "_content", False, retparser=True) 323 value, parsedvar = codeparsedata.getVarFlag(key, "_content", False, retparser=True)
324 parser = bb.codeparser.ShellParser(key, logger) 324 parser = bb.codeparser.ShellParser(key, logger)
325 parser.parse_shell(parsedvar.value) 325 parser.parse_shell(parsedvar.value)
326 deps = deps | shelldeps 326 deps = deps | shelldeps
327 deps = deps | parsedvar.references 327 deps = deps | parsedvar.references
328 deps = deps | (keys & parser.execs) | (keys & parsedvar.execs) 328 deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
329 value = handle_contains(value, parsedvar.contains, d) 329 value = handle_contains(value, parsedvar.contains, exclusions, d)
330 if hasattr(parsedvar, "removes"): 330 if hasattr(parsedvar, "removes"):
331 value = handle_remove(value, deps, parsedvar.removes, d) 331 value = handle_remove(value, deps, parsedvar.removes, d)
332 if vardeps is None: 332 if vardeps is None:
@@ -341,7 +341,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
341 value, parser = d.getVarFlag(key, "_content", False, retparser=True) 341 value, parser = d.getVarFlag(key, "_content", False, retparser=True)
342 deps |= parser.references 342 deps |= parser.references
343 deps = deps | (keys & parser.execs) 343 deps = deps | (keys & parser.execs)
344 value = handle_contains(value, parser.contains, d) 344 value = handle_contains(value, parser.contains, exclusions, d)
345 if hasattr(parser, "removes"): 345 if hasattr(parser, "removes"):
346 value = handle_remove(value, deps, parser.removes, d) 346 value = handle_remove(value, deps, parser.removes, d)
347 347
@@ -361,43 +361,50 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
361 deps |= set(varfdeps) 361 deps |= set(varfdeps)
362 362
363 deps |= set((vardeps or "").split()) 363 deps |= set((vardeps or "").split())
364 deps -= set(varflags.get("vardepsexclude", "").split()) 364 deps -= set(exclusions)
365 deps -= ignored_vars
365 except bb.parse.SkipRecipe: 366 except bb.parse.SkipRecipe:
366 raise 367 raise
367 except Exception as e: 368 except Exception as e:
368 bb.warn("Exception during build_dependencies for %s" % key) 369 bb.warn("Exception during build_dependencies for %s" % key)
369 raise 370 raise
370 return deps, value 371 return frozenset(deps), value
371 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs))) 372 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
372 #d.setVarFlag(key, "vardeps", deps) 373 #d.setVarFlag(key, "vardeps", deps)
373 374
374def generate_dependencies(d, whitelist): 375def generate_dependencies(d, ignored_vars):
375 376
376 keys = set(key for key in d if not key.startswith("__")) 377 mod_funcs = set(bb.codeparser.modulecode_deps.keys())
377 shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False)) 378 keys = set(key for key in d if not key.startswith("__")) | mod_funcs
379 shelldeps = set(key for key in d.getVar("__exportlist", False) if bb.utils.to_boolean(d.getVarFlag(key, "export")) and not bb.utils.to_boolean(d.getVarFlag(key, "unexport")))
378 varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS') 380 varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
379 381
382 codeparserd = d.createCopy()
383 for forced in (d.getVar('BB_HASH_CODEPARSER_VALS') or "").split():
384 key, value = forced.split("=", 1)
385 codeparserd.setVar(key, value)
386
380 deps = {} 387 deps = {}
381 values = {} 388 values = {}
382 389
383 tasklist = d.getVar('__BBTASKS', False) or [] 390 tasklist = d.getVar('__BBTASKS', False) or []
384 for task in tasklist: 391 for task in tasklist:
385 deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d) 392 deps[task], values[task] = build_dependencies(task, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
386 newdeps = deps[task] 393 newdeps = deps[task]
387 seen = set() 394 seen = set()
388 while newdeps: 395 while newdeps:
389 nextdeps = newdeps - whitelist 396 nextdeps = newdeps
390 seen |= nextdeps 397 seen |= nextdeps
391 newdeps = set() 398 newdeps = set()
392 for dep in nextdeps: 399 for dep in nextdeps:
393 if dep not in deps: 400 if dep not in deps:
394 deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d) 401 deps[dep], values[dep] = build_dependencies(dep, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
395 newdeps |= deps[dep] 402 newdeps |= deps[dep]
396 newdeps -= seen 403 newdeps -= seen
397 #print "For %s: %s" % (task, str(deps[task])) 404 #print "For %s: %s" % (task, str(deps[task]))
398 return tasklist, deps, values 405 return tasklist, deps, values
399 406
400def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn): 407def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn):
401 taskdeps = {} 408 taskdeps = {}
402 basehash = {} 409 basehash = {}
403 410
@@ -406,9 +413,10 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
406 413
407 if data is None: 414 if data is None:
408 bb.error("Task %s from %s seems to be empty?!" % (task, fn)) 415 bb.error("Task %s from %s seems to be empty?!" % (task, fn))
409 data = '' 416 data = []
417 else:
418 data = [data]
410 419
411 gendeps[task] -= whitelist
412 newdeps = gendeps[task] 420 newdeps = gendeps[task]
413 seen = set() 421 seen = set()
414 while newdeps: 422 while newdeps:
@@ -416,27 +424,24 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
416 seen |= nextdeps 424 seen |= nextdeps
417 newdeps = set() 425 newdeps = set()
418 for dep in nextdeps: 426 for dep in nextdeps:
419 if dep in whitelist:
420 continue
421 gendeps[dep] -= whitelist
422 newdeps |= gendeps[dep] 427 newdeps |= gendeps[dep]
423 newdeps -= seen 428 newdeps -= seen
424 429
425 alldeps = sorted(seen) 430 alldeps = sorted(seen)
426 for dep in alldeps: 431 for dep in alldeps:
427 data = data + dep 432 data.append(dep)
428 var = lookupcache[dep] 433 var = lookupcache[dep]
429 if var is not None: 434 if var is not None:
430 data = data + str(var) 435 data.append(str(var))
431 k = fn + ":" + task 436 k = fn + ":" + task
432 basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest() 437 basehash[k] = hashlib.sha256("".join(data).encode("utf-8")).hexdigest()
433 taskdeps[task] = alldeps 438 taskdeps[task] = frozenset(seen)
434 439
435 return taskdeps, basehash 440 return taskdeps, basehash
436 441
437def inherits_class(klass, d): 442def inherits_class(klass, d):
438 val = d.getVar('__inherit_cache', False) or [] 443 val = d.getVar('__inherit_cache', False) or []
439 needle = os.path.join('classes', '%s.bbclass' % klass) 444 needle = '/%s.bbclass' % klass
440 for v in val: 445 for v in val:
441 if v.endswith(needle): 446 if v.endswith(needle):
442 return True 447 return True
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 2328c334ac..8e7dd98384 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -16,8 +16,11 @@ BitBake build tools.
16# 16#
17# Based on functions from the base bb module, Copyright 2003 Holger Schurig 17# Based on functions from the base bb module, Copyright 2003 Holger Schurig
18 18
19import copy, re, sys, traceback 19import builtins
20from collections import MutableMapping 20import copy
21import re
22import sys
23from collections.abc import MutableMapping
21import logging 24import logging
22import hashlib 25import hashlib
23import bb, bb.codeparser 26import bb, bb.codeparser
@@ -26,13 +29,25 @@ from bb.COW import COWDictBase
26 29
27logger = logging.getLogger("BitBake.Data") 30logger = logging.getLogger("BitBake.Data")
28 31
29__setvar_keyword__ = ["_append", "_prepend", "_remove"] 32__setvar_keyword__ = [":append", ":prepend", ":remove"]
30__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$') 33__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$')
31__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}") 34__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+}")
32__expand_python_regexp__ = re.compile(r"\${@.+?}") 35__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}")
33__whitespace_split__ = re.compile(r'(\s)') 36__whitespace_split__ = re.compile(r'(\s)')
34__override_regexp__ = re.compile(r'[a-z0-9]+') 37__override_regexp__ = re.compile(r'[a-z0-9]+')
35 38
39bitbake_renamed_vars = {
40 "BB_ENV_WHITELIST": "BB_ENV_PASSTHROUGH",
41 "BB_ENV_EXTRAWHITE": "BB_ENV_PASSTHROUGH_ADDITIONS",
42 "BB_HASHBASE_WHITELIST": "BB_BASEHASH_IGNORE_VARS",
43 "BB_HASHCONFIG_WHITELIST": "BB_HASHCONFIG_IGNORE_VARS",
44 "BB_HASHTASK_WHITELIST": "BB_TASKHASH_IGNORE_TASKS",
45 "BB_SETSCENE_ENFORCE_WHITELIST": "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
46 "MULTI_PROVIDER_WHITELIST": "BB_MULTI_PROVIDER_ALLOWED",
47 "BB_STAMP_WHITELIST": "is a deprecated variable and support has been removed",
48 "BB_STAMP_POLICY": "is a deprecated variable and support has been removed",
49}
50
36def infer_caller_details(loginfo, parent = False, varval = True): 51def infer_caller_details(loginfo, parent = False, varval = True):
37 """Save the caller the trouble of specifying everything.""" 52 """Save the caller the trouble of specifying everything."""
38 # Save effort. 53 # Save effort.
@@ -80,68 +95,79 @@ def infer_caller_details(loginfo, parent = False, varval = True):
80 loginfo['func'] = func 95 loginfo['func'] = func
81 96
82class VariableParse: 97class VariableParse:
83 def __init__(self, varname, d, val = None): 98 def __init__(self, varname, d, unexpanded_value = None, val = None):
84 self.varname = varname 99 self.varname = varname
85 self.d = d 100 self.d = d
86 self.value = val 101 self.value = val
102 self.unexpanded_value = unexpanded_value
87 103
88 self.references = set() 104 self.references = set()
89 self.execs = set() 105 self.execs = set()
90 self.contains = {} 106 self.contains = {}
91 107
92 def var_sub(self, match): 108 def var_sub(self, match):
93 key = match.group()[2:-1] 109 key = match.group()[2:-1]
94 if self.varname and key: 110 if self.varname and key:
95 if self.varname == key: 111 if self.varname == key:
96 raise Exception("variable %s references itself!" % self.varname) 112 raise Exception("variable %s references itself!" % self.varname)
97 var = self.d.getVarFlag(key, "_content") 113 var = self.d.getVarFlag(key, "_content")
98 self.references.add(key) 114 self.references.add(key)
99 if var is not None: 115 if var is not None:
100 return var 116 return var
101 else: 117 else:
102 return match.group() 118 return match.group()
103 119
104 def python_sub(self, match): 120 def python_sub(self, match):
105 if isinstance(match, str): 121 if isinstance(match, str):
106 code = match 122 code = match
107 else: 123 else:
108 code = match.group()[3:-1] 124 code = match.group()[3:-1]
109 125
110 if self.varname: 126 # Do not run code that contains one or more unexpanded variables
111 varname = 'Var <%s>' % self.varname 127 # instead return the code with the characters we removed put back
112 else: 128 if __expand_var_regexp__.findall(code):
113 varname = '<expansion>' 129 return "${@" + code + "}"
114 codeobj = compile(code.strip(), varname, "eval")
115
116 parser = bb.codeparser.PythonParser(self.varname, logger)
117 parser.parse_python(code)
118 if self.varname:
119 vardeps = self.d.getVarFlag(self.varname, "vardeps")
120 if vardeps is None:
121 parser.log.flush()
122 else:
123 parser.log.flush()
124 self.references |= parser.references
125 self.execs |= parser.execs
126 130
127 for k in parser.contains: 131 if self.varname:
128 if k not in self.contains: 132 varname = 'Var <%s>' % self.varname
129 self.contains[k] = parser.contains[k].copy() 133 else:
130 else: 134 varname = '<expansion>'
131 self.contains[k].update(parser.contains[k]) 135 codeobj = compile(code.strip(), varname, "eval")
132 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d}) 136
133 return str(value) 137 parser = bb.codeparser.PythonParser(self.varname, logger)
138 parser.parse_python(code)
139 if self.varname:
140 vardeps = self.d.getVarFlag(self.varname, "vardeps")
141 if vardeps is None:
142 parser.log.flush()
143 else:
144 parser.log.flush()
145 self.references |= parser.references
146 self.execs |= parser.execs
134 147
148 for k in parser.contains:
149 if k not in self.contains:
150 self.contains[k] = parser.contains[k].copy()
151 else:
152 self.contains[k].update(parser.contains[k])
153 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
154 return str(value)
135 155
136class DataContext(dict): 156class DataContext(dict):
157 excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe'])
158
137 def __init__(self, metadata, **kwargs): 159 def __init__(self, metadata, **kwargs):
138 self.metadata = metadata 160 self.metadata = metadata
139 dict.__init__(self, **kwargs) 161 dict.__init__(self, **kwargs)
140 self['d'] = metadata 162 self['d'] = metadata
163 self.context = set(bb.utils.get_context())
141 164
142 def __missing__(self, key): 165 def __missing__(self, key):
166 if key in self.excluded or key in self.context:
167 raise KeyError(key)
168
143 value = self.metadata.getVar(key) 169 value = self.metadata.getVar(key)
144 if value is None or self.metadata.getVarFlag(key, 'func', False): 170 if value is None:
145 raise KeyError(key) 171 raise KeyError(key)
146 else: 172 else:
147 return value 173 return value
@@ -151,6 +177,7 @@ class ExpansionError(Exception):
151 self.expression = expression 177 self.expression = expression
152 self.variablename = varname 178 self.variablename = varname
153 self.exception = exception 179 self.exception = exception
180 self.varlist = [varname or expression or ""]
154 if varname: 181 if varname:
155 if expression: 182 if expression:
156 self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception) 183 self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
@@ -160,8 +187,14 @@ class ExpansionError(Exception):
160 self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception) 187 self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
161 Exception.__init__(self, self.msg) 188 Exception.__init__(self, self.msg)
162 self.args = (varname, expression, exception) 189 self.args = (varname, expression, exception)
190
191 def addVar(self, varname):
192 if varname:
193 self.varlist.append(varname)
194
163 def __str__(self): 195 def __str__(self):
164 return self.msg 196 chain = "\nThe variable dependency chain for the failure is: " + " -> ".join(self.varlist)
197 return self.msg + chain
165 198
166class IncludeHistory(object): 199class IncludeHistory(object):
167 def __init__(self, parent = None, filename = '[TOP LEVEL]'): 200 def __init__(self, parent = None, filename = '[TOP LEVEL]'):
@@ -239,12 +272,9 @@ class VariableHistory(object):
239 return 272 return
240 if 'op' not in loginfo or not loginfo['op']: 273 if 'op' not in loginfo or not loginfo['op']:
241 loginfo['op'] = 'set' 274 loginfo['op'] = 'set'
242 if 'detail' in loginfo:
243 loginfo['detail'] = str(loginfo['detail'])
244 if 'variable' not in loginfo or 'file' not in loginfo: 275 if 'variable' not in loginfo or 'file' not in loginfo:
245 raise ValueError("record() missing variable or file.") 276 raise ValueError("record() missing variable or file.")
246 var = loginfo['variable'] 277 var = loginfo['variable']
247
248 if var not in self.variables: 278 if var not in self.variables:
249 self.variables[var] = [] 279 self.variables[var] = []
250 if not isinstance(self.variables[var], list): 280 if not isinstance(self.variables[var], list):
@@ -277,7 +307,7 @@ class VariableHistory(object):
277 for (r, override) in d.overridedata[var]: 307 for (r, override) in d.overridedata[var]:
278 for event in self.variable(r): 308 for event in self.variable(r):
279 loginfo = event.copy() 309 loginfo = event.copy()
280 if 'flag' in loginfo and not loginfo['flag'].startswith("_"): 310 if 'flag' in loginfo and not loginfo['flag'].startswith(("_", ":")):
281 continue 311 continue
282 loginfo['variable'] = var 312 loginfo['variable'] = var
283 loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op']) 313 loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
@@ -303,7 +333,8 @@ class VariableHistory(object):
303 flag = '[%s] ' % (event['flag']) 333 flag = '[%s] ' % (event['flag'])
304 else: 334 else:
305 flag = '' 335 flag = ''
306 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail']))) 336 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % \
337 (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', str(event['detail']))))
307 if len(history) > 1: 338 if len(history) > 1:
308 o.write("# pre-expansion value:\n") 339 o.write("# pre-expansion value:\n")
309 o.write('# "%s"\n' % (commentVal)) 340 o.write('# "%s"\n' % (commentVal))
@@ -329,6 +360,16 @@ class VariableHistory(object):
329 lines.append(line) 360 lines.append(line)
330 return lines 361 return lines
331 362
363 def get_variable_refs(self, var):
364 """Return a dict of file/line references"""
365 var_history = self.variable(var)
366 refs = {}
367 for event in var_history:
368 if event['file'] not in refs:
369 refs[event['file']] = []
370 refs[event['file']].append(event['line'])
371 return refs
372
332 def get_variable_items_files(self, var): 373 def get_variable_items_files(self, var):
333 """ 374 """
334 Use variable history to map items added to a list variable and 375 Use variable history to map items added to a list variable and
@@ -342,12 +383,12 @@ class VariableHistory(object):
342 for event in history: 383 for event in history:
343 if 'flag' in event: 384 if 'flag' in event:
344 continue 385 continue
345 if event['op'] == '_remove': 386 if event['op'] == ':remove':
346 continue 387 continue
347 if isset and event['op'] == 'set?': 388 if isset and event['op'] == 'set?':
348 continue 389 continue
349 isset = True 390 isset = True
350 items = d.expand(event['detail']).split() 391 items = d.expand(str(event['detail'])).split()
351 for item in items: 392 for item in items:
352 # This is a little crude but is belt-and-braces to avoid us 393 # This is a little crude but is belt-and-braces to avoid us
353 # having to handle every possible operation type specifically 394 # having to handle every possible operation type specifically
@@ -363,6 +404,23 @@ class VariableHistory(object):
363 else: 404 else:
364 self.variables[var] = [] 405 self.variables[var] = []
365 406
407def _print_rename_error(var, loginfo, renamedvars, fullvar=None):
408 info = ""
409 if "file" in loginfo:
410 info = " file: %s" % loginfo["file"]
411 if "line" in loginfo:
412 info += " line: %s" % loginfo["line"]
413 if fullvar and fullvar != var:
414 info += " referenced as: %s" % fullvar
415 if info:
416 info = " (%s)" % info.strip()
417 renameinfo = renamedvars[var]
418 if " " in renameinfo:
419 # A space signals a string to display instead of a rename
420 bb.erroronce('Variable %s %s%s' % (var, renameinfo, info))
421 else:
422 bb.erroronce('Variable %s has been renamed to %s%s' % (var, renameinfo, info))
423
366class DataSmart(MutableMapping): 424class DataSmart(MutableMapping):
367 def __init__(self): 425 def __init__(self):
368 self.dict = {} 426 self.dict = {}
@@ -370,6 +428,8 @@ class DataSmart(MutableMapping):
370 self.inchistory = IncludeHistory() 428 self.inchistory = IncludeHistory()
371 self.varhistory = VariableHistory(self) 429 self.varhistory = VariableHistory(self)
372 self._tracking = False 430 self._tracking = False
431 self._var_renames = {}
432 self._var_renames.update(bitbake_renamed_vars)
373 433
374 self.expand_cache = {} 434 self.expand_cache = {}
375 435
@@ -391,9 +451,9 @@ class DataSmart(MutableMapping):
391 def expandWithRefs(self, s, varname): 451 def expandWithRefs(self, s, varname):
392 452
393 if not isinstance(s, str): # sanity check 453 if not isinstance(s, str): # sanity check
394 return VariableParse(varname, self, s) 454 return VariableParse(varname, self, s, s)
395 455
396 varparse = VariableParse(varname, self) 456 varparse = VariableParse(varname, self, s)
397 457
398 while s.find('${') != -1: 458 while s.find('${') != -1:
399 olds = s 459 olds = s
@@ -403,14 +463,17 @@ class DataSmart(MutableMapping):
403 s = __expand_python_regexp__.sub(varparse.python_sub, s) 463 s = __expand_python_regexp__.sub(varparse.python_sub, s)
404 except SyntaxError as e: 464 except SyntaxError as e:
405 # Likely unmatched brackets, just don't expand the expression 465 # Likely unmatched brackets, just don't expand the expression
406 if e.msg != "EOL while scanning string literal": 466 if e.msg != "EOL while scanning string literal" and not e.msg.startswith("unterminated string literal"):
407 raise 467 raise
408 if s == olds: 468 if s == olds:
409 break 469 break
410 except ExpansionError: 470 except ExpansionError as e:
471 e.addVar(varname)
411 raise 472 raise
412 except bb.parse.SkipRecipe: 473 except bb.parse.SkipRecipe:
413 raise 474 raise
475 except bb.BBHandledException:
476 raise
414 except Exception as exc: 477 except Exception as exc:
415 tb = sys.exc_info()[2] 478 tb = sys.exc_info()[2]
416 raise ExpansionError(varname, s, exc).with_traceback(tb) from exc 479 raise ExpansionError(varname, s, exc).with_traceback(tb) from exc
@@ -422,24 +485,19 @@ class DataSmart(MutableMapping):
422 def expand(self, s, varname = None): 485 def expand(self, s, varname = None):
423 return self.expandWithRefs(s, varname).value 486 return self.expandWithRefs(s, varname).value
424 487
425 def finalize(self, parent = False):
426 return
427
428 def internal_finalize(self, parent = False):
429 """Performs final steps upon the datastore, including application of overrides"""
430 self.overrides = None
431
432 def need_overrides(self): 488 def need_overrides(self):
433 if self.overrides is not None: 489 if self.overrides is not None:
434 return 490 return
435 if self.inoverride: 491 if self.inoverride:
436 return 492 return
493 overrride_stack = []
437 for count in range(5): 494 for count in range(5):
438 self.inoverride = True 495 self.inoverride = True
439 # Can end up here recursively so setup dummy values 496 # Can end up here recursively so setup dummy values
440 self.overrides = [] 497 self.overrides = []
441 self.overridesset = set() 498 self.overridesset = set()
442 self.overrides = (self.getVar("OVERRIDES") or "").split(":") or [] 499 self.overrides = (self.getVar("OVERRIDES") or "").split(":") or []
500 overrride_stack.append(self.overrides)
443 self.overridesset = set(self.overrides) 501 self.overridesset = set(self.overrides)
444 self.inoverride = False 502 self.inoverride = False
445 self.expand_cache = {} 503 self.expand_cache = {}
@@ -449,7 +507,7 @@ class DataSmart(MutableMapping):
449 self.overrides = newoverrides 507 self.overrides = newoverrides
450 self.overridesset = set(self.overrides) 508 self.overridesset = set(self.overrides)
451 else: 509 else:
452 bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.") 510 bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work. The list of failing override expansions: %s" % "\n".join(str(s) for s in overrride_stack))
453 511
454 def initVar(self, var): 512 def initVar(self, var):
455 self.expand_cache = {} 513 self.expand_cache = {}
@@ -460,27 +518,44 @@ class DataSmart(MutableMapping):
460 dest = self.dict 518 dest = self.dict
461 while dest: 519 while dest:
462 if var in dest: 520 if var in dest:
463 return dest[var], self.overridedata.get(var, None) 521 return dest[var]
464 522
465 if "_data" not in dest: 523 if "_data" not in dest:
466 break 524 break
467 dest = dest["_data"] 525 dest = dest["_data"]
468 return None, self.overridedata.get(var, None) 526 return None
469 527
470 def _makeShadowCopy(self, var): 528 def _makeShadowCopy(self, var):
471 if var in self.dict: 529 if var in self.dict:
472 return 530 return
473 531
474 local_var, _ = self._findVar(var) 532 local_var = self._findVar(var)
475 533
476 if local_var: 534 if local_var:
477 self.dict[var] = copy.copy(local_var) 535 self.dict[var] = copy.copy(local_var)
478 else: 536 else:
479 self.initVar(var) 537 self.initVar(var)
480 538
539 def hasOverrides(self, var):
540 return var in self.overridedata
481 541
482 def setVar(self, var, value, **loginfo): 542 def setVar(self, var, value, **loginfo):
483 #print("var=" + str(var) + " val=" + str(value)) 543 #print("var=" + str(var) + " val=" + str(value))
544
545 if not var.startswith("__anon_") and ("_append" in var or "_prepend" in var or "_remove" in var):
546 info = "%s" % var
547 if "file" in loginfo:
548 info += " file: %s" % loginfo["file"]
549 if "line" in loginfo:
550 info += " line: %s" % loginfo["line"]
551 bb.fatal("Variable %s contains an operation using the old override syntax. Please convert this layer/metadata before attempting to use with a newer bitbake." % info)
552
553 shortvar = var.split(":", 1)[0]
554 if shortvar in self._var_renames:
555 _print_rename_error(shortvar, loginfo, self._var_renames, fullvar=var)
556 # Mark that we have seen a renamed variable
557 self.setVar("_FAILPARSINGERRORHANDLED", True)
558
484 self.expand_cache = {} 559 self.expand_cache = {}
485 parsing=False 560 parsing=False
486 if 'parsing' in loginfo: 561 if 'parsing' in loginfo:
@@ -505,12 +580,10 @@ class DataSmart(MutableMapping):
505 else: 580 else:
506 loginfo['op'] = keyword 581 loginfo['op'] = keyword
507 self.varhistory.record(**loginfo) 582 self.varhistory.record(**loginfo)
508 # todo make sure keyword is not __doc__ or __module__
509 # pay the cookie monster 583 # pay the cookie monster
510 584
511 # more cookies for the cookie monster 585 # more cookies for the cookie monster
512 if '_' in var: 586 self._setvar_update_overrides(base, **loginfo)
513 self._setvar_update_overrides(base, **loginfo)
514 587
515 if base in self.overridevars: 588 if base in self.overridevars:
516 self._setvar_update_overridevars(var, value) 589 self._setvar_update_overridevars(var, value)
@@ -520,27 +593,27 @@ class DataSmart(MutableMapping):
520 self._makeShadowCopy(var) 593 self._makeShadowCopy(var)
521 594
522 if not parsing: 595 if not parsing:
523 if "_append" in self.dict[var]: 596 if ":append" in self.dict[var]:
524 del self.dict[var]["_append"] 597 del self.dict[var][":append"]
525 if "_prepend" in self.dict[var]: 598 if ":prepend" in self.dict[var]:
526 del self.dict[var]["_prepend"] 599 del self.dict[var][":prepend"]
527 if "_remove" in self.dict[var]: 600 if ":remove" in self.dict[var]:
528 del self.dict[var]["_remove"] 601 del self.dict[var][":remove"]
529 if var in self.overridedata: 602 if var in self.overridedata:
530 active = [] 603 active = []
531 self.need_overrides() 604 self.need_overrides()
532 for (r, o) in self.overridedata[var]: 605 for (r, o) in self.overridedata[var]:
533 if o in self.overridesset: 606 if o in self.overridesset:
534 active.append(r) 607 active.append(r)
535 elif "_" in o: 608 elif ":" in o:
536 if set(o.split("_")).issubset(self.overridesset): 609 if set(o.split(":")).issubset(self.overridesset):
537 active.append(r) 610 active.append(r)
538 for a in active: 611 for a in active:
539 self.delVar(a) 612 self.delVar(a)
540 del self.overridedata[var] 613 del self.overridedata[var]
541 614
542 # more cookies for the cookie monster 615 # more cookies for the cookie monster
543 if '_' in var: 616 if ':' in var:
544 self._setvar_update_overrides(var, **loginfo) 617 self._setvar_update_overrides(var, **loginfo)
545 618
546 # setting var 619 # setting var
@@ -562,12 +635,13 @@ class DataSmart(MutableMapping):
562 nextnew.update(vardata.references) 635 nextnew.update(vardata.references)
563 nextnew.update(vardata.contains.keys()) 636 nextnew.update(vardata.contains.keys())
564 new = nextnew 637 new = nextnew
565 self.internal_finalize(True) 638 self.overrides = None
639 self.expand_cache = {}
566 640
567 def _setvar_update_overrides(self, var, **loginfo): 641 def _setvar_update_overrides(self, var, **loginfo):
568 # aka pay the cookie monster 642 # aka pay the cookie monster
569 override = var[var.rfind('_')+1:] 643 override = var[var.rfind(':')+1:]
570 shortvar = var[:var.rfind('_')] 644 shortvar = var[:var.rfind(':')]
571 while override and __override_regexp__.match(override): 645 while override and __override_regexp__.match(override):
572 if shortvar not in self.overridedata: 646 if shortvar not in self.overridedata:
573 self.overridedata[shortvar] = [] 647 self.overridedata[shortvar] = []
@@ -576,9 +650,9 @@ class DataSmart(MutableMapping):
576 self.overridedata[shortvar] = list(self.overridedata[shortvar]) 650 self.overridedata[shortvar] = list(self.overridedata[shortvar])
577 self.overridedata[shortvar].append([var, override]) 651 self.overridedata[shortvar].append([var, override])
578 override = None 652 override = None
579 if "_" in shortvar: 653 if ":" in shortvar:
580 override = var[shortvar.rfind('_')+1:] 654 override = var[shortvar.rfind(':')+1:]
581 shortvar = var[:shortvar.rfind('_')] 655 shortvar = var[:shortvar.rfind(':')]
582 if len(shortvar) == 0: 656 if len(shortvar) == 0:
583 override = None 657 override = None
584 658
@@ -602,10 +676,11 @@ class DataSmart(MutableMapping):
602 self.varhistory.record(**loginfo) 676 self.varhistory.record(**loginfo)
603 self.setVar(newkey, val, ignore=True, parsing=True) 677 self.setVar(newkey, val, ignore=True, parsing=True)
604 678
605 for i in (__setvar_keyword__): 679 srcflags = self.getVarFlags(key, False, True) or {}
606 src = self.getVarFlag(key, i, False) 680 for i in srcflags:
607 if src is None: 681 if i not in (__setvar_keyword__):
608 continue 682 continue
683 src = srcflags[i]
609 684
610 dest = self.getVarFlag(newkey, i, False) or [] 685 dest = self.getVarFlag(newkey, i, False) or []
611 dest.extend(src) 686 dest.extend(src)
@@ -617,7 +692,7 @@ class DataSmart(MutableMapping):
617 self.overridedata[newkey].append([v.replace(key, newkey), o]) 692 self.overridedata[newkey].append([v.replace(key, newkey), o])
618 self.renameVar(v, v.replace(key, newkey)) 693 self.renameVar(v, v.replace(key, newkey))
619 694
620 if '_' in newkey and val is None: 695 if ':' in newkey and val is None:
621 self._setvar_update_overrides(newkey, **loginfo) 696 self._setvar_update_overrides(newkey, **loginfo)
622 697
623 loginfo['variable'] = key 698 loginfo['variable'] = key
@@ -629,12 +704,12 @@ class DataSmart(MutableMapping):
629 def appendVar(self, var, value, **loginfo): 704 def appendVar(self, var, value, **loginfo):
630 loginfo['op'] = 'append' 705 loginfo['op'] = 'append'
631 self.varhistory.record(**loginfo) 706 self.varhistory.record(**loginfo)
632 self.setVar(var + "_append", value, ignore=True, parsing=True) 707 self.setVar(var + ":append", value, ignore=True, parsing=True)
633 708
634 def prependVar(self, var, value, **loginfo): 709 def prependVar(self, var, value, **loginfo):
635 loginfo['op'] = 'prepend' 710 loginfo['op'] = 'prepend'
636 self.varhistory.record(**loginfo) 711 self.varhistory.record(**loginfo)
637 self.setVar(var + "_prepend", value, ignore=True, parsing=True) 712 self.setVar(var + ":prepend", value, ignore=True, parsing=True)
638 713
639 def delVar(self, var, **loginfo): 714 def delVar(self, var, **loginfo):
640 self.expand_cache = {} 715 self.expand_cache = {}
@@ -645,10 +720,10 @@ class DataSmart(MutableMapping):
645 self.dict[var] = {} 720 self.dict[var] = {}
646 if var in self.overridedata: 721 if var in self.overridedata:
647 del self.overridedata[var] 722 del self.overridedata[var]
648 if '_' in var: 723 if ':' in var:
649 override = var[var.rfind('_')+1:] 724 override = var[var.rfind(':')+1:]
650 shortvar = var[:var.rfind('_')] 725 shortvar = var[:var.rfind(':')]
651 while override and override.islower(): 726 while override and __override_regexp__.match(override):
652 try: 727 try:
653 if shortvar in self.overridedata: 728 if shortvar in self.overridedata:
654 # Force CoW by recreating the list first 729 # Force CoW by recreating the list first
@@ -657,15 +732,23 @@ class DataSmart(MutableMapping):
657 except ValueError as e: 732 except ValueError as e:
658 pass 733 pass
659 override = None 734 override = None
660 if "_" in shortvar: 735 if ":" in shortvar:
661 override = var[shortvar.rfind('_')+1:] 736 override = var[shortvar.rfind(':')+1:]
662 shortvar = var[:shortvar.rfind('_')] 737 shortvar = var[:shortvar.rfind(':')]
663 if len(shortvar) == 0: 738 if len(shortvar) == 0:
664 override = None 739 override = None
665 740
666 def setVarFlag(self, var, flag, value, **loginfo): 741 def setVarFlag(self, var, flag, value, **loginfo):
667 self.expand_cache = {} 742 self.expand_cache = {}
668 743
744 if var == "BB_RENAMED_VARIABLES":
745 self._var_renames[flag] = value
746
747 if var in self._var_renames:
748 _print_rename_error(var, loginfo, self._var_renames)
749 # Mark that we have seen a renamed variable
750 self.setVar("_FAILPARSINGERRORHANDLED", True)
751
669 if 'op' not in loginfo: 752 if 'op' not in loginfo:
670 loginfo['op'] = "set" 753 loginfo['op'] = "set"
671 loginfo['flag'] = flag 754 loginfo['flag'] = flag
@@ -674,7 +757,7 @@ class DataSmart(MutableMapping):
674 self._makeShadowCopy(var) 757 self._makeShadowCopy(var)
675 self.dict[var][flag] = value 758 self.dict[var][flag] = value
676 759
677 if flag == "_defaultval" and '_' in var: 760 if flag == "_defaultval" and ':' in var:
678 self._setvar_update_overrides(var, **loginfo) 761 self._setvar_update_overrides(var, **loginfo)
679 if flag == "_defaultval" and var in self.overridevars: 762 if flag == "_defaultval" and var in self.overridevars:
680 self._setvar_update_overridevars(var, value) 763 self._setvar_update_overridevars(var, value)
@@ -695,22 +778,27 @@ class DataSmart(MutableMapping):
695 return None 778 return None
696 cachename = var + "[" + flag + "]" 779 cachename = var + "[" + flag + "]"
697 780
781 if not expand and retparser and cachename in self.expand_cache:
782 return self.expand_cache[cachename].unexpanded_value, self.expand_cache[cachename]
783
698 if expand and cachename in self.expand_cache: 784 if expand and cachename in self.expand_cache:
699 return self.expand_cache[cachename].value 785 return self.expand_cache[cachename].value
700 786
701 local_var, overridedata = self._findVar(var) 787 local_var = self._findVar(var)
702 value = None 788 value = None
703 removes = set() 789 removes = set()
704 if flag == "_content" and overridedata is not None and not parsing: 790 if flag == "_content" and not parsing:
791 overridedata = self.overridedata.get(var, None)
792 if flag == "_content" and not parsing and overridedata is not None:
705 match = False 793 match = False
706 active = {} 794 active = {}
707 self.need_overrides() 795 self.need_overrides()
708 for (r, o) in overridedata: 796 for (r, o) in overridedata:
709 # What about double overrides both with "_" in the name? 797 # FIXME What about double overrides both with "_" in the name?
710 if o in self.overridesset: 798 if o in self.overridesset:
711 active[o] = r 799 active[o] = r
712 elif "_" in o: 800 elif ":" in o:
713 if set(o.split("_")).issubset(self.overridesset): 801 if set(o.split(":")).issubset(self.overridesset):
714 active[o] = r 802 active[o] = r
715 803
716 mod = True 804 mod = True
@@ -718,10 +806,10 @@ class DataSmart(MutableMapping):
718 mod = False 806 mod = False
719 for o in self.overrides: 807 for o in self.overrides:
720 for a in active.copy(): 808 for a in active.copy():
721 if a.endswith("_" + o): 809 if a.endswith(":" + o):
722 t = active[a] 810 t = active[a]
723 del active[a] 811 del active[a]
724 active[a.replace("_" + o, "")] = t 812 active[a.replace(":" + o, "")] = t
725 mod = True 813 mod = True
726 elif a == o: 814 elif a == o:
727 match = active[a] 815 match = active[a]
@@ -738,33 +826,35 @@ class DataSmart(MutableMapping):
738 value = copy.copy(local_var[flag]) 826 value = copy.copy(local_var[flag])
739 elif flag == "_content" and "_defaultval" in local_var and not noweakdefault: 827 elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
740 value = copy.copy(local_var["_defaultval"]) 828 value = copy.copy(local_var["_defaultval"])
829 elif "_defaultval_flag_"+flag in local_var and not noweakdefault:
830 value = copy.copy(local_var["_defaultval_flag_"+flag])
741 831
742 832
743 if flag == "_content" and local_var is not None and "_append" in local_var and not parsing: 833 if flag == "_content" and local_var is not None and ":append" in local_var and not parsing:
744 if not value:
745 value = ""
746 self.need_overrides() 834 self.need_overrides()
747 for (r, o) in local_var["_append"]: 835 for (r, o) in local_var[":append"]:
748 match = True 836 match = True
749 if o: 837 if o:
750 for o2 in o.split("_"): 838 for o2 in o.split(":"):
751 if not o2 in self.overrides: 839 if not o2 in self.overrides:
752 match = False 840 match = False
753 if match: 841 if match:
842 if value is None:
843 value = ""
754 value = value + r 844 value = value + r
755 845
756 if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing: 846 if flag == "_content" and local_var is not None and ":prepend" in local_var and not parsing:
757 if not value:
758 value = ""
759 self.need_overrides() 847 self.need_overrides()
760 for (r, o) in local_var["_prepend"]: 848 for (r, o) in local_var[":prepend"]:
761 849
762 match = True 850 match = True
763 if o: 851 if o:
764 for o2 in o.split("_"): 852 for o2 in o.split(":"):
765 if not o2 in self.overrides: 853 if not o2 in self.overrides:
766 match = False 854 match = False
767 if match: 855 if match:
856 if value is None:
857 value = ""
768 value = r + value 858 value = r + value
769 859
770 parser = None 860 parser = None
@@ -773,12 +863,12 @@ class DataSmart(MutableMapping):
773 if expand: 863 if expand:
774 value = parser.value 864 value = parser.value
775 865
776 if value and flag == "_content" and local_var is not None and "_remove" in local_var and not parsing: 866 if value and flag == "_content" and local_var is not None and ":remove" in local_var and not parsing:
777 self.need_overrides() 867 self.need_overrides()
778 for (r, o) in local_var["_remove"]: 868 for (r, o) in local_var[":remove"]:
779 match = True 869 match = True
780 if o: 870 if o:
781 for o2 in o.split("_"): 871 for o2 in o.split(":"):
782 if not o2 in self.overrides: 872 if not o2 in self.overrides:
783 match = False 873 match = False
784 if match: 874 if match:
@@ -791,7 +881,7 @@ class DataSmart(MutableMapping):
791 expanded_removes[r] = self.expand(r).split() 881 expanded_removes[r] = self.expand(r).split()
792 882
793 parser.removes = set() 883 parser.removes = set()
794 val = "" 884 val = []
795 for v in __whitespace_split__.split(parser.value): 885 for v in __whitespace_split__.split(parser.value):
796 skip = False 886 skip = False
797 for r in removes: 887 for r in removes:
@@ -800,8 +890,8 @@ class DataSmart(MutableMapping):
800 skip = True 890 skip = True
801 if skip: 891 if skip:
802 continue 892 continue
803 val = val + v 893 val.append(v)
804 parser.value = val 894 parser.value = "".join(val)
805 if expand: 895 if expand:
806 value = parser.value 896 value = parser.value
807 897
@@ -816,7 +906,7 @@ class DataSmart(MutableMapping):
816 def delVarFlag(self, var, flag, **loginfo): 906 def delVarFlag(self, var, flag, **loginfo):
817 self.expand_cache = {} 907 self.expand_cache = {}
818 908
819 local_var, _ = self._findVar(var) 909 local_var = self._findVar(var)
820 if not local_var: 910 if not local_var:
821 return 911 return
822 if not var in self.dict: 912 if not var in self.dict:
@@ -829,6 +919,8 @@ class DataSmart(MutableMapping):
829 self.varhistory.record(**loginfo) 919 self.varhistory.record(**loginfo)
830 920
831 del self.dict[var][flag] 921 del self.dict[var][flag]
922 if ("_defaultval_flag_" + flag) in self.dict[var]:
923 del self.dict[var]["_defaultval_flag_" + flag]
832 924
833 def appendVarFlag(self, var, flag, value, **loginfo): 925 def appendVarFlag(self, var, flag, value, **loginfo):
834 loginfo['op'] = 'append' 926 loginfo['op'] = 'append'
@@ -859,21 +951,26 @@ class DataSmart(MutableMapping):
859 self.dict[var][i] = flags[i] 951 self.dict[var][i] = flags[i]
860 952
861 def getVarFlags(self, var, expand = False, internalflags=False): 953 def getVarFlags(self, var, expand = False, internalflags=False):
862 local_var, _ = self._findVar(var) 954 local_var = self._findVar(var)
863 flags = {} 955 flags = {}
864 956
865 if local_var: 957 if local_var:
866 for i in local_var: 958 for i, val in local_var.items():
867 if i.startswith("_") and not internalflags: 959 if i.startswith("_defaultval_flag_") and not internalflags:
960 i = i[len("_defaultval_flag_"):]
961 if i not in local_var:
962 flags[i] = val
963 elif i.startswith(("_", ":")) and not internalflags:
868 continue 964 continue
869 flags[i] = local_var[i] 965 else:
966 flags[i] = val
967
870 if expand and i in expand: 968 if expand and i in expand:
871 flags[i] = self.expand(flags[i], var + "[" + i + "]") 969 flags[i] = self.expand(flags[i], var + "[" + i + "]")
872 if len(flags) == 0: 970 if len(flags) == 0:
873 return None 971 return None
874 return flags 972 return flags
875 973
876
877 def delVarFlags(self, var, **loginfo): 974 def delVarFlags(self, var, **loginfo):
878 self.expand_cache = {} 975 self.expand_cache = {}
879 if not var in self.dict: 976 if not var in self.dict:
@@ -905,6 +1002,7 @@ class DataSmart(MutableMapping):
905 data.inchistory = self.inchistory.copy() 1002 data.inchistory = self.inchistory.copy()
906 1003
907 data._tracking = self._tracking 1004 data._tracking = self._tracking
1005 data._var_renames = self._var_renames
908 1006
909 data.overrides = None 1007 data.overrides = None
910 data.overridevars = copy.copy(self.overridevars) 1008 data.overridevars = copy.copy(self.overridevars)
@@ -927,7 +1025,7 @@ class DataSmart(MutableMapping):
927 value = self.getVar(variable, False) 1025 value = self.getVar(variable, False)
928 for key in keys: 1026 for key in keys:
929 referrervalue = self.getVar(key, False) 1027 referrervalue = self.getVar(key, False)
930 if referrervalue and ref in referrervalue: 1028 if referrervalue and isinstance(referrervalue, str) and ref in referrervalue:
931 self.setVar(key, referrervalue.replace(ref, value)) 1029 self.setVar(key, referrervalue.replace(ref, value))
932 1030
933 def localkeys(self): 1031 def localkeys(self):
@@ -962,8 +1060,8 @@ class DataSmart(MutableMapping):
962 for (r, o) in self.overridedata[var]: 1060 for (r, o) in self.overridedata[var]:
963 if o in self.overridesset: 1061 if o in self.overridesset:
964 overrides.add(var) 1062 overrides.add(var)
965 elif "_" in o: 1063 elif ":" in o:
966 if set(o.split("_")).issubset(self.overridesset): 1064 if set(o.split(":")).issubset(self.overridesset):
967 overrides.add(var) 1065 overrides.add(var)
968 1066
969 for k in keylist(self.dict): 1067 for k in keylist(self.dict):
@@ -993,10 +1091,10 @@ class DataSmart(MutableMapping):
993 d = self.createCopy() 1091 d = self.createCopy()
994 bb.data.expandKeys(d) 1092 bb.data.expandKeys(d)
995 1093
996 config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST") or "").split()) 1094 config_ignore_vars = set((d.getVar("BB_HASHCONFIG_IGNORE_VARS") or "").split())
997 keys = set(key for key in iter(d) if not key.startswith("__")) 1095 keys = set(key for key in iter(d) if not key.startswith("__"))
998 for key in keys: 1096 for key in keys:
999 if key in config_whitelist: 1097 if key in config_ignore_vars:
1000 continue 1098 continue
1001 1099
1002 value = d.getVar(key, False) or "" 1100 value = d.getVar(key, False) or ""
@@ -1022,5 +1120,10 @@ class DataSmart(MutableMapping):
1022 value = d.getVar(i, False) or "" 1120 value = d.getVar(i, False) or ""
1023 data.update({i:value}) 1121 data.update({i:value})
1024 1122
1123 moddeps = bb.codeparser.modulecode_deps
1124 for dep in sorted(moddeps):
1125 # Ignore visitor code, sort sets
1126 data.update({'moddep[%s]' % dep : [sorted(moddeps[dep][0]), sorted(moddeps[dep][1]), sorted(moddeps[dep][2]), sorted(moddeps[dep][3]), moddeps[dep][4]]})
1127
1025 data_str = str([(k, data[k]) for k in sorted(data.keys())]) 1128 data_str = str([(k, data[k]) for k in sorted(data.keys())])
1026 return hashlib.sha256(data_str.encode("utf-8")).hexdigest() 1129 return hashlib.sha256(data_str.encode("utf-8")).hexdigest()
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 23e1f3187b..b29f0a5568 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -19,7 +19,6 @@ import sys
19import threading 19import threading
20import traceback 20import traceback
21 21
22import bb.exceptions
23import bb.utils 22import bb.utils
24 23
25# This is the pid for which we should generate the event. This is set when 24# This is the pid for which we should generate the event. This is set when
@@ -40,7 +39,7 @@ class HeartbeatEvent(Event):
40 """Triggered at regular time intervals of 10 seconds. Other events can fire much more often 39 """Triggered at regular time intervals of 10 seconds. Other events can fire much more often
41 (runQueueTaskStarted when there are many short tasks) or not at all for long periods 40 (runQueueTaskStarted when there are many short tasks) or not at all for long periods
42 of time (again runQueueTaskStarted, when there is just one long-running task), so this 41 of time (again runQueueTaskStarted, when there is just one long-running task), so this
43 event is more suitable for doing some task-independent work occassionally.""" 42 event is more suitable for doing some task-independent work occasionally."""
44 def __init__(self, time): 43 def __init__(self, time):
45 Event.__init__(self) 44 Event.__init__(self)
46 self.time = time 45 self.time = time
@@ -68,29 +67,39 @@ _catchall_handlers = {}
68_eventfilter = None 67_eventfilter = None
69_uiready = False 68_uiready = False
70_thread_lock = threading.Lock() 69_thread_lock = threading.Lock()
71_thread_lock_enabled = False 70_heartbeat_enabled = False
72 71_should_exit = threading.Event()
73if hasattr(__builtins__, '__setitem__'):
74 builtins = __builtins__
75else:
76 builtins = __builtins__.__dict__
77 72
78def enable_threadlock(): 73def enable_threadlock():
79 global _thread_lock_enabled 74 # Always needed now
80 _thread_lock_enabled = True 75 return
81 76
82def disable_threadlock(): 77def disable_threadlock():
83 global _thread_lock_enabled 78 # Always needed now
84 _thread_lock_enabled = False 79 return
80
81def enable_heartbeat():
82 global _heartbeat_enabled
83 _heartbeat_enabled = True
84
85def disable_heartbeat():
86 global _heartbeat_enabled
87 _heartbeat_enabled = False
88
89#
90# In long running code, this function should be called periodically
91# to check if we should exit due to an interuption (.e.g Ctrl+C from the UI)
92#
93def check_for_interrupts(d):
94 global _should_exit
95 if _should_exit.is_set():
96 bb.warn("Exiting due to interrupt.")
97 raise bb.BBHandledException()
85 98
86def execute_handler(name, handler, event, d): 99def execute_handler(name, handler, event, d):
87 event.data = d 100 event.data = d
88 addedd = False
89 if 'd' not in builtins:
90 builtins['d'] = d
91 addedd = True
92 try: 101 try:
93 ret = handler(event) 102 ret = handler(event, d)
94 except (bb.parse.SkipRecipe, bb.BBHandledException): 103 except (bb.parse.SkipRecipe, bb.BBHandledException):
95 raise 104 raise
96 except Exception: 105 except Exception:
@@ -104,8 +113,7 @@ def execute_handler(name, handler, event, d):
104 raise 113 raise
105 finally: 114 finally:
106 del event.data 115 del event.data
107 if addedd: 116
108 del builtins['d']
109 117
110def fire_class_handlers(event, d): 118def fire_class_handlers(event, d):
111 if isinstance(event, logging.LogRecord): 119 if isinstance(event, logging.LogRecord):
@@ -118,7 +126,7 @@ def fire_class_handlers(event, d):
118 if _eventfilter: 126 if _eventfilter:
119 if not _eventfilter(name, handler, event, d): 127 if not _eventfilter(name, handler, event, d):
120 continue 128 continue
121 if d and not name in (d.getVar("__BBHANDLERS_MC") or []): 129 if d is not None and not name in (d.getVar("__BBHANDLERS_MC") or set()):
122 continue 130 continue
123 execute_handler(name, handler, event, d) 131 execute_handler(name, handler, event, d)
124 132
@@ -132,8 +140,14 @@ def print_ui_queue():
132 if not _uiready: 140 if not _uiready:
133 from bb.msg import BBLogFormatter 141 from bb.msg import BBLogFormatter
134 # Flush any existing buffered content 142 # Flush any existing buffered content
135 sys.stdout.flush() 143 try:
136 sys.stderr.flush() 144 sys.stdout.flush()
145 except:
146 pass
147 try:
148 sys.stderr.flush()
149 except:
150 pass
137 stdout = logging.StreamHandler(sys.stdout) 151 stdout = logging.StreamHandler(sys.stdout)
138 stderr = logging.StreamHandler(sys.stderr) 152 stderr = logging.StreamHandler(sys.stderr)
139 formatter = BBLogFormatter("%(levelname)s: %(message)s") 153 formatter = BBLogFormatter("%(levelname)s: %(message)s")
@@ -174,36 +188,38 @@ def print_ui_queue():
174 188
175def fire_ui_handlers(event, d): 189def fire_ui_handlers(event, d):
176 global _thread_lock 190 global _thread_lock
177 global _thread_lock_enabled
178 191
179 if not _uiready: 192 if not _uiready:
180 # No UI handlers registered yet, queue up the messages 193 # No UI handlers registered yet, queue up the messages
181 ui_queue.append(event) 194 ui_queue.append(event)
182 return 195 return
183 196
184 if _thread_lock_enabled: 197 with bb.utils.lock_timeout_nocheck(_thread_lock) as lock:
185 _thread_lock.acquire() 198 if not lock:
186 199 # If we can't get the lock, we may be recursively called, queue and return
187 errors = [] 200 ui_queue.append(event)
188 for h in _ui_handlers: 201 return
189 #print "Sending event %s" % event
190 try:
191 if not _ui_logfilters[h].filter(event):
192 continue
193 # We use pickle here since it better handles object instances
194 # which xmlrpc's marshaller does not. Events *must* be serializable
195 # by pickle.
196 if hasattr(_ui_handlers[h].event, "sendpickle"):
197 _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
198 else:
199 _ui_handlers[h].event.send(event)
200 except:
201 errors.append(h)
202 for h in errors:
203 del _ui_handlers[h]
204 202
205 if _thread_lock_enabled: 203 errors = []
206 _thread_lock.release() 204 for h in _ui_handlers:
205 #print "Sending event %s" % event
206 try:
207 if not _ui_logfilters[h].filter(event):
208 continue
209 # We use pickle here since it better handles object instances
210 # which xmlrpc's marshaller does not. Events *must* be serializable
211 # by pickle.
212 if hasattr(_ui_handlers[h].event, "sendpickle"):
213 _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
214 else:
215 _ui_handlers[h].event.send(event)
216 except:
217 errors.append(h)
218 for h in errors:
219 del _ui_handlers[h]
220
221 while ui_queue:
222 fire_ui_handlers(ui_queue.pop(), d)
207 223
208def fire(event, d): 224def fire(event, d):
209 """Fire off an Event""" 225 """Fire off an Event"""
@@ -232,26 +248,31 @@ noop = lambda _: None
232def register(name, handler, mask=None, filename=None, lineno=None, data=None): 248def register(name, handler, mask=None, filename=None, lineno=None, data=None):
233 """Register an Event handler""" 249 """Register an Event handler"""
234 250
235 if data and data.getVar("BB_CURRENT_MC"): 251 if data is not None and data.getVar("BB_CURRENT_MC"):
236 mc = data.getVar("BB_CURRENT_MC") 252 mc = data.getVar("BB_CURRENT_MC")
237 name = '%s%s' % (mc.replace('-', '_'), name) 253 name = '%s%s' % (mc.replace('-', '_'), name)
238 254
239 # already registered 255 # already registered
240 if name in _handlers: 256 if name in _handlers:
257 if data is not None:
258 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
259 bbhands_mc.add(name)
260 data.setVar("__BBHANDLERS_MC", bbhands_mc)
241 return AlreadyRegistered 261 return AlreadyRegistered
242 262
243 if handler is not None: 263 if handler is not None:
244 # handle string containing python code 264 # handle string containing python code
245 if isinstance(handler, str): 265 if isinstance(handler, str):
246 tmp = "def %s(e):\n%s" % (name, handler) 266 tmp = "def %s(e, d):\n%s" % (name, handler)
267 # Inject empty lines to make code match lineno in filename
268 if lineno is not None:
269 tmp = "\n" * (lineno-1) + tmp
247 try: 270 try:
248 code = bb.methodpool.compile_cache(tmp) 271 code = bb.methodpool.compile_cache(tmp)
249 if not code: 272 if not code:
250 if filename is None: 273 if filename is None:
251 filename = "%s(e)" % name 274 filename = "%s(e, d)" % name
252 code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST) 275 code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
253 if lineno is not None:
254 ast.increment_lineno(code, lineno-1)
255 code = compile(code, filename, "exec") 276 code = compile(code, filename, "exec")
256 bb.methodpool.compile_cache_add(tmp, code) 277 bb.methodpool.compile_cache_add(tmp, code)
257 except SyntaxError: 278 except SyntaxError:
@@ -274,16 +295,16 @@ def register(name, handler, mask=None, filename=None, lineno=None, data=None):
274 _event_handler_map[m] = {} 295 _event_handler_map[m] = {}
275 _event_handler_map[m][name] = True 296 _event_handler_map[m][name] = True
276 297
277 if data: 298 if data is not None:
278 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or []) 299 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
279 bbhands_mc.append(name) 300 bbhands_mc.add(name)
280 data.setVar("__BBHANDLERS_MC", bbhands_mc) 301 data.setVar("__BBHANDLERS_MC", bbhands_mc)
281 302
282 return Registered 303 return Registered
283 304
284def remove(name, handler, data=None): 305def remove(name, handler, data=None):
285 """Remove an Event handler""" 306 """Remove an Event handler"""
286 if data: 307 if data is not None:
287 if data.getVar("BB_CURRENT_MC"): 308 if data.getVar("BB_CURRENT_MC"):
288 mc = data.getVar("BB_CURRENT_MC") 309 mc = data.getVar("BB_CURRENT_MC")
289 name = '%s%s' % (mc.replace('-', '_'), name) 310 name = '%s%s' % (mc.replace('-', '_'), name)
@@ -295,8 +316,8 @@ def remove(name, handler, data=None):
295 if name in _event_handler_map[event]: 316 if name in _event_handler_map[event]:
296 _event_handler_map[event].pop(name) 317 _event_handler_map[event].pop(name)
297 318
298 if data: 319 if data is not None:
299 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or []) 320 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
300 if name in bbhands_mc: 321 if name in bbhands_mc:
301 bbhands_mc.remove(name) 322 bbhands_mc.remove(name)
302 data.setVar("__BBHANDLERS_MC", bbhands_mc) 323 data.setVar("__BBHANDLERS_MC", bbhands_mc)
@@ -313,21 +334,23 @@ def set_eventfilter(func):
313 _eventfilter = func 334 _eventfilter = func
314 335
315def register_UIHhandler(handler, mainui=False): 336def register_UIHhandler(handler, mainui=False):
316 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1 337 with bb.utils.lock_timeout(_thread_lock):
317 _ui_handlers[_ui_handler_seq] = handler 338 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
318 level, debug_domains = bb.msg.constructLogOptions() 339 _ui_handlers[_ui_handler_seq] = handler
319 _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains) 340 level, debug_domains = bb.msg.constructLogOptions()
320 if mainui: 341 _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
321 global _uiready 342 if mainui:
322 _uiready = _ui_handler_seq 343 global _uiready
323 return _ui_handler_seq 344 _uiready = _ui_handler_seq
345 return _ui_handler_seq
324 346
325def unregister_UIHhandler(handlerNum, mainui=False): 347def unregister_UIHhandler(handlerNum, mainui=False):
326 if mainui: 348 if mainui:
327 global _uiready 349 global _uiready
328 _uiready = False 350 _uiready = False
329 if handlerNum in _ui_handlers: 351 with bb.utils.lock_timeout(_thread_lock):
330 del _ui_handlers[handlerNum] 352 if handlerNum in _ui_handlers:
353 del _ui_handlers[handlerNum]
331 return 354 return
332 355
333def get_uihandler(): 356def get_uihandler():
@@ -408,6 +431,16 @@ class RecipeEvent(Event):
408 self.fn = fn 431 self.fn = fn
409 Event.__init__(self) 432 Event.__init__(self)
410 433
434class RecipePreDeferredInherits(RecipeEvent):
435 """
436 Called before deferred inherits are processed so code can snoop on class extensions for example
437 Limitations: It won't see inherits of inherited classes and the data is unexpanded
438 """
439 def __init__(self, fn, inherits):
440 self.fn = fn
441 self.inherits = inherits
442 Event.__init__(self)
443
411class RecipePreFinalise(RecipeEvent): 444class RecipePreFinalise(RecipeEvent):
412 """ Recipe Parsing Complete but not yet finalised""" 445 """ Recipe Parsing Complete but not yet finalised"""
413 446
@@ -482,7 +515,7 @@ class BuildCompleted(BuildBase, OperationCompleted):
482 BuildBase.__init__(self, n, p, failures) 515 BuildBase.__init__(self, n, p, failures)
483 516
484class DiskFull(Event): 517class DiskFull(Event):
485 """Disk full case build aborted""" 518 """Disk full case build halted"""
486 def __init__(self, dev, type, freespace, mountpoint): 519 def __init__(self, dev, type, freespace, mountpoint):
487 Event.__init__(self) 520 Event.__init__(self)
488 self._dev = dev 521 self._dev = dev
@@ -666,6 +699,17 @@ class ReachableStamps(Event):
666 Event.__init__(self) 699 Event.__init__(self)
667 self.stamps = stamps 700 self.stamps = stamps
668 701
702class StaleSetSceneTasks(Event):
703 """
704 An event listing setscene tasks which are 'stale' and will
705 be rerun. The metadata may use to clean up stale data.
706 tasks is a mapping of tasks and matching stale stamps.
707 """
708
709 def __init__(self, tasks):
710 Event.__init__(self)
711 self.tasks = tasks
712
669class FilesMatchingFound(Event): 713class FilesMatchingFound(Event):
670 """ 714 """
671 Event when a list of files matching the supplied pattern has 715 Event when a list of files matching the supplied pattern has
@@ -732,13 +776,7 @@ class LogHandler(logging.Handler):
732 776
733 def emit(self, record): 777 def emit(self, record):
734 if record.exc_info: 778 if record.exc_info:
735 etype, value, tb = record.exc_info 779 record.bb_exc_formatted = traceback.format_exception(*record.exc_info)
736 if hasattr(tb, 'tb_next'):
737 tb = list(bb.exceptions.extract_traceback(tb, context=3))
738 # Need to turn the value into something the logging system can pickle
739 record.bb_exc_info = (etype, value, tb)
740 record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
741 value = str(value)
742 record.exc_info = None 780 record.exc_info = None
743 fire(record, None) 781 fire(record, None)
744 782
@@ -749,7 +787,7 @@ class LogHandler(logging.Handler):
749class MetadataEvent(Event): 787class MetadataEvent(Event):
750 """ 788 """
751 Generic event that target for OE-Core classes 789 Generic event that target for OE-Core classes
752 to report information during asynchrous execution 790 to report information during asynchronous execution
753 """ 791 """
754 def __init__(self, eventtype, eventdata): 792 def __init__(self, eventtype, eventdata):
755 Event.__init__(self) 793 Event.__init__(self)
@@ -830,3 +868,19 @@ class FindSigInfoResult(Event):
830 def __init__(self, result): 868 def __init__(self, result):
831 Event.__init__(self) 869 Event.__init__(self)
832 self.result = result 870 self.result = result
871
872class GetTaskSignatureResult(Event):
873 """
874 Event to return results from GetTaskSignatures command
875 """
876 def __init__(self, sig):
877 Event.__init__(self)
878 self.sig = sig
879
880class ParseError(Event):
881 """
882 Event to indicate parse failed
883 """
884 def __init__(self, msg):
885 super().__init__()
886 self._msg = msg
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py
deleted file mode 100644
index ecbad59970..0000000000
--- a/bitbake/lib/bb/exceptions.py
+++ /dev/null
@@ -1,94 +0,0 @@
1#
2# SPDX-License-Identifier: GPL-2.0-only
3#
4
5import inspect
6import traceback
7import bb.namedtuple_with_abc
8from collections import namedtuple
9
10
11class TracebackEntry(namedtuple.abc):
12 """Pickleable representation of a traceback entry"""
13 _fields = 'filename lineno function args code_context index'
14 _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
15
16 def format(self, formatter=None):
17 if not self.code_context:
18 return self._header.format(self) + '\n'
19
20 formatted = [self._header.format(self) + ':\n']
21
22 for lineindex, line in enumerate(self.code_context):
23 if formatter:
24 line = formatter(line)
25
26 if lineindex == self.index:
27 formatted.append(' >%s' % line)
28 else:
29 formatted.append(' %s' % line)
30 return formatted
31
32 def __str__(self):
33 return ''.join(self.format())
34
35def _get_frame_args(frame):
36 """Get the formatted arguments and class (if available) for a frame"""
37 arginfo = inspect.getargvalues(frame)
38
39 try:
40 if not arginfo.args:
41 return '', None
42 # There have been reports from the field of python 2.6 which doesn't
43 # return a namedtuple here but simply a tuple so fallback gracefully if
44 # args isn't present.
45 except AttributeError:
46 return '', None
47
48 firstarg = arginfo.args[0]
49 if firstarg == 'self':
50 self = arginfo.locals['self']
51 cls = self.__class__.__name__
52
53 arginfo.args.pop(0)
54 del arginfo.locals['self']
55 else:
56 cls = None
57
58 formatted = inspect.formatargvalues(*arginfo)
59 return formatted, cls
60
61def extract_traceback(tb, context=1):
62 frames = inspect.getinnerframes(tb, context)
63 for frame, filename, lineno, function, code_context, index in frames:
64 formatted_args, cls = _get_frame_args(frame)
65 if cls:
66 function = '%s.%s' % (cls, function)
67 yield TracebackEntry(filename, lineno, function, formatted_args,
68 code_context, index)
69
70def format_extracted(extracted, formatter=None, limit=None):
71 if limit:
72 extracted = extracted[-limit:]
73
74 formatted = []
75 for tracebackinfo in extracted:
76 formatted.extend(tracebackinfo.format(formatter))
77 return formatted
78
79
80def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
81 formatted = ['Traceback (most recent call last):\n']
82
83 if hasattr(tb, 'tb_next'):
84 tb = extract_traceback(tb, context)
85
86 formatted.extend(format_extracted(tb, formatter, limit))
87 formatted.extend(traceback.format_exception_only(etype, value))
88 return formatted
89
90def to_string(exc):
91 if isinstance(exc, SystemExit):
92 if not isinstance(exc.code, str):
93 return 'Exited with "%d"' % exc.code
94 return str(exc)
diff --git a/bitbake/lib/bb/fetch2/README b/bitbake/lib/bb/fetch2/README
new file mode 100644
index 0000000000..67b787ef47
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/README
@@ -0,0 +1,57 @@
1There are expectations of users of the fetcher code. This file attempts to document
2some of the constraints that are present. Some are obvious, some are less so. It is
3documented in the context of how OE uses it but the API calls are generic.
4
5a) network access for sources is only expected to happen in the do_fetch task.
6 This is not enforced or tested but is required so that we can:
7
8 i) audit the sources used (i.e. for license/manifest reasons)
9 ii) support offline builds with a suitable cache
10 iii) allow work to continue even with downtime upstream
11 iv) allow for changes upstream in incompatible ways
12 v) allow rebuilding of the software in X years time
13
14b) network access is not expected in do_unpack task.
15
16c) you can take DL_DIR and use it as a mirror for offline builds.
17
18d) access to the network is only made when explicitly configured in recipes
19 (e.g. use of AUTOREV, or use of git tags which change revision).
20
21e) fetcher output is deterministic (i.e. if you fetch configuration XXX now it
22 will match in future exactly in a clean build with a new DL_DIR).
23 One specific pain point example are git tags. They can be replaced and change
24 so the git fetcher has to resolve them with the network. We use git revisions
25 where possible to avoid this and ensure determinism.
26
27f) network access is expected to work with the standard linux proxy variables
28 so that access behind firewalls works (the fetcher sets these in the
29 environment but only in the do_fetch tasks).
30
31g) access during parsing has to be minimal, a "git ls-remote" for an AUTOREV
32 git recipe might be ok but you can't expect to checkout a git tree.
33
34h) we need to provide revision information during parsing such that a version
35 for the recipe can be constructed.
36
37i) versions are expected to be able to increase in a way which sorts allowing
38 package feeds to operate (see PR server required for git revisions to sort).
39
40j) API to query for possible version upgrades of a url is highly desireable to
41 allow our automated upgrage code to function (it is implied this does always
42 have network access).
43
44k) Where fixes or changes to behaviour in the fetcher are made, we ask that
45 test cases are added (run with "bitbake-selftest bb.tests.fetch"). We do
46 have fairly extensive test coverage of the fetcher as it is the only way
47 to track all of its corner cases, it still doesn't give entire coverage
48 though sadly.
49
50l) If using tools during parse time, they will have to be in ASSUME_PROVIDED
51 in OE's context as we can't build git-native, then parse a recipe and use
52 git ls-remote.
53
54Not all fetchers support all features, autorev is optional and doesn't make
55sense for some. Upgrade detection means different things in different contexts
56too.
57
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 19169d780f..0ad987c596 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -23,17 +23,18 @@ import collections
23import subprocess 23import subprocess
24import pickle 24import pickle
25import errno 25import errno
26import bb.persist_data, bb.utils 26import bb.utils
27import bb.checksum 27import bb.checksum
28import bb.process 28import bb.process
29import bb.event 29import bb.event
30 30
31__version__ = "2" 31__version__ = "2"
32_checksum_cache = bb.checksum.FileChecksumCache() 32_checksum_cache = bb.checksum.FileChecksumCache()
33_revisions_cache = bb.checksum.RevisionsCache()
33 34
34logger = logging.getLogger("BitBake.Fetcher") 35logger = logging.getLogger("BitBake.Fetcher")
35 36
36CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ] 37CHECKSUM_LIST = [ "goh1", "md5", "sha256", "sha1", "sha384", "sha512" ]
37SHOWN_CHECKSUM_LIST = ["sha256"] 38SHOWN_CHECKSUM_LIST = ["sha256"]
38 39
39class BBFetchException(Exception): 40class BBFetchException(Exception):
@@ -113,7 +114,7 @@ class MissingParameterError(BBFetchException):
113 self.args = (missing, url) 114 self.args = (missing, url)
114 115
115class ParameterError(BBFetchException): 116class ParameterError(BBFetchException):
116 """Exception raised when a url cannot be proccessed due to invalid parameters.""" 117 """Exception raised when a url cannot be processed due to invalid parameters."""
117 def __init__(self, message, url): 118 def __init__(self, message, url):
118 msg = "URL: '%s' has invalid parameters. %s" % (url, message) 119 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
119 self.url = url 120 self.url = url
@@ -182,7 +183,7 @@ class URI(object):
182 Some notes about relative URIs: while it's specified that 183 Some notes about relative URIs: while it's specified that
183 a URI beginning with <scheme>:// should either be directly 184 a URI beginning with <scheme>:// should either be directly
184 followed by a hostname or a /, the old URI handling of the 185 followed by a hostname or a /, the old URI handling of the
185 fetch2 library did not comform to this. Therefore, this URI 186 fetch2 library did not conform to this. Therefore, this URI
186 class has some kludges to make sure that URIs are parsed in 187 class has some kludges to make sure that URIs are parsed in
187 a way comforming to bitbake's current usage. This URI class 188 a way comforming to bitbake's current usage. This URI class
188 supports the following: 189 supports the following:
@@ -199,7 +200,7 @@ class URI(object):
199 file://hostname/absolute/path.diff (would be IETF compliant) 200 file://hostname/absolute/path.diff (would be IETF compliant)
200 201
201 Note that the last case only applies to a list of 202 Note that the last case only applies to a list of
202 "whitelisted" schemes (currently only file://), that requires 203 explicitly allowed schemes (currently only file://), that requires
203 its URIs to not have a network location. 204 its URIs to not have a network location.
204 """ 205 """
205 206
@@ -237,7 +238,7 @@ class URI(object):
237 # to RFC compliant URL format. E.g.: 238 # to RFC compliant URL format. E.g.:
238 # file://foo.diff -> file:foo.diff 239 # file://foo.diff -> file:foo.diff
239 if urlp.scheme in self._netloc_forbidden: 240 if urlp.scheme in self._netloc_forbidden:
240 uri = re.sub("(?<=:)//(?!/)", "", uri, 1) 241 uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1)
241 reparse = 1 242 reparse = 1
242 243
243 if reparse: 244 if reparse:
@@ -290,12 +291,12 @@ class URI(object):
290 291
291 def _param_str_split(self, string, elmdelim, kvdelim="="): 292 def _param_str_split(self, string, elmdelim, kvdelim="="):
292 ret = collections.OrderedDict() 293 ret = collections.OrderedDict()
293 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]: 294 for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]:
294 ret[k] = v 295 ret[k] = v
295 return ret 296 return ret
296 297
297 def _param_str_join(self, dict_, elmdelim, kvdelim="="): 298 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
298 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) 299 return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()])
299 300
300 @property 301 @property
301 def hostport(self): 302 def hostport(self):
@@ -352,6 +353,14 @@ def decodeurl(url):
352 user, password, parameters). 353 user, password, parameters).
353 """ 354 """
354 355
356 uri = URI(url)
357 path = uri.path if uri.path else "/"
358 return uri.scheme, uri.hostport, path, uri.username, uri.password, uri.params
359
360def decodemirrorurl(url):
361 """Decodes a mirror URL into the tokens (scheme, network location, path,
362 user, password, parameters).
363 """
355 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) 364 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
356 if not m: 365 if not m:
357 raise MalformedUrl(url) 366 raise MalformedUrl(url)
@@ -370,6 +379,9 @@ def decodeurl(url):
370 elif type.lower() == 'file': 379 elif type.lower() == 'file':
371 host = "" 380 host = ""
372 path = location 381 path = location
382 if user:
383 path = user + '@' + path
384 user = ""
373 else: 385 else:
374 host = location 386 host = location
375 path = "/" 387 path = "/"
@@ -388,7 +400,7 @@ def decodeurl(url):
388 if s: 400 if s:
389 if not '=' in s: 401 if not '=' in s:
390 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) 402 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
391 s1, s2 = s.split('=') 403 s1, s2 = s.split('=', 1)
392 p[s1] = s2 404 p[s1] = s2
393 405
394 return type, host, urllib.parse.unquote(path), user, pswd, p 406 return type, host, urllib.parse.unquote(path), user, pswd, p
@@ -402,34 +414,37 @@ def encodeurl(decoded):
402 414
403 if not type: 415 if not type:
404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) 416 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
405 url = '%s://' % type 417 uri = URI()
418 uri.scheme = type
406 if user and type != "file": 419 if user and type != "file":
407 url += "%s" % user 420 uri.username = user
408 if pswd: 421 if pswd:
409 url += ":%s" % pswd 422 uri.password = pswd
410 url += "@"
411 if host and type != "file": 423 if host and type != "file":
412 url += "%s" % host 424 uri.hostname = host
413 if path: 425 if path:
414 # Standardise path to ensure comparisons work 426 # Standardise path to ensure comparisons work
415 while '//' in path: 427 while '//' in path:
416 path = path.replace("//", "/") 428 path = path.replace("//", "/")
417 url += "%s" % urllib.parse.quote(path) 429 uri.path = path
430 if type == "file":
431 # Use old not IETF compliant style
432 uri.relative = False
418 if p: 433 if p:
419 for parm in p: 434 uri.params = p
420 url += ";%s=%s" % (parm, p[parm])
421 435
422 return url 436 return str(uri)
423 437
424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): 438def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
425 if not ud.url or not uri_find or not uri_replace: 439 if not ud.url or not uri_find or not uri_replace:
426 logger.error("uri_replace: passed an undefined value, not replacing") 440 logger.error("uri_replace: passed an undefined value, not replacing")
427 return None 441 return None
428 uri_decoded = list(decodeurl(ud.url)) 442 uri_decoded = list(decodemirrorurl(ud.url))
429 uri_find_decoded = list(decodeurl(uri_find)) 443 uri_find_decoded = list(decodemirrorurl(uri_find))
430 uri_replace_decoded = list(decodeurl(uri_replace)) 444 uri_replace_decoded = list(decodemirrorurl(uri_replace))
431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) 445 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
432 result_decoded = ['', '', '', '', '', {}] 446 result_decoded = ['', '', '', '', '', {}]
447 # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
433 for loc, i in enumerate(uri_find_decoded): 448 for loc, i in enumerate(uri_find_decoded):
434 result_decoded[loc] = uri_decoded[loc] 449 result_decoded[loc] = uri_decoded[loc]
435 regexp = i 450 regexp = i
@@ -449,6 +464,9 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
449 for l in replacements: 464 for l in replacements:
450 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) 465 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
451 result_decoded[loc][k] = uri_replace_decoded[loc][k] 466 result_decoded[loc][k] = uri_replace_decoded[loc][k]
467 elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
468 # User/password in the replacement is just a straight replacement
469 result_decoded[loc] = uri_replace_decoded[loc]
452 elif (re.match(regexp, uri_decoded[loc])): 470 elif (re.match(regexp, uri_decoded[loc])):
453 if not uri_replace_decoded[loc]: 471 if not uri_replace_decoded[loc]:
454 result_decoded[loc] = "" 472 result_decoded[loc] = ""
@@ -456,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
456 for k in replacements: 474 for k in replacements:
457 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) 475 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
458 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) 476 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
459 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1) 477 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], count=1)
460 if loc == 2: 478 if loc == 2:
461 # Handle path manipulations 479 # Handle path manipulations
462 basename = None 480 basename = None
@@ -465,10 +483,18 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
465 basename = os.path.basename(mirrortarball) 483 basename = os.path.basename(mirrortarball)
466 # Kill parameters, they make no sense for mirror tarballs 484 # Kill parameters, they make no sense for mirror tarballs
467 uri_decoded[5] = {} 485 uri_decoded[5] = {}
486 uri_find_decoded[5] = {}
468 elif ud.localpath and ud.method.supports_checksum(ud): 487 elif ud.localpath and ud.method.supports_checksum(ud):
469 basename = os.path.basename(ud.localpath) 488 basename = os.path.basename(ud.localpath)
470 if basename and not result_decoded[loc].endswith(basename): 489 if basename:
471 result_decoded[loc] = os.path.join(result_decoded[loc], basename) 490 uri_basename = os.path.basename(uri_decoded[loc])
491 # Prefix with a slash as a sentinel in case
492 # result_decoded[loc] does not contain one.
493 path = "/" + result_decoded[loc]
494 if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename):
495 result_decoded[loc] = path[1:-len(uri_basename)] + basename
496 elif not path.endswith("/" + basename):
497 result_decoded[loc] = os.path.join(path[1:], basename)
472 else: 498 else:
473 return None 499 return None
474 result = encodeurl(result_decoded) 500 result = encodeurl(result_decoded)
@@ -481,18 +507,23 @@ methods = []
481urldata_cache = {} 507urldata_cache = {}
482saved_headrevs = {} 508saved_headrevs = {}
483 509
484def fetcher_init(d): 510def fetcher_init(d, servercontext=True):
485 """ 511 """
486 Called to initialize the fetchers once the configuration data is known. 512 Called to initialize the fetchers once the configuration data is known.
487 Calls before this must not hit the cache. 513 Calls before this must not hit the cache.
488 """ 514 """
489 515
490 revs = bb.persist_data.persist('BB_URI_HEADREVS', d) 516 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
517 _revisions_cache.init_cache(d.getVar("BB_CACHEDIR"))
518
519 if not servercontext:
520 return
521
491 try: 522 try:
492 # fetcher_init is called multiple times, so make sure we only save the 523 # fetcher_init is called multiple times, so make sure we only save the
493 # revs the first time it is called. 524 # revs the first time it is called.
494 if not bb.fetch2.saved_headrevs: 525 if not bb.fetch2.saved_headrevs:
495 bb.fetch2.saved_headrevs = dict(revs) 526 bb.fetch2.saved_headrevs = _revisions_cache.get_revs()
496 except: 527 except:
497 pass 528 pass
498 529
@@ -502,11 +533,10 @@ def fetcher_init(d):
502 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) 533 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
503 elif srcrev_policy == "clear": 534 elif srcrev_policy == "clear":
504 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) 535 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
505 revs.clear() 536 _revisions_cache.clear_cache()
506 else: 537 else:
507 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) 538 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
508 539
509 _checksum_cache.init_cache(d)
510 540
511 for m in methods: 541 for m in methods:
512 if hasattr(m, "init"): 542 if hasattr(m, "init"):
@@ -514,9 +544,11 @@ def fetcher_init(d):
514 544
515def fetcher_parse_save(): 545def fetcher_parse_save():
516 _checksum_cache.save_extras() 546 _checksum_cache.save_extras()
547 _revisions_cache.save_extras()
517 548
518def fetcher_parse_done(): 549def fetcher_parse_done():
519 _checksum_cache.save_merge() 550 _checksum_cache.save_merge()
551 _revisions_cache.save_merge()
520 552
521def fetcher_compare_revisions(d): 553def fetcher_compare_revisions(d):
522 """ 554 """
@@ -524,7 +556,7 @@ def fetcher_compare_revisions(d):
524 when bitbake was started and return true if they have changed. 556 when bitbake was started and return true if they have changed.
525 """ 557 """
526 558
527 headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) 559 headrevs = _revisions_cache.get_revs()
528 return headrevs != bb.fetch2.saved_headrevs 560 return headrevs != bb.fetch2.saved_headrevs
529 561
530def mirror_from_string(data): 562def mirror_from_string(data):
@@ -534,7 +566,7 @@ def mirror_from_string(data):
534 bb.warn('Invalid mirror data %s, should have paired members.' % data) 566 bb.warn('Invalid mirror data %s, should have paired members.' % data)
535 return list(zip(*[iter(mirrors)]*2)) 567 return list(zip(*[iter(mirrors)]*2))
536 568
537def verify_checksum(ud, d, precomputed={}): 569def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True):
538 """ 570 """
539 verify the MD5 and SHA256 checksum for downloaded src 571 verify the MD5 and SHA256 checksum for downloaded src
540 572
@@ -548,20 +580,25 @@ def verify_checksum(ud, d, precomputed={}):
548 file against those in the recipe each time, rather than only after 580 file against those in the recipe each time, rather than only after
549 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. 581 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
550 """ 582 """
551
552 if ud.ignore_checksums or not ud.method.supports_checksum(ud): 583 if ud.ignore_checksums or not ud.method.supports_checksum(ud):
553 return {} 584 return {}
554 585
586 if localpath is None:
587 localpath = ud.localpath
588
555 def compute_checksum_info(checksum_id): 589 def compute_checksum_info(checksum_id):
556 checksum_name = getattr(ud, "%s_name" % checksum_id) 590 checksum_name = getattr(ud, "%s_name" % checksum_id)
557 591
558 if checksum_id in precomputed: 592 if checksum_id in precomputed:
559 checksum_data = precomputed[checksum_id] 593 checksum_data = precomputed[checksum_id]
560 else: 594 else:
561 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath) 595 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath)
562 596
563 checksum_expected = getattr(ud, "%s_expected" % checksum_id) 597 checksum_expected = getattr(ud, "%s_expected" % checksum_id)
564 598
599 if checksum_expected == '':
600 checksum_expected = None
601
565 return { 602 return {
566 "id": checksum_id, 603 "id": checksum_id,
567 "name": checksum_name, 604 "name": checksum_name,
@@ -581,17 +618,13 @@ def verify_checksum(ud, d, precomputed={}):
581 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])] 618 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
582 619
583 # If no checksum has been provided 620 # If no checksum has been provided
584 if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): 621 if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
585 messages = [] 622 messages = []
586 strict = d.getVar("BB_STRICT_CHECKSUM") or "0" 623 strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
587 624
588 # If strict checking enabled and neither sum defined, raise error 625 # If strict checking enabled and neither sum defined, raise error
589 if strict == "1": 626 if strict == "1":
590 messages.append("No checksum specified for '%s', please add at " \ 627 raise NoChecksumError("\n".join(checksum_lines))
591 "least one to the recipe:" % ud.localpath)
592 messages.extend(checksum_lines)
593 logger.error("\n".join(messages))
594 raise NoChecksumError("Missing SRC_URI checksum", ud.url)
595 628
596 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d) 629 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
597 630
@@ -612,8 +645,8 @@ def verify_checksum(ud, d, precomputed={}):
612 645
613 for ci in checksum_infos: 646 for ci in checksum_infos:
614 if ci["expected"] and ci["expected"] != ci["data"]: 647 if ci["expected"] and ci["expected"] != ci["data"]:
615 messages.append("File: '%s' has %s checksum %s when %s was " \ 648 messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
616 "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"])) 649 "expected" % (localpath, ci["id"], ci["data"], ci["expected"]))
617 bad_checksum = ci["data"] 650 bad_checksum = ci["data"]
618 651
619 if bad_checksum: 652 if bad_checksum:
@@ -731,13 +764,16 @@ def subprocess_setup():
731 # SIGPIPE errors are known issues with gzip/bash 764 # SIGPIPE errors are known issues with gzip/bash
732 signal.signal(signal.SIGPIPE, signal.SIG_DFL) 765 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
733 766
734def get_autorev(d): 767def mark_recipe_nocache(d):
735 # only not cache src rev in autorev case
736 if d.getVar('BB_SRCREV_POLICY') != "cache": 768 if d.getVar('BB_SRCREV_POLICY') != "cache":
737 d.setVar('BB_DONT_CACHE', '1') 769 d.setVar('BB_DONT_CACHE', '1')
770
771def get_autorev(d):
772 mark_recipe_nocache(d)
773 d.setVar("__BBAUTOREV_SEEN", True)
738 return "AUTOINC" 774 return "AUTOINC"
739 775
740def get_srcrev(d, method_name='sortable_revision'): 776def _get_srcrev(d, method_name='sortable_revision'):
741 """ 777 """
742 Return the revision string, usually for use in the version string (PV) of the current package 778 Return the revision string, usually for use in the version string (PV) of the current package
743 Most packages usually only have one SCM so we just pass on the call. 779 Most packages usually only have one SCM so we just pass on the call.
@@ -751,23 +787,34 @@ def get_srcrev(d, method_name='sortable_revision'):
751 that fetcher provides a method with the given name and the same signature as sortable_revision. 787 that fetcher provides a method with the given name and the same signature as sortable_revision.
752 """ 788 """
753 789
790 d.setVar("__BBSRCREV_SEEN", "1")
791 recursion = d.getVar("__BBINSRCREV")
792 if recursion:
793 raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
794 d.setVar("__BBINSRCREV", True)
795
754 scms = [] 796 scms = []
797 revs = []
755 fetcher = Fetch(d.getVar('SRC_URI').split(), d) 798 fetcher = Fetch(d.getVar('SRC_URI').split(), d)
756 urldata = fetcher.ud 799 urldata = fetcher.ud
757 for u in urldata: 800 for u in urldata:
758 if urldata[u].method.supports_srcrev(): 801 if urldata[u].method.supports_srcrev():
759 scms.append(u) 802 scms.append(u)
760 803
761 if len(scms) == 0: 804 if not scms:
762 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") 805 d.delVar("__BBINSRCREV")
806 return "", revs
763 807
764 if len(scms) == 1 and len(urldata[scms[0]].names) == 1: 808
765 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) 809 if len(scms) == 1:
810 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].name)
811 revs.append(rev)
766 if len(rev) > 10: 812 if len(rev) > 10:
767 rev = rev[:10] 813 rev = rev[:10]
814 d.delVar("__BBINSRCREV")
768 if autoinc: 815 if autoinc:
769 return "AUTOINC+" + rev 816 return "AUTOINC+" + rev, revs
770 return rev 817 return rev, revs
771 818
772 # 819 #
773 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT 820 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
@@ -781,12 +828,12 @@ def get_srcrev(d, method_name='sortable_revision'):
781 seenautoinc = False 828 seenautoinc = False
782 for scm in scms: 829 for scm in scms:
783 ud = urldata[scm] 830 ud = urldata[scm]
784 for name in ud.names: 831 autoinc, rev = getattr(ud.method, method_name)(ud, d, ud.name)
785 autoinc, rev = getattr(ud.method, method_name)(ud, d, name) 832 revs.append(rev)
786 seenautoinc = seenautoinc or autoinc 833 seenautoinc = seenautoinc or autoinc
787 if len(rev) > 10: 834 if len(rev) > 10:
788 rev = rev[:10] 835 rev = rev[:10]
789 name_to_rev[name] = rev 836 name_to_rev[ud.name] = rev
790 # Replace names by revisions in the SRCREV_FORMAT string. The approach used 837 # Replace names by revisions in the SRCREV_FORMAT string. The approach used
791 # here can handle names being prefixes of other names and names appearing 838 # here can handle names being prefixes of other names and names appearing
792 # as substrings in revisions (in which case the name should not be 839 # as substrings in revisions (in which case the name should not be
@@ -799,12 +846,71 @@ def get_srcrev(d, method_name='sortable_revision'):
799 if seenautoinc: 846 if seenautoinc:
800 format = "AUTOINC+" + format 847 format = "AUTOINC+" + format
801 848
802 return format 849 d.delVar("__BBINSRCREV")
850 return format, revs
851
852def get_hashvalue(d, method_name='sortable_revision'):
853 pkgv, revs = _get_srcrev(d, method_name=method_name)
854 return " ".join(revs)
855
856def get_pkgv_string(d, method_name='sortable_revision'):
857 pkgv, revs = _get_srcrev(d, method_name=method_name)
858 return pkgv
859
860def get_srcrev(d, method_name='sortable_revision'):
861 pkgv, revs = _get_srcrev(d, method_name=method_name)
862 if not pkgv:
863 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
864 return pkgv
803 865
804def localpath(url, d): 866def localpath(url, d):
805 fetcher = bb.fetch2.Fetch([url], d) 867 fetcher = bb.fetch2.Fetch([url], d)
806 return fetcher.localpath(url) 868 return fetcher.localpath(url)
807 869
870# Need to export PATH as binary could be in metadata paths
871# rather than host provided
872# Also include some other variables.
873FETCH_EXPORT_VARS = ['HOME', 'PATH',
874 'HTTP_PROXY', 'http_proxy',
875 'HTTPS_PROXY', 'https_proxy',
876 'FTP_PROXY', 'ftp_proxy',
877 'FTPS_PROXY', 'ftps_proxy',
878 'NO_PROXY', 'no_proxy',
879 'ALL_PROXY', 'all_proxy',
880 'GIT_PROXY_COMMAND',
881 'GIT_SSH',
882 'GIT_SSH_COMMAND',
883 'GIT_SSL_CAINFO',
884 'GIT_SMART_HTTP',
885 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
886 'SOCKS5_USER', 'SOCKS5_PASSWD',
887 'DBUS_SESSION_BUS_ADDRESS',
888 'P4CONFIG',
889 'SSL_CERT_FILE',
890 'NODE_EXTRA_CA_CERTS',
891 'AWS_PROFILE',
892 'AWS_ACCESS_KEY_ID',
893 'AWS_SECRET_ACCESS_KEY',
894 'AWS_ROLE_ARN',
895 'AWS_WEB_IDENTITY_TOKEN_FILE',
896 'AWS_DEFAULT_REGION',
897 'AWS_SESSION_TOKEN',
898 'GIT_CACHE_PATH',
899 'REMOTE_CONTAINERS_IPC',
900 'GITHUB_TOKEN',
901 'SSL_CERT_DIR']
902
903def get_fetcher_environment(d):
904 newenv = {}
905 origenv = d.getVar("BB_ORIGENV")
906 for name in bb.fetch2.FETCH_EXPORT_VARS:
907 value = d.getVar(name)
908 if not value and origenv:
909 value = origenv.getVar(name)
910 if value:
911 newenv[name] = value
912 return newenv
913
808def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): 914def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
809 """ 915 """
810 Run cmd returning the command output 916 Run cmd returning the command output
@@ -813,25 +919,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
813 Optionally remove the files/directories listed in cleanup upon failure 919 Optionally remove the files/directories listed in cleanup upon failure
814 """ 920 """
815 921
816 # Need to export PATH as binary could be in metadata paths 922 exportvars = FETCH_EXPORT_VARS
817 # rather than host provided
818 # Also include some other variables.
819 # FIXME: Should really include all export varaiables?
820 exportvars = ['HOME', 'PATH',
821 'HTTP_PROXY', 'http_proxy',
822 'HTTPS_PROXY', 'https_proxy',
823 'FTP_PROXY', 'ftp_proxy',
824 'FTPS_PROXY', 'ftps_proxy',
825 'NO_PROXY', 'no_proxy',
826 'ALL_PROXY', 'all_proxy',
827 'GIT_PROXY_COMMAND',
828 'GIT_SSH',
829 'GIT_SSL_CAINFO',
830 'GIT_SMART_HTTP',
831 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
832 'SOCKS5_USER', 'SOCKS5_PASSWD',
833 'DBUS_SESSION_BUS_ADDRESS',
834 'P4CONFIG']
835 923
836 if not cleanup: 924 if not cleanup:
837 cleanup = [] 925 cleanup = []
@@ -868,14 +956,17 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
868 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir) 956 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
869 success = True 957 success = True
870 except bb.process.NotFoundError as e: 958 except bb.process.NotFoundError as e:
871 error_message = "Fetch command %s" % (e.command) 959 error_message = "Fetch command %s not found" % (e.command)
872 except bb.process.ExecutionError as e: 960 except bb.process.ExecutionError as e:
873 if e.stdout: 961 if e.stdout:
874 output = "output:\n%s\n%s" % (e.stdout, e.stderr) 962 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
875 elif e.stderr: 963 elif e.stderr:
876 output = "output:\n%s" % e.stderr 964 output = "output:\n%s" % e.stderr
877 else: 965 else:
878 output = "no output" 966 if log:
967 output = "see logfile for output"
968 else:
969 output = "no output"
879 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output) 970 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
880 except bb.process.CmdError as e: 971 except bb.process.CmdError as e:
881 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) 972 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
@@ -937,6 +1028,7 @@ def build_mirroruris(origud, mirrors, ld):
937 1028
938 try: 1029 try:
939 newud = FetchData(newuri, ld) 1030 newud = FetchData(newuri, ld)
1031 newud.ignore_checksums = True
940 newud.setup_localpath(ld) 1032 newud.setup_localpath(ld)
941 except bb.fetch2.BBFetchException as e: 1033 except bb.fetch2.BBFetchException as e:
942 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) 1034 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
@@ -1000,6 +1092,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1000 # If that tarball is a local file:// we need to provide a symlink to it 1092 # If that tarball is a local file:// we need to provide a symlink to it
1001 dldir = ld.getVar("DL_DIR") 1093 dldir = ld.getVar("DL_DIR")
1002 1094
1095 if bb.utils.to_boolean(ld.getVar("BB_FETCH_PREMIRRORONLY")):
1096 ld = ld.createCopy()
1097 ld.setVar("BB_NO_NETWORK", "1")
1098
1003 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): 1099 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
1004 # Create donestamp in old format to avoid triggering a re-download 1100 # Create donestamp in old format to avoid triggering a re-download
1005 if ud.donestamp: 1101 if ud.donestamp:
@@ -1021,7 +1117,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1021 origud.method.build_mirror_data(origud, ld) 1117 origud.method.build_mirror_data(origud, ld)
1022 return origud.localpath 1118 return origud.localpath
1023 # Otherwise the result is a local file:// and we symlink to it 1119 # Otherwise the result is a local file:// and we symlink to it
1024 ensure_symlink(ud.localpath, origud.localpath) 1120 # This may also be a link to a shallow archive
1121 # When using shallow mode, add a symlink to the original fullshallow
1122 # path to ensure a valid symlink even in the `PREMIRRORS` case
1123 origud.method.update_mirror_links(ud, origud)
1025 update_stamp(origud, ld) 1124 update_stamp(origud, ld)
1026 return ud.localpath 1125 return ud.localpath
1027 1126
@@ -1046,7 +1145,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1046 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) 1145 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
1047 logger.debug(str(e)) 1146 logger.debug(str(e))
1048 try: 1147 try:
1049 ud.method.clean(ud, ld) 1148 if ud.method.cleanup_upon_failure():
1149 ud.method.clean(ud, ld)
1050 except UnboundLocalError: 1150 except UnboundLocalError:
1051 pass 1151 pass
1052 return False 1152 return False
@@ -1054,23 +1154,6 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1054 if ud.lockfile and ud.lockfile != origud.lockfile: 1154 if ud.lockfile and ud.lockfile != origud.lockfile:
1055 bb.utils.unlockfile(lf) 1155 bb.utils.unlockfile(lf)
1056 1156
1057
1058def ensure_symlink(target, link_name):
1059 if not os.path.exists(link_name):
1060 if os.path.islink(link_name):
1061 # Broken symbolic link
1062 os.unlink(link_name)
1063
1064 # In case this is executing without any file locks held (as is
1065 # the case for file:// URLs), two tasks may end up here at the
1066 # same time, in which case we do not want the second task to
1067 # fail when the link has already been created by the first task.
1068 try:
1069 os.symlink(target, link_name)
1070 except FileExistsError:
1071 pass
1072
1073
1074def try_mirrors(fetch, d, origud, mirrors, check = False): 1157def try_mirrors(fetch, d, origud, mirrors, check = False):
1075 """ 1158 """
1076 Try to use a mirrored version of the sources. 1159 Try to use a mirrored version of the sources.
@@ -1099,7 +1182,7 @@ def trusted_network(d, url):
1099 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): 1182 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
1100 return True 1183 return True
1101 1184
1102 pkgname = d.expand(d.getVar('PN', False)) 1185 pkgname = d.getVar('PN')
1103 trusted_hosts = None 1186 trusted_hosts = None
1104 if pkgname: 1187 if pkgname:
1105 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) 1188 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
@@ -1140,11 +1223,11 @@ def srcrev_internal_helper(ud, d, name):
1140 pn = d.getVar("PN") 1223 pn = d.getVar("PN")
1141 attempts = [] 1224 attempts = []
1142 if name != '' and pn: 1225 if name != '' and pn:
1143 attempts.append("SRCREV_%s_pn-%s" % (name, pn)) 1226 attempts.append("SRCREV_%s:pn-%s" % (name, pn))
1144 if name != '': 1227 if name != '':
1145 attempts.append("SRCREV_%s" % name) 1228 attempts.append("SRCREV_%s" % name)
1146 if pn: 1229 if pn:
1147 attempts.append("SRCREV_pn-%s" % pn) 1230 attempts.append("SRCREV:pn-%s" % pn)
1148 attempts.append("SRCREV") 1231 attempts.append("SRCREV")
1149 1232
1150 for a in attempts: 1233 for a in attempts:
@@ -1152,23 +1235,21 @@ def srcrev_internal_helper(ud, d, name):
1152 if srcrev and srcrev != "INVALID": 1235 if srcrev and srcrev != "INVALID":
1153 break 1236 break
1154 1237
1155 if 'rev' in ud.parm and 'tag' in ud.parm: 1238 if 'rev' in ud.parm:
1156 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) 1239 parmrev = ud.parm['rev']
1157
1158 if 'rev' in ud.parm or 'tag' in ud.parm:
1159 if 'rev' in ud.parm:
1160 parmrev = ud.parm['rev']
1161 else:
1162 parmrev = ud.parm['tag']
1163 if srcrev == "INVALID" or not srcrev: 1240 if srcrev == "INVALID" or not srcrev:
1164 return parmrev 1241 return parmrev
1165 if srcrev != parmrev: 1242 if srcrev != parmrev:
1166 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) 1243 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
1167 return parmrev 1244 return parmrev
1168 1245
1246 if 'tag' in ud.parm and (srcrev == "INVALID" or not srcrev):
1247 return ud.parm['tag']
1248
1169 if srcrev == "INVALID" or not srcrev: 1249 if srcrev == "INVALID" or not srcrev:
1170 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) 1250 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1171 if srcrev == "AUTOINC": 1251 if srcrev == "AUTOINC":
1252 d.setVar("__BBAUTOREV_ACTED_UPON", True)
1172 srcrev = ud.method.latest_revision(ud, d, name) 1253 srcrev = ud.method.latest_revision(ud, d, name)
1173 1254
1174 return srcrev 1255 return srcrev
@@ -1180,23 +1261,21 @@ def get_checksum_file_list(d):
1180 SRC_URI as a space-separated string 1261 SRC_URI as a space-separated string
1181 """ 1262 """
1182 fetch = Fetch([], d, cache = False, localonly = True) 1263 fetch = Fetch([], d, cache = False, localonly = True)
1183
1184 dl_dir = d.getVar('DL_DIR')
1185 filelist = [] 1264 filelist = []
1186 for u in fetch.urls: 1265 for u in fetch.urls:
1187 ud = fetch.ud[u] 1266 ud = fetch.ud[u]
1188
1189 if ud and isinstance(ud.method, local.Local): 1267 if ud and isinstance(ud.method, local.Local):
1190 paths = ud.method.localpaths(ud, d) 1268 found = False
1269 paths = ud.method.localfile_searchpaths(ud, d)
1191 for f in paths: 1270 for f in paths:
1192 pth = ud.decodedurl 1271 pth = ud.path
1193 if f.startswith(dl_dir): 1272 if os.path.exists(f):
1194 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else 1273 found = True
1195 if os.path.exists(f):
1196 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
1197 else:
1198 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
1199 filelist.append(f + ":" + str(os.path.exists(f))) 1274 filelist.append(f + ":" + str(os.path.exists(f)))
1275 if not found:
1276 bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found"
1277 "\nThe following paths were searched:"
1278 "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths)))
1200 1279
1201 return " ".join(filelist) 1280 return " ".join(filelist)
1202 1281
@@ -1234,28 +1313,28 @@ class FetchData(object):
1234 self.setup = False 1313 self.setup = False
1235 1314
1236 def configure_checksum(checksum_id): 1315 def configure_checksum(checksum_id):
1316 checksum_plain_name = "%ssum" % checksum_id
1237 if "name" in self.parm: 1317 if "name" in self.parm:
1238 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) 1318 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
1239 else: 1319 else:
1240 checksum_name = "%ssum" % checksum_id 1320 checksum_name = checksum_plain_name
1241
1242 setattr(self, "%s_name" % checksum_id, checksum_name)
1243 1321
1244 if checksum_name in self.parm: 1322 if checksum_name in self.parm:
1245 checksum_expected = self.parm[checksum_name] 1323 checksum_expected = self.parm[checksum_name]
1246 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: 1324 elif checksum_plain_name in self.parm:
1325 checksum_expected = self.parm[checksum_plain_name]
1326 checksum_name = checksum_plain_name
1327 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod", "npm"]:
1247 checksum_expected = None 1328 checksum_expected = None
1248 else: 1329 else:
1249 checksum_expected = d.getVarFlag("SRC_URI", checksum_name) 1330 checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
1250 1331
1332 setattr(self, "%s_name" % checksum_id, checksum_name)
1251 setattr(self, "%s_expected" % checksum_id, checksum_expected) 1333 setattr(self, "%s_expected" % checksum_id, checksum_expected)
1252 1334
1253 for checksum_id in CHECKSUM_LIST: 1335 self.name = self.parm.get("name",'default')
1254 configure_checksum(checksum_id) 1336 if "," in self.name:
1255 1337 raise ParameterError("The fetcher no longer supports multiple name parameters in a single url", self.url)
1256 self.ignore_checksums = False
1257
1258 self.names = self.parm.get("name",'default').split(',')
1259 1338
1260 self.method = None 1339 self.method = None
1261 for m in methods: 1340 for m in methods:
@@ -1276,6 +1355,11 @@ class FetchData(object):
1276 if hasattr(self.method, "urldata_init"): 1355 if hasattr(self.method, "urldata_init"):
1277 self.method.urldata_init(self, d) 1356 self.method.urldata_init(self, d)
1278 1357
1358 for checksum_id in CHECKSUM_LIST:
1359 configure_checksum(checksum_id)
1360
1361 self.ignore_checksums = False
1362
1279 if "localpath" in self.parm: 1363 if "localpath" in self.parm:
1280 # if user sets localpath for file, use it instead. 1364 # if user sets localpath for file, use it instead.
1281 self.localpath = self.parm["localpath"] 1365 self.localpath = self.parm["localpath"]
@@ -1302,13 +1386,7 @@ class FetchData(object):
1302 self.lockfile = basepath + '.lock' 1386 self.lockfile = basepath + '.lock'
1303 1387
1304 def setup_revisions(self, d): 1388 def setup_revisions(self, d):
1305 self.revisions = {} 1389 self.revision = srcrev_internal_helper(self, d, self.name)
1306 for name in self.names:
1307 self.revisions[name] = srcrev_internal_helper(self, d, name)
1308
1309 # add compatibility code for non name specified case
1310 if len(self.names) == 1:
1311 self.revision = self.revisions[self.names[0]]
1312 1390
1313 def setup_localpath(self, d): 1391 def setup_localpath(self, d):
1314 if not self.localpath: 1392 if not self.localpath:
@@ -1355,6 +1433,9 @@ class FetchMethod(object):
1355 Is localpath something that can be represented by a checksum? 1433 Is localpath something that can be represented by a checksum?
1356 """ 1434 """
1357 1435
1436 # We cannot compute checksums for None
1437 if urldata.localpath is None:
1438 return False
1358 # We cannot compute checksums for directories 1439 # We cannot compute checksums for directories
1359 if os.path.isdir(urldata.localpath): 1440 if os.path.isdir(urldata.localpath):
1360 return False 1441 return False
@@ -1367,6 +1448,12 @@ class FetchMethod(object):
1367 """ 1448 """
1368 return False 1449 return False
1369 1450
1451 def cleanup_upon_failure(self):
1452 """
1453 When a fetch fails, should clean() be called?
1454 """
1455 return True
1456
1370 def verify_donestamp(self, ud, d): 1457 def verify_donestamp(self, ud, d):
1371 """ 1458 """
1372 Verify the donestamp file 1459 Verify the donestamp file
@@ -1427,37 +1514,40 @@ class FetchMethod(object):
1427 (file, urldata.parm.get('unpack'))) 1514 (file, urldata.parm.get('unpack')))
1428 1515
1429 base, ext = os.path.splitext(file) 1516 base, ext = os.path.splitext(file)
1430 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']: 1517 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz', '.zst']:
1431 efile = os.path.join(rootdir, os.path.basename(base)) 1518 efile = os.path.join(rootdir, os.path.basename(base))
1432 else: 1519 else:
1433 efile = file 1520 efile = file
1434 cmd = None 1521 cmd = None
1435 1522
1436 if unpack: 1523 if unpack:
1524 tar_cmd = 'tar --extract --no-same-owner'
1525 if 'striplevel' in urldata.parm:
1526 tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel']
1437 if file.endswith('.tar'): 1527 if file.endswith('.tar'):
1438 cmd = 'tar x --no-same-owner -f %s' % file 1528 cmd = '%s -f %s' % (tar_cmd, file)
1439 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): 1529 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1440 cmd = 'tar xz --no-same-owner -f %s' % file 1530 cmd = '%s -z -f %s' % (tar_cmd, file)
1441 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): 1531 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1442 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file 1532 cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd)
1443 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): 1533 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1444 cmd = 'gzip -dc %s > %s' % (file, efile) 1534 cmd = 'gzip -dc %s > %s' % (file, efile)
1445 elif file.endswith('.bz2'): 1535 elif file.endswith('.bz2'):
1446 cmd = 'bzip2 -dc %s > %s' % (file, efile) 1536 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1447 elif file.endswith('.txz') or file.endswith('.tar.xz'): 1537 elif file.endswith('.txz') or file.endswith('.tar.xz'):
1448 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file 1538 cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd)
1449 elif file.endswith('.xz'): 1539 elif file.endswith('.xz'):
1450 cmd = 'xz -dc %s > %s' % (file, efile) 1540 cmd = 'xz -dc %s > %s' % (file, efile)
1451 elif file.endswith('.tar.lz'): 1541 elif file.endswith('.tar.lz'):
1452 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file 1542 cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd)
1453 elif file.endswith('.lz'): 1543 elif file.endswith('.lz'):
1454 cmd = 'lzip -dc %s > %s' % (file, efile) 1544 cmd = 'lzip -dc %s > %s' % (file, efile)
1455 elif file.endswith('.tar.7z'): 1545 elif file.endswith('.tar.7z'):
1456 cmd = '7z x -so %s | tar x --no-same-owner -f -' % file 1546 cmd = '7z x -so %s | %s -f -' % (file, tar_cmd)
1457 elif file.endswith('.7z'): 1547 elif file.endswith('.7z'):
1458 cmd = '7za x -y %s 1>/dev/null' % file 1548 cmd = '7za x -y %s 1>/dev/null' % file
1459 elif file.endswith('.tzst') or file.endswith('.tar.zst'): 1549 elif file.endswith('.tzst') or file.endswith('.tar.zst'):
1460 cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file 1550 cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd)
1461 elif file.endswith('.zst'): 1551 elif file.endswith('.zst'):
1462 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile) 1552 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
1463 elif file.endswith('.zip') or file.endswith('.jar'): 1553 elif file.endswith('.zip') or file.endswith('.jar'):
@@ -1483,14 +1573,14 @@ class FetchMethod(object):
1483 datafile = None 1573 datafile = None
1484 if output: 1574 if output:
1485 for line in output.decode().splitlines(): 1575 for line in output.decode().splitlines():
1486 if line.startswith('data.tar.'): 1576 if line.startswith('data.tar.') or line == 'data.tar':
1487 datafile = line 1577 datafile = line
1488 break 1578 break
1489 else: 1579 else:
1490 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) 1580 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar* file", urldata.url)
1491 else: 1581 else:
1492 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) 1582 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1493 cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile) 1583 cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
1494 1584
1495 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd 1585 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
1496 if 'subdir' in urldata.parm: 1586 if 'subdir' in urldata.parm:
@@ -1506,6 +1596,7 @@ class FetchMethod(object):
1506 unpackdir = rootdir 1596 unpackdir = rootdir
1507 1597
1508 if not unpack or not cmd: 1598 if not unpack or not cmd:
1599 urldata.unpack_tracer.unpack("file-copy", unpackdir)
1509 # If file == dest, then avoid any copies, as we already put the file into dest! 1600 # If file == dest, then avoid any copies, as we already put the file into dest!
1510 dest = os.path.join(unpackdir, os.path.basename(file)) 1601 dest = os.path.join(unpackdir, os.path.basename(file))
1511 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)): 1602 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
@@ -1519,7 +1610,9 @@ class FetchMethod(object):
1519 if urlpath.find("/") != -1: 1610 if urlpath.find("/") != -1:
1520 destdir = urlpath.rsplit("/", 1)[0] + '/' 1611 destdir = urlpath.rsplit("/", 1)[0] + '/'
1521 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) 1612 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
1522 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) 1613 cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir)
1614 else:
1615 urldata.unpack_tracer.unpack("archive-extract", unpackdir)
1523 1616
1524 if not cmd: 1617 if not cmd:
1525 return 1618 return
@@ -1546,6 +1639,28 @@ class FetchMethod(object):
1546 """ 1639 """
1547 bb.utils.remove(urldata.localpath) 1640 bb.utils.remove(urldata.localpath)
1548 1641
1642 def ensure_symlink(self, target, link_name):
1643 if not os.path.exists(link_name):
1644 dirname = os.path.dirname(link_name)
1645 bb.utils.mkdirhier(dirname)
1646 if os.path.islink(link_name):
1647 # Broken symbolic link
1648 os.unlink(link_name)
1649
1650 # In case this is executing without any file locks held (as is
1651 # the case for file:// URLs), two tasks may end up here at the
1652 # same time, in which case we do not want the second task to
1653 # fail when the link has already been created by the first task.
1654 try:
1655 os.symlink(target, link_name)
1656 except FileExistsError:
1657 pass
1658
1659 def update_mirror_links(self, ud, origud):
1660 # For local file:// results, create a symlink to them
1661 # This may also be a link to a shallow archive
1662 self.ensure_symlink(ud.localpath, origud.localpath)
1663
1549 def try_premirror(self, urldata, d): 1664 def try_premirror(self, urldata, d):
1550 """ 1665 """
1551 Should premirrors be used? 1666 Should premirrors be used?
@@ -1573,13 +1688,13 @@ class FetchMethod(object):
1573 if not hasattr(self, "_latest_revision"): 1688 if not hasattr(self, "_latest_revision"):
1574 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) 1689 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
1575 1690
1576 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1577 key = self.generate_revision_key(ud, d, name) 1691 key = self.generate_revision_key(ud, d, name)
1578 try: 1692
1579 return revs[key] 1693 rev = _revisions_cache.get_rev(key)
1580 except KeyError: 1694 if rev is None:
1581 revs[key] = rev = self._latest_revision(ud, d, name) 1695 rev = self._latest_revision(ud, d, name)
1582 return rev 1696 _revisions_cache.set_rev(key, rev)
1697 return rev
1583 1698
1584 def sortable_revision(self, ud, d, name): 1699 def sortable_revision(self, ud, d, name):
1585 latest_rev = self._build_revision(ud, d, name) 1700 latest_rev = self._build_revision(ud, d, name)
@@ -1611,12 +1726,61 @@ class FetchMethod(object):
1611 """ 1726 """
1612 return [] 1727 return []
1613 1728
1729
1730class DummyUnpackTracer(object):
1731 """
1732 Abstract API definition for a class that traces unpacked source files back
1733 to their respective upstream SRC_URI entries, for software composition
1734 analysis, license compliance and detailed SBOM generation purposes.
1735 User may load their own unpack tracer class (instead of the dummy
1736 one) by setting the BB_UNPACK_TRACER_CLASS config parameter.
1737 """
1738 def start(self, unpackdir, urldata_dict, d):
1739 """
1740 Start tracing the core Fetch.unpack process, using an index to map
1741 unpacked files to each SRC_URI entry.
1742 This method is called by Fetch.unpack and it may receive nested calls by
1743 gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit
1744 URLs and by recursively calling Fetch.unpack from new (nested) Fetch
1745 instances.
1746 """
1747 return
1748 def start_url(self, url):
1749 """Start tracing url unpack process.
1750 This method is called by Fetch.unpack before the fetcher-specific unpack
1751 method starts, and it may receive nested calls by gitsm and npmsw
1752 fetchers.
1753 """
1754 return
1755 def unpack(self, unpack_type, destdir):
1756 """
1757 Set unpack_type and destdir for current url.
1758 This method is called by the fetcher-specific unpack method after url
1759 tracing started.
1760 """
1761 return
1762 def finish_url(self, url):
1763 """Finish tracing url unpack process and update the file index.
1764 This method is called by Fetch.unpack after the fetcher-specific unpack
1765 method finished its job, and it may receive nested calls by gitsm
1766 and npmsw fetchers.
1767 """
1768 return
1769 def complete(self):
1770 """
1771 Finish tracing the Fetch.unpack process, and check if all nested
1772 Fecth.unpack calls (if any) have been completed; if so, save collected
1773 metadata.
1774 """
1775 return
1776
1777
1614class Fetch(object): 1778class Fetch(object):
1615 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): 1779 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
1616 if localonly and cache: 1780 if localonly and cache:
1617 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") 1781 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1618 1782
1619 if len(urls) == 0: 1783 if not urls:
1620 urls = d.getVar("SRC_URI").split() 1784 urls = d.getVar("SRC_URI").split()
1621 self.urls = urls 1785 self.urls = urls
1622 self.d = d 1786 self.d = d
@@ -1631,10 +1795,30 @@ class Fetch(object):
1631 if key in urldata_cache: 1795 if key in urldata_cache:
1632 self.ud = urldata_cache[key] 1796 self.ud = urldata_cache[key]
1633 1797
1798 # the unpack_tracer object needs to be made available to possible nested
1799 # Fetch instances (when those are created by gitsm and npmsw fetchers)
1800 # so we set it as a global variable
1801 global unpack_tracer
1802 try:
1803 unpack_tracer
1804 except NameError:
1805 class_path = d.getVar("BB_UNPACK_TRACER_CLASS")
1806 if class_path:
1807 # use user-defined unpack tracer class
1808 import importlib
1809 module_name, _, class_name = class_path.rpartition(".")
1810 module = importlib.import_module(module_name)
1811 class_ = getattr(module, class_name)
1812 unpack_tracer = class_()
1813 else:
1814 # fall back to the dummy/abstract class
1815 unpack_tracer = DummyUnpackTracer()
1816
1634 for url in urls: 1817 for url in urls:
1635 if url not in self.ud: 1818 if url not in self.ud:
1636 try: 1819 try:
1637 self.ud[url] = FetchData(url, d, localonly) 1820 self.ud[url] = FetchData(url, d, localonly)
1821 self.ud[url].unpack_tracer = unpack_tracer
1638 except NonLocalMethod: 1822 except NonLocalMethod:
1639 if localonly: 1823 if localonly:
1640 self.ud[url] = None 1824 self.ud[url] = None
@@ -1648,7 +1832,7 @@ class Fetch(object):
1648 self.ud[url] = FetchData(url, self.d) 1832 self.ud[url] = FetchData(url, self.d)
1649 1833
1650 self.ud[url].setup_localpath(self.d) 1834 self.ud[url].setup_localpath(self.d)
1651 return self.d.expand(self.ud[url].localpath) 1835 return self.ud[url].localpath
1652 1836
1653 def localpaths(self): 1837 def localpaths(self):
1654 """ 1838 """
@@ -1673,6 +1857,7 @@ class Fetch(object):
1673 network = self.d.getVar("BB_NO_NETWORK") 1857 network = self.d.getVar("BB_NO_NETWORK")
1674 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY")) 1858 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
1675 1859
1860 checksum_missing_messages = []
1676 for u in urls: 1861 for u in urls:
1677 ud = self.ud[u] 1862 ud = self.ud[u]
1678 ud.setup_localpath(self.d) 1863 ud.setup_localpath(self.d)
@@ -1684,7 +1869,6 @@ class Fetch(object):
1684 1869
1685 try: 1870 try:
1686 self.d.setVar("BB_NO_NETWORK", network) 1871 self.d.setVar("BB_NO_NETWORK", network)
1687
1688 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): 1872 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1689 done = True 1873 done = True
1690 elif m.try_premirror(ud, self.d): 1874 elif m.try_premirror(ud, self.d):
@@ -1701,23 +1885,28 @@ class Fetch(object):
1701 logger.debug(str(e)) 1885 logger.debug(str(e))
1702 done = False 1886 done = False
1703 1887
1888 d = self.d
1704 if premirroronly: 1889 if premirroronly:
1705 self.d.setVar("BB_NO_NETWORK", "1") 1890 # Only disable the network in a copy
1891 d = bb.data.createCopy(self.d)
1892 d.setVar("BB_NO_NETWORK", "1")
1706 1893
1707 firsterr = None 1894 firsterr = None
1708 verified_stamp = m.verify_donestamp(ud, self.d) 1895 verified_stamp = False
1709 if not done and (not verified_stamp or m.need_update(ud, self.d)): 1896 if done:
1897 verified_stamp = m.verify_donestamp(ud, d)
1898 if not done and (not verified_stamp or m.need_update(ud, d)):
1710 try: 1899 try:
1711 if not trusted_network(self.d, ud.url): 1900 if not trusted_network(d, ud.url):
1712 raise UntrustedUrl(ud.url) 1901 raise UntrustedUrl(ud.url)
1713 logger.debug("Trying Upstream") 1902 logger.debug("Trying Upstream")
1714 m.download(ud, self.d) 1903 m.download(ud, d)
1715 if hasattr(m, "build_mirror_data"): 1904 if hasattr(m, "build_mirror_data"):
1716 m.build_mirror_data(ud, self.d) 1905 m.build_mirror_data(ud, d)
1717 done = True 1906 done = True
1718 # early checksum verify, so that if checksum mismatched, 1907 # early checksum verify, so that if checksum mismatched,
1719 # fetcher still have chance to fetch from mirror 1908 # fetcher still have chance to fetch from mirror
1720 m.update_donestamp(ud, self.d) 1909 m.update_donestamp(ud, d)
1721 1910
1722 except bb.fetch2.NetworkAccess: 1911 except bb.fetch2.NetworkAccess:
1723 raise 1912 raise
@@ -1735,18 +1924,18 @@ class Fetch(object):
1735 logger.debug(str(e)) 1924 logger.debug(str(e))
1736 firsterr = e 1925 firsterr = e
1737 # Remove any incomplete fetch 1926 # Remove any incomplete fetch
1738 if not verified_stamp: 1927 if not verified_stamp and m.cleanup_upon_failure():
1739 m.clean(ud, self.d) 1928 m.clean(ud, d)
1740 logger.debug("Trying MIRRORS") 1929 logger.debug("Trying MIRRORS")
1741 mirrors = mirror_from_string(self.d.getVar('MIRRORS')) 1930 mirrors = mirror_from_string(d.getVar('MIRRORS'))
1742 done = m.try_mirrors(self, ud, self.d, mirrors) 1931 done = m.try_mirrors(self, ud, d, mirrors)
1743 1932
1744 if not done or not m.done(ud, self.d): 1933 if not done or not m.done(ud, d):
1745 if firsterr: 1934 if firsterr:
1746 logger.error(str(firsterr)) 1935 logger.error(str(firsterr))
1747 raise FetchError("Unable to fetch URL from any source.", u) 1936 raise FetchError("Unable to fetch URL from any source.", u)
1748 1937
1749 m.update_donestamp(ud, self.d) 1938 m.update_donestamp(ud, d)
1750 1939
1751 except IOError as e: 1940 except IOError as e:
1752 if e.errno in [errno.ESTALE]: 1941 if e.errno in [errno.ESTALE]:
@@ -1754,17 +1943,28 @@ class Fetch(object):
1754 raise ChecksumError("Stale Error Detected") 1943 raise ChecksumError("Stale Error Detected")
1755 1944
1756 except BBFetchException as e: 1945 except BBFetchException as e:
1757 if isinstance(e, ChecksumError): 1946 if isinstance(e, NoChecksumError):
1947 (message, _) = e.args
1948 checksum_missing_messages.append(message)
1949 continue
1950 elif isinstance(e, ChecksumError):
1758 logger.error("Checksum failure fetching %s" % u) 1951 logger.error("Checksum failure fetching %s" % u)
1759 raise 1952 raise
1760 1953
1761 finally: 1954 finally:
1762 if ud.lockfile: 1955 if ud.lockfile:
1763 bb.utils.unlockfile(lf) 1956 bb.utils.unlockfile(lf)
1957 if checksum_missing_messages:
1958 logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages))
1959 raise BBFetchException("There was some missing checksums in the recipe")
1764 1960
1765 def checkstatus(self, urls=None): 1961 def checkstatus(self, urls=None):
1766 """ 1962 """
1767 Check all urls exist upstream 1963 Check all URLs exist upstream.
1964
1965 Returns None if the URLs exist, raises FetchError if the check wasn't
1966 successful but there wasn't an error (such as file not found), and
1967 raises other exceptions in error cases.
1768 """ 1968 """
1769 1969
1770 if not urls: 1970 if not urls:
@@ -1787,7 +1987,7 @@ class Fetch(object):
1787 ret = m.try_mirrors(self, ud, self.d, mirrors, True) 1987 ret = m.try_mirrors(self, ud, self.d, mirrors, True)
1788 1988
1789 if not ret: 1989 if not ret:
1790 raise FetchError("URL %s doesn't work" % u, u) 1990 raise FetchError("URL doesn't work", u)
1791 1991
1792 def unpack(self, root, urls=None): 1992 def unpack(self, root, urls=None):
1793 """ 1993 """
@@ -1797,6 +1997,8 @@ class Fetch(object):
1797 if not urls: 1997 if not urls:
1798 urls = self.urls 1998 urls = self.urls
1799 1999
2000 unpack_tracer.start(root, self.ud, self.d)
2001
1800 for u in urls: 2002 for u in urls:
1801 ud = self.ud[u] 2003 ud = self.ud[u]
1802 ud.setup_localpath(self.d) 2004 ud.setup_localpath(self.d)
@@ -1804,11 +2006,15 @@ class Fetch(object):
1804 if ud.lockfile: 2006 if ud.lockfile:
1805 lf = bb.utils.lockfile(ud.lockfile) 2007 lf = bb.utils.lockfile(ud.lockfile)
1806 2008
2009 unpack_tracer.start_url(u)
1807 ud.method.unpack(ud, root, self.d) 2010 ud.method.unpack(ud, root, self.d)
2011 unpack_tracer.finish_url(u)
1808 2012
1809 if ud.lockfile: 2013 if ud.lockfile:
1810 bb.utils.unlockfile(lf) 2014 bb.utils.unlockfile(lf)
1811 2015
2016 unpack_tracer.complete()
2017
1812 def clean(self, urls=None): 2018 def clean(self, urls=None):
1813 """ 2019 """
1814 Clean files that the fetcher gets or places 2020 Clean files that the fetcher gets or places
@@ -1908,6 +2114,10 @@ from . import repo
1908from . import clearcase 2114from . import clearcase
1909from . import npm 2115from . import npm
1910from . import npmsw 2116from . import npmsw
2117from . import az
2118from . import crate
2119from . import gcp
2120from . import gomod
1911 2121
1912methods.append(local.Local()) 2122methods.append(local.Local())
1913methods.append(wget.Wget()) 2123methods.append(wget.Wget())
@@ -1927,3 +2137,8 @@ methods.append(repo.Repo())
1927methods.append(clearcase.ClearCase()) 2137methods.append(clearcase.ClearCase())
1928methods.append(npm.Npm()) 2138methods.append(npm.Npm())
1929methods.append(npmsw.NpmShrinkWrap()) 2139methods.append(npmsw.NpmShrinkWrap())
2140methods.append(az.Az())
2141methods.append(crate.Crate())
2142methods.append(gcp.GCP())
2143methods.append(gomod.GoMod())
2144methods.append(gomod.GoModGit())
diff --git a/bitbake/lib/bb/fetch2/az.py b/bitbake/lib/bb/fetch2/az.py
new file mode 100644
index 0000000000..1d3664f213
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/az.py
@@ -0,0 +1,98 @@
1"""
2BitBake 'Fetch' Azure Storage implementation
3
4"""
5
6# Copyright (C) 2021 Alejandro Hernandez Samaniego
7#
8# Based on bb.fetch2.wget:
9# Copyright (C) 2003, 2004 Chris Larson
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13# Based on functions from the base bb module, Copyright 2003 Holger Schurig
14
15import shlex
16import os
17import bb
18from bb.fetch2 import FetchError
19from bb.fetch2 import logger
20from bb.fetch2.wget import Wget
21
22
23class Az(Wget):
24
25 def supports(self, ud, d):
26 """
27 Check to see if a given url can be fetched from Azure Storage
28 """
29 return ud.type in ['az']
30
31
32 def checkstatus(self, fetch, ud, d, try_again=True):
33
34 # checkstatus discards parameters either way, we need to do this before adding the SAS
35 ud.url = ud.url.replace('az://','https://').split(';')[0]
36
37 az_sas = d.getVar('AZ_SAS')
38 if az_sas and az_sas not in ud.url:
39 if not az_sas.startswith('?'):
40 raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.")
41 ud.url += az_sas
42
43 return Wget.checkstatus(self, fetch, ud, d, try_again)
44
45 # Override download method, include retries
46 def download(self, ud, d, retries=3):
47 """Fetch urls"""
48
49 # If were reaching the account transaction limit we might be refused a connection,
50 # retrying allows us to avoid false negatives since the limit changes over time
51 fetchcmd = self.basecmd + ' --retry-connrefused --waitretry=5'
52
53 # We need to provide a localpath to avoid wget using the SAS
54 # ud.localfile either has the downloadfilename or ud.path
55 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
56 bb.utils.mkdirhier(os.path.dirname(localpath))
57 fetchcmd += " -O %s" % shlex.quote(localpath)
58
59
60 if ud.user and ud.pswd:
61 fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
62
63 # Check if a Shared Access Signature was given and use it
64 az_sas = d.getVar('AZ_SAS')
65
66 if az_sas:
67 if not az_sas.startswith('?'):
68 raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.")
69 azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas)
70 else:
71 azuri = '%s%s%s' % ('https://', ud.host, ud.path)
72
73 dldir = d.getVar("DL_DIR")
74 if os.path.exists(ud.localpath):
75 # file exists, but we didnt complete it.. trying again.
76 fetchcmd += " -c -P %s '%s'" % (dldir, azuri)
77 else:
78 fetchcmd += " -P %s '%s'" % (dldir, azuri)
79
80 try:
81 self._runwget(ud, d, fetchcmd, False)
82 except FetchError as e:
83 # Azure fails on handshake sometimes when using wget after some stress, producing a
84 # FetchError from the fetcher, if the artifact exists retyring should succeed
85 if 'Unable to establish SSL connection' in str(e):
86 logger.debug2('Unable to establish SSL connection: Retries remaining: %s, Retrying...' % retries)
87 self.download(ud, d, retries -1)
88
89 # Sanity check since wget can pretend it succeed when it didn't
90 # Also, this used to happen if sourceforge sent us to the mirror page
91 if not os.path.exists(ud.localpath):
92 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (azuri, ud.localpath), azuri)
93
94 if os.path.getsize(ud.localpath) == 0:
95 os.remove(ud.localpath)
96 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (azuri), azuri)
97
98 return True
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py
index 1a9c863769..17500daf95 100644
--- a/bitbake/lib/bb/fetch2/clearcase.py
+++ b/bitbake/lib/bb/fetch2/clearcase.py
@@ -108,7 +108,7 @@ class ClearCase(FetchMethod):
108 ud.module.replace("/", "."), 108 ud.module.replace("/", "."),
109 ud.label.replace("/", ".")) 109 ud.label.replace("/", "."))
110 110
111 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) 111 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME"))
112 ud.csname = "%s-config-spec" % (ud.identifier) 112 ud.csname = "%s-config-spec" % (ud.identifier)
113 ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) 113 ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
114 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) 114 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
@@ -130,8 +130,6 @@ class ClearCase(FetchMethod):
130 self.debug("configspecfile = %s" % ud.configspecfile) 130 self.debug("configspecfile = %s" % ud.configspecfile)
131 self.debug("localfile = %s" % ud.localfile) 131 self.debug("localfile = %s" % ud.localfile)
132 132
133 ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
134
135 def _build_ccase_command(self, ud, command): 133 def _build_ccase_command(self, ud, command):
136 """ 134 """
137 Build up a commandline based on ud 135 Build up a commandline based on ud
@@ -196,7 +194,7 @@ class ClearCase(FetchMethod):
196 194
197 def need_update(self, ud, d): 195 def need_update(self, ud, d):
198 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec): 196 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
199 ud.identifier += "-%s" % d.getVar("DATETIME",d, True) 197 ud.identifier += "-%s" % d.getVar("DATETIME")
200 return True 198 return True
201 if os.path.exists(ud.localpath): 199 if os.path.exists(ud.localpath):
202 return False 200 return False
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py
new file mode 100644
index 0000000000..e611736f06
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/crate.py
@@ -0,0 +1,150 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for crates.io
5"""
6
7# Copyright (C) 2016 Doug Goldstein
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11# Based on functions from the base bb module, Copyright 2003 Holger Schurig
12
13import hashlib
14import json
15import os
16import subprocess
17import bb
18from bb.fetch2 import logger, subprocess_setup, UnpackError
19from bb.fetch2.wget import Wget
20
21
22class Crate(Wget):
23
24 """Class to fetch crates via wget"""
25
26 def _cargo_bitbake_path(self, rootdir):
27 return os.path.join(rootdir, "cargo_home", "bitbake")
28
29 def supports(self, ud, d):
30 """
31 Check to see if a given url is for this fetcher
32 """
33 return ud.type in ['crate']
34
35 def recommends_checksum(self, urldata):
36 return True
37
38 def urldata_init(self, ud, d):
39 """
40 Sets up to download the respective crate from crates.io
41 """
42
43 if ud.type == 'crate':
44 self._crate_urldata_init(ud, d)
45
46 super(Crate, self).urldata_init(ud, d)
47
48 def _crate_urldata_init(self, ud, d):
49 """
50 Sets up the download for a crate
51 """
52
53 # URL syntax is: crate://NAME/VERSION
54 # break the URL apart by /
55 parts = ud.url.split('/')
56 if len(parts) < 5:
57 raise bb.fetch2.ParameterError("Invalid URL: Must be crate://HOST/NAME/VERSION", ud.url)
58
59 # version is expected to be the last token
60 # but ignore possible url parameters which will be used
61 # by the top fetcher class
62 version = parts[-1].split(";")[0]
63 # second to last field is name
64 name = parts[-2]
65 # host (this is to allow custom crate registries to be specified
66 host = '/'.join(parts[2:-2])
67
68 # if using upstream just fix it up nicely
69 if host == 'crates.io':
70 host = 'crates.io/api/v1/crates'
71
72 ud.url = "https://%s/%s/%s/download" % (host, name, version)
73 ud.versionsurl = "https://%s/%s/versions" % (host, name)
74 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
75 if 'name' not in ud.parm:
76 ud.parm['name'] = '%s-%s' % (name, version)
77
78 logger.debug2("Fetching %s to %s" % (ud.url, ud.parm['downloadfilename']))
79
80 def unpack(self, ud, rootdir, d):
81 """
82 Uses the crate to build the necessary paths for cargo to utilize it
83 """
84 if ud.type == 'crate':
85 return self._crate_unpack(ud, rootdir, d)
86 else:
87 super(Crate, self).unpack(ud, rootdir, d)
88
89 def _crate_unpack(self, ud, rootdir, d):
90 """
91 Unpacks a crate
92 """
93 thefile = ud.localpath
94
95 # possible metadata we need to write out
96 metadata = {}
97
98 # change to the rootdir to unpack but save the old working dir
99 save_cwd = os.getcwd()
100 os.chdir(rootdir)
101
102 bp = d.getVar('BP')
103 if bp == ud.parm.get('name'):
104 cmd = "tar -xz --no-same-owner -f %s" % thefile
105 ud.unpack_tracer.unpack("crate-extract", rootdir)
106 else:
107 cargo_bitbake = self._cargo_bitbake_path(rootdir)
108 ud.unpack_tracer.unpack("cargo-extract", cargo_bitbake)
109
110 cmd = "tar -xz --no-same-owner -f %s -C %s" % (thefile, cargo_bitbake)
111
112 # ensure we've got these paths made
113 bb.utils.mkdirhier(cargo_bitbake)
114
115 # generate metadata necessary
116 with open(thefile, 'rb') as f:
117 # get the SHA256 of the original tarball
118 tarhash = hashlib.sha256(f.read()).hexdigest()
119
120 metadata['files'] = {}
121 metadata['package'] = tarhash
122
123 path = d.getVar('PATH')
124 if path:
125 cmd = "PATH=\"%s\" %s" % (path, cmd)
126 bb.note("Unpacking %s to %s/" % (thefile, os.getcwd()))
127
128 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
129
130 os.chdir(save_cwd)
131
132 if ret != 0:
133 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
134
135 # if we have metadata to write out..
136 if len(metadata) > 0:
137 cratepath = os.path.splitext(os.path.basename(thefile))[0]
138 bbpath = self._cargo_bitbake_path(rootdir)
139 mdfile = '.cargo-checksum.json'
140 mdpath = os.path.join(bbpath, cratepath, mdfile)
141 with open(mdpath, "w") as f:
142 json.dump(metadata, f)
143
144 def latest_versionstring(self, ud, d):
145 from functools import cmp_to_key
146 json_data = json.loads(self._fetch_index(ud.versionsurl, ud, d))
147 versions = [(0, i["num"], "") for i in json_data["versions"]]
148 versions = sorted(versions, key=cmp_to_key(bb.utils.vercmp))
149
150 return (versions[-1][1], "")
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py
new file mode 100644
index 0000000000..86546d40bf
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gcp.py
@@ -0,0 +1,102 @@
1"""
2BitBake 'Fetch' implementation for Google Cloup Platform Storage.
3
4Class for fetching files from Google Cloud Storage using the
5Google Cloud Storage Python Client. The GCS Python Client must
6be correctly installed, configured and authenticated prior to use.
7Additionally, gsutil must also be installed.
8
9"""
10
11# Copyright (C) 2023, Snap Inc.
12#
13# Based in part on bb.fetch2.s3:
14# Copyright (C) 2017 Andre McCurdy
15#
16# SPDX-License-Identifier: GPL-2.0-only
17#
18# Based on functions from the base bb module, Copyright 2003 Holger Schurig
19
20import os
21import bb
22import urllib.parse, urllib.error
23from bb.fetch2 import FetchMethod
24from bb.fetch2 import FetchError
25from bb.fetch2 import logger
26
27class GCP(FetchMethod):
28 """
29 Class to fetch urls via GCP's Python API.
30 """
31 def __init__(self):
32 self.gcp_client = None
33
34 def supports(self, ud, d):
35 """
36 Check to see if a given url can be fetched with GCP.
37 """
38 return ud.type in ['gs']
39
40 def recommends_checksum(self, urldata):
41 return True
42
43 def urldata_init(self, ud, d):
44 if 'downloadfilename' in ud.parm:
45 ud.basename = ud.parm['downloadfilename']
46 else:
47 ud.basename = os.path.basename(ud.path)
48
49 ud.localfile = ud.basename
50
51 def get_gcp_client(self):
52 from google.cloud import storage
53 self.gcp_client = storage.Client(project=None)
54
55 def download(self, ud, d):
56 """
57 Fetch urls using the GCP API.
58 Assumes localpath was called first.
59 """
60 from google.api_core.exceptions import NotFound
61 logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
62 if self.gcp_client is None:
63 self.get_gcp_client()
64
65 bb.fetch2.check_network_access(d, "blob.download_to_filename", f"gs://{ud.host}{ud.path}")
66
67 # Path sometimes has leading slash, so strip it
68 path = ud.path.lstrip("/")
69 blob = self.gcp_client.bucket(ud.host).blob(path)
70 try:
71 blob.download_to_filename(ud.localpath)
72 except NotFound:
73 raise FetchError("The GCP API threw a NotFound exception")
74
75 # Additional sanity checks copied from the wget class (although there
76 # are no known issues which mean these are required, treat the GCP API
77 # tool with a little healthy suspicion).
78 if not os.path.exists(ud.localpath):
79 raise FetchError(f"The GCP API returned success for gs://{ud.host}{ud.path} but {ud.localpath} doesn't exist?!")
80
81 if os.path.getsize(ud.localpath) == 0:
82 os.remove(ud.localpath)
83 raise FetchError(f"The downloaded file for gs://{ud.host}{ud.path} resulted in a zero size file?! Deleting and failing since this isn't right.")
84
85 return True
86
87 def checkstatus(self, fetch, ud, d):
88 """
89 Check the status of a URL.
90 """
91 logger.debug2(f"Checking status of gs://{ud.host}{ud.path}")
92 if self.gcp_client is None:
93 self.get_gcp_client()
94
95 bb.fetch2.check_network_access(d, "gcp_client.bucket(ud.host).blob(path).exists()", f"gs://{ud.host}{ud.path}")
96
97 # Path sometimes has leading slash, so strip it
98 path = ud.path.lstrip("/")
99 if self.gcp_client.bucket(ud.host).blob(path).exists() == False:
100 raise FetchError(f"The GCP API reported that gs://{ud.host}{ud.path} does not exist")
101 else:
102 return True
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index e3ba80a3f5..14ec45a3f6 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -9,15 +9,6 @@ Supported SRC_URI options are:
9- branch 9- branch
10 The git branch to retrieve from. The default is "master" 10 The git branch to retrieve from. The default is "master"
11 11
12 This option also supports multiple branch fetching, with branches
13 separated by commas. In multiple branches case, the name option
14 must have the same number of names to match the branches, which is
15 used to specify the SRC_REV for the branch
16 e.g:
17 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
18 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
19 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
20
21- tag 12- tag
22 The git tag to retrieve. The default is "master" 13 The git tag to retrieve. The default is "master"
23 14
@@ -44,13 +35,27 @@ Supported SRC_URI options are:
44 35
45- nobranch 36- nobranch
46 Don't check the SHA validation for branch. set this option for the recipe 37 Don't check the SHA validation for branch. set this option for the recipe
47 referring to commit which is valid in tag instead of branch. 38 referring to commit which is valid in any namespace (branch, tag, ...)
39 instead of branch.
48 The default is "0", set nobranch=1 if needed. 40 The default is "0", set nobranch=1 if needed.
49 41
42- subpath
43 Limit the checkout to a specific subpath of the tree.
44 By default, checkout the whole tree, set subpath=<path> if needed
45
46- destsuffix
47 The name of the path in which to place the checkout.
48 By default, the path is git/, set destsuffix=<suffix> if needed
49
50- usehead 50- usehead
51 For local git:// urls to use the current branch HEAD as the revision for use with 51 For local git:// urls to use the current branch HEAD as the revision for use with
52 AUTOREV. Implies nobranch. 52 AUTOREV. Implies nobranch.
53 53
54- lfs
55 Enable the checkout to use LFS for large files. This will download all LFS files
56 in the download step, as the unpack step does not have network access.
57 The default is "1", set lfs=0 to skip.
58
54""" 59"""
55 60
56# Copyright (C) 2005 Richard Purdie 61# Copyright (C) 2005 Richard Purdie
@@ -64,15 +69,22 @@ import fnmatch
64import os 69import os
65import re 70import re
66import shlex 71import shlex
72import shutil
67import subprocess 73import subprocess
68import tempfile 74import tempfile
75import urllib
69import bb 76import bb
70import bb.progress 77import bb.progress
78from contextlib import contextmanager
71from bb.fetch2 import FetchMethod 79from bb.fetch2 import FetchMethod
72from bb.fetch2 import runfetchcmd 80from bb.fetch2 import runfetchcmd
73from bb.fetch2 import logger 81from bb.fetch2 import logger
82from bb.fetch2 import trusted_network
74 83
75 84
85sha1_re = re.compile(r'^[0-9a-f]{40}$')
86slash_re = re.compile(r"/+")
87
76class GitProgressHandler(bb.progress.LineFilterProgressHandler): 88class GitProgressHandler(bb.progress.LineFilterProgressHandler):
77 """Extract progress information from git output""" 89 """Extract progress information from git output"""
78 def __init__(self, d): 90 def __init__(self, d):
@@ -130,6 +142,9 @@ class Git(FetchMethod):
130 def supports_checksum(self, urldata): 142 def supports_checksum(self, urldata):
131 return False 143 return False
132 144
145 def cleanup_upon_failure(self):
146 return False
147
133 def urldata_init(self, ud, d): 148 def urldata_init(self, ud, d):
134 """ 149 """
135 init git specific variable within url data 150 init git specific variable within url data
@@ -141,6 +156,11 @@ class Git(FetchMethod):
141 ud.proto = 'file' 156 ud.proto = 'file'
142 else: 157 else:
143 ud.proto = "git" 158 ud.proto = "git"
159 if ud.host == "github.com" and ud.proto == "git":
160 # github stopped supporting git protocol
161 # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
162 ud.proto = "https"
163 bb.warn("URL: %s uses git protocol which is no longer supported by github. Please change to ;protocol=https in the url." % ud.url)
144 164
145 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'): 165 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
146 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url) 166 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
@@ -162,18 +182,25 @@ class Git(FetchMethod):
162 ud.bareclone = ud.parm.get("bareclone","0") == "1" 182 ud.bareclone = ud.parm.get("bareclone","0") == "1"
163 if ud.bareclone: 183 if ud.bareclone:
164 ud.nocheckout = 1 184 ud.nocheckout = 1
165
166 ud.unresolvedrev = {}
167 branches = ud.parm.get("branch", "master").split(',')
168 if len(branches) != len(ud.names):
169 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
170 185
171 ud.cloneflags = "-s -n" 186 ud.unresolvedrev = ""
187 ud.branch = ud.parm.get("branch", "")
188 if not ud.branch and not ud.nobranch:
189 raise bb.fetch2.ParameterError("The url does not set any branch parameter or set nobranch=1.", ud.url)
190
191 ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1"
192
193 ud.cloneflags = "-n"
194 if not ud.noshared:
195 ud.cloneflags += " -s"
172 if ud.bareclone: 196 if ud.bareclone:
173 ud.cloneflags += " --mirror" 197 ud.cloneflags += " --mirror"
174 198
199 ud.shallow_skip_fast = False
175 ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1" 200 ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
176 ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split() 201 ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
202 if 'tag' in ud.parm:
203 ud.shallow_extra_refs.append("refs/tags/" + ud.parm['tag'])
177 204
178 depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH") 205 depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
179 if depth_default is not None: 206 if depth_default is not None:
@@ -190,32 +217,27 @@ class Git(FetchMethod):
190 217
191 revs_default = d.getVar("BB_GIT_SHALLOW_REVS") 218 revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
192 ud.shallow_revs = [] 219 ud.shallow_revs = []
193 ud.branches = {} 220
194 for pos, name in enumerate(ud.names): 221 ud.unresolvedrev = ud.branch
195 branch = branches[pos] 222
196 ud.branches[name] = branch 223 shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % ud.name)
197 ud.unresolvedrev[name] = branch 224 if shallow_depth is not None:
198 225 try:
199 shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name) 226 shallow_depth = int(shallow_depth or 0)
200 if shallow_depth is not None: 227 except ValueError:
201 try: 228 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
202 shallow_depth = int(shallow_depth or 0) 229 else:
203 except ValueError: 230 if shallow_depth < 0:
204 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) 231 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
205 else: 232 ud.shallow_depths[ud.name] = shallow_depth
206 if shallow_depth < 0: 233
207 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) 234 revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % ud.name)
208 ud.shallow_depths[name] = shallow_depth 235 if revs is not None:
209 236 ud.shallow_revs.extend(revs.split())
210 revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name) 237 elif revs_default is not None:
211 if revs is not None: 238 ud.shallow_revs.extend(revs_default.split())
212 ud.shallow_revs.extend(revs.split()) 239
213 elif revs_default is not None: 240 if ud.shallow and not ud.shallow_revs and ud.shallow_depths[ud.name] == 0:
214 ud.shallow_revs.extend(revs_default.split())
215
216 if (ud.shallow and
217 not ud.shallow_revs and
218 all(ud.shallow_depths[n] == 0 for n in ud.names)):
219 # Shallow disabled for this URL 241 # Shallow disabled for this URL
220 ud.shallow = False 242 ud.shallow = False
221 243
@@ -224,10 +246,9 @@ class Git(FetchMethod):
224 # rev of this repository. This will get resolved into a revision 246 # rev of this repository. This will get resolved into a revision
225 # later. If an actual revision happens to have also been provided 247 # later. If an actual revision happens to have also been provided
226 # then this setting will be overridden. 248 # then this setting will be overridden.
227 for name in ud.names: 249 ud.unresolvedrev = 'HEAD'
228 ud.unresolvedrev[name] = 'HEAD'
229 250
230 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0" 251 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all -c clone.defaultRemoteName=origin"
231 252
232 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" 253 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
233 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable 254 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -235,24 +256,22 @@ class Git(FetchMethod):
235 256
236 ud.setup_revisions(d) 257 ud.setup_revisions(d)
237 258
238 for name in ud.names: 259 # Ensure any revision that doesn't look like a SHA-1 is translated into one
239 # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one 260 if not sha1_re.match(ud.revision or ''):
240 if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]): 261 if ud.revision:
241 if ud.revisions[name]: 262 ud.unresolvedrev = ud.revision
242 ud.unresolvedrev[name] = ud.revisions[name] 263 ud.revision = self.latest_revision(ud, d, ud.name)
243 ud.revisions[name] = self.latest_revision(ud, d, name)
244 264
245 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_')) 265 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_'))
246 if gitsrcname.startswith('.'): 266 if gitsrcname.startswith('.'):
247 gitsrcname = gitsrcname[1:] 267 gitsrcname = gitsrcname[1:]
248 268
249 # for rebaseable git repo, it is necessary to keep mirror tar ball 269 # For a rebaseable git repo, it is necessary to keep a mirror tar ball
250 # per revision, so that even the revision disappears from the 270 # per revision, so that even if the revision disappears from the
251 # upstream repo in the future, the mirror will remain intact and still 271 # upstream repo in the future, the mirror will remain intact and still
252 # contains the revision 272 # contain the revision
253 if ud.rebaseable: 273 if ud.rebaseable:
254 for name in ud.names: 274 gitsrcname = gitsrcname + '_' + ud.revision
255 gitsrcname = gitsrcname + '_' + ud.revisions[name]
256 275
257 dl_dir = d.getVar("DL_DIR") 276 dl_dir = d.getVar("DL_DIR")
258 gitdir = d.getVar("GITDIR") or (dl_dir + "/git2") 277 gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
@@ -270,15 +289,14 @@ class Git(FetchMethod):
270 if ud.shallow_revs: 289 if ud.shallow_revs:
271 tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs))) 290 tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
272 291
273 for name, revision in sorted(ud.revisions.items()): 292 tarballname = "%s_%s" % (tarballname, ud.revision[:7])
274 tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7]) 293 depth = ud.shallow_depths[ud.name]
275 depth = ud.shallow_depths[name] 294 if depth:
276 if depth: 295 tarballname = "%s-%s" % (tarballname, depth)
277 tarballname = "%s-%s" % (tarballname, depth)
278 296
279 shallow_refs = [] 297 shallow_refs = []
280 if not ud.nobranch: 298 if not ud.nobranch:
281 shallow_refs.extend(ud.branches.values()) 299 shallow_refs.append(ud.branch)
282 if ud.shallow_extra_refs: 300 if ud.shallow_extra_refs:
283 shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs) 301 shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
284 if shallow_refs: 302 if shallow_refs:
@@ -293,16 +311,29 @@ class Git(FetchMethod):
293 return ud.clonedir 311 return ud.clonedir
294 312
295 def need_update(self, ud, d): 313 def need_update(self, ud, d):
296 return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud) 314 return self.clonedir_need_update(ud, d) \
315 or self.shallow_tarball_need_update(ud) \
316 or self.tarball_need_update(ud) \
317 or self.lfs_need_update(ud, d)
297 318
298 def clonedir_need_update(self, ud, d): 319 def clonedir_need_update(self, ud, d):
299 if not os.path.exists(ud.clonedir): 320 if not os.path.exists(ud.clonedir):
300 return True 321 return True
301 if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d): 322 if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d):
302 return True 323 return True
303 for name in ud.names: 324 if not self._contains_ref(ud, d, ud.name, ud.clonedir):
304 if not self._contains_ref(ud, d, name, ud.clonedir): 325 return True
305 return True 326 return False
327
328 def lfs_need_update(self, ud, d):
329 if not self._need_lfs(ud):
330 return False
331
332 if self.clonedir_need_update(ud, d):
333 return True
334
335 if not self._lfs_objects_downloaded(ud, d, ud.clonedir):
336 return True
306 return False 337 return False
307 338
308 def clonedir_need_shallow_revs(self, ud, d): 339 def clonedir_need_shallow_revs(self, ud, d):
@@ -319,11 +350,28 @@ class Git(FetchMethod):
319 def tarball_need_update(self, ud): 350 def tarball_need_update(self, ud):
320 return ud.write_tarballs and not os.path.exists(ud.fullmirror) 351 return ud.write_tarballs and not os.path.exists(ud.fullmirror)
321 352
353 def update_mirror_links(self, ud, origud):
354 super().update_mirror_links(ud, origud)
355 # When using shallow mode, add a symlink to the original fullshallow
356 # path to ensure a valid symlink even in the `PREMIRRORS` case
357 if ud.shallow and not os.path.exists(origud.fullshallow):
358 self.ensure_symlink(ud.localpath, origud.fullshallow)
359
322 def try_premirror(self, ud, d): 360 def try_premirror(self, ud, d):
323 # If we don't do this, updating an existing checkout with only premirrors 361 # If we don't do this, updating an existing checkout with only premirrors
324 # is not possible 362 # is not possible
325 if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")): 363 if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
326 return True 364 return True
365 # If the url is not in trusted network, that is, BB_NO_NETWORK is set to 0
366 # and BB_ALLOWED_NETWORKS does not contain the host that ud.url uses, then
367 # we need to try premirrors first as using upstream is destined to fail.
368 if not trusted_network(d, ud.url):
369 return True
370 # the following check is to ensure incremental fetch in downloads, this is
371 # because the premirror might be old and does not contain the new rev required,
372 # and this will cause a total removal and new clone. So if we can reach to
373 # network, we prefer upstream over premirror, though the premirror might contain
374 # the new rev.
327 if os.path.exists(ud.clonedir): 375 if os.path.exists(ud.clonedir):
328 return False 376 return False
329 return True 377 return True
@@ -337,21 +385,76 @@ class Git(FetchMethod):
337 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d): 385 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
338 ud.localpath = ud.fullshallow 386 ud.localpath = ud.fullshallow
339 return 387 return
340 elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir): 388 elif os.path.exists(ud.fullmirror) and self.need_update(ud, d):
341 bb.utils.mkdirhier(ud.clonedir) 389 if not os.path.exists(ud.clonedir):
342 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir) 390 bb.utils.mkdirhier(ud.clonedir)
343 391 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
392 else:
393 tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
394 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=tmpdir)
395 output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
396 if 'mirror' in output:
397 runfetchcmd("%s remote rm mirror" % ud.basecmd, d, workdir=ud.clonedir)
398 runfetchcmd("%s remote add --mirror=fetch mirror %s" % (ud.basecmd, tmpdir), d, workdir=ud.clonedir)
399 fetch_cmd = "LANG=C %s fetch -f --update-head-ok --progress mirror " % (ud.basecmd)
400 runfetchcmd(fetch_cmd, d, workdir=ud.clonedir)
344 repourl = self._get_repo_url(ud) 401 repourl = self._get_repo_url(ud)
345 402
403 needs_clone = False
404 if os.path.exists(ud.clonedir):
405 # The directory may exist, but not be the top level of a bare git
406 # repository in which case it needs to be deleted and re-cloned.
407 try:
408 # Since clones can be bare, use --absolute-git-dir instead of --show-toplevel
409 output = runfetchcmd("LANG=C %s rev-parse --absolute-git-dir" % ud.basecmd, d, workdir=ud.clonedir)
410 toplevel = output.rstrip()
411
412 if not bb.utils.path_is_descendant(toplevel, ud.clonedir):
413 logger.warning("Top level directory '%s' is not a descendant of '%s'. Re-cloning", toplevel, ud.clonedir)
414 needs_clone = True
415 except bb.fetch2.FetchError as e:
416 logger.warning("Unable to get top level for %s (not a git directory?): %s", ud.clonedir, e)
417 needs_clone = True
418 except FileNotFoundError as e:
419 logger.warning("%s", e)
420 needs_clone = True
421
422 if needs_clone:
423 shutil.rmtree(ud.clonedir)
424 else:
425 needs_clone = True
426
346 # If the repo still doesn't exist, fallback to cloning it 427 # If the repo still doesn't exist, fallback to cloning it
347 if not os.path.exists(ud.clonedir): 428 if needs_clone:
348 # We do this since git will use a "-l" option automatically for local urls where possible 429 # We do this since git will use a "-l" option automatically for local urls where possible,
430 # but it doesn't work when git/objects is a symlink, only works when it is a directory.
349 if repourl.startswith("file://"): 431 if repourl.startswith("file://"):
350 repourl = repourl[7:] 432 repourl_path = repourl[7:]
433 objects = os.path.join(repourl_path, 'objects')
434 if os.path.isdir(objects) and not os.path.islink(objects):
435 repourl = repourl_path
351 clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir) 436 clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir)
352 if ud.proto.lower() != 'file': 437 if ud.proto.lower() != 'file':
353 bb.fetch2.check_network_access(d, clone_cmd, ud.url) 438 bb.fetch2.check_network_access(d, clone_cmd, ud.url)
354 progresshandler = GitProgressHandler(d) 439 progresshandler = GitProgressHandler(d)
440
441 # Try creating a fast initial shallow clone
442 # Enabling ud.shallow_skip_fast will skip this
443 # If the Git error "Server does not allow request for unadvertised object"
444 # occurs, shallow_skip_fast is enabled automatically.
445 # This may happen if the Git server does not allow the request
446 # or if the Git client has issues with this functionality.
447 if ud.shallow and not ud.shallow_skip_fast:
448 try:
449 self.clone_shallow_with_tarball(ud, d)
450 # When the shallow clone has succeeded, use the shallow tarball
451 ud.localpath = ud.fullshallow
452 return
453 except:
454 logger.warning("Creating fast initial shallow clone failed, try initial regular clone now.")
455
456 # When skipping fast initial shallow or the fast inital shallow clone failed:
457 # Try again with an initial regular clone
355 runfetchcmd(clone_cmd, d, log=progresshandler) 458 runfetchcmd(clone_cmd, d, log=progresshandler)
356 459
357 # Update the checkout if needed 460 # Update the checkout if needed
@@ -361,7 +464,11 @@ class Git(FetchMethod):
361 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir) 464 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
362 465
363 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir) 466 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir)
364 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl)) 467
468 if ud.nobranch:
469 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
470 else:
471 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl))
365 if ud.proto.lower() != 'file': 472 if ud.proto.lower() != 'file':
366 bb.fetch2.check_network_access(d, fetch_cmd, ud.url) 473 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
367 progresshandler = GitProgressHandler(d) 474 progresshandler = GitProgressHandler(d)
@@ -375,138 +482,206 @@ class Git(FetchMethod):
375 if exc.errno != errno.ENOENT: 482 if exc.errno != errno.ENOENT:
376 raise 483 raise
377 484
378 for name in ud.names: 485 if not self._contains_ref(ud, d, ud.name, ud.clonedir):
379 if not self._contains_ref(ud, d, name, ud.clonedir): 486 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revision, ud.branch))
380 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
381 487
382 if ud.shallow and ud.write_shallow_tarballs: 488 if ud.shallow and ud.write_shallow_tarballs:
383 missing_rev = self.clonedir_need_shallow_revs(ud, d) 489 missing_rev = self.clonedir_need_shallow_revs(ud, d)
384 if missing_rev: 490 if missing_rev:
385 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) 491 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
386 492
387 if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud): 493 if self.lfs_need_update(ud, d):
388 # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching 494 self.lfs_fetch(ud, d, ud.clonedir, ud.revision)
389 # of all LFS blobs needed at the the srcrev. 495
390 # 496 def lfs_fetch(self, ud, d, clonedir, revision, fetchall=False, progresshandler=None):
391 # It would be nice to just do this inline here by running 'git-lfs fetch' 497 """Helper method for fetching Git LFS data"""
392 # on the bare clonedir, but that operation requires a working copy on some 498 try:
393 # releases of Git LFS. 499 if self._need_lfs(ud) and self._contains_lfs(ud, d, clonedir) and len(revision):
394 tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) 500 self._ensure_git_lfs(d, ud)
395 try: 501
396 # Do the checkout. This implicitly involves a Git LFS fetch. 502 # Using worktree with the revision because .lfsconfig may exists
397 self.unpack(ud, tmpdir, d) 503 worktree_add_cmd = "%s worktree add wt %s" % (ud.basecmd, revision)
398 504 runfetchcmd(worktree_add_cmd, d, log=progresshandler, workdir=clonedir)
399 # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into 505 lfs_fetch_cmd = "%s lfs fetch %s" % (ud.basecmd, "--all" if fetchall else "")
400 # the bare clonedir. 506 runfetchcmd(lfs_fetch_cmd, d, log=progresshandler, workdir=(clonedir + "/wt"))
401 # 507 worktree_rem_cmd = "%s worktree remove -f wt" % ud.basecmd
402 # As this procedure is invoked repeatedly on incremental fetches as 508 runfetchcmd(worktree_rem_cmd, d, log=progresshandler, workdir=clonedir)
403 # a recipe's SRCREV is bumped throughout its lifetime, this will 509 except:
404 # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs 510 logger.warning("Fetching LFS did not succeed.")
405 # corresponding to all the blobs reachable from the different revs 511
406 # fetched across time. 512 @contextmanager
407 # 513 def create_atomic(self, filename):
408 # Only do this if the unpack resulted in a .git/lfs directory being 514 """Create as a temp file and move atomically into position to avoid races"""
409 # created; this only happens if at least one blob needed to be 515 fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
410 # downloaded. 516 try:
411 if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")): 517 yield tfile
412 runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir) 518 umask = os.umask(0o666)
413 finally: 519 os.umask(umask)
414 bb.utils.remove(tmpdir, recurse=True) 520 os.chmod(tfile, (0o666 & ~umask))
521 os.rename(tfile, filename)
522 finally:
523 os.close(fd)
415 524
416 def build_mirror_data(self, ud, d): 525 def build_mirror_data(self, ud, d):
417 if ud.shallow and ud.write_shallow_tarballs: 526 if ud.shallow and ud.write_shallow_tarballs:
418 if not os.path.exists(ud.fullshallow): 527 if not os.path.exists(ud.fullshallow):
419 if os.path.islink(ud.fullshallow): 528 if os.path.islink(ud.fullshallow):
420 os.unlink(ud.fullshallow) 529 os.unlink(ud.fullshallow)
421 tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) 530 self.clone_shallow_with_tarball(ud, d)
422 shallowclone = os.path.join(tempdir, 'git')
423 try:
424 self.clone_shallow_local(ud, shallowclone, d)
425
426 logger.info("Creating tarball of git repository")
427 runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
428 runfetchcmd("touch %s.done" % ud.fullshallow, d)
429 finally:
430 bb.utils.remove(tempdir, recurse=True)
431 elif ud.write_tarballs and not os.path.exists(ud.fullmirror): 531 elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
432 if os.path.islink(ud.fullmirror): 532 if os.path.islink(ud.fullmirror):
433 os.unlink(ud.fullmirror) 533 os.unlink(ud.fullmirror)
434 534
435 logger.info("Creating tarball of git repository") 535 logger.info("Creating tarball of git repository")
436 runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir) 536 with self.create_atomic(ud.fullmirror) as tfile:
537 mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d,
538 quiet=True, workdir=ud.clonedir)
539 runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ."
540 % (tfile, mtime), d, workdir=ud.clonedir)
437 runfetchcmd("touch %s.done" % ud.fullmirror, d) 541 runfetchcmd("touch %s.done" % ud.fullmirror, d)
438 542
543 def clone_shallow_with_tarball(self, ud, d):
544 ret = False
545 tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
546 shallowclone = os.path.join(tempdir, 'git')
547 try:
548 try:
549 self.clone_shallow_local(ud, shallowclone, d)
550 except:
551 logger.warning("Fast shallow clone failed, try to skip fast mode now.")
552 bb.utils.remove(tempdir, recurse=True)
553 os.mkdir(tempdir)
554 ud.shallow_skip_fast = True
555 self.clone_shallow_local(ud, shallowclone, d)
556 logger.info("Creating tarball of git repository")
557 with self.create_atomic(ud.fullshallow) as tfile:
558 runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
559 runfetchcmd("touch %s.done" % ud.fullshallow, d)
560 ret = True
561 finally:
562 bb.utils.remove(tempdir, recurse=True)
563
564 return ret
565
439 def clone_shallow_local(self, ud, dest, d): 566 def clone_shallow_local(self, ud, dest, d):
440 """Clone the repo and make it shallow. 567 """
568 Shallow fetch from ud.clonedir (${DL_DIR}/git2/<gitrepo> by default):
569 - For BB_GIT_SHALLOW_DEPTH: git fetch --depth <depth> rev
570 - For BB_GIT_SHALLOW_REVS: git fetch --shallow-exclude=<revs> rev
571 """
441 572
442 The upstream url of the new clone isn't set at this time, as it'll be 573 progresshandler = GitProgressHandler(d)
443 set correctly when unpacked.""" 574 repourl = self._get_repo_url(ud)
444 runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d) 575 bb.utils.mkdirhier(dest)
576 init_cmd = "%s init -q" % ud.basecmd
577 if ud.bareclone:
578 init_cmd += " --bare"
579 runfetchcmd(init_cmd, d, workdir=dest)
580 # Use repourl when creating a fast initial shallow clone
581 # Prefer already existing full bare clones if available
582 if not ud.shallow_skip_fast and not os.path.exists(ud.clonedir):
583 remote = shlex.quote(repourl)
584 else:
585 remote = ud.clonedir
586 runfetchcmd("%s remote add origin %s" % (ud.basecmd, remote), d, workdir=dest)
445 587
446 to_parse, shallow_branches = [], [] 588 # Check the histories which should be excluded
447 for name in ud.names: 589 shallow_exclude = ''
448 revision = ud.revisions[name] 590 for revision in ud.shallow_revs:
449 depth = ud.shallow_depths[name] 591 shallow_exclude += " --shallow-exclude=%s" % revision
450 if depth:
451 to_parse.append('%s~%d^{}' % (revision, depth - 1))
452 592
453 # For nobranch, we need a ref, otherwise the commits will be 593 revision = ud.revision
454 # removed, and for non-nobranch, we truncate the branch to our 594 depth = ud.shallow_depths[ud.name]
455 # srcrev, to avoid keeping unnecessary history beyond that.
456 branch = ud.branches[name]
457 if ud.nobranch:
458 ref = "refs/shallow/%s" % name
459 elif ud.bareclone:
460 ref = "refs/heads/%s" % branch
461 else:
462 ref = "refs/remotes/origin/%s" % branch
463 595
464 shallow_branches.append(ref) 596 # The --depth and --shallow-exclude can't be used together
465 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) 597 if depth and shallow_exclude:
598 raise bb.fetch2.FetchError("BB_GIT_SHALLOW_REVS is set, but BB_GIT_SHALLOW_DEPTH is not 0.")
466 599
467 # Map srcrev+depths to revisions 600 # For nobranch, we need a ref, otherwise the commits will be
468 parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest) 601 # removed, and for non-nobranch, we truncate the branch to our
602 # srcrev, to avoid keeping unnecessary history beyond that.
603 branch = ud.branch
604 if ud.nobranch:
605 ref = "refs/shallow/%s" % ud.name
606 elif ud.bareclone:
607 ref = "refs/heads/%s" % branch
608 else:
609 ref = "refs/remotes/origin/%s" % branch
610
611 fetch_cmd = "%s fetch origin %s" % (ud.basecmd, revision)
612 if depth:
613 fetch_cmd += " --depth %s" % depth
614
615 if shallow_exclude:
616 fetch_cmd += shallow_exclude
469 617
470 # Resolve specified revisions 618 # Advertise the revision for lower version git such as 2.25.1:
471 parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest) 619 # error: Server does not allow request for unadvertised object.
472 shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines() 620 # The ud.clonedir is a local temporary dir, will be removed when
621 # fetch is done, so we can do anything on it.
622 adv_cmd = 'git branch -f advertise-%s %s' % (revision, revision)
623 if ud.shallow_skip_fast:
624 runfetchcmd(adv_cmd, d, workdir=ud.clonedir)
625
626 runfetchcmd(fetch_cmd, d, workdir=dest)
627 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
628 # Fetch Git LFS data
629 self.lfs_fetch(ud, d, dest, ud.revision)
473 630
474 # Apply extra ref wildcards 631 # Apply extra ref wildcards
475 all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd, 632 all_refs_remote = runfetchcmd("%s ls-remote origin 'refs/*'" % ud.basecmd, \
476 d, workdir=dest).splitlines() 633 d, workdir=dest).splitlines()
634 all_refs = []
635 for line in all_refs_remote:
636 all_refs.append(line.split()[-1])
637 extra_refs = []
477 for r in ud.shallow_extra_refs: 638 for r in ud.shallow_extra_refs:
478 if not ud.bareclone: 639 if not ud.bareclone:
479 r = r.replace('refs/heads/', 'refs/remotes/origin/') 640 r = r.replace('refs/heads/', 'refs/remotes/origin/')
480 641
481 if '*' in r: 642 if '*' in r:
482 matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs) 643 matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
483 shallow_branches.extend(matches) 644 extra_refs.extend(matches)
484 else: 645 else:
485 shallow_branches.append(r) 646 extra_refs.append(r)
647
648 for ref in extra_refs:
649 ref_fetch = ref.replace('refs/heads/', '').replace('refs/remotes/origin/', '').replace('refs/tags/', '')
650 runfetchcmd("%s fetch origin --depth 1 %s" % (ud.basecmd, ref_fetch), d, workdir=dest)
651 revision = runfetchcmd("%s rev-parse FETCH_HEAD" % ud.basecmd, d, workdir=dest)
652 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
486 653
487 # Make the repository shallow 654 # The url is local ud.clonedir, set it to upstream one
488 shallow_cmd = [self.make_shallow_path, '-s'] 655 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest)
489 for b in shallow_branches:
490 shallow_cmd.append('-r')
491 shallow_cmd.append(b)
492 shallow_cmd.extend(shallow_revisions)
493 runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
494 656
495 def unpack(self, ud, destdir, d): 657 def unpack(self, ud, destdir, d):
496 """ unpack the downloaded src to destdir""" 658 """ unpack the downloaded src to destdir"""
497 659
498 subdir = ud.parm.get("subpath", "") 660 subdir = ud.parm.get("subdir")
499 if subdir != "": 661 subpath = ud.parm.get("subpath")
500 readpathspec = ":%s" % subdir 662 readpathspec = ""
501 def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/')) 663 def_destsuffix = (d.getVar("BB_GIT_DEFAULT_DESTSUFFIX") or "git") + "/"
502 else: 664
503 readpathspec = "" 665 if subpath:
504 def_destsuffix = "git/" 666 readpathspec = ":%s" % subpath
667 def_destsuffix = "%s/" % os.path.basename(subpath.rstrip('/'))
668
669 if subdir:
670 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
671 if os.path.isabs(subdir):
672 if not os.path.realpath(subdir).startswith(os.path.realpath(destdir)):
673 raise bb.fetch2.UnpackError("subdir argument isn't a subdirectory of unpack root %s" % destdir, ud.url)
674 destdir = subdir
675 else:
676 destdir = os.path.join(destdir, subdir)
677 def_destsuffix = ""
505 678
506 destsuffix = ud.parm.get("destsuffix", def_destsuffix) 679 destsuffix = ud.parm.get("destsuffix", def_destsuffix)
507 destdir = ud.destdir = os.path.join(destdir, destsuffix) 680 destdir = ud.destdir = os.path.join(destdir, destsuffix)
508 if os.path.exists(destdir): 681 if os.path.exists(destdir):
509 bb.utils.prunedir(destdir) 682 bb.utils.prunedir(destdir)
683 if not ud.bareclone:
684 ud.unpack_tracer.unpack("git", destdir)
510 685
511 need_lfs = self._need_lfs(ud) 686 need_lfs = self._need_lfs(ud)
512 687
@@ -516,13 +691,12 @@ class Git(FetchMethod):
516 source_found = False 691 source_found = False
517 source_error = [] 692 source_error = []
518 693
519 if not source_found: 694 clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
520 clonedir_is_up_to_date = not self.clonedir_need_update(ud, d) 695 if clonedir_is_up_to_date:
521 if clonedir_is_up_to_date: 696 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
522 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d) 697 source_found = True
523 source_found = True 698 else:
524 else: 699 source_error.append("clone directory not available or not up to date: " + ud.clonedir)
525 source_error.append("clone directory not available or not up to date: " + ud.clonedir)
526 700
527 if not source_found: 701 if not source_found:
528 if ud.shallow: 702 if ud.shallow:
@@ -538,28 +712,43 @@ class Git(FetchMethod):
538 if not source_found: 712 if not source_found:
539 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url) 713 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
540 714
715 # If there is a tag parameter in the url and we also have a fixed srcrev, check the tag
716 # matches the revision
717 if 'tag' in ud.parm and sha1_re.match(ud.revision):
718 output = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.parm['tag']), d, workdir=destdir)
719 output = output.strip()
720 if output != ud.revision:
721 # It is possible ud.revision is the revision on an annotated tag which won't match the output of rev-list
722 # If it resolves to the same thing there isn't a problem.
723 output2 = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
724 output2 = output2.strip()
725 if output != output2:
726 raise bb.fetch2.FetchError("The revision the git tag '%s' resolved to didn't match the SRCREV in use (%s vs %s)" % (ud.parm['tag'], output, ud.revision), ud.url)
727
541 repourl = self._get_repo_url(ud) 728 repourl = self._get_repo_url(ud)
542 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir) 729 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir)
543 730
544 if self._contains_lfs(ud, d, destdir): 731 if self._contains_lfs(ud, d, destdir):
545 if need_lfs and not self._find_git_lfs(d): 732 if not need_lfs:
546 raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
547 elif not need_lfs:
548 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) 733 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
734 else:
735 self._ensure_git_lfs(d, ud)
736
737 runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir)
549 738
550 if not ud.nocheckout: 739 if not ud.nocheckout:
551 if subdir != "": 740 if subpath:
552 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d, 741 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revision, readpathspec), d,
553 workdir=destdir) 742 workdir=destdir)
554 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) 743 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
555 elif not ud.nobranch: 744 elif not ud.nobranch:
556 branchname = ud.branches[ud.names[0]] 745 branchname = ud.branch
557 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \ 746 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
558 ud.revisions[ud.names[0]]), d, workdir=destdir) 747 ud.revision), d, workdir=destdir)
559 runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \ 748 runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
560 branchname), d, workdir=destdir) 749 branchname), d, workdir=destdir)
561 else: 750 else:
562 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir) 751 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
563 752
564 return True 753 return True
565 754
@@ -573,8 +762,13 @@ class Git(FetchMethod):
573 clonedir = os.path.realpath(ud.localpath) 762 clonedir = os.path.realpath(ud.localpath)
574 to_remove.append(clonedir) 763 to_remove.append(clonedir)
575 764
765 # Remove shallow mirror tarball
766 if ud.shallow:
767 to_remove.append(ud.fullshallow)
768 to_remove.append(ud.fullshallow + ".done")
769
576 for r in to_remove: 770 for r in to_remove:
577 if os.path.exists(r): 771 if os.path.exists(r) or os.path.islink(r):
578 bb.note('Removing %s' % r) 772 bb.note('Removing %s' % r)
579 bb.utils.remove(r, True) 773 bb.utils.remove(r, True)
580 774
@@ -585,10 +779,10 @@ class Git(FetchMethod):
585 cmd = "" 779 cmd = ""
586 if ud.nobranch: 780 if ud.nobranch:
587 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( 781 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
588 ud.basecmd, ud.revisions[name]) 782 ud.basecmd, ud.revision)
589 else: 783 else:
590 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( 784 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
591 ud.basecmd, ud.revisions[name], ud.branches[name]) 785 ud.basecmd, ud.revision, ud.branch)
592 try: 786 try:
593 output = runfetchcmd(cmd, d, quiet=True, workdir=wd) 787 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
594 except bb.fetch2.FetchError: 788 except bb.fetch2.FetchError:
@@ -597,6 +791,37 @@ class Git(FetchMethod):
597 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) 791 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
598 return output.split()[0] != "0" 792 return output.split()[0] != "0"
599 793
794 def _lfs_objects_downloaded(self, ud, d, wd):
795 """
796 Verifies whether the LFS objects for requested revisions have already been downloaded
797 """
798 # Bail out early if this repository doesn't use LFS
799 if not self._contains_lfs(ud, d, wd):
800 return True
801
802 self._ensure_git_lfs(d, ud)
803
804 # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file
805 # existence.
806 # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git
807 cmd = "%s lfs ls-files -l %s" \
808 % (ud.basecmd, ud.revision)
809 output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip()
810 # Do not do any further matching if no objects are managed by LFS
811 if not output:
812 return True
813
814 # Match all lines beginning with the hexadecimal OID
815 oid_regex = re.compile("^(([a-fA-F0-9]{2})([a-fA-F0-9]{2})[A-Fa-f0-9]+)")
816 for line in output.split("\n"):
817 oid = re.search(oid_regex, line)
818 if not oid:
819 bb.warn("git lfs ls-files output '%s' did not match expected format." % line)
820 if not os.path.exists(os.path.join(wd, "lfs", "objects", oid.group(2), oid.group(3), oid.group(1))):
821 return False
822
823 return True
824
600 def _need_lfs(self, ud): 825 def _need_lfs(self, ud):
601 return ud.parm.get("lfs", "1") == "1" 826 return ud.parm.get("lfs", "1") == "1"
602 827
@@ -604,20 +829,8 @@ class Git(FetchMethod):
604 """ 829 """
605 Check if the repository has 'lfs' (large file) content 830 Check if the repository has 'lfs' (large file) content
606 """ 831 """
607
608 if not ud.nobranch:
609 branchname = ud.branches[ud.names[0]]
610 else:
611 branchname = "master"
612
613 # The bare clonedir doesn't use the remote names; it has the branch immediately.
614 if wd == ud.clonedir:
615 refname = ud.branches[ud.names[0]]
616 else:
617 refname = "origin/%s" % ud.branches[ud.names[0]]
618
619 cmd = "%s grep lfs %s:.gitattributes | wc -l" % ( 832 cmd = "%s grep lfs %s:.gitattributes | wc -l" % (
620 ud.basecmd, refname) 833 ud.basecmd, ud.revision)
621 834
622 try: 835 try:
623 output = runfetchcmd(cmd, d, quiet=True, workdir=wd) 836 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
@@ -627,12 +840,14 @@ class Git(FetchMethod):
627 pass 840 pass
628 return False 841 return False
629 842
630 def _find_git_lfs(self, d): 843 def _ensure_git_lfs(self, d, ud):
631 """ 844 """
632 Return True if git-lfs can be found, False otherwise. 845 Ensures that git-lfs is available, raising a FetchError if it isn't.
633 """ 846 """
634 import shutil 847 if shutil.which("git-lfs", path=d.getVar('PATH')) is None:
635 return shutil.which("git-lfs", path=d.getVar('PATH')) is not None 848 raise bb.fetch2.FetchError(
849 "Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 "
850 "to ignore it)" % self._get_repo_url(ud))
636 851
637 def _get_repo_url(self, ud): 852 def _get_repo_url(self, ud):
638 """ 853 """
@@ -640,22 +855,21 @@ class Git(FetchMethod):
640 """ 855 """
641 # Note that we do not support passwords directly in the git urls. There are several 856 # Note that we do not support passwords directly in the git urls. There are several
642 # reasons. SRC_URI can be written out to things like buildhistory and people don't 857 # reasons. SRC_URI can be written out to things like buildhistory and people don't
643 # want to leak passwords like that. Its also all too easy to share metadata without 858 # want to leak passwords like that. Its also all too easy to share metadata without
644 # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as 859 # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as
645 # alternatives so we will not take patches adding password support here. 860 # alternatives so we will not take patches adding password support here.
646 if ud.user: 861 if ud.user:
647 username = ud.user + '@' 862 username = ud.user + '@'
648 else: 863 else:
649 username = "" 864 username = ""
650 return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path) 865 return "%s://%s%s%s" % (ud.proto, username, ud.host, urllib.parse.quote(ud.path))
651 866
652 def _revision_key(self, ud, d, name): 867 def _revision_key(self, ud, d, name):
653 """ 868 """
654 Return a unique key for the url 869 Return a unique key for the url
655 """ 870 """
656 # Collapse adjacent slashes 871 # Collapse adjacent slashes
657 slash_re = re.compile(r"/+") 872 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev
658 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name]
659 873
660 def _lsremote(self, ud, d, search): 874 def _lsremote(self, ud, d, search):
661 """ 875 """
@@ -687,21 +901,27 @@ class Git(FetchMethod):
687 """ 901 """
688 Compute the HEAD revision for the url 902 Compute the HEAD revision for the url
689 """ 903 """
904 if not d.getVar("__BBSRCREV_SEEN"):
905 raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev, ud.host+ud.path))
906
907 # Ensure we mark as not cached
908 bb.fetch2.mark_recipe_nocache(d)
909
690 output = self._lsremote(ud, d, "") 910 output = self._lsremote(ud, d, "")
691 # Tags of the form ^{} may not work, need to fallback to other form 911 # Tags of the form ^{} may not work, need to fallback to other form
692 if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead: 912 if ud.unresolvedrev[:5] == "refs/" or ud.usehead:
693 head = ud.unresolvedrev[name] 913 head = ud.unresolvedrev
694 tag = ud.unresolvedrev[name] 914 tag = ud.unresolvedrev
695 else: 915 else:
696 head = "refs/heads/%s" % ud.unresolvedrev[name] 916 head = "refs/heads/%s" % ud.unresolvedrev
697 tag = "refs/tags/%s" % ud.unresolvedrev[name] 917 tag = "refs/tags/%s" % ud.unresolvedrev
698 for s in [head, tag + "^{}", tag]: 918 for s in [head, tag + "^{}", tag]:
699 for l in output.strip().split('\n'): 919 for l in output.strip().split('\n'):
700 sha1, ref = l.split() 920 sha1, ref = l.split()
701 if s == ref: 921 if s == ref:
702 return sha1 922 return sha1
703 raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \ 923 raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
704 (ud.unresolvedrev[name], ud.host+ud.path)) 924 (ud.unresolvedrev, ud.host+ud.path))
705 925
706 def latest_versionstring(self, ud, d): 926 def latest_versionstring(self, ud, d):
707 """ 927 """
@@ -711,60 +931,63 @@ class Git(FetchMethod):
711 """ 931 """
712 pupver = ('', '') 932 pupver = ('', '')
713 933
714 tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
715 try: 934 try:
716 output = self._lsremote(ud, d, "refs/tags/*") 935 output = self._lsremote(ud, d, "refs/tags/*")
717 except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e: 936 except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
718 bb.note("Could not list remote: %s" % str(e)) 937 bb.note("Could not list remote: %s" % str(e))
719 return pupver 938 return pupver
720 939
940 rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
941 pver_re = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
942 nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
943
721 verstring = "" 944 verstring = ""
722 revision = ""
723 for line in output.split("\n"): 945 for line in output.split("\n"):
724 if not line: 946 if not line:
725 break 947 break
726 948
727 tag_head = line.split("/")[-1] 949 m = rev_tag_re.match(line)
950 if not m:
951 continue
952
953 (revision, tag) = m.groups()
954
728 # Ignore non-released branches 955 # Ignore non-released branches
729 m = re.search(r"(alpha|beta|rc|final)+", tag_head) 956 if nonrel_re.search(tag):
730 if m:
731 continue 957 continue
732 958
733 # search for version in the line 959 # search for version in the line
734 tag = tagregex.search(tag_head) 960 m = pver_re.search(tag)
735 if tag is None: 961 if not m:
736 continue 962 continue
737 963
738 tag = tag.group('pver') 964 pver = m.group('pver').replace("_", ".")
739 tag = tag.replace("_", ".")
740 965
741 if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0: 966 if verstring and bb.utils.vercmp(("0", pver, ""), ("0", verstring, "")) < 0:
742 continue 967 continue
743 968
744 verstring = tag 969 verstring = pver
745 revision = line.split()[0]
746 pupver = (verstring, revision) 970 pupver = (verstring, revision)
747 971
748 return pupver 972 return pupver
749 973
750 def _build_revision(self, ud, d, name): 974 def _build_revision(self, ud, d, name):
751 return ud.revisions[name] 975 return ud.revision
752 976
753 def gitpkgv_revision(self, ud, d, name): 977 def gitpkgv_revision(self, ud, d, name):
754 """ 978 """
755 Return a sortable revision number by counting commits in the history 979 Return a sortable revision number by counting commits in the history
756 Based on gitpkgv.bblass in meta-openembedded 980 Based on gitpkgv.bblass in meta-openembedded
757 """ 981 """
758 rev = self._build_revision(ud, d, name) 982 rev = ud.revision
759 localpath = ud.localpath 983 localpath = ud.localpath
760 rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev) 984 rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
761 if not os.path.exists(localpath): 985 if not os.path.exists(localpath):
762 commits = None 986 commits = None
763 else: 987 else:
764 if not os.path.exists(rev_file) or not os.path.getsize(rev_file): 988 if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
765 from pipes import quote
766 commits = bb.fetch2.runfetchcmd( 989 commits = bb.fetch2.runfetchcmd(
767 "git rev-list %s -- | wc -l" % quote(rev), 990 "git rev-list %s -- | wc -l" % shlex.quote(rev),
768 d, quiet=True).strip().lstrip('0') 991 d, quiet=True).strip().lstrip('0')
769 if commits: 992 if commits:
770 open(rev_file, "w").write("%d\n" % int(commits)) 993 open(rev_file, "w").write("%d\n" % int(commits))
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
index a4527bf364..5869e1b99b 100644
--- a/bitbake/lib/bb/fetch2/gitsm.py
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -62,36 +62,35 @@ class GitSM(Git):
62 return modules 62 return modules
63 63
64 # Collect the defined submodules, and their attributes 64 # Collect the defined submodules, and their attributes
65 for name in ud.names: 65 try:
66 gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=workdir)
67 except:
68 # No submodules to update
69 gitmodules = ""
70
71 for m, md in parse_gitmodules(gitmodules).items():
66 try: 72 try:
67 gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir) 73 module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revision, md['path']), d, quiet=True, workdir=workdir)
68 except: 74 except:
69 # No submodules to update 75 # If the command fails, we don't have a valid file to check. If it doesn't
76 # fail -- it still might be a failure, see next check...
77 module_hash = ""
78
79 if not module_hash:
80 logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m)
70 continue 81 continue
71 82
72 for m, md in parse_gitmodules(gitmodules).items(): 83 submodules.append(m)
73 try: 84 paths[m] = md['path']
74 module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir) 85 revision[m] = ud.revision
75 except: 86 uris[m] = md['url']
76 # If the command fails, we don't have a valid file to check. If it doesn't 87 subrevision[m] = module_hash.split()[2]
77 # fail -- it still might be a failure, see next check... 88
78 module_hash = "" 89 # Convert relative to absolute uri based on parent uri
79 90 if uris[m].startswith('..') or uris[m].startswith('./'):
80 if not module_hash: 91 newud = copy.copy(ud)
81 logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) 92 newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
82 continue 93 uris[m] = Git._get_repo_url(self, newud)
83
84 submodules.append(m)
85 paths[m] = md['path']
86 revision[m] = ud.revisions[name]
87 uris[m] = md['url']
88 subrevision[m] = module_hash.split()[2]
89
90 # Convert relative to absolute uri based on parent uri
91 if uris[m].startswith('..'):
92 newud = copy.copy(ud)
93 newud.path = os.path.realpath(os.path.join(newud.path, uris[m]))
94 uris[m] = Git._get_repo_url(self, newud)
95 94
96 for module in submodules: 95 for module in submodules:
97 # Translate the module url into a SRC_URI 96 # Translate the module url into a SRC_URI
@@ -115,10 +114,21 @@ class GitSM(Git):
115 # This has to be a file reference 114 # This has to be a file reference
116 proto = "file" 115 proto = "file"
117 url = "gitsm://" + uris[module] 116 url = "gitsm://" + uris[module]
117 if url.endswith("{}{}".format(ud.host, ud.path)):
118 raise bb.fetch2.FetchError("Submodule refers to the parent repository. This will cause deadlock situation in current version of Bitbake." \
119 "Consider using git fetcher instead.")
118 120
119 url += ';protocol=%s' % proto 121 url += ';protocol=%s' % proto
120 url += ";name=%s" % module 122 url += ";name=%s" % module
121 url += ";subpath=%s" % module 123 url += ";subpath=%s" % module
124 url += ";nobranch=1"
125 url += ";lfs=%s" % ("1" if self._need_lfs(ud) else "0")
126 # Note that adding "user=" here to give credentials to the
127 # submodule is not supported. Since using SRC_URI to give git://
128 # URL a password is not supported, one have to use one of the
129 # recommended way (eg. ~/.netrc or SSH config) which does specify
130 # the user (See comment in git.py).
131 # So, we will not take patches adding "user=" support here.
122 132
123 ld = d.createCopy() 133 ld = d.createCopy()
124 # Not necessary to set SRC_URI, since we're passing the URI to 134 # Not necessary to set SRC_URI, since we're passing the URI to
@@ -136,20 +146,26 @@ class GitSM(Git):
136 146
137 return submodules != [] 147 return submodules != []
138 148
149 def call_process_submodules(self, ud, d, extra_check, subfunc):
150 # If we're using a shallow mirror tarball it needs to be
151 # unpacked temporarily so that we can examine the .gitmodules file
152 # Unpack even when ud.clonedir is not available,
153 # which may occur during a fast shallow clone
154 unpack = extra_check or not os.path.exists(ud.clonedir)
155 if ud.shallow and os.path.exists(ud.fullshallow) and unpack:
156 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
157 try:
158 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
159 self.process_submodules(ud, tmpdir, subfunc, d)
160 finally:
161 shutil.rmtree(tmpdir)
162 else:
163 self.process_submodules(ud, ud.clonedir, subfunc, d)
164
139 def need_update(self, ud, d): 165 def need_update(self, ud, d):
140 if Git.need_update(self, ud, d): 166 if Git.need_update(self, ud, d):
141 return True 167 return True
142 168
143 try:
144 # Check for the nugget dropped by the download operation
145 known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
146 (ud.basecmd), d, workdir=ud.clonedir)
147
148 if ud.revisions[ud.names[0]] in known_srcrevs.split():
149 return False
150 except bb.fetch2.FetchError:
151 pass
152
153 need_update_list = [] 169 need_update_list = []
154 def need_update_submodule(ud, url, module, modpath, workdir, d): 170 def need_update_submodule(ud, url, module, modpath, workdir, d):
155 url += ";bareclone=1;nobranch=1" 171 url += ";bareclone=1;nobranch=1"
@@ -163,22 +179,9 @@ class GitSM(Git):
163 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) 179 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
164 need_update_result = True 180 need_update_result = True
165 181
166 # If we're using a shallow mirror tarball it needs to be unpacked 182 self.call_process_submodules(ud, d, not os.path.exists(ud.clonedir), need_update_submodule)
167 # temporarily so that we can examine the .gitmodules file 183
168 if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir): 184 if need_update_list:
169 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
170 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
171 self.process_submodules(ud, tmpdir, need_update_submodule, d)
172 shutil.rmtree(tmpdir)
173 else:
174 self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
175 if len(need_update_list) == 0:
176 # We already have the required commits of all submodules. Drop
177 # a nugget so we don't need to check again.
178 runfetchcmd("%s config --add bitbake.srcrev %s" % \
179 (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
180
181 if len(need_update_list) > 0:
182 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) 185 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
183 return True 186 return True
184 187
@@ -199,19 +202,7 @@ class GitSM(Git):
199 raise 202 raise
200 203
201 Git.download(self, ud, d) 204 Git.download(self, ud, d)
202 205 self.call_process_submodules(ud, d, self.need_update(ud, d), download_submodule)
203 # If we're using a shallow mirror tarball it needs to be unpacked
204 # temporarily so that we can examine the .gitmodules file
205 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
206 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
207 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
208 self.process_submodules(ud, tmpdir, download_submodule, d)
209 shutil.rmtree(tmpdir)
210 else:
211 self.process_submodules(ud, ud.clonedir, download_submodule, d)
212 # Drop a nugget for the srcrev we've fetched (used by need_update)
213 runfetchcmd("%s config --add bitbake.srcrev %s" % \
214 (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
215 206
216 def unpack(self, ud, destdir, d): 207 def unpack(self, ud, destdir, d):
217 def unpack_submodules(ud, url, module, modpath, workdir, d): 208 def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -225,6 +216,10 @@ class GitSM(Git):
225 216
226 try: 217 try:
227 newfetch = Fetch([url], d, cache=False) 218 newfetch = Fetch([url], d, cache=False)
219 # modpath is needed by unpack tracer to calculate submodule
220 # checkout dir
221 new_ud = newfetch.ud[url]
222 new_ud.modpath = modpath
228 newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module))) 223 newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module)))
229 except Exception as e: 224 except Exception as e:
230 logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) 225 logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e)))
@@ -250,13 +245,27 @@ class GitSM(Git):
250 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) 245 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
251 246
252 if not ud.bareclone and ret: 247 if not ud.bareclone and ret:
253 # All submodules should already be downloaded and configured in the tree. This simply sets 248 cmdprefix = ""
254 # up the configuration and checks out the files. The main project config should remain 249 # Avoid LFS smudging (replacing the LFS pointers with the actual content) when LFS shouldn't be used but git-lfs is installed.
255 # unmodified, and no download from the internet should occur. 250 if not self._need_lfs(ud):
256 runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) 251 cmdprefix = "GIT_LFS_SKIP_SMUDGE=1 "
252 runfetchcmd("%s%s submodule update --recursive --no-fetch" % (cmdprefix, ud.basecmd), d, quiet=True, workdir=ud.destdir)
253 def clean(self, ud, d):
254 def clean_submodule(ud, url, module, modpath, workdir, d):
255 url += ";bareclone=1;nobranch=1"
256 try:
257 newfetch = Fetch([url], d, cache=False)
258 newfetch.clean()
259 except Exception as e:
260 logger.warning('gitsm: submodule clean failed: %s %s' % (type(e).__name__, str(e)))
261
262 self.call_process_submodules(ud, d, True, clean_submodule)
263
264 # Clean top git dir
265 Git.clean(self, ud, d)
257 266
258 def implicit_urldata(self, ud, d): 267 def implicit_urldata(self, ud, d):
259 import shutil, subprocess, tempfile 268 import subprocess
260 269
261 urldata = [] 270 urldata = []
262 def add_submodule(ud, url, module, modpath, workdir, d): 271 def add_submodule(ud, url, module, modpath, workdir, d):
@@ -264,14 +273,6 @@ class GitSM(Git):
264 newfetch = Fetch([url], d, cache=False) 273 newfetch = Fetch([url], d, cache=False)
265 urldata.extend(newfetch.expanded_urldata()) 274 urldata.extend(newfetch.expanded_urldata())
266 275
267 # If we're using a shallow mirror tarball it needs to be unpacked 276 self.call_process_submodules(ud, d, ud.method.need_update(ud, d), add_submodule)
268 # temporarily so that we can examine the .gitmodules file
269 if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d):
270 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
271 subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True)
272 self.process_submodules(ud, tmpdir, add_submodule, d)
273 shutil.rmtree(tmpdir)
274 else:
275 self.process_submodules(ud, ud.clonedir, add_submodule, d)
276 277
277 return urldata 278 return urldata
diff --git a/bitbake/lib/bb/fetch2/gomod.py b/bitbake/lib/bb/fetch2/gomod.py
new file mode 100644
index 0000000000..53c1d8d115
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gomod.py
@@ -0,0 +1,273 @@
1"""
2BitBake 'Fetch' implementation for Go modules
3
4The gomod/gomodgit fetchers are used to download Go modules to the module cache
5from a module proxy or directly from a version control repository.
6
7Example SRC_URI:
8
9SRC_URI += "gomod://golang.org/x/net;version=v0.9.0;sha256sum=..."
10SRC_URI += "gomodgit://golang.org/x/net;version=v0.9.0;repo=go.googlesource.com/net;srcrev=..."
11
12Required SRC_URI parameters:
13
14- version
15 The version of the module.
16
17Optional SRC_URI parameters:
18
19- mod
20 Fetch and unpack the go.mod file only instead of the complete module.
21 The go command may need to download go.mod files for many different modules
22 when computing the build list, and go.mod files are much smaller than
23 module zip files.
24 The default is "0", set mod=1 for the go.mod file only.
25
26- sha256sum
27 The checksum of the module zip file, or the go.mod file in case of fetching
28 only the go.mod file. Alternatively, set the SRC_URI varible flag for
29 "module@version.sha256sum".
30
31- protocol
32 The method used when fetching directly from a version control repository.
33 The default is "https" for git.
34
35- repo
36 The URL when fetching directly from a version control repository. Required
37 when the URL is different from the module path.
38
39- srcrev
40 The revision identifier used when fetching directly from a version control
41 repository. Alternatively, set the SRCREV varible for "module@version".
42
43- subdir
44 The module subdirectory when fetching directly from a version control
45 repository. Required when the module is not located in the root of the
46 repository.
47
48Related variables:
49
50- GO_MOD_PROXY
51 The module proxy used by the fetcher.
52
53- GO_MOD_CACHE_DIR
54 The directory where the module cache is located.
55 This must match the exported GOMODCACHE variable for the go command to find
56 the downloaded modules.
57
58See the Go modules reference, https://go.dev/ref/mod, for more information
59about the module cache, module proxies and version control systems.
60"""
61
62import hashlib
63import os
64import re
65import shutil
66import subprocess
67import zipfile
68
69import bb
70from bb.fetch2 import FetchError
71from bb.fetch2 import MissingParameterError
72from bb.fetch2 import runfetchcmd
73from bb.fetch2 import subprocess_setup
74from bb.fetch2.git import Git
75from bb.fetch2.wget import Wget
76
77
78def escape(path):
79 """Escape capital letters using exclamation points."""
80 return re.sub(r'([A-Z])', lambda m: '!' + m.group(1).lower(), path)
81
82
83class GoMod(Wget):
84 """Class to fetch Go modules from a Go module proxy via wget"""
85
86 def supports(self, ud, d):
87 """Check to see if a given URL is for this fetcher."""
88 return ud.type == 'gomod'
89
90 def urldata_init(self, ud, d):
91 """Set up to download the module from the module proxy.
92
93 Set up to download the module zip file to the module cache directory
94 and unpack the go.mod file (unless downloading only the go.mod file):
95
96 cache/download/<module>/@v/<version>.zip: The module zip file.
97 cache/download/<module>/@v/<version>.mod: The go.mod file.
98 """
99
100 proxy = d.getVar('GO_MOD_PROXY') or 'proxy.golang.org'
101 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
102
103 if 'version' not in ud.parm:
104 raise MissingParameterError('version', ud.url)
105
106 module = ud.host
107 if ud.path != '/':
108 module += ud.path
109 ud.parm['module'] = module
110 version = ud.parm['version']
111
112 # Set URL and filename for wget download
113 if ud.parm.get('mod', '0') == '1':
114 ext = '.mod'
115 else:
116 ext = '.zip'
117 path = escape(f"{module}/@v/{version}{ext}")
118 ud.url = bb.fetch2.encodeurl(
119 ('https', proxy, '/' + path, None, None, None))
120 ud.parm['downloadfilename'] = f"{module.replace('/', '.')}@{version}{ext}"
121
122 # Set name for checksum verification
123 ud.parm['name'] = f"{module}@{version}"
124
125 # Set path for unpack
126 ud.parm['unpackpath'] = os.path.join(moddir, 'cache/download', path)
127
128 super().urldata_init(ud, d)
129
130 def unpack(self, ud, rootdir, d):
131 """Unpack the module in the module cache."""
132
133 # Unpack the module zip file or go.mod file
134 unpackpath = os.path.join(rootdir, ud.parm['unpackpath'])
135 unpackdir = os.path.dirname(unpackpath)
136 bb.utils.mkdirhier(unpackdir)
137 ud.unpack_tracer.unpack("file-copy", unpackdir)
138 cmd = f"cp {ud.localpath} {unpackpath}"
139 path = d.getVar('PATH')
140 if path:
141 cmd = f"PATH={path} {cmd}"
142 name = os.path.basename(unpackpath)
143 bb.note(f"Unpacking {name} to {unpackdir}/")
144 subprocess.check_call(cmd, shell=True, preexec_fn=subprocess_setup)
145
146 if name.endswith('.zip'):
147 # Unpack the go.mod file from the zip file
148 module = ud.parm['module']
149 name = name.rsplit('.', 1)[0] + '.mod'
150 bb.note(f"Unpacking {name} to {unpackdir}/")
151 with zipfile.ZipFile(ud.localpath) as zf:
152 with open(os.path.join(unpackdir, name), mode='wb') as mf:
153 try:
154 f = module + '@' + ud.parm['version'] + '/go.mod'
155 shutil.copyfileobj(zf.open(f), mf)
156 except KeyError:
157 # If the module does not have a go.mod file, synthesize
158 # one containing only a module statement.
159 mf.write(f'module {module}\n'.encode())
160
161
162class GoModGit(Git):
163 """Class to fetch Go modules directly from a git repository"""
164
165 def supports(self, ud, d):
166 """Check to see if a given URL is for this fetcher."""
167 return ud.type == 'gomodgit'
168
169 def urldata_init(self, ud, d):
170 """Set up to download the module from the git repository.
171
172 Set up to download the git repository to the module cache directory and
173 unpack the module zip file and the go.mod file:
174
175 cache/vcs/<hash>: The bare git repository.
176 cache/download/<module>/@v/<version>.zip: The module zip file.
177 cache/download/<module>/@v/<version>.mod: The go.mod file.
178 """
179
180 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
181
182 if 'version' not in ud.parm:
183 raise MissingParameterError('version', ud.url)
184
185 module = ud.host
186 if ud.path != '/':
187 module += ud.path
188 ud.parm['module'] = module
189
190 # Set host, path and srcrev for git download
191 if 'repo' in ud.parm:
192 repo = ud.parm['repo']
193 idx = repo.find('/')
194 if idx != -1:
195 ud.host = repo[:idx]
196 ud.path = repo[idx:]
197 else:
198 ud.host = repo
199 ud.path = ''
200 if 'protocol' not in ud.parm:
201 ud.parm['protocol'] = 'https'
202 ud.name = f"{module}@{ud.parm['version']}"
203 srcrev = d.getVar('SRCREV_' + ud.name)
204 if srcrev:
205 if 'srcrev' not in ud.parm:
206 ud.parm['srcrev'] = srcrev
207 else:
208 if 'srcrev' in ud.parm:
209 d.setVar('SRCREV_' + ud.name, ud.parm['srcrev'])
210 if 'branch' not in ud.parm:
211 ud.parm['nobranch'] = '1'
212
213 # Set subpath, subdir and bareclone for git unpack
214 if 'subdir' in ud.parm:
215 ud.parm['subpath'] = ud.parm['subdir']
216 key = f"git3:{ud.parm['protocol']}://{ud.host}{ud.path}".encode()
217 ud.parm['key'] = key
218 ud.parm['subdir'] = os.path.join(moddir, 'cache/vcs',
219 hashlib.sha256(key).hexdigest())
220 ud.parm['bareclone'] = '1'
221
222 super().urldata_init(ud, d)
223
224 def unpack(self, ud, rootdir, d):
225 """Unpack the module in the module cache."""
226
227 # Unpack the bare git repository
228 super().unpack(ud, rootdir, d)
229
230 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
231
232 # Create the info file
233 module = ud.parm['module']
234 repodir = os.path.join(rootdir, ud.parm['subdir'])
235 with open(repodir + '.info', 'wb') as f:
236 f.write(ud.parm['key'])
237
238 # Unpack the go.mod file from the repository
239 unpackdir = os.path.join(rootdir, moddir, 'cache/download',
240 escape(module), '@v')
241 bb.utils.mkdirhier(unpackdir)
242 srcrev = ud.parm['srcrev']
243 version = ud.parm['version']
244 escaped_version = escape(version)
245 cmd = f"git ls-tree -r --name-only '{srcrev}'"
246 if 'subpath' in ud.parm:
247 cmd += f" '{ud.parm['subpath']}'"
248 files = runfetchcmd(cmd, d, workdir=repodir).split()
249 name = escaped_version + '.mod'
250 bb.note(f"Unpacking {name} to {unpackdir}/")
251 with open(os.path.join(unpackdir, name), mode='wb') as mf:
252 f = 'go.mod'
253 if 'subpath' in ud.parm:
254 f = os.path.join(ud.parm['subpath'], f)
255 if f in files:
256 cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
257 subprocess.check_call(cmd, stdout=mf, cwd=repodir,
258 preexec_fn=subprocess_setup)
259 else:
260 # If the module does not have a go.mod file, synthesize one
261 # containing only a module statement.
262 mf.write(f'module {module}\n'.encode())
263
264 # Synthesize the module zip file from the repository
265 name = escaped_version + '.zip'
266 bb.note(f"Unpacking {name} to {unpackdir}/")
267 with zipfile.ZipFile(os.path.join(unpackdir, name), mode='w') as zf:
268 prefix = module + '@' + version + '/'
269 for f in files:
270 cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
271 data = subprocess.check_output(cmd, cwd=repodir,
272 preexec_fn=subprocess_setup)
273 zf.writestr(prefix + f, data)
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
index 063e13008a..cbff8c490c 100644
--- a/bitbake/lib/bb/fetch2/hg.py
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -242,6 +242,7 @@ class Hg(FetchMethod):
242 revflag = "-r %s" % ud.revision 242 revflag = "-r %s" % ud.revision
243 subdir = ud.parm.get("destsuffix", ud.module) 243 subdir = ud.parm.get("destsuffix", ud.module)
244 codir = "%s/%s" % (destdir, subdir) 244 codir = "%s/%s" % (destdir, subdir)
245 ud.unpack_tracer.unpack("hg", codir)
245 246
246 scmdata = ud.parm.get("scmdata", "") 247 scmdata = ud.parm.get("scmdata", "")
247 if scmdata != "nokeep": 248 if scmdata != "nokeep":
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
index e7d1c8c58f..fda56a564e 100644
--- a/bitbake/lib/bb/fetch2/local.py
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -29,11 +29,10 @@ class Local(FetchMethod):
29 29
30 def urldata_init(self, ud, d): 30 def urldata_init(self, ud, d):
31 # We don't set localfile as for this fetcher the file is already local! 31 # We don't set localfile as for this fetcher the file is already local!
32 ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0]) 32 ud.basename = os.path.basename(ud.path)
33 ud.basename = os.path.basename(ud.decodedurl) 33 ud.basepath = ud.path
34 ud.basepath = ud.decodedurl
35 ud.needdonestamp = False 34 ud.needdonestamp = False
36 if "*" in ud.decodedurl: 35 if "*" in ud.path:
37 raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url) 36 raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url)
38 return 37 return
39 38
@@ -41,28 +40,24 @@ class Local(FetchMethod):
41 """ 40 """
42 Return the local filename of a given url assuming a successful fetch. 41 Return the local filename of a given url assuming a successful fetch.
43 """ 42 """
44 return self.localpaths(urldata, d)[-1] 43 return self.localfile_searchpaths(urldata, d)[-1]
45 44
46 def localpaths(self, urldata, d): 45 def localfile_searchpaths(self, urldata, d):
47 """ 46 """
48 Return the local filename of a given url assuming a successful fetch. 47 Return the local filename of a given url assuming a successful fetch.
49 """ 48 """
50 searched = [] 49 searched = []
51 path = urldata.decodedurl 50 path = urldata.path
52 newpath = path 51 newpath = path
53 if path[0] == "/": 52 if path[0] == "/":
53 logger.debug2("Using absolute %s" % (path))
54 return [path] 54 return [path]
55 filespath = d.getVar('FILESPATH') 55 filespath = d.getVar('FILESPATH')
56 if filespath: 56 if filespath:
57 logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) 57 logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
58 newpath, hist = bb.utils.which(filespath, path, history=True) 58 newpath, hist = bb.utils.which(filespath, path, history=True)
59 logger.debug2("Using %s for %s" % (newpath, path))
59 searched.extend(hist) 60 searched.extend(hist)
60 if not os.path.exists(newpath):
61 dldirfile = os.path.join(d.getVar("DL_DIR"), path)
62 logger.debug2("Defaulting to %s for %s" % (dldirfile, path))
63 bb.utils.mkdirhier(os.path.dirname(dldirfile))
64 searched.append(dldirfile)
65 return searched
66 return searched 61 return searched
67 62
68 def need_update(self, ud, d): 63 def need_update(self, ud, d):
@@ -78,9 +73,7 @@ class Local(FetchMethod):
78 filespath = d.getVar('FILESPATH') 73 filespath = d.getVar('FILESPATH')
79 if filespath: 74 if filespath:
80 locations = filespath.split(":") 75 locations = filespath.split(":")
81 locations.append(d.getVar("DL_DIR")) 76 msg = "Unable to find file " + urldata.url + " anywhere to download to " + urldata.localpath + ". The paths that were searched were:\n " + "\n ".join(locations)
82
83 msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
84 raise FetchError(msg) 77 raise FetchError(msg)
85 78
86 return True 79 return True
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py
index 47898509ff..e469d66768 100644
--- a/bitbake/lib/bb/fetch2/npm.py
+++ b/bitbake/lib/bb/fetch2/npm.py
@@ -42,19 +42,27 @@ from bb.utils import is_semver
42 42
43def npm_package(package): 43def npm_package(package):
44 """Convert the npm package name to remove unsupported character""" 44 """Convert the npm package name to remove unsupported character"""
45 # Scoped package names (with the @) use the same naming convention 45 # For scoped package names ('@user/package') the '/' is replaced by a '-'.
46 # as the 'npm pack' command. 46 # This is similar to what 'npm pack' does, but 'npm pack' also strips the
47 if package.startswith("@"): 47 # leading '@', which can lead to ambiguous package names.
48 return re.sub("/", "-", package[1:]) 48 name = re.sub("/", "-", package)
49 return package 49 name = name.lower()
50 name = re.sub(r"[^\-a-z0-9@]", "", name)
51 name = name.strip("-")
52 return name
53
50 54
51def npm_filename(package, version): 55def npm_filename(package, version):
52 """Get the filename of a npm package""" 56 """Get the filename of a npm package"""
53 return npm_package(package) + "-" + version + ".tgz" 57 return npm_package(package) + "-" + version + ".tgz"
54 58
55def npm_localfile(package, version): 59def npm_localfile(package, version=None):
56 """Get the local filename of a npm package""" 60 """Get the local filename of a npm package"""
57 return os.path.join("npm2", npm_filename(package, version)) 61 if version is not None:
62 filename = npm_filename(package, version)
63 else:
64 filename = package
65 return os.path.join("npm2", filename)
58 66
59def npm_integrity(integrity): 67def npm_integrity(integrity):
60 """ 68 """
@@ -69,41 +77,67 @@ def npm_unpack(tarball, destdir, d):
69 bb.utils.mkdirhier(destdir) 77 bb.utils.mkdirhier(destdir)
70 cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball) 78 cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball)
71 cmd += " --no-same-owner" 79 cmd += " --no-same-owner"
80 cmd += " --delay-directory-restore"
72 cmd += " --strip-components=1" 81 cmd += " --strip-components=1"
73 runfetchcmd(cmd, d, workdir=destdir) 82 runfetchcmd(cmd, d, workdir=destdir)
83 runfetchcmd("chmod -R +X '%s'" % (destdir), d, quiet=True, workdir=destdir)
74 84
75class NpmEnvironment(object): 85class NpmEnvironment(object):
76 """ 86 """
77 Using a npm config file seems more reliable than using cli arguments. 87 Using a npm config file seems more reliable than using cli arguments.
78 This class allows to create a controlled environment for npm commands. 88 This class allows to create a controlled environment for npm commands.
79 """ 89 """
80 def __init__(self, d, configs=None): 90 def __init__(self, d, configs=[], npmrc=None):
81 self.d = d 91 self.d = d
82 self.configs = configs 92
93 self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1)
94
95 hn = self._home_npmrc(d)
96 if hn is not None:
97 with open(hn, 'r') as hnf:
98 self.user_config.write(hnf.read())
99
100 for key, value in configs:
101 self.user_config.write("%s=%s\n" % (key, value))
102
103 if npmrc:
104 self.global_config_name = npmrc
105 else:
106 self.global_config_name = "/dev/null"
107
108 def __del__(self):
109 if self.user_config:
110 self.user_config.close()
111
112 def _home_npmrc(self, d):
113 """Function to return user's HOME .npmrc file (or None if it doesn't exist)"""
114 home_npmrc_file = os.path.join(os.environ.get("HOME"), ".npmrc")
115 if d.getVar("BB_USE_HOME_NPMRC") == "1" and os.path.exists(home_npmrc_file):
116 bb.warn(f"BB_USE_HOME_NPMRC flag set and valid .npmrc detected - "\
117 f"npm fetcher will use {home_npmrc_file}")
118 return home_npmrc_file
119 return None
83 120
84 def run(self, cmd, args=None, configs=None, workdir=None): 121 def run(self, cmd, args=None, configs=None, workdir=None):
85 """Run npm command in a controlled environment""" 122 """Run npm command in a controlled environment"""
86 with tempfile.TemporaryDirectory() as tmpdir: 123 with tempfile.TemporaryDirectory() as tmpdir:
87 d = bb.data.createCopy(self.d) 124 d = bb.data.createCopy(self.d)
125 d.setVar("PATH", d.getVar("PATH")) # PATH might contain $HOME - evaluate it before patching
88 d.setVar("HOME", tmpdir) 126 d.setVar("HOME", tmpdir)
89 127
90 cfgfile = os.path.join(tmpdir, "npmrc")
91
92 if not workdir: 128 if not workdir:
93 workdir = tmpdir 129 workdir = tmpdir
94 130
95 def _run(cmd): 131 def _run(cmd):
96 cmd = "NPM_CONFIG_USERCONFIG=%s " % cfgfile + cmd 132 cmd = "NPM_CONFIG_USERCONFIG=%s " % (self.user_config.name) + cmd
97 cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % cfgfile + cmd 133 cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % (self.global_config_name) + cmd
98 return runfetchcmd(cmd, d, workdir=workdir) 134 return runfetchcmd(cmd, d, workdir=workdir)
99 135
100 if self.configs:
101 for key, value in self.configs:
102 _run("npm config set %s %s" % (key, shlex.quote(value)))
103
104 if configs: 136 if configs:
137 bb.warn("Use of configs argument of NpmEnvironment.run() function"
138 " is deprecated. Please use args argument instead.")
105 for key, value in configs: 139 for key, value in configs:
106 _run("npm config set %s %s" % (key, shlex.quote(value))) 140 cmd += " --%s=%s" % (key, shlex.quote(value))
107 141
108 if args: 142 if args:
109 for key, value in args: 143 for key, value in args:
@@ -142,12 +176,12 @@ class Npm(FetchMethod):
142 raise ParameterError("Invalid 'version' parameter", ud.url) 176 raise ParameterError("Invalid 'version' parameter", ud.url)
143 177
144 # Extract the 'registry' part of the url 178 # Extract the 'registry' part of the url
145 ud.registry = re.sub(r"^npm://", "http://", ud.url.split(";")[0]) 179 ud.registry = re.sub(r"^npm://", "https://", ud.url.split(";")[0])
146 180
147 # Using the 'downloadfilename' parameter as local filename 181 # Using the 'downloadfilename' parameter as local filename
148 # or the npm package name. 182 # or the npm package name.
149 if "downloadfilename" in ud.parm: 183 if "downloadfilename" in ud.parm:
150 ud.localfile = d.expand(ud.parm["downloadfilename"]) 184 ud.localfile = npm_localfile(ud.parm["downloadfilename"])
151 else: 185 else:
152 ud.localfile = npm_localfile(ud.package, ud.version) 186 ud.localfile = npm_localfile(ud.package, ud.version)
153 187
@@ -165,14 +199,14 @@ class Npm(FetchMethod):
165 199
166 def _resolve_proxy_url(self, ud, d): 200 def _resolve_proxy_url(self, ud, d):
167 def _npm_view(): 201 def _npm_view():
168 configs = [] 202 args = []
169 configs.append(("json", "true")) 203 args.append(("json", "true"))
170 configs.append(("registry", ud.registry)) 204 args.append(("registry", ud.registry))
171 pkgver = shlex.quote(ud.package + "@" + ud.version) 205 pkgver = shlex.quote(ud.package + "@" + ud.version)
172 cmd = ud.basecmd + " view %s" % pkgver 206 cmd = ud.basecmd + " view %s" % pkgver
173 env = NpmEnvironment(d) 207 env = NpmEnvironment(d)
174 check_network_access(d, cmd, ud.registry) 208 check_network_access(d, cmd, ud.registry)
175 view_string = env.run(cmd, configs=configs) 209 view_string = env.run(cmd, args=args)
176 210
177 if not view_string: 211 if not view_string:
178 raise FetchError("Unavailable package %s" % pkgver, ud.url) 212 raise FetchError("Unavailable package %s" % pkgver, ud.url)
@@ -280,6 +314,7 @@ class Npm(FetchMethod):
280 destsuffix = ud.parm.get("destsuffix", "npm") 314 destsuffix = ud.parm.get("destsuffix", "npm")
281 destdir = os.path.join(rootdir, destsuffix) 315 destdir = os.path.join(rootdir, destsuffix)
282 npm_unpack(ud.localpath, destdir, d) 316 npm_unpack(ud.localpath, destdir, d)
317 ud.unpack_tracer.unpack("npm", destdir)
283 318
284 def clean(self, ud, d): 319 def clean(self, ud, d):
285 """Clean any existing full or partial download""" 320 """Clean any existing full or partial download"""
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py
index 0c3511d8ab..2f9599ee9e 100644
--- a/bitbake/lib/bb/fetch2/npmsw.py
+++ b/bitbake/lib/bb/fetch2/npmsw.py
@@ -24,34 +24,39 @@ import bb
24from bb.fetch2 import Fetch 24from bb.fetch2 import Fetch
25from bb.fetch2 import FetchMethod 25from bb.fetch2 import FetchMethod
26from bb.fetch2 import ParameterError 26from bb.fetch2 import ParameterError
27from bb.fetch2 import runfetchcmd
27from bb.fetch2 import URI 28from bb.fetch2 import URI
28from bb.fetch2.npm import npm_integrity 29from bb.fetch2.npm import npm_integrity
29from bb.fetch2.npm import npm_localfile 30from bb.fetch2.npm import npm_localfile
30from bb.fetch2.npm import npm_unpack 31from bb.fetch2.npm import npm_unpack
31from bb.utils import is_semver 32from bb.utils import is_semver
33from bb.utils import lockfile
34from bb.utils import unlockfile
32 35
33def foreach_dependencies(shrinkwrap, callback=None, dev=False): 36def foreach_dependencies(shrinkwrap, callback=None, dev=False):
34 """ 37 """
35 Run a callback for each dependencies of a shrinkwrap file. 38 Run a callback for each dependencies of a shrinkwrap file.
36 The callback is using the format: 39 The callback is using the format:
37 callback(name, params, deptree) 40 callback(name, data, location)
38 with: 41 with:
39 name = the package name (string) 42 name = the package name (string)
40 params = the package parameters (dictionary) 43 data = the package data (dictionary)
41 deptree = the package dependency tree (array of strings) 44 location = the location of the package (string)
42 """ 45 """
43 def _walk_deps(deps, deptree): 46 packages = shrinkwrap.get("packages")
44 for name in deps: 47 if not packages:
45 subtree = [*deptree, name] 48 raise FetchError("Invalid shrinkwrap file format")
46 _walk_deps(deps[name].get("dependencies", {}), subtree) 49
47 if callback is not None: 50 for location, data in packages.items():
48 if deps[name].get("dev", False) and not dev: 51 # Skip empty main and local link target packages
49 continue 52 if not location.startswith('node_modules/'):
50 elif deps[name].get("bundled", False): 53 continue
51 continue 54 elif not dev and data.get("dev", False):
52 callback(name, deps[name], subtree) 55 continue
53 56 elif data.get("inBundle", False):
54 _walk_deps(shrinkwrap.get("dependencies", {}), []) 57 continue
58 name = location.split('node_modules/')[-1]
59 callback(name, data, location)
55 60
56class NpmShrinkWrap(FetchMethod): 61class NpmShrinkWrap(FetchMethod):
57 """Class to fetch all package from a shrinkwrap file""" 62 """Class to fetch all package from a shrinkwrap file"""
@@ -72,19 +77,28 @@ class NpmShrinkWrap(FetchMethod):
72 # Resolve the dependencies 77 # Resolve the dependencies
73 ud.deps = [] 78 ud.deps = []
74 79
75 def _resolve_dependency(name, params, deptree): 80 def _resolve_dependency(name, params, destsuffix):
76 url = None 81 url = None
77 localpath = None 82 localpath = None
78 extrapaths = [] 83 extrapaths = []
79 destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] 84 unpack = True
80 destsuffix = os.path.join(*destsubdirs)
81 85
82 integrity = params.get("integrity", None) 86 integrity = params.get("integrity")
83 resolved = params.get("resolved", None) 87 resolved = params.get("resolved")
84 version = params.get("version", None) 88 version = params.get("version")
89 link = params.get("link", False)
90
91 # Handle link sources
92 if link:
93 localpath = resolved
94 unpack = False
85 95
86 # Handle registry sources 96 # Handle registry sources
87 if is_semver(version) and resolved and integrity: 97 elif version and is_semver(version) and integrity:
98 # Handle duplicate dependencies without url
99 if not resolved:
100 return
101
88 localfile = npm_localfile(name, version) 102 localfile = npm_localfile(name, version)
89 103
90 uri = URI(resolved) 104 uri = URI(resolved)
@@ -108,10 +122,10 @@ class NpmShrinkWrap(FetchMethod):
108 extrapaths.append(resolvefile) 122 extrapaths.append(resolvefile)
109 123
110 # Handle http tarball sources 124 # Handle http tarball sources
111 elif version.startswith("http") and integrity: 125 elif resolved.startswith("http") and integrity:
112 localfile = os.path.join("npm2", os.path.basename(version)) 126 localfile = npm_localfile(os.path.basename(resolved))
113 127
114 uri = URI(version) 128 uri = URI(resolved)
115 uri.params["downloadfilename"] = localfile 129 uri.params["downloadfilename"] = localfile
116 130
117 checksum_name, checksum_expected = npm_integrity(integrity) 131 checksum_name, checksum_expected = npm_integrity(integrity)
@@ -121,8 +135,12 @@ class NpmShrinkWrap(FetchMethod):
121 135
122 localpath = os.path.join(d.getVar("DL_DIR"), localfile) 136 localpath = os.path.join(d.getVar("DL_DIR"), localfile)
123 137
138 # Handle local tarball sources
139 elif resolved.startswith("file"):
140 localpath = resolved[5:]
141
124 # Handle git sources 142 # Handle git sources
125 elif version.startswith("git"): 143 elif resolved.startswith("git"):
126 regex = re.compile(r""" 144 regex = re.compile(r"""
127 ^ 145 ^
128 git\+ 146 git\+
@@ -134,29 +152,31 @@ class NpmShrinkWrap(FetchMethod):
134 $ 152 $
135 """, re.VERBOSE) 153 """, re.VERBOSE)
136 154
137 match = regex.match(version) 155 match = regex.match(resolved)
138
139 if not match: 156 if not match:
140 raise ParameterError("Invalid git url: %s" % version, ud.url) 157 raise ParameterError("Invalid git url: %s" % resolved, ud.url)
141 158
142 groups = match.groupdict() 159 groups = match.groupdict()
143 160
144 uri = URI("git://" + str(groups["url"])) 161 uri = URI("git://" + str(groups["url"]))
145 uri.params["protocol"] = str(groups["protocol"]) 162 uri.params["protocol"] = str(groups["protocol"])
146 uri.params["rev"] = str(groups["rev"]) 163 uri.params["rev"] = str(groups["rev"])
164 uri.params["nobranch"] = "1"
147 uri.params["destsuffix"] = destsuffix 165 uri.params["destsuffix"] = destsuffix
148 166
149 url = str(uri) 167 url = str(uri)
150 168
151 # local tarball sources and local link sources are unsupported
152 else: 169 else:
153 raise ParameterError("Unsupported dependency: %s" % name, ud.url) 170 raise ParameterError("Unsupported dependency: %s" % name, ud.url)
154 171
172 # name is needed by unpack tracer for module mapping
155 ud.deps.append({ 173 ud.deps.append({
174 "name": name,
156 "url": url, 175 "url": url,
157 "localpath": localpath, 176 "localpath": localpath,
158 "extrapaths": extrapaths, 177 "extrapaths": extrapaths,
159 "destsuffix": destsuffix, 178 "destsuffix": destsuffix,
179 "unpack": unpack,
160 }) 180 })
161 181
162 try: 182 try:
@@ -177,17 +197,23 @@ class NpmShrinkWrap(FetchMethod):
177 # This fetcher resolves multiple URIs from a shrinkwrap file and then 197 # This fetcher resolves multiple URIs from a shrinkwrap file and then
178 # forwards it to a proxy fetcher. The management of the donestamp file, 198 # forwards it to a proxy fetcher. The management of the donestamp file,
179 # the lockfile and the checksums are forwarded to the proxy fetcher. 199 # the lockfile and the checksums are forwarded to the proxy fetcher.
180 ud.proxy = Fetch([dep["url"] for dep in ud.deps], data) 200 shrinkwrap_urls = [dep["url"] for dep in ud.deps if dep["url"]]
201 if shrinkwrap_urls:
202 ud.proxy = Fetch(shrinkwrap_urls, data)
181 ud.needdonestamp = False 203 ud.needdonestamp = False
182 204
183 @staticmethod 205 @staticmethod
184 def _foreach_proxy_method(ud, handle): 206 def _foreach_proxy_method(ud, handle):
185 returns = [] 207 returns = []
186 for proxy_url in ud.proxy.urls: 208 #Check if there are dependencies before try to fetch them
187 proxy_ud = ud.proxy.ud[proxy_url] 209 if len(ud.deps) > 0:
188 proxy_d = ud.proxy.d 210 for proxy_url in ud.proxy.urls:
189 proxy_ud.setup_localpath(proxy_d) 211 proxy_ud = ud.proxy.ud[proxy_url]
190 returns.append(handle(proxy_ud.method, proxy_ud, proxy_d)) 212 proxy_d = ud.proxy.d
213 proxy_ud.setup_localpath(proxy_d)
214 lf = lockfile(proxy_ud.lockfile)
215 returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
216 unlockfile(lf)
191 return returns 217 return returns
192 218
193 def verify_donestamp(self, ud, d): 219 def verify_donestamp(self, ud, d):
@@ -220,10 +246,11 @@ class NpmShrinkWrap(FetchMethod):
220 246
221 def unpack(self, ud, rootdir, d): 247 def unpack(self, ud, rootdir, d):
222 """Unpack the downloaded dependencies""" 248 """Unpack the downloaded dependencies"""
223 destdir = d.getVar("S") 249 destdir = rootdir
224 destsuffix = ud.parm.get("destsuffix") 250 destsuffix = ud.parm.get("destsuffix")
225 if destsuffix: 251 if destsuffix:
226 destdir = os.path.join(rootdir, destsuffix) 252 destdir = os.path.join(rootdir, destsuffix)
253 ud.unpack_tracer.unpack("npm-shrinkwrap", destdir)
227 254
228 bb.utils.mkdirhier(destdir) 255 bb.utils.mkdirhier(destdir)
229 bb.utils.copyfile(ud.shrinkwrap_file, 256 bb.utils.copyfile(ud.shrinkwrap_file,
@@ -237,7 +264,16 @@ class NpmShrinkWrap(FetchMethod):
237 264
238 for dep in manual: 265 for dep in manual:
239 depdestdir = os.path.join(destdir, dep["destsuffix"]) 266 depdestdir = os.path.join(destdir, dep["destsuffix"])
240 npm_unpack(dep["localpath"], depdestdir, d) 267 if dep["url"]:
268 npm_unpack(dep["localpath"], depdestdir, d)
269 else:
270 depsrcdir= os.path.join(destdir, dep["localpath"])
271 if dep["unpack"]:
272 npm_unpack(depsrcdir, depdestdir, d)
273 else:
274 bb.utils.mkdirhier(depdestdir)
275 cmd = 'cp -fpPRH "%s/." .' % (depsrcdir)
276 runfetchcmd(cmd, d, workdir=depdestdir)
241 277
242 def clean(self, ud, d): 278 def clean(self, ud, d):
243 """Clean any existing full or partial download""" 279 """Clean any existing full or partial download"""
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
index d9ce44390c..495ac8a30a 100644
--- a/bitbake/lib/bb/fetch2/osc.py
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4""" 6"""
@@ -9,6 +11,7 @@ Based on the svn "Fetch" implementation.
9 11
10import logging 12import logging
11import os 13import os
14import re
12import bb 15import bb
13from bb.fetch2 import FetchMethod 16from bb.fetch2 import FetchMethod
14from bb.fetch2 import FetchError 17from bb.fetch2 import FetchError
@@ -36,6 +39,7 @@ class Osc(FetchMethod):
36 # Create paths to osc checkouts 39 # Create paths to osc checkouts
37 oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc") 40 oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc")
38 relpath = self._strip_leading_slashes(ud.path) 41 relpath = self._strip_leading_slashes(ud.path)
42 ud.oscdir = oscdir
39 ud.pkgdir = os.path.join(oscdir, ud.host) 43 ud.pkgdir = os.path.join(oscdir, ud.host)
40 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) 44 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
41 45
@@ -43,13 +47,13 @@ class Osc(FetchMethod):
43 ud.revision = ud.parm['rev'] 47 ud.revision = ud.parm['rev']
44 else: 48 else:
45 pv = d.getVar("PV", False) 49 pv = d.getVar("PV", False)
46 rev = bb.fetch2.srcrev_internal_helper(ud, d) 50 rev = bb.fetch2.srcrev_internal_helper(ud, d, '')
47 if rev: 51 if rev:
48 ud.revision = rev 52 ud.revision = rev
49 else: 53 else:
50 ud.revision = "" 54 ud.revision = ""
51 55
52 ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision)) 56 ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), relpath.replace('/', '.'), ud.revision))
53 57
54 def _buildosccommand(self, ud, d, command): 58 def _buildosccommand(self, ud, d, command):
55 """ 59 """
@@ -59,26 +63,49 @@ class Osc(FetchMethod):
59 63
60 basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc" 64 basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc"
61 65
62 proto = ud.parm.get('protocol', 'ocs') 66 proto = ud.parm.get('protocol', 'https')
63 67
64 options = [] 68 options = []
65 69
66 config = "-c %s" % self.generate_config(ud, d) 70 config = "-c %s" % self.generate_config(ud, d)
67 71
68 if ud.revision: 72 if getattr(ud, 'revision', ''):
69 options.append("-r %s" % ud.revision) 73 options.append("-r %s" % ud.revision)
70 74
71 coroot = self._strip_leading_slashes(ud.path) 75 coroot = self._strip_leading_slashes(ud.path)
72 76
73 if command == "fetch": 77 if command == "fetch":
74 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) 78 osccmd = "%s %s -A %s://%s co %s/%s %s" % (basecmd, config, proto, ud.host, coroot, ud.module, " ".join(options))
75 elif command == "update": 79 elif command == "update":
76 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) 80 osccmd = "%s %s -A %s://%s up %s" % (basecmd, config, proto, ud.host, " ".join(options))
81 elif command == "api_source":
82 osccmd = "%s %s -A %s://%s api source/%s/%s" % (basecmd, config, proto, ud.host, coroot, ud.module)
77 else: 83 else:
78 raise FetchError("Invalid osc command %s" % command, ud.url) 84 raise FetchError("Invalid osc command %s" % command, ud.url)
79 85
80 return osccmd 86 return osccmd
81 87
88 def _latest_revision(self, ud, d, name):
89 """
90 Fetch latest revision for the given package
91 """
92 api_source_cmd = self._buildosccommand(ud, d, "api_source")
93
94 output = runfetchcmd(api_source_cmd, d)
95 match = re.match(r'<directory ?.* rev="(\d+)".*>', output)
96 if match is None:
97 raise FetchError("Unable to parse osc response", ud.url)
98 return match.groups()[0]
99
100 def _revision_key(self, ud, d, name):
101 """
102 Return a unique key for the url
103 """
104 # Collapse adjacent slashes
105 slash_re = re.compile(r"/+")
106 rev = getattr(ud, 'revision', "latest")
107 return "osc:%s%s.%s.%s" % (ud.host, slash_re.sub(".", ud.path), name, rev)
108
82 def download(self, ud, d): 109 def download(self, ud, d):
83 """ 110 """
84 Fetch url 111 Fetch url
@@ -86,7 +113,7 @@ class Osc(FetchMethod):
86 113
87 logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") 114 logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
88 115
89 if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): 116 if os.access(ud.moddir, os.R_OK):
90 oscupdatecmd = self._buildosccommand(ud, d, "update") 117 oscupdatecmd = self._buildosccommand(ud, d, "update")
91 logger.info("Update "+ ud.url) 118 logger.info("Update "+ ud.url)
92 # update sources there 119 # update sources there
@@ -114,20 +141,23 @@ class Osc(FetchMethod):
114 Generate a .oscrc to be used for this run. 141 Generate a .oscrc to be used for this run.
115 """ 142 """
116 143
117 config_path = os.path.join(d.getVar('OSCDIR'), "oscrc") 144 config_path = os.path.join(ud.oscdir, "oscrc")
145 if not os.path.exists(ud.oscdir):
146 bb.utils.mkdirhier(ud.oscdir)
147
118 if (os.path.exists(config_path)): 148 if (os.path.exists(config_path)):
119 os.remove(config_path) 149 os.remove(config_path)
120 150
121 f = open(config_path, 'w') 151 f = open(config_path, 'w')
152 proto = ud.parm.get('protocol', 'https')
122 f.write("[general]\n") 153 f.write("[general]\n")
123 f.write("apisrv = %s\n" % ud.host) 154 f.write("apiurl = %s://%s\n" % (proto, ud.host))
124 f.write("scheme = http\n")
125 f.write("su-wrapper = su -c\n") 155 f.write("su-wrapper = su -c\n")
126 f.write("build-root = %s\n" % d.getVar('WORKDIR')) 156 f.write("build-root = %s\n" % d.getVar('WORKDIR'))
127 f.write("urllist = %s\n" % d.getVar("OSCURLLIST")) 157 f.write("urllist = %s\n" % d.getVar("OSCURLLIST"))
128 f.write("extra-pkgs = gzip\n") 158 f.write("extra-pkgs = gzip\n")
129 f.write("\n") 159 f.write("\n")
130 f.write("[%s]\n" % ud.host) 160 f.write("[%s://%s]\n" % (proto, ud.host))
131 f.write("user = %s\n" % ud.parm["user"]) 161 f.write("user = %s\n" % ud.parm["user"])
132 f.write("pass = %s\n" % ud.parm["pswd"]) 162 f.write("pass = %s\n" % ud.parm["pswd"])
133 f.close() 163 f.close()
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
index e2a41a4a12..3b6fa4b1ec 100644
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -134,7 +134,7 @@ class Perforce(FetchMethod):
134 134
135 ud.setup_revisions(d) 135 ud.setup_revisions(d)
136 136
137 ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision)) 137 ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleanedmodule, ud.revision))
138 138
139 def _buildp4command(self, ud, d, command, depot_filename=None): 139 def _buildp4command(self, ud, d, command, depot_filename=None):
140 """ 140 """
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py
index ffca73c8e4..22c0538139 100644
--- a/bitbake/lib/bb/fetch2/s3.py
+++ b/bitbake/lib/bb/fetch2/s3.py
@@ -18,10 +18,47 @@ The aws tool must be correctly installed and configured prior to use.
18import os 18import os
19import bb 19import bb
20import urllib.request, urllib.parse, urllib.error 20import urllib.request, urllib.parse, urllib.error
21import re
21from bb.fetch2 import FetchMethod 22from bb.fetch2 import FetchMethod
22from bb.fetch2 import FetchError 23from bb.fetch2 import FetchError
23from bb.fetch2 import runfetchcmd 24from bb.fetch2 import runfetchcmd
24 25
26def convertToBytes(value, unit):
27 value = float(value)
28 if (unit == "KiB"):
29 value = value*1024.0;
30 elif (unit == "MiB"):
31 value = value*1024.0*1024.0;
32 elif (unit == "GiB"):
33 value = value*1024.0*1024.0*1024.0;
34 return value
35
36class S3ProgressHandler(bb.progress.LineFilterProgressHandler):
37 """
38 Extract progress information from s3 cp output, e.g.:
39 Completed 5.1 KiB/8.8 GiB (12.0 MiB/s) with 1 file(s) remaining
40 """
41 def __init__(self, d):
42 super(S3ProgressHandler, self).__init__(d)
43 # Send an initial progress event so the bar gets shown
44 self._fire_progress(0)
45
46 def writeline(self, line):
47 percs = re.findall(r'^Completed (\d+.{0,1}\d*) (\w+)\/(\d+.{0,1}\d*) (\w+) (\(.+\)) with\s+', line)
48 if percs:
49 completed = (percs[-1][0])
50 completedUnit = (percs[-1][1])
51 total = (percs[-1][2])
52 totalUnit = (percs[-1][3])
53 completed = convertToBytes(completed, completedUnit)
54 total = convertToBytes(total, totalUnit)
55 progress = (completed/total)*100.0
56 rate = percs[-1][4]
57 self.update(progress, rate)
58 return False
59 return True
60
61
25class S3(FetchMethod): 62class S3(FetchMethod):
26 """Class to fetch urls via 'aws s3'""" 63 """Class to fetch urls via 'aws s3'"""
27 64
@@ -40,7 +77,7 @@ class S3(FetchMethod):
40 else: 77 else:
41 ud.basename = os.path.basename(ud.path) 78 ud.basename = os.path.basename(ud.path)
42 79
43 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 80 ud.localfile = ud.basename
44 81
45 ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3" 82 ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3"
46 83
@@ -52,7 +89,9 @@ class S3(FetchMethod):
52 89
53 cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath) 90 cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath)
54 bb.fetch2.check_network_access(d, cmd, ud.url) 91 bb.fetch2.check_network_access(d, cmd, ud.url)
55 runfetchcmd(cmd, d) 92
93 progresshandler = S3ProgressHandler(d)
94 runfetchcmd(cmd, d, False, log=progresshandler)
56 95
57 # Additional sanity checks copied from the wget class (although there 96 # Additional sanity checks copied from the wget class (although there
58 # are no known issues which mean these are required, treat the aws cli 97 # are no known issues which mean these are required, treat the aws cli
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
index f87f292e5d..bee71a0d0d 100644
--- a/bitbake/lib/bb/fetch2/sftp.py
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -77,7 +77,7 @@ class SFTP(FetchMethod):
77 else: 77 else:
78 ud.basename = os.path.basename(ud.path) 78 ud.basename = os.path.basename(ud.path)
79 79
80 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 80 ud.localfile = ud.basename
81 81
82 def download(self, ud, d): 82 def download(self, ud, d):
83 """Fetch urls""" 83 """Fetch urls"""
@@ -103,7 +103,7 @@ class SFTP(FetchMethod):
103 if path[:3] == '/~/': 103 if path[:3] == '/~/':
104 path = path[3:] 104 path = path[3:]
105 105
106 remote = '%s%s:%s' % (user, urlo.hostname, path) 106 remote = '"%s%s:%s"' % (user, urlo.hostname, path)
107 107
108 cmd = '%s %s %s %s' % (basecmd, port, remote, lpath) 108 cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
109 109
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
index 2c8557e1f8..2a0f2cb44b 100644
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -32,6 +32,7 @@ IETF secsh internet draft:
32 32
33import re, os 33import re, os
34from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd 34from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd
35import urllib
35 36
36 37
37__pattern__ = re.compile(r''' 38__pattern__ = re.compile(r'''
@@ -40,9 +41,9 @@ __pattern__ = re.compile(r'''
40 ( # Optional username/password block 41 ( # Optional username/password block
41 (?P<user>\S+) # username 42 (?P<user>\S+) # username
42 (:(?P<pass>\S+))? # colon followed by the password (optional) 43 (:(?P<pass>\S+))? # colon followed by the password (optional)
43 )?
44 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional) 44 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
45 @ 45 @
46 )?
46 (?P<host>\S+?) # non-greedy match of the host 47 (?P<host>\S+?) # non-greedy match of the host
47 (:(?P<port>[0-9]+))? # colon followed by the port (optional) 48 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
48 / 49 /
@@ -70,9 +71,9 @@ class SSH(FetchMethod):
70 "git:// prefix with protocol=ssh", urldata.url) 71 "git:// prefix with protocol=ssh", urldata.url)
71 m = __pattern__.match(urldata.url) 72 m = __pattern__.match(urldata.url)
72 path = m.group('path') 73 path = m.group('path')
74 path = urllib.parse.unquote(path)
73 host = m.group('host') 75 host = m.group('host')
74 urldata.localpath = os.path.join(d.getVar('DL_DIR'), 76 urldata.localfile = os.path.basename(os.path.normpath(path))
75 os.path.basename(os.path.normpath(path)))
76 77
77 def download(self, urldata, d): 78 def download(self, urldata, d):
78 dldir = d.getVar('DL_DIR') 79 dldir = d.getVar('DL_DIR')
@@ -96,6 +97,11 @@ class SSH(FetchMethod):
96 fr += '@%s' % host 97 fr += '@%s' % host
97 else: 98 else:
98 fr = host 99 fr = host
100
101 if path[0] != '~':
102 path = '/%s' % path
103 path = urllib.parse.unquote(path)
104
99 fr += ':%s' % path 105 fr += ':%s' % path
100 106
101 cmd = 'scp -B -r %s %s %s/' % ( 107 cmd = 'scp -B -r %s %s %s/' % (
@@ -108,3 +114,41 @@ class SSH(FetchMethod):
108 114
109 runfetchcmd(cmd, d) 115 runfetchcmd(cmd, d)
110 116
117 def checkstatus(self, fetch, urldata, d):
118 """
119 Check the status of the url
120 """
121 m = __pattern__.match(urldata.url)
122 path = m.group('path')
123 host = m.group('host')
124 port = m.group('port')
125 user = m.group('user')
126 password = m.group('pass')
127
128 if port:
129 portarg = '-P %s' % port
130 else:
131 portarg = ''
132
133 if user:
134 fr = user
135 if password:
136 fr += ':%s' % password
137 fr += '@%s' % host
138 else:
139 fr = host
140
141 if path[0] != '~':
142 path = '/%s' % path
143 path = urllib.parse.unquote(path)
144
145 cmd = 'ssh -o BatchMode=true %s %s [ -f %s ]' % (
146 portarg,
147 fr,
148 path
149 )
150
151 check_network_access(d, cmd, urldata.url)
152 runfetchcmd(cmd, d)
153
154 return True
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
index 8856ef1c62..0852108e7d 100644
--- a/bitbake/lib/bb/fetch2/svn.py
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -57,7 +57,12 @@ class Svn(FetchMethod):
57 if 'rev' in ud.parm: 57 if 'rev' in ud.parm:
58 ud.revision = ud.parm['rev'] 58 ud.revision = ud.parm['rev']
59 59
60 ud.localfile = d.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision)) 60 # Whether to use the @REV peg-revision syntax in the svn command or not
61 ud.pegrevision = True
62 if 'nopegrevision' in ud.parm:
63 ud.pegrevision = False
64
65 ud.localfile = d.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ["0", "1"][ud.pegrevision]))
61 66
62 def _buildsvncommand(self, ud, d, command): 67 def _buildsvncommand(self, ud, d, command):
63 """ 68 """
@@ -86,7 +91,7 @@ class Svn(FetchMethod):
86 if command == "info": 91 if command == "info":
87 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) 92 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
88 elif command == "log1": 93 elif command == "log1":
89 svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) 94 svncmd = "%s log --limit 1 --quiet %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
90 else: 95 else:
91 suffix = "" 96 suffix = ""
92 97
@@ -98,7 +103,8 @@ class Svn(FetchMethod):
98 103
99 if ud.revision: 104 if ud.revision:
100 options.append("-r %s" % ud.revision) 105 options.append("-r %s" % ud.revision)
101 suffix = "@%s" % (ud.revision) 106 if ud.pegrevision:
107 suffix = "@%s" % (ud.revision)
102 108
103 if command == "fetch": 109 if command == "fetch":
104 transportuser = ud.parm.get("transportuser", "") 110 transportuser = ud.parm.get("transportuser", "")
@@ -204,3 +210,6 @@ class Svn(FetchMethod):
204 210
205 def _build_revision(self, ud, d): 211 def _build_revision(self, ud, d):
206 return ud.revision 212 return ud.revision
213
214 def supports_checksum(self, urldata):
215 return False
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index 6d82f3af07..7e43d3bc97 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -26,7 +26,6 @@ from bb.fetch2 import FetchMethod
26from bb.fetch2 import FetchError 26from bb.fetch2 import FetchError
27from bb.fetch2 import logger 27from bb.fetch2 import logger
28from bb.fetch2 import runfetchcmd 28from bb.fetch2 import runfetchcmd
29from bb.utils import export_proxies
30from bs4 import BeautifulSoup 29from bs4 import BeautifulSoup
31from bs4 import SoupStrainer 30from bs4 import SoupStrainer
32 31
@@ -52,18 +51,19 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
52 51
53 52
54class Wget(FetchMethod): 53class Wget(FetchMethod):
54 """Class to fetch urls via 'wget'"""
55 55
56 # CDNs like CloudFlare may do a 'browser integrity test' which can fail 56 def check_certs(self, d):
57 # with the standard wget/urllib User-Agent, so pretend to be a modern 57 """
58 # browser. 58 Should certificates be checked?
59 user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0" 59 """
60 return (d.getVar("BB_CHECK_SSL_CERTS") or "1") != "0"
60 61
61 """Class to fetch urls via 'wget'"""
62 def supports(self, ud, d): 62 def supports(self, ud, d):
63 """ 63 """
64 Check to see if a given url can be fetched with wget. 64 Check to see if a given url can be fetched with wget.
65 """ 65 """
66 return ud.type in ['http', 'https', 'ftp'] 66 return ud.type in ['http', 'https', 'ftp', 'ftps']
67 67
68 def recommends_checksum(self, urldata): 68 def recommends_checksum(self, urldata):
69 return True 69 return True
@@ -78,11 +78,17 @@ class Wget(FetchMethod):
78 else: 78 else:
79 ud.basename = os.path.basename(ud.path) 79 ud.basename = os.path.basename(ud.path)
80 80
81 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 81 ud.localfile = ud.basename
82 if not ud.localfile: 82 if not ud.localfile:
83 ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) 83 ud.localfile = ud.host + ud.path.replace("/", ".")
84 84
85 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" 85 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget --tries=2 --timeout=100"
86
87 if ud.type == 'ftp' or ud.type == 'ftps':
88 self.basecmd += " --passive-ftp"
89
90 if not self.check_certs(d):
91 self.basecmd += " --no-check-certificate"
86 92
87 def _runwget(self, ud, d, command, quiet, workdir=None): 93 def _runwget(self, ud, d, command, quiet, workdir=None):
88 94
@@ -90,39 +96,53 @@ class Wget(FetchMethod):
90 96
91 logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) 97 logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
92 bb.fetch2.check_network_access(d, command, ud.url) 98 bb.fetch2.check_network_access(d, command, ud.url)
93 runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) 99 runfetchcmd(command + ' --progress=dot --verbose', d, quiet, log=progresshandler, workdir=workdir)
94 100
95 def download(self, ud, d): 101 def download(self, ud, d):
96 """Fetch urls""" 102 """Fetch urls"""
97 103
98 fetchcmd = self.basecmd 104 fetchcmd = self.basecmd
99 105
100 if 'downloadfilename' in ud.parm: 106 dldir = os.path.realpath(d.getVar("DL_DIR"))
101 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) 107 localpath = os.path.join(dldir, ud.localfile) + ".tmp"
102 bb.utils.mkdirhier(os.path.dirname(localpath)) 108 bb.utils.mkdirhier(os.path.dirname(localpath))
103 fetchcmd += " -O %s" % shlex.quote(localpath) 109 fetchcmd += " --output-document=%s" % shlex.quote(localpath)
104 110
105 if ud.user and ud.pswd: 111 if ud.user and ud.pswd:
106 fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd) 112 fetchcmd += " --auth-no-challenge"
113 if ud.parm.get("redirectauth", "1") == "1":
114 # An undocumented feature of wget is that if the
115 # username/password are specified on the URI, wget will only
116 # send the Authorization header to the first host and not to
117 # any hosts that it is redirected to. With the increasing
118 # usage of temporary AWS URLs, this difference now matters as
119 # AWS will reject any request that has authentication both in
120 # the query parameters (from the redirect) and in the
121 # Authorization header.
122 fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd)
107 123
108 uri = ud.url.split(";")[0] 124 uri = ud.url.split(";")[0]
109 if os.path.exists(ud.localpath): 125 fetchcmd += " --continue --directory-prefix=%s '%s'" % (dldir, uri)
110 # file exists, but we didnt complete it.. trying again..
111 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
112 else:
113 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
114
115 self._runwget(ud, d, fetchcmd, False) 126 self._runwget(ud, d, fetchcmd, False)
116 127
117 # Sanity check since wget can pretend it succeed when it didn't 128 # Sanity check since wget can pretend it succeed when it didn't
118 # Also, this used to happen if sourceforge sent us to the mirror page 129 # Also, this used to happen if sourceforge sent us to the mirror page
119 if not os.path.exists(ud.localpath): 130 if not os.path.exists(localpath):
120 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) 131 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri)
121 132
122 if os.path.getsize(ud.localpath) == 0: 133 if os.path.getsize(localpath) == 0:
123 os.remove(ud.localpath) 134 os.remove(localpath)
124 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) 135 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
125 136
137 # Try and verify any checksum now, meaning if it isn't correct, we don't remove the
138 # original file, which might be a race (imagine two recipes referencing the same
139 # source, one with an incorrect checksum)
140 bb.fetch2.verify_checksum(ud, d, localpath=localpath, fatal_nochecksum=False)
141
142 # Remove the ".tmp" and move the file into position atomically
143 # Our lock prevents multiple writers but mirroring code may grab incomplete files
144 os.rename(localpath, localpath[:-4])
145
126 return True 146 return True
127 147
128 def checkstatus(self, fetch, ud, d, try_again=True): 148 def checkstatus(self, fetch, ud, d, try_again=True):
@@ -209,12 +229,17 @@ class Wget(FetchMethod):
209 # We let the request fail and expect it to be 229 # We let the request fail and expect it to be
210 # tried once more ("try_again" in check_status()), 230 # tried once more ("try_again" in check_status()),
211 # with the dead connection removed from the cache. 231 # with the dead connection removed from the cache.
212 # If it still fails, we give up, which can happend for bad 232 # If it still fails, we give up, which can happen for bad
213 # HTTP proxy settings. 233 # HTTP proxy settings.
214 fetch.connection_cache.remove_connection(h.host, h.port) 234 fetch.connection_cache.remove_connection(h.host, h.port)
215 raise urllib.error.URLError(err) 235 raise urllib.error.URLError(err)
216 else: 236 else:
217 r = h.getresponse() 237 try:
238 r = h.getresponse()
239 except TimeoutError as e:
240 if fetch.connection_cache:
241 fetch.connection_cache.remove_connection(h.host, h.port)
242 raise TimeoutError(e)
218 243
219 # Pick apart the HTTPResponse object to get the addinfourl 244 # Pick apart the HTTPResponse object to get the addinfourl
220 # object initialized properly. 245 # object initialized properly.
@@ -275,71 +300,115 @@ class Wget(FetchMethod):
275 300
276 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): 301 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
277 """ 302 """
278 urllib2.HTTPRedirectHandler resets the method to GET on redirect, 303 urllib2.HTTPRedirectHandler before 3.13 has two flaws:
279 when we want to follow redirects using the original method. 304
305 It resets the method to GET on redirect when we want to follow
306 redirects using the original method (typically HEAD). This was fixed
307 in 759e8e7.
308
309 It also doesn't handle 308 (Permanent Redirect). This was fixed in
310 c379bc5.
311
312 Until we depend on Python 3.13 onwards, copy the redirect_request
313 method to fix these issues.
280 """ 314 """
281 def redirect_request(self, req, fp, code, msg, headers, newurl): 315 def redirect_request(self, req, fp, code, msg, headers, newurl):
282 newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) 316 m = req.get_method()
283 newreq.get_method = req.get_method 317 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
284 return newreq 318 or code in (301, 302, 303) and m == "POST")):
285 exported_proxies = export_proxies(d) 319 raise urllib.HTTPError(req.full_url, code, msg, headers, fp)
286 320
287 handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] 321 # Strictly (according to RFC 2616), 301 or 302 in response to
288 if exported_proxies: 322 # a POST MUST NOT cause a redirection without confirmation
289 handlers.append(urllib.request.ProxyHandler()) 323 # from the user (of urllib.request, in this case). In practice,
290 handlers.append(CacheHTTPHandler()) 324 # essentially all clients do redirect in this case, so we do
291 # Since Python 2.7.9 ssl cert validation is enabled by default 325 # the same.
292 # see PEP-0476, this causes verification errors on some https servers 326
293 # so disable by default. 327 # Be conciliant with URIs containing a space. This is mainly
294 import ssl 328 # redundant with the more complete encoding done in http_error_302(),
295 if hasattr(ssl, '_create_unverified_context'): 329 # but it is kept for compatibility with other callers.
296 handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context())) 330 newurl = newurl.replace(' ', '%20')
297 opener = urllib.request.build_opener(*handlers) 331
298 332 CONTENT_HEADERS = ("content-length", "content-type")
299 try: 333 newheaders = {k: v for k, v in req.headers.items()
300 uri = ud.url.split(";")[0] 334 if k.lower() not in CONTENT_HEADERS}
301 r = urllib.request.Request(uri) 335 return urllib.request.Request(newurl,
302 r.get_method = lambda: "HEAD" 336 method="HEAD" if m == "HEAD" else "GET",
303 # Some servers (FusionForge, as used on Alioth) require that the 337 headers=newheaders,
304 # optional Accept header is set. 338 origin_req_host=req.origin_req_host,
305 r.add_header("Accept", "*/*") 339 unverifiable=True)
306 r.add_header("User-Agent", self.user_agent) 340
307 def add_basic_auth(login_str, request): 341 http_error_308 = urllib.request.HTTPRedirectHandler.http_error_302
308 '''Adds Basic auth to http request, pass in login:password as string''' 342
309 import base64 343 # We need to update the environment here as both the proxy and HTTPS
310 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8") 344 # handlers need variables set. The proxy needs http_proxy and friends to
311 authheader = "Basic %s" % encodeuser 345 # be set, and HTTPSHandler ends up calling into openssl to load the
312 r.add_header("Authorization", authheader) 346 # certificates. In buildtools configurations this will be looking at the
313 347 # wrong place for certificates by default: we set SSL_CERT_FILE to the
314 if ud.user and ud.pswd: 348 # right location in the buildtools environment script but as BitBake
315 add_basic_auth(ud.user + ':' + ud.pswd, r) 349 # prunes prunes the environment this is lost. When binaries are executed
350 # runfetchcmd ensures these values are in the environment, but this is
351 # pure Python so we need to update the environment.
352 #
353 # Avoid tramping the environment too much by using bb.utils.environment
354 # to scope the changes to the build_opener request, which is when the
355 # environment lookups happen.
356 newenv = bb.fetch2.get_fetcher_environment(d)
357
358 with bb.utils.environment(**newenv):
359 import ssl
360
361 if self.check_certs(d):
362 context = ssl.create_default_context()
363 else:
364 context = ssl._create_unverified_context()
365
366 handlers = [FixedHTTPRedirectHandler,
367 HTTPMethodFallback,
368 urllib.request.ProxyHandler(),
369 CacheHTTPHandler(),
370 urllib.request.HTTPSHandler(context=context)]
371 opener = urllib.request.build_opener(*handlers)
316 372
317 try: 373 try:
318 import netrc 374 parts = urllib.parse.urlparse(ud.url.split(";")[0])
319 n = netrc.netrc() 375 uri = "{}://{}{}".format(parts.scheme, parts.netloc, parts.path)
320 login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname) 376 r = urllib.request.Request(uri)
321 add_basic_auth("%s:%s" % (login, password), r) 377 r.get_method = lambda: "HEAD"
322 except (TypeError, ImportError, IOError, netrc.NetrcParseError): 378 # Some servers (FusionForge, as used on Alioth) require that the
323 pass 379 # optional Accept header is set.
324 380 r.add_header("Accept", "*/*")
325 with opener.open(r) as response: 381 r.add_header("User-Agent", "bitbake/{}".format(bb.__version__))
326 pass 382 def add_basic_auth(login_str, request):
327 except urllib.error.URLError as e: 383 '''Adds Basic auth to http request, pass in login:password as string'''
328 if try_again: 384 import base64
329 logger.debug2("checkstatus: trying again") 385 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
330 return self.checkstatus(fetch, ud, d, False) 386 authheader = "Basic %s" % encodeuser
331 else: 387 r.add_header("Authorization", authheader)
332 # debug for now to avoid spamming the logs in e.g. remote sstate searches 388
333 logger.debug2("checkstatus() urlopen failed: %s" % e) 389 if ud.user and ud.pswd:
334 return False 390 add_basic_auth(ud.user + ':' + ud.pswd, r)
335 except ConnectionResetError as e: 391
336 if try_again: 392 try:
337 logger.debug2("checkstatus: trying again") 393 import netrc
338 return self.checkstatus(fetch, ud, d, False) 394 auth_data = netrc.netrc().authenticators(urllib.parse.urlparse(uri).hostname)
339 else: 395 if auth_data:
340 # debug for now to avoid spamming the logs in e.g. remote sstate searches 396 login, _, password = auth_data
341 logger.debug2("checkstatus() urlopen failed: %s" % e) 397 add_basic_auth("%s:%s" % (login, password), r)
342 return False 398 except (FileNotFoundError, netrc.NetrcParseError):
399 pass
400
401 with opener.open(r, timeout=100) as response:
402 pass
403 except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
404 if try_again:
405 logger.debug2("checkstatus: trying again")
406 return self.checkstatus(fetch, ud, d, False)
407 else:
408 # debug for now to avoid spamming the logs in e.g. remote sstate searches
409 logger.debug2("checkstatus() urlopen failed for %s: %s" % (uri,e))
410 return False
411
343 return True 412 return True
344 413
345 def _parse_path(self, regex, s): 414 def _parse_path(self, regex, s):
@@ -416,7 +485,7 @@ class Wget(FetchMethod):
416 f = tempfile.NamedTemporaryFile() 485 f = tempfile.NamedTemporaryFile()
417 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f: 486 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
418 fetchcmd = self.basecmd 487 fetchcmd = self.basecmd
419 fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'" 488 fetchcmd += " --output-document=%s '%s'" % (f.name, uri)
420 try: 489 try:
421 self._runwget(ud, d, fetchcmd, True, workdir=workdir) 490 self._runwget(ud, d, fetchcmd, True, workdir=workdir)
422 fetchresult = f.read() 491 fetchresult = f.read()
@@ -472,7 +541,7 @@ class Wget(FetchMethod):
472 version_dir = ['', '', ''] 541 version_dir = ['', '', '']
473 version = ['', '', ''] 542 version = ['', '', '']
474 543
475 dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))") 544 dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])*(\d+))")
476 s = dirver_regex.search(dirver) 545 s = dirver_regex.search(dirver)
477 if s: 546 if s:
478 version_dir[1] = s.group('ver') 547 version_dir[1] = s.group('ver')
@@ -548,7 +617,7 @@ class Wget(FetchMethod):
548 617
549 # src.rpm extension was added only for rpm package. Can be removed if the rpm 618 # src.rpm extension was added only for rpm package. Can be removed if the rpm
550 # packaged will always be considered as having to be manually upgraded 619 # packaged will always be considered as having to be manually upgraded
551 psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)" 620 psuffix_regex = r"(tar\.\w+|tgz|zip|xz|rpm|bz2|orig\.tar\.\w+|src\.tar\.\w+|src\.tgz|svnr\d+\.tar\.\w+|stable\.tar\.\w+|src\.rpm)"
552 621
553 # match name, version and archive type of a package 622 # match name, version and archive type of a package
554 package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)" 623 package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
@@ -576,13 +645,17 @@ class Wget(FetchMethod):
576 645
577 sanity check to ensure same name and type. 646 sanity check to ensure same name and type.
578 """ 647 """
579 package = ud.path.split("/")[-1] 648 if 'downloadfilename' in ud.parm:
649 package = ud.parm['downloadfilename']
650 else:
651 package = ud.path.split("/")[-1]
580 current_version = ['', d.getVar('PV'), ''] 652 current_version = ['', d.getVar('PV'), '']
581 653
582 """possible to have no version in pkg name, such as spectrum-fw""" 654 """possible to have no version in pkg name, such as spectrum-fw"""
583 if not re.search(r"\d+", package): 655 if not re.search(r"\d+", package):
584 current_version[1] = re.sub('_', '.', current_version[1]) 656 current_version[1] = re.sub('_', '.', current_version[1])
585 current_version[1] = re.sub('-', '.', current_version[1]) 657 current_version[1] = re.sub('-', '.', current_version[1])
658 bb.debug(3, "latest_versionstring: no version found in %s" % package)
586 return (current_version[1], '') 659 return (current_version[1], '')
587 660
588 package_regex = self._init_regexes(package, ud, d) 661 package_regex = self._init_regexes(package, ud, d)
@@ -599,10 +672,10 @@ class Wget(FetchMethod):
599 # search for version matches on folders inside the path, like: 672 # search for version matches on folders inside the path, like:
600 # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz 673 # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
601 dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") 674 dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
602 m = dirver_regex.search(path) 675 m = dirver_regex.findall(path)
603 if m: 676 if m:
604 pn = d.getVar('PN') 677 pn = d.getVar('PN')
605 dirver = m.group('dirver') 678 dirver = m[-1][0]
606 679
607 dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn))) 680 dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
608 if not dirver_pn_regex.search(dirver): 681 if not dirver_pn_regex.search(dirver):
diff --git a/bitbake/lib/bb/main.py b/bitbake/lib/bb/main.py
index 06bad495ac..bca8ebfa09 100755
--- a/bitbake/lib/bb/main.py
+++ b/bitbake/lib/bb/main.py
@@ -12,11 +12,12 @@
12import os 12import os
13import sys 13import sys
14import logging 14import logging
15import optparse 15import argparse
16import warnings 16import warnings
17import fcntl 17import fcntl
18import time 18import time
19import traceback 19import traceback
20import datetime
20 21
21import bb 22import bb
22from bb import event 23from bb import event
@@ -43,18 +44,18 @@ def present_options(optionlist):
43 else: 44 else:
44 return optionlist[0] 45 return optionlist[0]
45 46
46class BitbakeHelpFormatter(optparse.IndentedHelpFormatter): 47class BitbakeHelpFormatter(argparse.HelpFormatter):
47 def format_option(self, option): 48 def _get_help_string(self, action):
48 # We need to do this here rather than in the text we supply to 49 # We need to do this here rather than in the text we supply to
49 # add_option() because we don't want to call list_extension_modules() 50 # add_option() because we don't want to call list_extension_modules()
50 # on every execution (since it imports all of the modules) 51 # on every execution (since it imports all of the modules)
51 # Note also that we modify option.help rather than the returned text 52 # Note also that we modify option.help rather than the returned text
52 # - this is so that we don't have to re-format the text ourselves 53 # - this is so that we don't have to re-format the text ourselves
53 if option.dest == 'ui': 54 if action.dest == 'ui':
54 valid_uis = list_extension_modules(bb.ui, 'main') 55 valid_uis = list_extension_modules(bb.ui, 'main')
55 option.help = option.help.replace('@CHOICES@', present_options(valid_uis)) 56 return action.help.replace('@CHOICES@', present_options(valid_uis))
56 57
57 return optparse.IndentedHelpFormatter.format_option(self, option) 58 return action.help
58 59
59def list_extension_modules(pkg, checkattr): 60def list_extension_modules(pkg, checkattr):
60 """ 61 """
@@ -112,189 +113,209 @@ def _showwarning(message, category, filename, lineno, file=None, line=None):
112 warnlog.warning(s) 113 warnlog.warning(s)
113 114
114warnings.showwarning = _showwarning 115warnings.showwarning = _showwarning
115warnings.filterwarnings("ignore")
116warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
117warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
118warnings.filterwarnings("ignore", category=ImportWarning)
119warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
120warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
121
122 116
123def create_bitbake_parser(): 117def create_bitbake_parser():
124 parser = optparse.OptionParser( 118 parser = argparse.ArgumentParser(
125 formatter=BitbakeHelpFormatter(), 119 description="""\
126 version="BitBake Build Tool Core version %s" % bb.__version__, 120 It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
127 usage="""%prog [options] [recipename/target recipe:do_task ...] 121 will provide the layer, BBFILES and other configuration information.
128 122 """,
129 Executes the specified task (default is 'build') for a given set of target recipes (.bb files). 123 formatter_class=BitbakeHelpFormatter,
130 It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which 124 allow_abbrev=False,
131 will provide the layer, BBFILES and other configuration information.""") 125 add_help=False, # help is manually added below in a specific argument group
132 126 )
133 parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None, 127
134 help="Execute tasks from a specific .bb recipe directly. WARNING: Does " 128 general_group = parser.add_argument_group('General options')
135 "not handle any dependencies from other recipes.") 129 task_group = parser.add_argument_group('Task control options')
136 130 exec_group = parser.add_argument_group('Execution control options')
137 parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True, 131 logging_group = parser.add_argument_group('Logging/output control options')
138 help="Continue as much as possible after an error. While the target that " 132 server_group = parser.add_argument_group('Server options')
139 "failed and anything depending on it cannot be built, as much as " 133 config_group = parser.add_argument_group('Configuration options')
140 "possible will be built before stopping.") 134
141 135 general_group.add_argument("targets", nargs="*", metavar="recipename/target",
142 parser.add_option("-f", "--force", action="store_true", dest="force", default=False, 136 help="Execute the specified task (default is 'build') for these target "
143 help="Force the specified targets/task to run (invalidating any " 137 "recipes (.bb files).")
144 "existing stamp file).") 138
145 139 general_group.add_argument("-s", "--show-versions", action="store_true",
146 parser.add_option("-c", "--cmd", action="store", dest="cmd", 140 help="Show current and preferred versions of all recipes.")
147 help="Specify the task to execute. The exact options available " 141
148 "depend on the metadata. Some examples might be 'compile'" 142 general_group.add_argument("-e", "--environment", action="store_true",
149 " or 'populate_sysroot' or 'listtasks' may give a list of " 143 dest="show_environment",
150 "the tasks available.") 144 help="Show the global or per-recipe environment complete with information"
151 145 " about where variables were set/changed.")
152 parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp", 146
153 help="Invalidate the stamp for the specified task such as 'compile' " 147 general_group.add_argument("-g", "--graphviz", action="store_true", dest="dot_graph",
154 "and then run the default task for the specified target(s).") 148 help="Save dependency tree information for the specified "
155 149 "targets in the dot syntax.")
156 parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
157 help="Read the specified file before bitbake.conf.")
158
159 parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
160 help="Read the specified file after bitbake.conf.")
161
162 parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
163 help="Enable tracing of shell tasks (with 'set -x'). "
164 "Also print bb.note(...) messages to stdout (in "
165 "addition to writing them to ${T}/log.do_<task>).")
166
167 parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
168 help="Increase the debug level. You can specify this "
169 "more than once. -D sets the debug level to 1, "
170 "where only bb.debug(1, ...) messages are printed "
171 "to stdout; -DD sets the debug level to 2, where "
172 "both bb.debug(1, ...) and bb.debug(2, ...) "
173 "messages are printed; etc. Without -D, no debug "
174 "messages are printed. Note that -D only affects "
175 "output to stdout. All debug messages are written "
176 "to ${T}/log.do_taskname, regardless of the debug "
177 "level.")
178
179 parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0,
180 help="Output less log message data to the terminal. You can specify this more than once.")
181
182 parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
183 help="Don't execute, just go through the motions.")
184
185 parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
186 default=[], metavar="SIGNATURE_HANDLER",
187 help="Dump out the signature construction information, with no task "
188 "execution. The SIGNATURE_HANDLER parameter is passed to the "
189 "handler. Two common values are none and printdiff but the handler "
190 "may define more/less. none means only dump the signature, printdiff"
191 " means compare the dumped signature with the cached one.")
192
193 parser.add_option("-p", "--parse-only", action="store_true",
194 dest="parse_only", default=False,
195 help="Quit after parsing the BB recipes.")
196
197 parser.add_option("-s", "--show-versions", action="store_true",
198 dest="show_versions", default=False,
199 help="Show current and preferred versions of all recipes.")
200
201 parser.add_option("-e", "--environment", action="store_true",
202 dest="show_environment", default=False,
203 help="Show the global or per-recipe environment complete with information"
204 " about where variables were set/changed.")
205
206 parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
207 help="Save dependency tree information for the specified "
208 "targets in the dot syntax.")
209
210 parser.add_option("-I", "--ignore-deps", action="append",
211 dest="extra_assume_provided", default=[],
212 help="Assume these dependencies don't exist and are already provided "
213 "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
214 "graphs more appealing")
215
216 parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
217 help="Show debug logging for the specified logging domains")
218
219 parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
220 help="Profile the command and save reports.")
221 150
222 # @CHOICES@ is substituted out by BitbakeHelpFormatter above 151 # @CHOICES@ is substituted out by BitbakeHelpFormatter above
223 parser.add_option("-u", "--ui", action="store", dest="ui", 152 general_group.add_argument("-u", "--ui",
224 default=os.environ.get('BITBAKE_UI', 'knotty'), 153 default=os.environ.get('BITBAKE_UI', 'knotty'),
225 help="The user interface to use (@CHOICES@ - default %default).") 154 help="The user interface to use (@CHOICES@ - default %(default)s).")
226 155
227 parser.add_option("", "--token", action="store", dest="xmlrpctoken", 156 general_group.add_argument("--version", action="store_true",
228 default=os.environ.get("BBTOKEN"), 157 help="Show programs version and exit.")
229 help="Specify the connection token to be used when connecting " 158
230 "to a remote server.") 159 general_group.add_argument('-h', '--help', action='help',
231 160 help='Show this help message and exit.')
232 parser.add_option("", "--revisions-changed", action="store_true", 161
233 dest="revisions_changed", default=False, 162
234 help="Set the exit code depending on whether upstream floating " 163 task_group.add_argument("-f", "--force", action="store_true",
235 "revisions have changed or not.") 164 help="Force the specified targets/task to run (invalidating any "
236 165 "existing stamp file).")
237 parser.add_option("", "--server-only", action="store_true", 166
238 dest="server_only", default=False, 167 task_group.add_argument("-c", "--cmd",
239 help="Run bitbake without a UI, only starting a server " 168 help="Specify the task to execute. The exact options available "
240 "(cooker) process.") 169 "depend on the metadata. Some examples might be 'compile'"
241 170 " or 'populate_sysroot' or 'listtasks' may give a list of "
242 parser.add_option("-B", "--bind", action="store", dest="bind", default=False, 171 "the tasks available.")
243 help="The name/address for the bitbake xmlrpc server to bind to.") 172
244 173 task_group.add_argument("-C", "--clear-stamp", dest="invalidate_stamp",
245 parser.add_option("-T", "--idle-timeout", type=float, dest="server_timeout", 174 help="Invalidate the stamp for the specified task such as 'compile' "
246 default=os.getenv("BB_SERVER_TIMEOUT"), 175 "and then run the default task for the specified target(s).")
247 help="Set timeout to unload bitbake server due to inactivity, " 176
248 "set to -1 means no unload, " 177 task_group.add_argument("--runall", action="append", default=[],
249 "default: Environment variable BB_SERVER_TIMEOUT.") 178 help="Run the specified task for any recipe in the taskgraph of the "
250 179 "specified target (even if it wouldn't otherwise have run).")
251 parser.add_option("", "--no-setscene", action="store_true", 180
252 dest="nosetscene", default=False, 181 task_group.add_argument("--runonly", action="append",
253 help="Do not run any setscene tasks. sstate will be ignored and " 182 help="Run only the specified task within the taskgraph of the "
254 "everything needed, built.") 183 "specified targets (and any task dependencies those tasks may have).")
255 184
256 parser.add_option("", "--skip-setscene", action="store_true", 185 task_group.add_argument("--no-setscene", action="store_true",
257 dest="skipsetscene", default=False, 186 dest="nosetscene",
258 help="Skip setscene tasks if they would be executed. Tasks previously " 187 help="Do not run any setscene tasks. sstate will be ignored and "
259 "restored from sstate will be kept, unlike --no-setscene") 188 "everything needed, built.")
260 189
261 parser.add_option("", "--setscene-only", action="store_true", 190 task_group.add_argument("--skip-setscene", action="store_true",
262 dest="setsceneonly", default=False, 191 dest="skipsetscene",
263 help="Only run setscene tasks, don't run any real tasks.") 192 help="Skip setscene tasks if they would be executed. Tasks previously "
264 193 "restored from sstate will be kept, unlike --no-setscene.")
265 parser.add_option("", "--remote-server", action="store", dest="remote_server", 194
266 default=os.environ.get("BBSERVER"), 195 task_group.add_argument("--setscene-only", action="store_true",
267 help="Connect to the specified server.") 196 dest="setsceneonly",
268 197 help="Only run setscene tasks, don't run any real tasks.")
269 parser.add_option("-m", "--kill-server", action="store_true", 198
270 dest="kill_server", default=False, 199
271 help="Terminate any running bitbake server.") 200 exec_group.add_argument("-n", "--dry-run", action="store_true",
272 201 help="Don't execute, just go through the motions.")
273 parser.add_option("", "--observe-only", action="store_true", 202
274 dest="observe_only", default=False, 203 exec_group.add_argument("-p", "--parse-only", action="store_true",
275 help="Connect to a server as an observing-only client.") 204 help="Quit after parsing the BB recipes.")
276 205
277 parser.add_option("", "--status-only", action="store_true", 206 exec_group.add_argument("-k", "--continue", action="store_false", dest="halt",
278 dest="status_only", default=False, 207 help="Continue as much as possible after an error. While the target that "
279 help="Check the status of the remote bitbake server.") 208 "failed and anything depending on it cannot be built, as much as "
280 209 "possible will be built before stopping.")
281 parser.add_option("-w", "--write-log", action="store", dest="writeeventlog", 210
282 default=os.environ.get("BBEVENTLOG"), 211 exec_group.add_argument("-P", "--profile", action="store_true",
283 help="Writes the event log of the build to a bitbake event json file. " 212 help="Profile the command and save reports.")
284 "Use '' (empty string) to assign the name automatically.") 213
285 214 exec_group.add_argument("-S", "--dump-signatures", action="append",
286 parser.add_option("", "--runall", action="append", dest="runall", 215 default=[], metavar="SIGNATURE_HANDLER",
287 help="Run the specified task for any recipe in the taskgraph of the specified target (even if it wouldn't otherwise have run).") 216 help="Dump out the signature construction information, with no task "
288 217 "execution. The SIGNATURE_HANDLER parameter is passed to the "
289 parser.add_option("", "--runonly", action="append", dest="runonly", 218 "handler. Two common values are none and printdiff but the handler "
290 help="Run only the specified task within the taskgraph of the specified targets (and any task dependencies those tasks may have).") 219 "may define more/less. none means only dump the signature, printdiff"
220 " means recursively compare the dumped signature with the most recent"
221 " one in a local build or sstate cache (can be used to find out why tasks re-run"
222 " when that is not expected)")
223
224 exec_group.add_argument("--revisions-changed", action="store_true",
225 help="Set the exit code depending on whether upstream floating "
226 "revisions have changed or not.")
227
228 exec_group.add_argument("-b", "--buildfile",
229 help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
230 "not handle any dependencies from other recipes.")
231
232 logging_group.add_argument("-D", "--debug", action="count", default=0,
233 help="Increase the debug level. You can specify this "
234 "more than once. -D sets the debug level to 1, "
235 "where only bb.debug(1, ...) messages are printed "
236 "to stdout; -DD sets the debug level to 2, where "
237 "both bb.debug(1, ...) and bb.debug(2, ...) "
238 "messages are printed; etc. Without -D, no debug "
239 "messages are printed. Note that -D only affects "
240 "output to stdout. All debug messages are written "
241 "to ${T}/log.do_taskname, regardless of the debug "
242 "level.")
243
244 logging_group.add_argument("-l", "--log-domains", action="append", dest="debug_domains",
245 default=[],
246 help="Show debug logging for the specified logging domains.")
247
248 logging_group.add_argument("-v", "--verbose", action="store_true",
249 help="Enable tracing of shell tasks (with 'set -x'). "
250 "Also print bb.note(...) messages to stdout (in "
251 "addition to writing them to ${T}/log.do_<task>).")
252
253 logging_group.add_argument("-q", "--quiet", action="count", default=0,
254 help="Output less log message data to the terminal. You can specify this "
255 "more than once.")
256
257 logging_group.add_argument("-w", "--write-log", dest="writeeventlog",
258 default=os.environ.get("BBEVENTLOG"),
259 help="Writes the event log of the build to a bitbake event json file. "
260 "Use '' (empty string) to assign the name automatically.")
261
262
263 server_group.add_argument("-B", "--bind", default=False,
264 help="The name/address for the bitbake xmlrpc server to bind to.")
265
266 server_group.add_argument("-T", "--idle-timeout", type=float, dest="server_timeout",
267 default=os.getenv("BB_SERVER_TIMEOUT"),
268 help="Set timeout to unload bitbake server due to inactivity, "
269 "set to -1 means no unload, "
270 "default: Environment variable BB_SERVER_TIMEOUT.")
271
272 server_group.add_argument("--remote-server",
273 default=os.environ.get("BBSERVER"),
274 help="Connect to the specified server.")
275
276 server_group.add_argument("-m", "--kill-server", action="store_true",
277 help="Terminate any running bitbake server.")
278
279 server_group.add_argument("--token", dest="xmlrpctoken",
280 default=os.environ.get("BBTOKEN"),
281 help="Specify the connection token to be used when connecting "
282 "to a remote server.")
283
284 server_group.add_argument("--observe-only", action="store_true",
285 help="Connect to a server as an observing-only client.")
286
287 server_group.add_argument("--status-only", action="store_true",
288 help="Check the status of the remote bitbake server.")
289
290 server_group.add_argument("--server-only", action="store_true",
291 help="Run bitbake without a UI, only starting a server "
292 "(cooker) process.")
293
294
295 config_group.add_argument("-r", "--read", action="append", dest="prefile", default=[],
296 help="Read the specified file before bitbake.conf.")
297
298 config_group.add_argument("-R", "--postread", action="append", dest="postfile", default=[],
299 help="Read the specified file after bitbake.conf.")
300
301
302 config_group.add_argument("-I", "--ignore-deps", action="append",
303 dest="extra_assume_provided", default=[],
304 help="Assume these dependencies don't exist and are already provided "
305 "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
306 "graphs more appealing.")
307
291 return parser 308 return parser
292 309
293 310
294class BitBakeConfigParameters(cookerdata.ConfigParameters): 311class BitBakeConfigParameters(cookerdata.ConfigParameters):
295 def parseCommandLine(self, argv=sys.argv): 312 def parseCommandLine(self, argv=sys.argv):
296 parser = create_bitbake_parser() 313 parser = create_bitbake_parser()
297 options, targets = parser.parse_args(argv) 314 options = parser.parse_intermixed_args(argv[1:])
315
316 if options.version:
317 print("BitBake Build Tool Core version %s" % bb.__version__)
318 sys.exit(0)
298 319
299 if options.quiet and options.verbose: 320 if options.quiet and options.verbose:
300 parser.error("options --quiet and --verbose are mutually exclusive") 321 parser.error("options --quiet and --verbose are mutually exclusive")
@@ -326,7 +347,7 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
326 else: 347 else:
327 options.xmlrpcinterface = (None, 0) 348 options.xmlrpcinterface = (None, 0)
328 349
329 return options, targets[1:] 350 return options, options.targets
330 351
331 352
332def bitbake_main(configParams, configuration): 353def bitbake_main(configParams, configuration):
@@ -391,6 +412,9 @@ def bitbake_main(configParams, configuration):
391 412
392 return 1 413 return 1
393 414
415def timestamp():
416 return datetime.datetime.now().strftime('%H:%M:%S.%f')
417
394def setup_bitbake(configParams, extrafeatures=None): 418def setup_bitbake(configParams, extrafeatures=None):
395 # Ensure logging messages get sent to the UI as events 419 # Ensure logging messages get sent to the UI as events
396 handler = bb.event.LogHandler() 420 handler = bb.event.LogHandler()
@@ -398,6 +422,11 @@ def setup_bitbake(configParams, extrafeatures=None):
398 # In status only mode there are no logs and no UI 422 # In status only mode there are no logs and no UI
399 logger.addHandler(handler) 423 logger.addHandler(handler)
400 424
425 if configParams.dump_signatures:
426 if extrafeatures is None:
427 extrafeatures = []
428 extrafeatures.append(bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO)
429
401 if configParams.server_only: 430 if configParams.server_only:
402 featureset = [] 431 featureset = []
403 ui_module = None 432 ui_module = None
@@ -425,7 +454,7 @@ def setup_bitbake(configParams, extrafeatures=None):
425 retries = 8 454 retries = 8
426 while retries: 455 while retries:
427 try: 456 try:
428 topdir, lock = lockBitbake() 457 topdir, lock, lockfile = lockBitbake()
429 sockname = topdir + "/bitbake.sock" 458 sockname = topdir + "/bitbake.sock"
430 if lock: 459 if lock:
431 if configParams.status_only or configParams.kill_server: 460 if configParams.status_only or configParams.kill_server:
@@ -436,18 +465,22 @@ def setup_bitbake(configParams, extrafeatures=None):
436 logger.info("Starting bitbake server...") 465 logger.info("Starting bitbake server...")
437 # Clear the event queue since we already displayed messages 466 # Clear the event queue since we already displayed messages
438 bb.event.ui_queue = [] 467 bb.event.ui_queue = []
439 server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface) 468 server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface, configParams.profile)
440 469
441 else: 470 else:
442 logger.info("Reconnecting to bitbake server...") 471 logger.info("Reconnecting to bitbake server...")
443 if not os.path.exists(sockname): 472 if not os.path.exists(sockname):
444 logger.info("Previous bitbake instance shutting down?, waiting to retry...") 473 logger.info("Previous bitbake instance shutting down?, waiting to retry... (%s)" % timestamp())
474 procs = bb.server.process.get_lockfile_process_msg(lockfile)
475 if procs:
476 logger.info("Processes holding bitbake.lock (missing socket %s):\n%s" % (sockname, procs))
477 logger.info("Directory listing: %s" % (str(os.listdir(topdir))))
445 i = 0 478 i = 0
446 lock = None 479 lock = None
447 # Wait for 5s or until we can get the lock 480 # Wait for 5s or until we can get the lock
448 while not lock and i < 50: 481 while not lock and i < 50:
449 time.sleep(0.1) 482 time.sleep(0.1)
450 _, lock = lockBitbake() 483 _, lock, _ = lockBitbake()
451 i += 1 484 i += 1
452 if lock: 485 if lock:
453 bb.utils.unlockfile(lock) 486 bb.utils.unlockfile(lock)
@@ -466,9 +499,9 @@ def setup_bitbake(configParams, extrafeatures=None):
466 retries -= 1 499 retries -= 1
467 tryno = 8 - retries 500 tryno = 8 - retries
468 if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)): 501 if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)):
469 logger.info("Retrying server connection (#%d)..." % tryno) 502 logger.info("Retrying server connection (#%d)... (%s)" % (tryno, timestamp()))
470 else: 503 else:
471 logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc())) 504 logger.info("Retrying server connection (#%d)... (%s, %s)" % (tryno, traceback.format_exc(), timestamp()))
472 505
473 if not retries: 506 if not retries:
474 bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).") 507 bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).")
@@ -497,5 +530,5 @@ def lockBitbake():
497 bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?") 530 bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?")
498 raise BBMainFatal 531 raise BBMainFatal
499 lockfile = topdir + "/bitbake.lock" 532 lockfile = topdir + "/bitbake.lock"
500 return topdir, bb.utils.lockfile(lockfile, False, False) 533 return topdir, bb.utils.lockfile(lockfile, False, False), lockfile
501 534
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py
index 98f2109ed2..f928210351 100644
--- a/bitbake/lib/bb/monitordisk.py
+++ b/bitbake/lib/bb/monitordisk.py
@@ -76,7 +76,12 @@ def getDiskData(BBDirs):
76 return None 76 return None
77 77
78 action = pathSpaceInodeRe.group(1) 78 action = pathSpaceInodeRe.group(1)
79 if action not in ("ABORT", "STOPTASKS", "WARN"): 79 if action == "ABORT":
80 # Emit a deprecation warning
81 logger.warnonce("The BB_DISKMON_DIRS \"ABORT\" action has been renamed to \"HALT\", update configuration")
82 action = "HALT"
83
84 if action not in ("HALT", "STOPTASKS", "WARN"):
80 printErr("Unknown disk space monitor action: %s" % action) 85 printErr("Unknown disk space monitor action: %s" % action)
81 return None 86 return None
82 87
@@ -177,7 +182,7 @@ class diskMonitor:
177 # use them to avoid printing too many warning messages 182 # use them to avoid printing too many warning messages
178 self.preFreeS = {} 183 self.preFreeS = {}
179 self.preFreeI = {} 184 self.preFreeI = {}
180 # This is for STOPTASKS and ABORT, to avoid printing the message 185 # This is for STOPTASKS and HALT, to avoid printing the message
181 # repeatedly while waiting for the tasks to finish 186 # repeatedly while waiting for the tasks to finish
182 self.checked = {} 187 self.checked = {}
183 for k in self.devDict: 188 for k in self.devDict:
@@ -219,8 +224,8 @@ class diskMonitor:
219 self.checked[k] = True 224 self.checked[k] = True
220 rq.finish_runqueue(False) 225 rq.finish_runqueue(False)
221 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) 226 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
222 elif action == "ABORT" and not self.checked[k]: 227 elif action == "HALT" and not self.checked[k]:
223 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!") 228 logger.error("Immediately halt since the disk space monitor action is \"HALT\"!")
224 self.checked[k] = True 229 self.checked[k] = True
225 rq.finish_runqueue(True) 230 rq.finish_runqueue(True)
226 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) 231 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
@@ -229,9 +234,10 @@ class diskMonitor:
229 freeInode = st.f_favail 234 freeInode = st.f_favail
230 235
231 if minInode and freeInode < minInode: 236 if minInode and freeInode < minInode:
232 # Some filesystems use dynamic inodes so can't run out 237 # Some filesystems use dynamic inodes so can't run out.
233 # (e.g. btrfs). This is reported by the inode count being 0. 238 # This is reported by the inode count being 0 (btrfs) or the free
234 if st.f_files == 0: 239 # inode count being -1 (cephfs).
240 if st.f_files == 0 or st.f_favail == -1:
235 self.devDict[k][2] = None 241 self.devDict[k][2] = None
236 continue 242 continue
237 # Always show warning, the self.checked would always be False if the action is WARN 243 # Always show warning, the self.checked would always be False if the action is WARN
@@ -245,8 +251,8 @@ class diskMonitor:
245 self.checked[k] = True 251 self.checked[k] = True
246 rq.finish_runqueue(False) 252 rq.finish_runqueue(False)
247 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) 253 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
248 elif action == "ABORT" and not self.checked[k]: 254 elif action == "HALT" and not self.checked[k]:
249 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!") 255 logger.error("Immediately halt since the disk space monitor action is \"HALT\"!")
250 self.checked[k] = True 256 self.checked[k] = True
251 rq.finish_runqueue(True) 257 rq.finish_runqueue(True)
252 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) 258 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 291b38ff7f..4f616ff42e 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -30,7 +30,9 @@ class BBLogFormatter(logging.Formatter):
30 PLAIN = logging.INFO + 1 30 PLAIN = logging.INFO + 1
31 VERBNOTE = logging.INFO + 2 31 VERBNOTE = logging.INFO + 2
32 ERROR = logging.ERROR 32 ERROR = logging.ERROR
33 ERRORONCE = logging.ERROR - 1
33 WARNING = logging.WARNING 34 WARNING = logging.WARNING
35 WARNONCE = logging.WARNING - 1
34 CRITICAL = logging.CRITICAL 36 CRITICAL = logging.CRITICAL
35 37
36 levelnames = { 38 levelnames = {
@@ -42,7 +44,9 @@ class BBLogFormatter(logging.Formatter):
42 PLAIN : '', 44 PLAIN : '',
43 VERBNOTE: 'NOTE', 45 VERBNOTE: 'NOTE',
44 WARNING : 'WARNING', 46 WARNING : 'WARNING',
47 WARNONCE : 'WARNING',
45 ERROR : 'ERROR', 48 ERROR : 'ERROR',
49 ERRORONCE : 'ERROR',
46 CRITICAL: 'ERROR', 50 CRITICAL: 'ERROR',
47 } 51 }
48 52
@@ -58,7 +62,9 @@ class BBLogFormatter(logging.Formatter):
58 PLAIN : BASECOLOR, 62 PLAIN : BASECOLOR,
59 VERBNOTE: BASECOLOR, 63 VERBNOTE: BASECOLOR,
60 WARNING : YELLOW, 64 WARNING : YELLOW,
65 WARNONCE : YELLOW,
61 ERROR : RED, 66 ERROR : RED,
67 ERRORONCE : RED,
62 CRITICAL: RED, 68 CRITICAL: RED,
63 } 69 }
64 70
@@ -83,10 +89,6 @@ class BBLogFormatter(logging.Formatter):
83 msg = logging.Formatter.format(self, record) 89 msg = logging.Formatter.format(self, record)
84 if hasattr(record, 'bb_exc_formatted'): 90 if hasattr(record, 'bb_exc_formatted'):
85 msg += '\n' + ''.join(record.bb_exc_formatted) 91 msg += '\n' + ''.join(record.bb_exc_formatted)
86 elif hasattr(record, 'bb_exc_info'):
87 etype, value, tb = record.bb_exc_info
88 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
89 msg += '\n' + ''.join(formatted)
90 return msg 92 return msg
91 93
92 def colorize(self, record): 94 def colorize(self, record):
@@ -121,6 +123,22 @@ class BBLogFilter(object):
121 return True 123 return True
122 return False 124 return False
123 125
126class LogFilterShowOnce(logging.Filter):
127 def __init__(self):
128 self.seen_warnings = set()
129 self.seen_errors = set()
130
131 def filter(self, record):
132 if record.levelno == bb.msg.BBLogFormatter.WARNONCE:
133 if record.msg in self.seen_warnings:
134 return False
135 self.seen_warnings.add(record.msg)
136 if record.levelno == bb.msg.BBLogFormatter.ERRORONCE:
137 if record.msg in self.seen_errors:
138 return False
139 self.seen_errors.add(record.msg)
140 return True
141
124class LogFilterGEQLevel(logging.Filter): 142class LogFilterGEQLevel(logging.Filter):
125 def __init__(self, level): 143 def __init__(self, level):
126 self.strlevel = str(level) 144 self.strlevel = str(level)
@@ -206,8 +224,9 @@ def logger_create(name, output=sys.stderr, level=logging.INFO, preserve_handlers
206 """Standalone logger creation function""" 224 """Standalone logger creation function"""
207 logger = logging.getLogger(name) 225 logger = logging.getLogger(name)
208 console = logging.StreamHandler(output) 226 console = logging.StreamHandler(output)
227 console.addFilter(bb.msg.LogFilterShowOnce())
209 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") 228 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
210 if color == 'always' or (color == 'auto' and output.isatty()): 229 if color == 'always' or (color == 'auto' and output.isatty() and os.environ.get('NO_COLOR', '') == ''):
211 format.enable_color() 230 format.enable_color()
212 console.setFormatter(format) 231 console.setFormatter(format)
213 if preserve_handlers: 232 if preserve_handlers:
@@ -293,10 +312,17 @@ def setLoggingConfig(defaultconfig, userconfigfile=None):
293 312
294 # Convert all level parameters to integers in case users want to use the 313 # Convert all level parameters to integers in case users want to use the
295 # bitbake defined level names 314 # bitbake defined level names
296 for h in logconfig["handlers"].values(): 315 for name, h in logconfig["handlers"].items():
297 if "level" in h: 316 if "level" in h:
298 h["level"] = bb.msg.stringToLevel(h["level"]) 317 h["level"] = bb.msg.stringToLevel(h["level"])
299 318
319 # Every handler needs its own instance of the once filter.
320 once_filter_name = name + ".showonceFilter"
321 logconfig.setdefault("filters", {})[once_filter_name] = {
322 "()": "bb.msg.LogFilterShowOnce",
323 }
324 h.setdefault("filters", []).append(once_filter_name)
325
300 for l in logconfig["loggers"].values(): 326 for l in logconfig["loggers"].values():
301 if "level" in l: 327 if "level" in l:
302 l["level"] = bb.msg.stringToLevel(l["level"]) 328 l["level"] = bb.msg.stringToLevel(l["level"])
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index c01807ba87..d428d8a4b4 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -49,20 +49,32 @@ class SkipPackage(SkipRecipe):
49__mtime_cache = {} 49__mtime_cache = {}
50def cached_mtime(f): 50def cached_mtime(f):
51 if f not in __mtime_cache: 51 if f not in __mtime_cache:
52 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 52 res = os.stat(f)
53 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
53 return __mtime_cache[f] 54 return __mtime_cache[f]
54 55
55def cached_mtime_noerror(f): 56def cached_mtime_noerror(f):
56 if f not in __mtime_cache: 57 if f not in __mtime_cache:
57 try: 58 try:
58 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 59 res = os.stat(f)
60 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
59 except OSError: 61 except OSError:
60 return 0 62 return 0
61 return __mtime_cache[f] 63 return __mtime_cache[f]
62 64
65def check_mtime(f, mtime):
66 try:
67 res = os.stat(f)
68 current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino)
69 __mtime_cache[f] = current_mtime
70 except OSError:
71 current_mtime = 0
72 return current_mtime == mtime
73
63def update_mtime(f): 74def update_mtime(f):
64 try: 75 try:
65 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 76 res = os.stat(f)
77 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
66 except OSError: 78 except OSError:
67 if f in __mtime_cache: 79 if f in __mtime_cache:
68 del __mtime_cache[f] 80 del __mtime_cache[f]
@@ -99,12 +111,12 @@ def supports(fn, data):
99 return 1 111 return 1
100 return 0 112 return 0
101 113
102def handle(fn, data, include = 0): 114def handle(fn, data, include=0, baseconfig=False):
103 """Call the handler that is appropriate for this file""" 115 """Call the handler that is appropriate for this file"""
104 for h in handlers: 116 for h in handlers:
105 if h['supports'](fn, data): 117 if h['supports'](fn, data):
106 with data.inchistory.include(fn): 118 with data.inchistory.include(fn):
107 return h['handle'](fn, data, include) 119 return h['handle'](fn, data, include, baseconfig)
108 raise ParseError("not a BitBake file", fn) 120 raise ParseError("not a BitBake file", fn)
109 121
110def init(fn, data): 122def init(fn, data):
@@ -113,6 +125,8 @@ def init(fn, data):
113 return h['init'](data) 125 return h['init'](data)
114 126
115def init_parser(d): 127def init_parser(d):
128 if hasattr(bb.parse, "siggen"):
129 bb.parse.siggen.exit()
116 bb.parse.siggen = bb.siggen.init(d) 130 bb.parse.siggen = bb.siggen.init(d)
117 131
118def resolve_file(fn, d): 132def resolve_file(fn, d):
@@ -162,4 +176,41 @@ def get_file_depends(d):
162 dep_files.append(os.path.abspath(fn)) 176 dep_files.append(os.path.abspath(fn))
163 return " ".join(dep_files) 177 return " ".join(dep_files)
164 178
179def vardeps(*varnames):
180 """
181 Function decorator that can be used to instruct the bitbake dependency
182 parsing to add a dependency on the specified variables names
183
184 Example:
185
186 @bb.parse.vardeps("FOO", "BAR")
187 def my_function():
188 ...
189
190 """
191 def inner(f):
192 if not hasattr(f, "bb_vardeps"):
193 f.bb_vardeps = set()
194 f.bb_vardeps |= set(varnames)
195 return f
196 return inner
197
198def vardepsexclude(*varnames):
199 """
200 Function decorator that can be used to instruct the bitbake dependency
201 parsing to ignore dependencies on the specified variable names in the code
202
203 Example:
204
205 @bb.parse.vardepsexclude("FOO", "BAR")
206 def my_function():
207 ...
208 """
209 def inner(f):
210 if not hasattr(f, "bb_vardepsexclude"):
211 f.bb_vardepsexclude = set()
212 f.bb_vardepsexclude |= set(varnames)
213 return f
214 return inner
215
165from bb.parse.parse_py import __version__, ConfHandler, BBHandler 216from bb.parse.parse_py import __version__, ConfHandler, BBHandler
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 50a88f7da7..49a0788038 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -9,6 +9,7 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12import sys
12import bb 13import bb
13from bb import methodpool 14from bb import methodpool
14from bb.parse import logger 15from bb.parse import logger
@@ -42,6 +43,21 @@ class IncludeNode(AstNode):
42 else: 43 else:
43 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False) 44 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False)
44 45
46class IncludeAllNode(AstNode):
47 def __init__(self, filename, lineno, what_file):
48 AstNode.__init__(self, filename, lineno)
49 self.what_file = what_file
50
51 def eval(self, data):
52 """
53 Include the file and evaluate the statements
54 """
55 s = data.expand(self.what_file)
56 logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s)
57
58 for path in data.getVar("BBPATH").split(":"):
59 bb.parse.ConfHandler.include(self.filename, os.path.join(path, s), self.lineno, data, False)
60
45class ExportNode(AstNode): 61class ExportNode(AstNode):
46 def __init__(self, filename, lineno, var): 62 def __init__(self, filename, lineno, var):
47 AstNode.__init__(self, filename, lineno) 63 AstNode.__init__(self, filename, lineno)
@@ -130,9 +146,16 @@ class DataNode(AstNode):
130 else: 146 else:
131 val = groupd["value"] 147 val = groupd["value"]
132 148
149 if ":append" in key or ":remove" in key or ":prepend" in key:
150 if op in ["append", "prepend", "postdot", "predot", "ques"]:
151 bb.warn(key + " " + groupd[op] + " is not a recommended operator combination, please replace it.")
152
133 flag = None 153 flag = None
134 if 'flag' in groupd and groupd['flag'] is not None: 154 if 'flag' in groupd and groupd['flag'] is not None:
135 flag = groupd['flag'] 155 if groupd["lazyques"]:
156 flag = "_defaultval_flag_"+groupd['flag']
157 else:
158 flag = groupd['flag']
136 elif groupd["lazyques"]: 159 elif groupd["lazyques"]:
137 flag = "_defaultval" 160 flag = "_defaultval"
138 161
@@ -145,7 +168,7 @@ class DataNode(AstNode):
145 data.setVar(key, val, parsing=True, **loginfo) 168 data.setVar(key, val, parsing=True, **loginfo)
146 169
147class MethodNode(AstNode): 170class MethodNode(AstNode):
148 tr_tbl = str.maketrans('/.+-@%&', '_______') 171 tr_tbl = str.maketrans('/.+-@%&~', '________')
149 172
150 def __init__(self, filename, lineno, func_name, body, python, fakeroot): 173 def __init__(self, filename, lineno, func_name, body, python, fakeroot):
151 AstNode.__init__(self, filename, lineno) 174 AstNode.__init__(self, filename, lineno)
@@ -206,10 +229,12 @@ class ExportFuncsNode(AstNode):
206 229
207 def eval(self, data): 230 def eval(self, data):
208 231
232 sentinel = " # Export function set\n"
209 for func in self.n: 233 for func in self.n:
210 calledfunc = self.classname + "_" + func 234 calledfunc = self.classname + "_" + func
211 235
212 if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False): 236 basevar = data.getVar(func, False)
237 if basevar and sentinel not in basevar:
213 continue 238 continue
214 239
215 if data.getVar(func, False): 240 if data.getVar(func, False):
@@ -219,29 +244,30 @@ class ExportFuncsNode(AstNode):
219 for flag in [ "func", "python" ]: 244 for flag in [ "func", "python" ]:
220 if data.getVarFlag(calledfunc, flag, False): 245 if data.getVarFlag(calledfunc, flag, False):
221 data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False)) 246 data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False))
222 for flag in [ "dirs" ]: 247 for flag in ["dirs", "cleandirs", "fakeroot"]:
223 if data.getVarFlag(func, flag, False): 248 if data.getVarFlag(func, flag, False):
224 data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False)) 249 data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False))
225 data.setVarFlag(func, "filename", "autogenerated") 250 data.setVarFlag(func, "filename", "autogenerated")
226 data.setVarFlag(func, "lineno", 1) 251 data.setVarFlag(func, "lineno", 1)
227 252
228 if data.getVarFlag(calledfunc, "python", False): 253 if data.getVarFlag(calledfunc, "python", False):
229 data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True) 254 data.setVar(func, sentinel + " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
230 else: 255 else:
231 if "-" in self.classname: 256 if "-" in self.classname:
232 bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc)) 257 bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
233 data.setVar(func, " " + calledfunc + "\n", parsing=True) 258 data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True)
234 data.setVarFlag(func, 'export_func', '1')
235 259
236class AddTaskNode(AstNode): 260class AddTaskNode(AstNode):
237 def __init__(self, filename, lineno, func, before, after): 261 def __init__(self, filename, lineno, tasks, before, after):
238 AstNode.__init__(self, filename, lineno) 262 AstNode.__init__(self, filename, lineno)
239 self.func = func 263 self.tasks = tasks
240 self.before = before 264 self.before = before
241 self.after = after 265 self.after = after
242 266
243 def eval(self, data): 267 def eval(self, data):
244 bb.build.addtask(self.func, self.before, self.after, data) 268 tasks = self.tasks.split()
269 for task in tasks:
270 bb.build.addtask(task, self.before, self.after, data)
245 271
246class DelTaskNode(AstNode): 272class DelTaskNode(AstNode):
247 def __init__(self, filename, lineno, tasks): 273 def __init__(self, filename, lineno, tasks):
@@ -265,6 +291,41 @@ class BBHandlerNode(AstNode):
265 data.setVarFlag(h, "handler", 1) 291 data.setVarFlag(h, "handler", 1)
266 data.setVar('__BBHANDLERS', bbhands) 292 data.setVar('__BBHANDLERS', bbhands)
267 293
294class PyLibNode(AstNode):
295 def __init__(self, filename, lineno, libdir, namespace):
296 AstNode.__init__(self, filename, lineno)
297 self.libdir = libdir
298 self.namespace = namespace
299
300 def eval(self, data):
301 global_mods = (data.getVar("BB_GLOBAL_PYMODULES") or "").split()
302 for m in global_mods:
303 if m not in bb.utils._context:
304 bb.utils._context[m] = __import__(m)
305
306 libdir = data.expand(self.libdir)
307 if libdir not in sys.path:
308 sys.path.append(libdir)
309 try:
310 bb.utils._context[self.namespace] = __import__(self.namespace)
311 toimport = getattr(bb.utils._context[self.namespace], "BBIMPORTS", [])
312 for i in toimport:
313 bb.utils._context[self.namespace] = __import__(self.namespace + "." + i)
314 mod = getattr(bb.utils._context[self.namespace], i)
315 fn = getattr(mod, "__file__")
316 funcs = {}
317 for f in dir(mod):
318 if f.startswith("_"):
319 continue
320 fcall = getattr(mod, f)
321 if not callable(fcall):
322 continue
323 funcs[f] = fcall
324 bb.codeparser.add_module_functions(fn, funcs, "%s.%s" % (self.namespace, i))
325
326 except AttributeError as e:
327 bb.error("Error importing OE modules: %s" % str(e))
328
268class InheritNode(AstNode): 329class InheritNode(AstNode):
269 def __init__(self, filename, lineno, classes): 330 def __init__(self, filename, lineno, classes):
270 AstNode.__init__(self, filename, lineno) 331 AstNode.__init__(self, filename, lineno)
@@ -273,9 +334,68 @@ class InheritNode(AstNode):
273 def eval(self, data): 334 def eval(self, data):
274 bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data) 335 bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
275 336
337class InheritDeferredNode(AstNode):
338 def __init__(self, filename, lineno, classes):
339 AstNode.__init__(self, filename, lineno)
340 self.inherit = (classes, filename, lineno)
341
342 def eval(self, data):
343 bb.parse.BBHandler.inherit_defer(*self.inherit, data)
344
345class AddFragmentsNode(AstNode):
346 def __init__(self, filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable):
347 AstNode.__init__(self, filename, lineno)
348 self.fragments_path_prefix = fragments_path_prefix
349 self.fragments_variable = fragments_variable
350 self.flagged_variables_list_variable = flagged_variables_list_variable
351 self.builtin_fragments_variable = builtin_fragments_variable
352
353 def eval(self, data):
354 # No need to use mark_dependency since we would only match a fragment
355 # from a specific layer and there can only be a single layer with a
356 # given namespace.
357 def find_fragment(layers, layerid, full_fragment_name):
358 for layerpath in layers.split():
359 candidate_fragment_path = os.path.join(layerpath, full_fragment_name)
360 if os.path.exists(candidate_fragment_path) and bb.utils.get_file_layer(candidate_fragment_path, data) == layerid:
361 return candidate_fragment_path
362 return None
363
364 def check_and_set_builtin_fragment(fragment, data, builtin_fragments):
365 prefix, value = fragment.split('/', 1)
366 if prefix in builtin_fragments.keys():
367 data.setVar(builtin_fragments[prefix], value)
368 return True
369 return False
370
371 fragments = data.getVar(self.fragments_variable)
372 layers = data.getVar('BBLAYERS')
373 flagged_variables = data.getVar(self.flagged_variables_list_variable).split()
374 builtin_fragments = {f[0]:f[1] for f in [f.split(':') for f in data.getVar(self.builtin_fragments_variable).split()] }
375
376 if not fragments:
377 return
378 for f in fragments.split():
379 if check_and_set_builtin_fragment(f, data, builtin_fragments):
380 continue
381 layerid, fragment_name = f.split('/', 1)
382 full_fragment_name = data.expand("{}/{}.conf".format(self.fragments_path_prefix, fragment_name))
383 fragment_path = find_fragment(layers, layerid, full_fragment_name)
384 if fragment_path:
385 bb.parse.ConfHandler.include(self.filename, fragment_path, self.lineno, data, "include fragment")
386 for flagged_var in flagged_variables:
387 val = data.getVar(flagged_var)
388 data.setVarFlag(flagged_var, f, val)
389 data.setVar(flagged_var, None)
390 else:
391 bb.error("Could not find fragment {} in enabled layers: {}".format(f, layers))
392
276def handleInclude(statements, filename, lineno, m, force): 393def handleInclude(statements, filename, lineno, m, force):
277 statements.append(IncludeNode(filename, lineno, m.group(1), force)) 394 statements.append(IncludeNode(filename, lineno, m.group(1), force))
278 395
396def handleIncludeAll(statements, filename, lineno, m):
397 statements.append(IncludeAllNode(filename, lineno, m.group(1)))
398
279def handleExport(statements, filename, lineno, m): 399def handleExport(statements, filename, lineno, m):
280 statements.append(ExportNode(filename, lineno, m.group(1))) 400 statements.append(ExportNode(filename, lineno, m.group(1)))
281 401
@@ -297,38 +417,81 @@ def handlePythonMethod(statements, filename, lineno, funcname, modulename, body)
297def handleExportFuncs(statements, filename, lineno, m, classname): 417def handleExportFuncs(statements, filename, lineno, m, classname):
298 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname)) 418 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
299 419
300def handleAddTask(statements, filename, lineno, m): 420def handleAddTask(statements, filename, lineno, tasks, before, after):
301 func = m.group("func") 421 statements.append(AddTaskNode(filename, lineno, tasks, before, after))
302 before = m.group("before")
303 after = m.group("after")
304 if func is None:
305 return
306
307 statements.append(AddTaskNode(filename, lineno, func, before, after))
308 422
309def handleDelTask(statements, filename, lineno, m): 423def handleDelTask(statements, filename, lineno, tasks):
310 func = m.group(1) 424 statements.append(DelTaskNode(filename, lineno, tasks))
311 if func is None:
312 return
313
314 statements.append(DelTaskNode(filename, lineno, func))
315 425
316def handleBBHandlers(statements, filename, lineno, m): 426def handleBBHandlers(statements, filename, lineno, m):
317 statements.append(BBHandlerNode(filename, lineno, m.group(1))) 427 statements.append(BBHandlerNode(filename, lineno, m.group(1)))
318 428
429def handlePyLib(statements, filename, lineno, m):
430 statements.append(PyLibNode(filename, lineno, m.group(1), m.group(2)))
431
319def handleInherit(statements, filename, lineno, m): 432def handleInherit(statements, filename, lineno, m):
320 classes = m.group(1) 433 classes = m.group(1)
321 statements.append(InheritNode(filename, lineno, classes)) 434 statements.append(InheritNode(filename, lineno, classes))
322 435
436def handleInheritDeferred(statements, filename, lineno, m):
437 classes = m.group(1)
438 statements.append(InheritDeferredNode(filename, lineno, classes))
439
440def handleAddFragments(statements, filename, lineno, m):
441 fragments_path_prefix = m.group(1)
442 fragments_variable = m.group(2)
443 flagged_variables_list_variable = m.group(3)
444 builtin_fragments_variable = m.group(4)
445 statements.append(AddFragmentsNode(filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable))
446
323def runAnonFuncs(d): 447def runAnonFuncs(d):
324 code = [] 448 code = []
325 for funcname in d.getVar("__BBANONFUNCS", False) or []: 449 for funcname in d.getVar("__BBANONFUNCS", False) or []:
326 code.append("%s(d)" % funcname) 450 code.append("%s(d)" % funcname)
327 bb.utils.better_exec("\n".join(code), {"d": d}) 451 bb.utils.better_exec("\n".join(code), {"d": d})
328 452
453# Handle recipe level PREFERRED_PROVIDERs
454def handleVirtRecipeProviders(tasklist, d):
455 depends = (d.getVar("DEPENDS") or "").split()
456 virtprovs = (d.getVar("BB_RECIPE_VIRTUAL_PROVIDERS") or "").split()
457 newdeps = []
458 for dep in depends:
459 if dep in virtprovs:
460 newdep = d.getVar("PREFERRED_PROVIDER_" + dep)
461 if not newdep:
462 bb.fatal("Error, recipe virtual provider PREFERRED_PROVIDER_%s not set" % dep)
463 newdeps.append(newdep)
464 else:
465 newdeps.append(dep)
466 d.setVar("DEPENDS", " ".join(newdeps))
467 for task in tasklist:
468 taskdeps = (d.getVarFlag(task, "depends") or "").split()
469 remapped = []
470 for entry in taskdeps:
471 r, t = entry.split(":")
472 if r in virtprovs:
473 r = d.getVar("PREFERRED_PROVIDER_" + r)
474 remapped.append("%s:%s" % (r, t))
475 d.setVarFlag(task, "depends", " ".join(remapped))
476
329def finalize(fn, d, variant = None): 477def finalize(fn, d, variant = None):
330 saved_handlers = bb.event.get_handlers().copy() 478 saved_handlers = bb.event.get_handlers().copy()
331 try: 479 try:
480 # Found renamed variables. Exit immediately
481 if d.getVar("_FAILPARSINGERRORHANDLED", False) == True:
482 raise bb.BBHandledException()
483
484 inherits = [x[0] for x in (d.getVar('__BBDEFINHERITS', False) or [('',)])]
485 bb.event.fire(bb.event.RecipePreDeferredInherits(fn, inherits), d)
486
487 while True:
488 inherits = d.getVar('__BBDEFINHERITS', False) or []
489 if not inherits:
490 break
491 inherit, filename, lineno = inherits.pop(0)
492 d.setVar('__BBDEFINHERITS', inherits)
493 bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
494
332 for var in d.getVar('__BBHANDLERS', False) or []: 495 for var in d.getVar('__BBHANDLERS', False) or []:
333 # try to add the handler 496 # try to add the handler
334 handlerfn = d.getVarFlag(var, "filename", False) 497 handlerfn = d.getVarFlag(var, "filename", False)
@@ -347,12 +510,16 @@ def finalize(fn, d, variant = None):
347 510
348 tasklist = d.getVar('__BBTASKS', False) or [] 511 tasklist = d.getVar('__BBTASKS', False) or []
349 bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d) 512 bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d)
513 handleVirtRecipeProviders(tasklist, d)
350 bb.build.add_tasks(tasklist, d) 514 bb.build.add_tasks(tasklist, d)
351 515
352 bb.parse.siggen.finalise(fn, d, variant) 516 bb.parse.siggen.finalise(fn, d, variant)
353 517
354 d.setVar('BBINCLUDED', bb.parse.get_file_depends(d)) 518 d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
355 519
520 if d.getVar('__BBAUTOREV_SEEN') and d.getVar('__BBSRCREV_SEEN') and not d.getVar("__BBAUTOREV_ACTED_UPON"):
521 bb.fatal("AUTOREV/SRCPV set too late for the fetcher to work properly, please set the variables earlier in parsing. Erroring instead of later obtuse build failures.")
522
356 bb.event.fire(bb.event.RecipeParsed(fn), d) 523 bb.event.fire(bb.event.RecipeParsed(fn), d)
357 finally: 524 finally:
358 bb.event.set_handlers(saved_handlers) 525 bb.event.set_handlers(saved_handlers)
@@ -414,7 +581,7 @@ def multi_finalize(fn, d):
414 d.setVar("BBEXTENDVARIANT", variantmap[name]) 581 d.setVar("BBEXTENDVARIANT", variantmap[name])
415 else: 582 else:
416 d.setVar("PN", "%s-%s" % (pn, name)) 583 d.setVar("PN", "%s-%s" % (pn, name))
417 bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d) 584 bb.parse.BBHandler.inherit_defer(extendedmap[name], fn, 0, d)
418 585
419 safe_d.setVar("BBCLASSEXTEND", extended) 586 safe_d.setVar("BBCLASSEXTEND", extended)
420 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) 587 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index f8988b8631..008fec2308 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -19,14 +19,12 @@ from . import ConfHandler
19from .. import resolve_file, ast, logger, ParseError 19from .. import resolve_file, ast, logger, ParseError
20from .ConfHandler import include, init 20from .ConfHandler import include, init
21 21
22# For compatibility 22__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
23bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
24
25__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
26__inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) 23__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
24__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" )
27__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) 25__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
28__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") 26__addtask_regexp__ = re.compile(r"addtask\s+([^#\n]+)(?P<comment>#.*|.*?)")
29__deltask_regexp__ = re.compile(r"deltask\s+(.+)") 27__deltask_regexp__ = re.compile(r"deltask\s+([^#\n]+)(?P<comment>#.*|.*?)")
30__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" ) 28__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" )
31__def_regexp__ = re.compile(r"def\s+(\w+).*:" ) 29__def_regexp__ = re.compile(r"def\s+(\w+).*:" )
32__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" ) 30__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" )
@@ -36,6 +34,7 @@ __infunc__ = []
36__inpython__ = False 34__inpython__ = False
37__body__ = [] 35__body__ = []
38__classname__ = "" 36__classname__ = ""
37__residue__ = []
39 38
40cached_statements = {} 39cached_statements = {}
41 40
@@ -43,31 +42,56 @@ def supports(fn, d):
43 """Return True if fn has a supported extension""" 42 """Return True if fn has a supported extension"""
44 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] 43 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
45 44
46def inherit(files, fn, lineno, d): 45def inherit_defer(expression, fn, lineno, d):
46 inherit = (expression, fn, lineno)
47 inherits = d.getVar('__BBDEFINHERITS', False) or []
48 inherits.append(inherit)
49 d.setVar('__BBDEFINHERITS', inherits)
50
51def inherit(files, fn, lineno, d, deferred=False):
47 __inherit_cache = d.getVar('__inherit_cache', False) or [] 52 __inherit_cache = d.getVar('__inherit_cache', False) or []
53 #if "${" in files and not deferred:
54 # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno))
48 files = d.expand(files).split() 55 files = d.expand(files).split()
49 for file in files: 56 for file in files:
50 if not os.path.isabs(file) and not file.endswith(".bbclass"): 57 defer = (d.getVar("BB_DEFER_BBCLASSES") or "").split()
51 file = os.path.join('classes', '%s.bbclass' % file) 58 if not deferred and file in defer:
52 59 inherit_defer(file, fn, lineno, d)
53 if not os.path.isabs(file): 60 continue
54 bbpath = d.getVar("BBPATH") 61 classtype = d.getVar("__bbclasstype", False)
55 abs_fn, attempts = bb.utils.which(bbpath, file, history=True) 62 origfile = file
56 for af in attempts: 63 for t in ["classes-" + classtype, "classes"]:
57 if af != abs_fn: 64 file = origfile
58 bb.parse.mark_dependency(d, af) 65 if not os.path.isabs(file) and not file.endswith(".bbclass"):
59 if abs_fn: 66 file = os.path.join(t, '%s.bbclass' % file)
60 file = abs_fn 67
68 if not os.path.isabs(file):
69 bbpath = d.getVar("BBPATH")
70 abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
71 for af in attempts:
72 if af != abs_fn:
73 bb.parse.mark_dependency(d, af)
74 if abs_fn:
75 file = abs_fn
76
77 if os.path.exists(file):
78 break
79
80 if not os.path.exists(file):
81 raise ParseError("Could not inherit file %s" % (file), fn, lineno)
61 82
62 if not file in __inherit_cache: 83 if not file in __inherit_cache:
63 logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno)) 84 logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno))
64 __inherit_cache.append( file ) 85 __inherit_cache.append( file )
65 d.setVar('__inherit_cache', __inherit_cache) 86 d.setVar('__inherit_cache', __inherit_cache)
66 include(fn, file, lineno, d, "inherit") 87 try:
88 bb.parse.handle(file, d, True)
89 except (IOError, OSError) as exc:
90 raise ParseError("Could not inherit file %s: %s" % (fn, exc.strerror), fn, lineno)
67 __inherit_cache = d.getVar('__inherit_cache', False) or [] 91 __inherit_cache = d.getVar('__inherit_cache', False) or []
68 92
69def get_statements(filename, absolute_filename, base_name): 93def get_statements(filename, absolute_filename, base_name):
70 global cached_statements 94 global cached_statements, __residue__, __body__
71 95
72 try: 96 try:
73 return cached_statements[absolute_filename] 97 return cached_statements[absolute_filename]
@@ -87,12 +111,17 @@ def get_statements(filename, absolute_filename, base_name):
87 # add a blank line to close out any python definition 111 # add a blank line to close out any python definition
88 feeder(lineno, "", filename, base_name, statements, eof=True) 112 feeder(lineno, "", filename, base_name, statements, eof=True)
89 113
114 if __residue__:
115 raise ParseError("Unparsed lines %s: %s" % (filename, str(__residue__)), filename, lineno)
116 if __body__:
117 raise ParseError("Unparsed lines from unclosed function %s: %s" % (filename, str(__body__)), filename, lineno)
118
90 if filename.endswith(".bbclass") or filename.endswith(".inc"): 119 if filename.endswith(".bbclass") or filename.endswith(".inc"):
91 cached_statements[absolute_filename] = statements 120 cached_statements[absolute_filename] = statements
92 return statements 121 return statements
93 122
94def handle(fn, d, include): 123def handle(fn, d, include, baseconfig=False):
95 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__ 124 global __infunc__, __body__, __residue__, __classname__
96 __body__ = [] 125 __body__ = []
97 __infunc__ = [] 126 __infunc__ = []
98 __classname__ = "" 127 __classname__ = ""
@@ -144,7 +173,7 @@ def handle(fn, d, include):
144 return d 173 return d
145 174
146def feeder(lineno, s, fn, root, statements, eof=False): 175def feeder(lineno, s, fn, root, statements, eof=False):
147 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__ 176 global __inpython__, __infunc__, __body__, __residue__, __classname__
148 177
149 # Check tabs in python functions: 178 # Check tabs in python functions:
150 # - def py_funcname(): covered by __inpython__ 179 # - def py_funcname(): covered by __inpython__
@@ -181,10 +210,10 @@ def feeder(lineno, s, fn, root, statements, eof=False):
181 210
182 if s and s[0] == '#': 211 if s and s[0] == '#':
183 if len(__residue__) != 0 and __residue__[0][0] != "#": 212 if len(__residue__) != 0 and __residue__[0][0] != "#":
184 bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s)) 213 bb.fatal("There is a comment on line %s of file %s:\n'''\n%s\n'''\nwhich is in the middle of a multiline expression. This syntax is invalid, please correct it." % (lineno, fn, s))
185 214
186 if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"): 215 if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"):
187 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) 216 bb.fatal("There is a confusing multiline partially commented expression on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (lineno - len(__residue__), fn, "\n".join(__residue__)))
188 217
189 if s and s[-1] == '\\': 218 if s and s[-1] == '\\':
190 __residue__.append(s[:-1]) 219 __residue__.append(s[:-1])
@@ -220,29 +249,38 @@ def feeder(lineno, s, fn, root, statements, eof=False):
220 249
221 m = __addtask_regexp__.match(s) 250 m = __addtask_regexp__.match(s)
222 if m: 251 if m:
223 if len(m.group().split()) == 2: 252 after = ""
224 # Check and warn for "addtask task1 task2" 253 before = ""
225 m2 = re.match(r"addtask\s+(?P<func>\w+)(?P<ignores>.*)", s) 254
226 if m2 and m2.group('ignores'): 255 # This code splits on 'before' and 'after' instead of on whitespace so we can defer
227 logger.warning('addtask ignored: "%s"' % m2.group('ignores')) 256 # evaluation to as late as possible.
228 257 tasks = m.group(1).split(" before ")[0].split(" after ")[0]
229 # Check and warn for "addtask task1 before task2 before task3", the 258
230 # similar to "after" 259 for exp in m.group(1).split(" before "):
231 taskexpression = s.split() 260 exp2 = exp.split(" after ")
232 for word in ('before', 'after'): 261 if len(exp2) > 1:
233 if taskexpression.count(word) > 1: 262 after = after + " ".join(exp2[1:])
234 logger.warning("addtask contained multiple '%s' keywords, only one is supported" % word)
235 263
236 # Check and warn for having task with exprssion as part of task name 264 for exp in m.group(1).split(" after "):
265 exp2 = exp.split(" before ")
266 if len(exp2) > 1:
267 before = before + " ".join(exp2[1:])
268
269 # Check and warn for having task with a keyword as part of task name
270 taskexpression = s.split()
237 for te in taskexpression: 271 for te in taskexpression:
238 if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ): 272 if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ):
239 raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn) 273 raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn)
240 ast.handleAddTask(statements, fn, lineno, m) 274
275 if tasks is not None:
276 ast.handleAddTask(statements, fn, lineno, tasks, before, after)
241 return 277 return
242 278
243 m = __deltask_regexp__.match(s) 279 m = __deltask_regexp__.match(s)
244 if m: 280 if m:
245 ast.handleDelTask(statements, fn, lineno, m) 281 task = m.group(1)
282 if task is not None:
283 ast.handleDelTask(statements, fn, lineno, task)
246 return 284 return
247 285
248 m = __addhandler_regexp__.match(s) 286 m = __addhandler_regexp__.match(s)
@@ -255,7 +293,12 @@ def feeder(lineno, s, fn, root, statements, eof=False):
255 ast.handleInherit(statements, fn, lineno, m) 293 ast.handleInherit(statements, fn, lineno, m)
256 return 294 return
257 295
258 return ConfHandler.feeder(lineno, s, fn, statements) 296 m = __inherit_def_regexp__.match(s)
297 if m:
298 ast.handleInheritDeferred(statements, fn, lineno, m)
299 return
300
301 return ConfHandler.feeder(lineno, s, fn, statements, conffile=False)
259 302
260# Add us to the handlers list 303# Add us to the handlers list
261from .. import handlers 304from .. import handlers
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index f171c5c932..9ddbae123d 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -20,10 +20,10 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle
20__config_regexp__ = re.compile( r""" 20__config_regexp__ = re.compile( r"""
21 ^ 21 ^
22 (?P<exp>export\s+)? 22 (?P<exp>export\s+)?
23 (?P<var>[a-zA-Z0-9\-_+.${}/~]+?) 23 (?P<var>[a-zA-Z0-9\-_+.${}/~:]*?)
24 (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])? 24 (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@/]*)\])?
25 25
26 \s* ( 26 (?P<whitespace>\s*) (
27 (?P<colon>:=) | 27 (?P<colon>:=) |
28 (?P<lazyques>\?\?=) | 28 (?P<lazyques>\?\?=) |
29 (?P<ques>\?=) | 29 (?P<ques>\?=) |
@@ -32,7 +32,7 @@ __config_regexp__ = re.compile( r"""
32 (?P<predot>=\.) | 32 (?P<predot>=\.) |
33 (?P<postdot>\.=) | 33 (?P<postdot>\.=) |
34 = 34 =
35 ) \s* 35 ) (?P<whitespace2>\s*)
36 36
37 (?!'[^']*'[^']*'$) 37 (?!'[^']*'[^']*'$)
38 (?!\"[^\"]*\"[^\"]*\"$) 38 (?!\"[^\"]*\"[^\"]*\"$)
@@ -43,15 +43,15 @@ __config_regexp__ = re.compile( r"""
43 """, re.X) 43 """, re.X)
44__include_regexp__ = re.compile( r"include\s+(.+)" ) 44__include_regexp__ = re.compile( r"include\s+(.+)" )
45__require_regexp__ = re.compile( r"require\s+(.+)" ) 45__require_regexp__ = re.compile( r"require\s+(.+)" )
46__includeall_regexp__ = re.compile( r"include_all\s+(.+)" )
46__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 47__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
47__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 48__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
48__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.]+)\]$" ) 49__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" )
50__addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" )
51__addfragments_regexp__ = re.compile(r"addfragments\s+(.+)\s+(.+)\s+(.+)\s+(.+)" )
49 52
50def init(data): 53def init(data):
51 topdir = data.getVar('TOPDIR', False) 54 return
52 if not topdir:
53 data.setVar('TOPDIR', os.getcwd())
54
55 55
56def supports(fn, d): 56def supports(fn, d):
57 return fn[-5:] == ".conf" 57 return fn[-5:] == ".conf"
@@ -105,12 +105,12 @@ def include_single_file(parentfn, fn, lineno, data, error_out):
105# We have an issue where a UI might want to enforce particular settings such as 105# We have an issue where a UI might want to enforce particular settings such as
106# an empty DISTRO variable. If configuration files do something like assigning 106# an empty DISTRO variable. If configuration files do something like assigning
107# a weak default, it turns out to be very difficult to filter out these changes, 107# a weak default, it turns out to be very difficult to filter out these changes,
108# particularly when the weak default might appear half way though parsing a chain 108# particularly when the weak default might appear half way though parsing a chain
109# of configuration files. We therefore let the UIs hook into configuration file 109# of configuration files. We therefore let the UIs hook into configuration file
110# parsing. This turns out to be a hard problem to solve any other way. 110# parsing. This turns out to be a hard problem to solve any other way.
111confFilters = [] 111confFilters = []
112 112
113def handle(fn, data, include): 113def handle(fn, data, include, baseconfig=False):
114 init(data) 114 init(data)
115 115
116 if include == 0: 116 if include == 0:
@@ -128,21 +128,26 @@ def handle(fn, data, include):
128 s = f.readline() 128 s = f.readline()
129 if not s: 129 if not s:
130 break 130 break
131 origlineno = lineno
132 origline = s
131 w = s.strip() 133 w = s.strip()
132 # skip empty lines 134 # skip empty lines
133 if not w: 135 if not w:
134 continue 136 continue
135 s = s.rstrip() 137 s = s.rstrip()
136 while s[-1] == '\\': 138 while s[-1] == '\\':
137 s2 = f.readline().rstrip() 139 line = f.readline()
140 origline += line
141 s2 = line.rstrip()
138 lineno = lineno + 1 142 lineno = lineno + 1
139 if (not s2 or s2 and s2[0] != "#") and s[0] == "#" : 143 if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
140 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) 144 bb.fatal("There is a confusing multiline, partially commented expression starting on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (origlineno, fn, origline))
145
141 s = s[:-1] + s2 146 s = s[:-1] + s2
142 # skip comments 147 # skip comments
143 if s[0] == '#': 148 if s[0] == '#':
144 continue 149 continue
145 feeder(lineno, s, abs_fn, statements) 150 feeder(lineno, s, abs_fn, statements, baseconfig=baseconfig)
146 151
147 # DONE WITH PARSING... time to evaluate 152 # DONE WITH PARSING... time to evaluate
148 data.setVar('FILE', abs_fn) 153 data.setVar('FILE', abs_fn)
@@ -150,17 +155,21 @@ def handle(fn, data, include):
150 if oldfile: 155 if oldfile:
151 data.setVar('FILE', oldfile) 156 data.setVar('FILE', oldfile)
152 157
153 f.close()
154
155 for f in confFilters: 158 for f in confFilters:
156 f(fn, data) 159 f(fn, data)
157 160
158 return data 161 return data
159 162
160def feeder(lineno, s, fn, statements): 163# baseconfig is set for the bblayers/layer.conf cookerdata config parsing
164# The function is also used by BBHandler, conffile would be False
165def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
161 m = __config_regexp__.match(s) 166 m = __config_regexp__.match(s)
162 if m: 167 if m:
163 groupd = m.groupdict() 168 groupd = m.groupdict()
169 if groupd['var'] == "":
170 raise ParseError("Empty variable name in assignment: '%s'" % s, fn, lineno);
171 if not groupd['whitespace'] or not groupd['whitespace2']:
172 logger.warning("%s:%s has a lack of whitespace around the assignment: '%s'" % (fn, lineno, s))
164 ast.handleData(statements, fn, lineno, groupd) 173 ast.handleData(statements, fn, lineno, groupd)
165 return 174 return
166 175
@@ -174,6 +183,11 @@ def feeder(lineno, s, fn, statements):
174 ast.handleInclude(statements, fn, lineno, m, True) 183 ast.handleInclude(statements, fn, lineno, m, True)
175 return 184 return
176 185
186 m = __includeall_regexp__.match(s)
187 if m:
188 ast.handleIncludeAll(statements, fn, lineno, m)
189 return
190
177 m = __export_regexp__.match(s) 191 m = __export_regexp__.match(s)
178 if m: 192 if m:
179 ast.handleExport(statements, fn, lineno, m) 193 ast.handleExport(statements, fn, lineno, m)
@@ -189,6 +203,16 @@ def feeder(lineno, s, fn, statements):
189 ast.handleUnsetFlag(statements, fn, lineno, m) 203 ast.handleUnsetFlag(statements, fn, lineno, m)
190 return 204 return
191 205
206 m = __addpylib_regexp__.match(s)
207 if baseconfig and conffile and m:
208 ast.handlePyLib(statements, fn, lineno, m)
209 return
210
211 m = __addfragments_regexp__.match(s)
212 if m:
213 ast.handleAddFragments(statements, fn, lineno, m)
214 return
215
192 raise ParseError("unparsed line: '%s'" % s, fn, lineno); 216 raise ParseError("unparsed line: '%s'" % s, fn, lineno);
193 217
194# Add us to the handlers list 218# Add us to the handlers list
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
deleted file mode 100644
index c6a209fb3f..0000000000
--- a/bitbake/lib/bb/persist_data.py
+++ /dev/null
@@ -1,301 +0,0 @@
1"""BitBake Persistent Data Store
2
3Used to store data in a central location such that other threads/tasks can
4access them at some future date. Acts as a convenience wrapper around sqlite,
5currently, providing a key/value store accessed by 'domain'.
6"""
7
8# Copyright (C) 2007 Richard Purdie
9# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13
14import collections
15import contextlib
16import functools
17import logging
18import os.path
19import sqlite3
20import sys
21import warnings
22from collections import Mapping
23
24sqlversion = sqlite3.sqlite_version_info
25if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
26 raise Exception("sqlite3 version 3.3.0 or later is required.")
27
28
29logger = logging.getLogger("BitBake.PersistData")
30
31@functools.total_ordering
32class SQLTable(collections.MutableMapping):
33 class _Decorators(object):
34 @staticmethod
35 def retry(*, reconnect=True):
36 """
37 Decorator that restarts a function if a database locked sqlite
38 exception occurs. If reconnect is True, the database connection
39 will be closed and reopened each time a failure occurs
40 """
41 def retry_wrapper(f):
42 def wrap_func(self, *args, **kwargs):
43 # Reconnect if necessary
44 if self.connection is None and reconnect:
45 self.reconnect()
46
47 count = 0
48 while True:
49 try:
50 return f(self, *args, **kwargs)
51 except sqlite3.OperationalError as exc:
52 if count < 500 and ('is locked' in str(exc) or 'locking protocol' in str(exc)):
53 count = count + 1
54 if reconnect:
55 self.reconnect()
56 continue
57 raise
58 return wrap_func
59 return retry_wrapper
60
61 @staticmethod
62 def transaction(f):
63 """
64 Decorator that starts a database transaction and creates a database
65 cursor for performing queries. If no exception is thrown, the
66 database results are commited. If an exception occurs, the database
67 is rolled back. In all cases, the cursor is closed after the
68 function ends.
69
70 Note that the cursor is passed as an extra argument to the function
71 after `self` and before any of the normal arguments
72 """
73 def wrap_func(self, *args, **kwargs):
74 # Context manager will COMMIT the database on success,
75 # or ROLLBACK on an exception
76 with self.connection:
77 # Automatically close the cursor when done
78 with contextlib.closing(self.connection.cursor()) as cursor:
79 return f(self, cursor, *args, **kwargs)
80 return wrap_func
81
82 """Object representing a table/domain in the database"""
83 def __init__(self, cachefile, table):
84 self.cachefile = cachefile
85 self.table = table
86
87 self.connection = None
88 self._execute_single("CREATE TABLE IF NOT EXISTS %s(key TEXT PRIMARY KEY NOT NULL, value TEXT);" % table)
89
90 @_Decorators.retry(reconnect=False)
91 @_Decorators.transaction
92 def _setup_database(self, cursor):
93 cursor.execute("pragma synchronous = off;")
94 # Enable WAL and keep the autocheckpoint length small (the default is
95 # usually 1000). Persistent caches are usually read-mostly, so keeping
96 # this short will keep readers running quickly
97 cursor.execute("pragma journal_mode = WAL;")
98 cursor.execute("pragma wal_autocheckpoint = 100;")
99
100 def reconnect(self):
101 if self.connection is not None:
102 self.connection.close()
103 self.connection = sqlite3.connect(self.cachefile, timeout=5)
104 self.connection.text_factory = str
105 self._setup_database()
106
107 @_Decorators.retry()
108 @_Decorators.transaction
109 def _execute_single(self, cursor, *query):
110 """
111 Executes a single query and discards the results. This correctly closes
112 the database cursor when finished
113 """
114 cursor.execute(*query)
115
116 @_Decorators.retry()
117 def _row_iter(self, f, *query):
118 """
119 Helper function that returns a row iterator. Each time __next__ is
120 called on the iterator, the provided function is evaluated to determine
121 the return value
122 """
123 class CursorIter(object):
124 def __init__(self, cursor):
125 self.cursor = cursor
126
127 def __iter__(self):
128 return self
129
130 def __next__(self):
131 row = self.cursor.fetchone()
132 if row is None:
133 self.cursor.close()
134 raise StopIteration
135 return f(row)
136
137 def __enter__(self):
138 return self
139
140 def __exit__(self, typ, value, traceback):
141 self.cursor.close()
142 return False
143
144 cursor = self.connection.cursor()
145 try:
146 cursor.execute(*query)
147 return CursorIter(cursor)
148 except:
149 cursor.close()
150
151 def __enter__(self):
152 self.connection.__enter__()
153 return self
154
155 def __exit__(self, *excinfo):
156 self.connection.__exit__(*excinfo)
157
158 @_Decorators.retry()
159 @_Decorators.transaction
160 def __getitem__(self, cursor, key):
161 cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
162 row = cursor.fetchone()
163 if row is not None:
164 return row[1]
165 raise KeyError(key)
166
167 @_Decorators.retry()
168 @_Decorators.transaction
169 def __delitem__(self, cursor, key):
170 if key not in self:
171 raise KeyError(key)
172 cursor.execute("DELETE from %s where key=?;" % self.table, [key])
173
174 @_Decorators.retry()
175 @_Decorators.transaction
176 def __setitem__(self, cursor, key, value):
177 if not isinstance(key, str):
178 raise TypeError('Only string keys are supported')
179 elif not isinstance(value, str):
180 raise TypeError('Only string values are supported')
181
182 # Ensure the entire transaction (including SELECT) executes under write lock
183 cursor.execute("BEGIN EXCLUSIVE")
184
185 cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
186 row = cursor.fetchone()
187 if row is not None:
188 cursor.execute("UPDATE %s SET value=? WHERE key=?;" % self.table, [value, key])
189 else:
190 cursor.execute("INSERT into %s(key, value) values (?, ?);" % self.table, [key, value])
191
192 @_Decorators.retry()
193 @_Decorators.transaction
194 def __contains__(self, cursor, key):
195 cursor.execute('SELECT * from %s where key=?;' % self.table, [key])
196 return cursor.fetchone() is not None
197
198 @_Decorators.retry()
199 @_Decorators.transaction
200 def __len__(self, cursor):
201 cursor.execute("SELECT COUNT(key) FROM %s;" % self.table)
202 row = cursor.fetchone()
203 if row is not None:
204 return row[0]
205
206 def __iter__(self):
207 return self._row_iter(lambda row: row[0], "SELECT key from %s;" % self.table)
208
209 def __lt__(self, other):
210 if not isinstance(other, Mapping):
211 raise NotImplemented
212
213 return len(self) < len(other)
214
215 def get_by_pattern(self, pattern):
216 return self._row_iter(lambda row: row[1], "SELECT * FROM %s WHERE key LIKE ?;" %
217 self.table, [pattern])
218
219 def values(self):
220 return list(self.itervalues())
221
222 def itervalues(self):
223 return self._row_iter(lambda row: row[0], "SELECT value FROM %s;" %
224 self.table)
225
226 def items(self):
227 return list(self.iteritems())
228
229 def iteritems(self):
230 return self._row_iter(lambda row: (row[0], row[1]), "SELECT * FROM %s;" %
231 self.table)
232
233 @_Decorators.retry()
234 @_Decorators.transaction
235 def clear(self, cursor):
236 cursor.execute("DELETE FROM %s;" % self.table)
237
238 def has_key(self, key):
239 return key in self
240
241
242class PersistData(object):
243 """Deprecated representation of the bitbake persistent data store"""
244 def __init__(self, d):
245 warnings.warn("Use of PersistData is deprecated. Please use "
246 "persist(domain, d) instead.",
247 category=DeprecationWarning,
248 stacklevel=2)
249
250 self.data = persist(d)
251 logger.debug("Using '%s' as the persistent data cache",
252 self.data.filename)
253
254 def addDomain(self, domain):
255 """
256 Add a domain (pending deprecation)
257 """
258 return self.data[domain]
259
260 def delDomain(self, domain):
261 """
262 Removes a domain and all the data it contains
263 """
264 del self.data[domain]
265
266 def getKeyValues(self, domain):
267 """
268 Return a list of key + value pairs for a domain
269 """
270 return list(self.data[domain].items())
271
272 def getValue(self, domain, key):
273 """
274 Return the value of a key for a domain
275 """
276 return self.data[domain][key]
277
278 def setValue(self, domain, key, value):
279 """
280 Sets the value of a key for a domain
281 """
282 self.data[domain][key] = value
283
284 def delValue(self, domain, key):
285 """
286 Deletes a key/value pair
287 """
288 del self.data[domain][key]
289
290def persist(domain, d):
291 """Convenience factory for SQLTable objects based upon metadata"""
292 import bb.utils
293 cachedir = (d.getVar("PERSISTENT_DIR") or
294 d.getVar("CACHE"))
295 if not cachedir:
296 logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
297 sys.exit(1)
298
299 bb.utils.mkdirhier(cachedir)
300 cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
301 return SQLTable(cachefile, domain)
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
index 7c3995cce5..4c7b6d39df 100644
--- a/bitbake/lib/bb/process.py
+++ b/bitbake/lib/bb/process.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -60,7 +62,7 @@ class Popen(subprocess.Popen):
60 "close_fds": True, 62 "close_fds": True,
61 "preexec_fn": subprocess_setup, 63 "preexec_fn": subprocess_setup,
62 "stdout": subprocess.PIPE, 64 "stdout": subprocess.PIPE,
63 "stderr": subprocess.STDOUT, 65 "stderr": subprocess.PIPE,
64 "stdin": subprocess.PIPE, 66 "stdin": subprocess.PIPE,
65 "shell": False, 67 "shell": False,
66 } 68 }
@@ -142,7 +144,7 @@ def _logged_communicate(pipe, log, input, extrafiles):
142 while pipe.poll() is None: 144 while pipe.poll() is None:
143 read_all_pipes(log, rin, outdata, errdata) 145 read_all_pipes(log, rin, outdata, errdata)
144 146
145 # Pocess closed, drain all pipes... 147 # Process closed, drain all pipes...
146 read_all_pipes(log, rin, outdata, errdata) 148 read_all_pipes(log, rin, outdata, errdata)
147 finally: 149 finally:
148 log.flush() 150 log.flush()
@@ -181,5 +183,8 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
181 stderr = stderr.decode("utf-8") 183 stderr = stderr.decode("utf-8")
182 184
183 if pipe.returncode != 0: 185 if pipe.returncode != 0:
186 if log:
187 # Don't duplicate the output in the exception if logging it
188 raise ExecutionError(cmd, pipe.returncode, None, None)
184 raise ExecutionError(cmd, pipe.returncode, stdout, stderr) 189 raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
185 return stdout, stderr 190 return stdout, stderr
diff --git a/bitbake/lib/bb/progress.py b/bitbake/lib/bb/progress.py
index d051ba0198..9518be77fb 100644
--- a/bitbake/lib/bb/progress.py
+++ b/bitbake/lib/bb/progress.py
@@ -94,12 +94,15 @@ class LineFilterProgressHandler(ProgressHandler):
94 while True: 94 while True:
95 breakpos = self._linebuffer.find('\n') + 1 95 breakpos = self._linebuffer.find('\n') + 1
96 if breakpos == 0: 96 if breakpos == 0:
97 break 97 # for the case when the line with progress ends with only '\r'
98 breakpos = self._linebuffer.find('\r') + 1
99 if breakpos == 0:
100 break
98 line = self._linebuffer[:breakpos] 101 line = self._linebuffer[:breakpos]
99 self._linebuffer = self._linebuffer[breakpos:] 102 self._linebuffer = self._linebuffer[breakpos:]
100 # Drop any line feeds and anything that precedes them 103 # Drop any line feeds and anything that precedes them
101 lbreakpos = line.rfind('\r') + 1 104 lbreakpos = line.rfind('\r') + 1
102 if lbreakpos: 105 if lbreakpos and lbreakpos != breakpos:
103 line = line[lbreakpos:] 106 line = line[lbreakpos:]
104 if self.writeline(filter_color(line)): 107 if self.writeline(filter_color(line)):
105 super().write(line) 108 super().write(line)
@@ -145,7 +148,7 @@ class MultiStageProgressReporter:
145 for tasks made up of python code spread across multiple 148 for tasks made up of python code spread across multiple
146 classes / functions - the progress reporter object can 149 classes / functions - the progress reporter object can
147 be passed around or stored at the object level and calls 150 be passed around or stored at the object level and calls
148 to next_stage() and update() made whereever needed. 151 to next_stage() and update() made wherever needed.
149 """ 152 """
150 def __init__(self, d, stage_weights, debug=False): 153 def __init__(self, d, stage_weights, debug=False):
151 """ 154 """
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
index b5a6cd0090..e11a4637d1 100644
--- a/bitbake/lib/bb/providers.py
+++ b/bitbake/lib/bb/providers.py
@@ -38,16 +38,17 @@ def findProviders(cfgData, dataCache, pkg_pn = None):
38 localdata = data.createCopy(cfgData) 38 localdata = data.createCopy(cfgData)
39 bb.data.expandKeys(localdata) 39 bb.data.expandKeys(localdata)
40 40
41 required = {}
41 preferred_versions = {} 42 preferred_versions = {}
42 latest_versions = {} 43 latest_versions = {}
43 44
44 for pn in pkg_pn: 45 for pn in pkg_pn:
45 (last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn) 46 (last_ver, last_file, pref_ver, pref_file, req) = findBestProvider(pn, localdata, dataCache, pkg_pn)
46 preferred_versions[pn] = (pref_ver, pref_file) 47 preferred_versions[pn] = (pref_ver, pref_file)
47 latest_versions[pn] = (last_ver, last_file) 48 latest_versions[pn] = (last_ver, last_file)
49 required[pn] = req
48 50
49 return (latest_versions, preferred_versions) 51 return (latest_versions, preferred_versions, required)
50
51 52
52def allProviders(dataCache): 53def allProviders(dataCache):
53 """ 54 """
@@ -59,7 +60,6 @@ def allProviders(dataCache):
59 all_providers[pn].append((ver, fn)) 60 all_providers[pn].append((ver, fn))
60 return all_providers 61 return all_providers
61 62
62
63def sortPriorities(pn, dataCache, pkg_pn = None): 63def sortPriorities(pn, dataCache, pkg_pn = None):
64 """ 64 """
65 Reorder pkg_pn by file priority and default preference 65 Reorder pkg_pn by file priority and default preference
@@ -87,6 +87,21 @@ def sortPriorities(pn, dataCache, pkg_pn = None):
87 87
88 return tmp_pn 88 return tmp_pn
89 89
90def versionVariableMatch(cfgData, keyword, pn):
91 """
92 Return the value of the <keyword>_VERSION variable if set.
93 """
94
95 # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
96 # hence we do this manually rather than use OVERRIDES
97 ver = cfgData.getVar("%s_VERSION:pn-%s" % (keyword, pn))
98 if not ver:
99 ver = cfgData.getVar("%s_VERSION_%s" % (keyword, pn))
100 if not ver:
101 ver = cfgData.getVar("%s_VERSION" % keyword)
102
103 return ver
104
90def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): 105def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
91 """ 106 """
92 Check if the version pe,pv,pr is the preferred one. 107 Check if the version pe,pv,pr is the preferred one.
@@ -102,19 +117,28 @@ def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
102 117
103def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): 118def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
104 """ 119 """
105 Find the first provider in pkg_pn with a PREFERRED_VERSION set. 120 Find the first provider in pkg_pn with REQUIRED_VERSION or PREFERRED_VERSION set.
106 """ 121 """
107 122
108 preferred_file = None 123 preferred_file = None
109 preferred_ver = None 124 preferred_ver = None
125 required = False
110 126
111 # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot 127 required_v = versionVariableMatch(cfgData, "REQUIRED", pn)
112 # hence we do this manually rather than use OVERRIDES 128 preferred_v = versionVariableMatch(cfgData, "PREFERRED", pn)
113 preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn) 129
114 if not preferred_v: 130 itemstr = ""
115 preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn) 131 if item:
116 if not preferred_v: 132 itemstr = " (for item %s)" % item
117 preferred_v = cfgData.getVar("PREFERRED_VERSION") 133
134 if required_v is not None:
135 if preferred_v is not None:
136 logger.warning("REQUIRED_VERSION and PREFERRED_VERSION for package %s%s are both set using REQUIRED_VERSION %s", pn, itemstr, required_v)
137 else:
138 logger.debug("REQUIRED_VERSION is set for package %s%s", pn, itemstr)
139 # REQUIRED_VERSION always takes precedence over PREFERRED_VERSION
140 preferred_v = required_v
141 required = True
118 142
119 if preferred_v: 143 if preferred_v:
120 m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v) 144 m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v)
@@ -147,11 +171,9 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
147 pv_str = preferred_v 171 pv_str = preferred_v
148 if not (preferred_e is None): 172 if not (preferred_e is None):
149 pv_str = '%s:%s' % (preferred_e, pv_str) 173 pv_str = '%s:%s' % (preferred_e, pv_str)
150 itemstr = ""
151 if item:
152 itemstr = " (for item %s)" % item
153 if preferred_file is None: 174 if preferred_file is None:
154 logger.warn("preferred version %s of %s not available%s", pv_str, pn, itemstr) 175 if not required:
176 logger.warning("preferred version %s of %s not available%s", pv_str, pn, itemstr)
155 available_vers = [] 177 available_vers = []
156 for file_set in pkg_pn: 178 for file_set in pkg_pn:
157 for f in file_set: 179 for f in file_set:
@@ -163,12 +185,16 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
163 available_vers.append(ver_str) 185 available_vers.append(ver_str)
164 if available_vers: 186 if available_vers:
165 available_vers.sort() 187 available_vers.sort()
166 logger.warn("versions of %s available: %s", pn, ' '.join(available_vers)) 188 logger.warning("versions of %s available: %s", pn, ' '.join(available_vers))
189 if required:
190 logger.error("required version %s of %s not available%s", pv_str, pn, itemstr)
167 else: 191 else:
168 logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) 192 if required:
169 193 logger.debug("selecting %s as REQUIRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
170 return (preferred_ver, preferred_file) 194 else:
195 logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
171 196
197 return (preferred_ver, preferred_file, required)
172 198
173def findLatestProvider(pn, cfgData, dataCache, file_set): 199def findLatestProvider(pn, cfgData, dataCache, file_set):
174 """ 200 """
@@ -189,7 +215,6 @@ def findLatestProvider(pn, cfgData, dataCache, file_set):
189 215
190 return (latest, latest_f) 216 return (latest, latest_f)
191 217
192
193def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): 218def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
194 """ 219 """
195 If there is a PREFERRED_VERSION, find the highest-priority bbfile 220 If there is a PREFERRED_VERSION, find the highest-priority bbfile
@@ -198,17 +223,16 @@ def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
198 """ 223 """
199 224
200 sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn) 225 sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
201 # Find the highest priority provider with a PREFERRED_VERSION set 226 # Find the highest priority provider with a REQUIRED_VERSION or PREFERRED_VERSION set
202 (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item) 227 (preferred_ver, preferred_file, required) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
203 # Find the latest version of the highest priority provider 228 # Find the latest version of the highest priority provider
204 (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0]) 229 (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
205 230
206 if preferred_file is None: 231 if not required and preferred_file is None:
207 preferred_file = latest_f 232 preferred_file = latest_f
208 preferred_ver = latest 233 preferred_ver = latest
209 234
210 return (latest, latest_f, preferred_ver, preferred_file) 235 return (latest, latest_f, preferred_ver, preferred_file, required)
211
212 236
213def _filterProviders(providers, item, cfgData, dataCache): 237def _filterProviders(providers, item, cfgData, dataCache):
214 """ 238 """
@@ -234,10 +258,13 @@ def _filterProviders(providers, item, cfgData, dataCache):
234 258
235 logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys()))) 259 logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys())))
236 260
237 # First add PREFERRED_VERSIONS 261 # First add REQUIRED_VERSIONS or PREFERRED_VERSIONS
238 for pn in sorted(pkg_pn): 262 for pn in sorted(pkg_pn):
239 sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn) 263 sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
240 preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item) 264 preferred_ver, preferred_file, required = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
265 if required and preferred_file is None:
266 return eligible
267 preferred_versions[pn] = (preferred_ver, preferred_file)
241 if preferred_versions[pn][1]: 268 if preferred_versions[pn][1]:
242 eligible.append(preferred_versions[pn][1]) 269 eligible.append(preferred_versions[pn][1])
243 270
@@ -249,7 +276,6 @@ def _filterProviders(providers, item, cfgData, dataCache):
249 eligible.append(preferred_versions[pn][1]) 276 eligible.append(preferred_versions[pn][1])
250 277
251 if not eligible: 278 if not eligible:
252 logger.error("no eligible providers for %s", item)
253 return eligible 279 return eligible
254 280
255 # If pn == item, give it a slight default preference 281 # If pn == item, give it a slight default preference
@@ -266,7 +292,6 @@ def _filterProviders(providers, item, cfgData, dataCache):
266 292
267 return eligible 293 return eligible
268 294
269
270def filterProviders(providers, item, cfgData, dataCache): 295def filterProviders(providers, item, cfgData, dataCache):
271 """ 296 """
272 Take a list of providers and filter/reorder according to the 297 Take a list of providers and filter/reorder according to the
@@ -371,8 +396,8 @@ def getRuntimeProviders(dataCache, rdepend):
371 return rproviders 396 return rproviders
372 397
373 # Only search dynamic packages if we can't find anything in other variables 398 # Only search dynamic packages if we can't find anything in other variables
374 for pattern in dataCache.packages_dynamic: 399 for pat_key in dataCache.packages_dynamic:
375 pattern = pattern.replace(r'+', r"\+") 400 pattern = pat_key.replace(r'+', r"\+")
376 if pattern in regexp_cache: 401 if pattern in regexp_cache:
377 regexp = regexp_cache[pattern] 402 regexp = regexp_cache[pattern]
378 else: 403 else:
@@ -383,12 +408,11 @@ def getRuntimeProviders(dataCache, rdepend):
383 raise 408 raise
384 regexp_cache[pattern] = regexp 409 regexp_cache[pattern] = regexp
385 if regexp.match(rdepend): 410 if regexp.match(rdepend):
386 rproviders += dataCache.packages_dynamic[pattern] 411 rproviders += dataCache.packages_dynamic[pat_key]
387 logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend) 412 logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend)
388 413
389 return rproviders 414 return rproviders
390 415
391
392def buildWorldTargetList(dataCache, task=None): 416def buildWorldTargetList(dataCache, task=None):
393 """ 417 """
394 Build package list for "bitbake world" 418 Build package list for "bitbake world"
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 54ef245a63..80f3d3282f 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -14,6 +14,7 @@ import os
14import sys 14import sys
15import stat 15import stat
16import errno 16import errno
17import itertools
17import logging 18import logging
18import re 19import re
19import bb 20import bb
@@ -24,6 +25,7 @@ import pickle
24from multiprocessing import Process 25from multiprocessing import Process
25import shlex 26import shlex
26import pprint 27import pprint
28import time
27 29
28bblogger = logging.getLogger("BitBake") 30bblogger = logging.getLogger("BitBake")
29logger = logging.getLogger("BitBake.RunQueue") 31logger = logging.getLogger("BitBake.RunQueue")
@@ -85,15 +87,19 @@ class RunQueueStats:
85 """ 87 """
86 Holds statistics on the tasks handled by the associated runQueue 88 Holds statistics on the tasks handled by the associated runQueue
87 """ 89 """
88 def __init__(self, total): 90 def __init__(self, total, setscene_total):
89 self.completed = 0 91 self.completed = 0
90 self.skipped = 0 92 self.skipped = 0
91 self.failed = 0 93 self.failed = 0
92 self.active = 0 94 self.active = 0
95 self.setscene_active = 0
96 self.setscene_covered = 0
97 self.setscene_notcovered = 0
98 self.setscene_total = setscene_total
93 self.total = total 99 self.total = total
94 100
95 def copy(self): 101 def copy(self):
96 obj = self.__class__(self.total) 102 obj = self.__class__(self.total, self.setscene_total)
97 obj.__dict__.update(self.__dict__) 103 obj.__dict__.update(self.__dict__)
98 return obj 104 return obj
99 105
@@ -112,10 +118,18 @@ class RunQueueStats:
112 def taskActive(self): 118 def taskActive(self):
113 self.active = self.active + 1 119 self.active = self.active + 1
114 120
121 def updateCovered(self, covered, notcovered):
122 self.setscene_covered = covered
123 self.setscene_notcovered = notcovered
124
125 def updateActiveSetscene(self, active):
126 self.setscene_active = active
127
115# These values indicate the next step due to be run in the 128# These values indicate the next step due to be run in the
116# runQueue state machine 129# runQueue state machine
117runQueuePrepare = 2 130runQueuePrepare = 2
118runQueueSceneInit = 3 131runQueueSceneInit = 3
132runQueueDumpSigs = 4
119runQueueRunning = 6 133runQueueRunning = 6
120runQueueFailed = 7 134runQueueFailed = 7
121runQueueCleanUp = 8 135runQueueCleanUp = 8
@@ -143,11 +157,82 @@ class RunQueueScheduler(object):
143 self.stamps = {} 157 self.stamps = {}
144 for tid in self.rqdata.runtaskentries: 158 for tid in self.rqdata.runtaskentries:
145 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 159 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
146 self.stamps[tid] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 160 self.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
147 if tid in self.rq.runq_buildable: 161 if tid in self.rq.runq_buildable:
148 self.buildable.append(tid) 162 self.buildable.add(tid)
149 163
150 self.rev_prio_map = None 164 self.rev_prio_map = None
165 self.is_pressure_usable()
166
167 def is_pressure_usable(self):
168 """
169 If monitoring pressure, return True if pressure files can be open and read. For example
170 openSUSE /proc/pressure/* files have readable file permissions but when read the error EOPNOTSUPP (Operation not supported)
171 is returned.
172 """
173 if self.rq.max_cpu_pressure or self.rq.max_io_pressure or self.rq.max_memory_pressure:
174 try:
175 with open("/proc/pressure/cpu") as cpu_pressure_fds, \
176 open("/proc/pressure/io") as io_pressure_fds, \
177 open("/proc/pressure/memory") as memory_pressure_fds:
178
179 self.prev_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
180 self.prev_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
181 self.prev_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
182 self.prev_pressure_time = time.time()
183 self.check_pressure = True
184 except:
185 bb.note("The /proc/pressure files can't be read. Continuing build without monitoring pressure")
186 self.check_pressure = False
187 else:
188 self.check_pressure = False
189
190 def exceeds_max_pressure(self):
191 """
192 Monitor the difference in total pressure at least once per second, if
193 BB_PRESSURE_MAX_{CPU|IO|MEMORY} are set, return True if above threshold.
194 """
195 if self.check_pressure:
196 with open("/proc/pressure/cpu") as cpu_pressure_fds, \
197 open("/proc/pressure/io") as io_pressure_fds, \
198 open("/proc/pressure/memory") as memory_pressure_fds:
199 # extract "total" from /proc/pressure/{cpu|io}
200 curr_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
201 curr_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
202 curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
203 now = time.time()
204 tdiff = now - self.prev_pressure_time
205 psi_accumulation_interval = 1.0
206 cpu_pressure = (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) / tdiff
207 io_pressure = (float(curr_io_pressure) - float(self.prev_io_pressure)) / tdiff
208 memory_pressure = (float(curr_memory_pressure) - float(self.prev_memory_pressure)) / tdiff
209 exceeds_cpu_pressure = self.rq.max_cpu_pressure and cpu_pressure > self.rq.max_cpu_pressure
210 exceeds_io_pressure = self.rq.max_io_pressure and io_pressure > self.rq.max_io_pressure
211 exceeds_memory_pressure = self.rq.max_memory_pressure and memory_pressure > self.rq.max_memory_pressure
212
213 if tdiff > psi_accumulation_interval:
214 self.prev_cpu_pressure = curr_cpu_pressure
215 self.prev_io_pressure = curr_io_pressure
216 self.prev_memory_pressure = curr_memory_pressure
217 self.prev_pressure_time = now
218
219 pressure_state = (exceeds_cpu_pressure, exceeds_io_pressure, exceeds_memory_pressure)
220 pressure_values = (round(cpu_pressure,1), self.rq.max_cpu_pressure, round(io_pressure,1), self.rq.max_io_pressure, round(memory_pressure,1), self.rq.max_memory_pressure)
221 if hasattr(self, "pressure_state") and pressure_state != self.pressure_state:
222 bb.note("Pressure status changed to CPU: %s, IO: %s, Mem: %s (CPU: %s/%s, IO: %s/%s, Mem: %s/%s) - using %s/%s bitbake threads" % (pressure_state + pressure_values + (len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks)))
223 self.pressure_state = pressure_state
224 return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure)
225 elif self.rq.max_loadfactor:
226 limit = False
227 loadfactor = float(os.getloadavg()[0]) / os.cpu_count()
228 # bb.warn("Comparing %s to %s" % (loadfactor, self.rq.max_loadfactor))
229 if loadfactor > self.rq.max_loadfactor:
230 limit = True
231 if hasattr(self, "loadfactor_limit") and limit != self.loadfactor_limit:
232 bb.note("Load average limiting set to %s as load average: %s - using %s/%s bitbake threads" % (limit, loadfactor, len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks))
233 self.loadfactor_limit = limit
234 return limit
235 return False
151 236
152 def next_buildable_task(self): 237 def next_buildable_task(self):
153 """ 238 """
@@ -161,6 +246,12 @@ class RunQueueScheduler(object):
161 if not buildable: 246 if not buildable:
162 return None 247 return None
163 248
249 # Bitbake requires that at least one task be active. Only check for pressure if
250 # this is the case, otherwise the pressure limitation could result in no tasks
251 # being active and no new tasks started thereby, at times, breaking the scheduler.
252 if self.rq.stats.active and self.exceeds_max_pressure():
253 return None
254
164 # Filter out tasks that have a max number of threads that have been exceeded 255 # Filter out tasks that have a max number of threads that have been exceeded
165 skip_buildable = {} 256 skip_buildable = {}
166 for running in self.rq.runq_running.difference(self.rq.runq_complete): 257 for running in self.rq.runq_running.difference(self.rq.runq_complete):
@@ -191,11 +282,11 @@ class RunQueueScheduler(object):
191 best = None 282 best = None
192 bestprio = None 283 bestprio = None
193 for tid in buildable: 284 for tid in buildable:
194 taskname = taskname_from_tid(tid)
195 if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
196 continue
197 prio = self.rev_prio_map[tid] 285 prio = self.rev_prio_map[tid]
198 if bestprio is None or bestprio > prio: 286 if bestprio is None or bestprio > prio:
287 taskname = taskname_from_tid(tid)
288 if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
289 continue
199 stamp = self.stamps[tid] 290 stamp = self.stamps[tid]
200 if stamp in self.rq.build_stamps.values(): 291 if stamp in self.rq.build_stamps.values():
201 continue 292 continue
@@ -374,10 +465,9 @@ class RunQueueData:
374 self.rq = rq 465 self.rq = rq
375 self.warn_multi_bb = False 466 self.warn_multi_bb = False
376 467
377 self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or "" 468 self.multi_provider_allowed = (cfgData.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split()
378 self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split() 469 self.setscene_ignore_tasks = get_setscene_enforce_ignore_tasks(cfgData, targets)
379 self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData, targets) 470 self.setscene_ignore_tasks_checked = False
380 self.setscenewhitelist_checked = False
381 self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1") 471 self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1")
382 self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter() 472 self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter()
383 473
@@ -387,7 +477,6 @@ class RunQueueData:
387 self.runtaskentries = {} 477 self.runtaskentries = {}
388 478
389 def runq_depends_names(self, ids): 479 def runq_depends_names(self, ids):
390 import re
391 ret = [] 480 ret = []
392 for id in ids: 481 for id in ids:
393 nam = os.path.basename(id) 482 nam = os.path.basename(id)
@@ -475,7 +564,7 @@ class RunQueueData:
475 msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends))) 564 msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends)))
476 msgs.append("\n") 565 msgs.append("\n")
477 if len(valid_chains) > 10: 566 if len(valid_chains) > 10:
478 msgs.append("Aborted dependency loops search after 10 matches.\n") 567 msgs.append("Halted dependency loops search after 10 matches.\n")
479 raise TooManyLoops 568 raise TooManyLoops
480 continue 569 continue
481 scan = False 570 scan = False
@@ -536,7 +625,7 @@ class RunQueueData:
536 next_points.append(revdep) 625 next_points.append(revdep)
537 task_done[revdep] = True 626 task_done[revdep] = True
538 endpoints = next_points 627 endpoints = next_points
539 if len(next_points) == 0: 628 if not next_points:
540 break 629 break
541 630
542 # Circular dependency sanity check 631 # Circular dependency sanity check
@@ -578,15 +667,18 @@ class RunQueueData:
578 667
579 found = False 668 found = False
580 for mc in self.taskData: 669 for mc in self.taskData:
581 if len(taskData[mc].taskentries) > 0: 670 if taskData[mc].taskentries:
582 found = True 671 found = True
583 break 672 break
584 if not found: 673 if not found:
585 # Nothing to do 674 # Nothing to do
586 return 0 675 return 0
587 676
677 bb.parse.siggen.setup_datacache(self.dataCaches)
678
588 self.init_progress_reporter.start() 679 self.init_progress_reporter.start()
589 self.init_progress_reporter.next_stage() 680 self.init_progress_reporter.next_stage()
681 bb.event.check_for_interrupts(self.cooker.data)
590 682
591 # Step A - Work out a list of tasks to run 683 # Step A - Work out a list of tasks to run
592 # 684 #
@@ -632,9 +724,13 @@ class RunQueueData:
632 frommc = mcdependency[1] 724 frommc = mcdependency[1]
633 mcdep = mcdependency[2] 725 mcdep = mcdependency[2]
634 deptask = mcdependency[4] 726 deptask = mcdependency[4]
727 if mcdep not in taskData:
728 bb.fatal("Multiconfig '%s' is referenced in multiconfig dependency '%s' but not enabled in BBMULTICONFIG?" % (mcdep, dep))
635 if mc == frommc: 729 if mc == frommc:
636 fn = taskData[mcdep].build_targets[pn][0] 730 fn = taskData[mcdep].build_targets[pn][0]
637 newdep = '%s:%s' % (fn,deptask) 731 newdep = '%s:%s' % (fn,deptask)
732 if newdep not in taskData[mcdep].taskentries:
733 bb.fatal("Task mcdepends on non-existent task %s" % (newdep))
638 taskData[mc].taskentries[tid].tdepends.append(newdep) 734 taskData[mc].taskentries[tid].tdepends.append(newdep)
639 735
640 for mc in taskData: 736 for mc in taskData:
@@ -733,6 +829,7 @@ class RunQueueData:
733 #self.dump_data() 829 #self.dump_data()
734 830
735 self.init_progress_reporter.next_stage() 831 self.init_progress_reporter.next_stage()
832 bb.event.check_for_interrupts(self.cooker.data)
736 833
737 # Resolve recursive 'recrdeptask' dependencies (Part B) 834 # Resolve recursive 'recrdeptask' dependencies (Part B)
738 # 835 #
@@ -762,7 +859,7 @@ class RunQueueData:
762 # Find the dependency chain endpoints 859 # Find the dependency chain endpoints
763 endpoints = set() 860 endpoints = set()
764 for tid in self.runtaskentries: 861 for tid in self.runtaskentries:
765 if len(deps[tid]) == 0: 862 if not deps[tid]:
766 endpoints.add(tid) 863 endpoints.add(tid)
767 # Iterate the chains collating dependencies 864 # Iterate the chains collating dependencies
768 while endpoints: 865 while endpoints:
@@ -773,11 +870,11 @@ class RunQueueData:
773 cumulativedeps[dep].update(cumulativedeps[tid]) 870 cumulativedeps[dep].update(cumulativedeps[tid])
774 if tid in deps[dep]: 871 if tid in deps[dep]:
775 deps[dep].remove(tid) 872 deps[dep].remove(tid)
776 if len(deps[dep]) == 0: 873 if not deps[dep]:
777 next.add(dep) 874 next.add(dep)
778 endpoints = next 875 endpoints = next
779 #for tid in deps: 876 #for tid in deps:
780 # if len(deps[tid]) != 0: 877 # if deps[tid]:
781 # bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid])) 878 # bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid]))
782 879
783 # Loop here since recrdeptasks can depend upon other recrdeptasks and we have to 880 # Loop here since recrdeptasks can depend upon other recrdeptasks and we have to
@@ -829,6 +926,7 @@ class RunQueueData:
829 self.runtaskentries[tid].depends.difference_update(recursivetasksselfref) 926 self.runtaskentries[tid].depends.difference_update(recursivetasksselfref)
830 927
831 self.init_progress_reporter.next_stage() 928 self.init_progress_reporter.next_stage()
929 bb.event.check_for_interrupts(self.cooker.data)
832 930
833 #self.dump_data() 931 #self.dump_data()
834 932
@@ -867,7 +965,7 @@ class RunQueueData:
867 bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname) 965 bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
868 else: 966 else:
869 logger.verbose("Invalidate task %s, %s", taskname, fn) 967 logger.verbose("Invalidate task %s, %s", taskname, fn)
870 bb.parse.siggen.invalidate_task(taskname, self.dataCaches[mc], taskfn) 968 bb.parse.siggen.invalidate_task(taskname, taskfn)
871 969
872 self.target_tids = [] 970 self.target_tids = []
873 for (mc, target, task, fn) in self.targets: 971 for (mc, target, task, fn) in self.targets:
@@ -910,47 +1008,54 @@ class RunQueueData:
910 mark_active(tid, 1) 1008 mark_active(tid, 1)
911 1009
912 self.init_progress_reporter.next_stage() 1010 self.init_progress_reporter.next_stage()
1011 bb.event.check_for_interrupts(self.cooker.data)
913 1012
914 # Step C - Prune all inactive tasks 1013 # Step C - Prune all inactive tasks
915 # 1014 #
916 # Once all active tasks are marked, prune the ones we don't need. 1015 # Once all active tasks are marked, prune the ones we don't need.
917 1016
918 delcount = {}
919 for tid in list(self.runtaskentries.keys()):
920 if tid not in runq_build:
921 delcount[tid] = self.runtaskentries[tid]
922 del self.runtaskentries[tid]
923
924 # Handle --runall 1017 # Handle --runall
925 if self.cooker.configuration.runall: 1018 if self.cooker.configuration.runall:
926 # re-run the mark_active and then drop unused tasks from new list 1019 # re-run the mark_active and then drop unused tasks from new list
927 runq_build = {}
928 1020
929 for task in self.cooker.configuration.runall: 1021 runall_tids = set()
930 if not task.startswith("do_"): 1022 added = True
931 task = "do_{0}".format(task) 1023 while added:
932 runall_tids = set() 1024 reduced_tasklist = set(self.runtaskentries.keys())
933 for tid in list(self.runtaskentries): 1025 for tid in list(self.runtaskentries.keys()):
934 wanttid = "{0}:{1}".format(fn_from_tid(tid), task) 1026 if tid not in runq_build:
935 if wanttid in delcount: 1027 reduced_tasklist.remove(tid)
936 self.runtaskentries[wanttid] = delcount[wanttid] 1028 runq_build = {}
937 if wanttid in self.runtaskentries:
938 runall_tids.add(wanttid)
939
940 for tid in list(runall_tids):
941 mark_active(tid,1)
942 if self.cooker.configuration.force:
943 invalidate_task(tid, False)
944 1029
945 for tid in list(self.runtaskentries.keys()): 1030 orig = runall_tids
946 if tid not in runq_build: 1031 runall_tids = set()
947 delcount[tid] = self.runtaskentries[tid] 1032 for task in self.cooker.configuration.runall:
948 del self.runtaskentries[tid] 1033 if not task.startswith("do_"):
1034 task = "do_{0}".format(task)
1035 for tid in reduced_tasklist:
1036 wanttid = "{0}:{1}".format(fn_from_tid(tid), task)
1037 if wanttid in self.runtaskentries:
1038 runall_tids.add(wanttid)
1039
1040 for tid in list(runall_tids):
1041 mark_active(tid, 1)
1042 self.target_tids.append(tid)
1043 if self.cooker.configuration.force:
1044 invalidate_task(tid, False)
1045 added = runall_tids - orig
1046
1047 delcount = set()
1048 for tid in list(self.runtaskentries.keys()):
1049 if tid not in runq_build:
1050 delcount.add(tid)
1051 del self.runtaskentries[tid]
949 1052
950 if len(self.runtaskentries) == 0: 1053 if self.cooker.configuration.runall:
1054 if not self.runtaskentries:
951 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets))) 1055 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
952 1056
953 self.init_progress_reporter.next_stage() 1057 self.init_progress_reporter.next_stage()
1058 bb.event.check_for_interrupts(self.cooker.data)
954 1059
955 # Handle runonly 1060 # Handle runonly
956 if self.cooker.configuration.runonly: 1061 if self.cooker.configuration.runonly:
@@ -960,19 +1065,19 @@ class RunQueueData:
960 for task in self.cooker.configuration.runonly: 1065 for task in self.cooker.configuration.runonly:
961 if not task.startswith("do_"): 1066 if not task.startswith("do_"):
962 task = "do_{0}".format(task) 1067 task = "do_{0}".format(task)
963 runonly_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == task } 1068 runonly_tids = [k for k in self.runtaskentries.keys() if taskname_from_tid(k) == task]
964 1069
965 for tid in list(runonly_tids): 1070 for tid in runonly_tids:
966 mark_active(tid,1) 1071 mark_active(tid, 1)
967 if self.cooker.configuration.force: 1072 if self.cooker.configuration.force:
968 invalidate_task(tid, False) 1073 invalidate_task(tid, False)
969 1074
970 for tid in list(self.runtaskentries.keys()): 1075 for tid in list(self.runtaskentries.keys()):
971 if tid not in runq_build: 1076 if tid not in runq_build:
972 delcount[tid] = self.runtaskentries[tid] 1077 delcount.add(tid)
973 del self.runtaskentries[tid] 1078 del self.runtaskentries[tid]
974 1079
975 if len(self.runtaskentries) == 0: 1080 if not self.runtaskentries:
976 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets))) 1081 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets)))
977 1082
978 # 1083 #
@@ -980,8 +1085,8 @@ class RunQueueData:
980 # 1085 #
981 1086
982 # Check to make sure we still have tasks to run 1087 # Check to make sure we still have tasks to run
983 if len(self.runtaskentries) == 0: 1088 if not self.runtaskentries:
984 if not taskData[''].abort: 1089 if not taskData[''].halt:
985 bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.") 1090 bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
986 else: 1091 else:
987 bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.") 1092 bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.")
@@ -991,6 +1096,7 @@ class RunQueueData:
991 logger.verbose("Assign Weightings") 1096 logger.verbose("Assign Weightings")
992 1097
993 self.init_progress_reporter.next_stage() 1098 self.init_progress_reporter.next_stage()
1099 bb.event.check_for_interrupts(self.cooker.data)
994 1100
995 # Generate a list of reverse dependencies to ease future calculations 1101 # Generate a list of reverse dependencies to ease future calculations
996 for tid in self.runtaskentries: 1102 for tid in self.runtaskentries:
@@ -998,13 +1104,14 @@ class RunQueueData:
998 self.runtaskentries[dep].revdeps.add(tid) 1104 self.runtaskentries[dep].revdeps.add(tid)
999 1105
1000 self.init_progress_reporter.next_stage() 1106 self.init_progress_reporter.next_stage()
1107 bb.event.check_for_interrupts(self.cooker.data)
1001 1108
1002 # Identify tasks at the end of dependency chains 1109 # Identify tasks at the end of dependency chains
1003 # Error on circular dependency loops (length two) 1110 # Error on circular dependency loops (length two)
1004 endpoints = [] 1111 endpoints = []
1005 for tid in self.runtaskentries: 1112 for tid in self.runtaskentries:
1006 revdeps = self.runtaskentries[tid].revdeps 1113 revdeps = self.runtaskentries[tid].revdeps
1007 if len(revdeps) == 0: 1114 if not revdeps:
1008 endpoints.append(tid) 1115 endpoints.append(tid)
1009 for dep in revdeps: 1116 for dep in revdeps:
1010 if dep in self.runtaskentries[tid].depends: 1117 if dep in self.runtaskentries[tid].depends:
@@ -1014,12 +1121,14 @@ class RunQueueData:
1014 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints)) 1121 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
1015 1122
1016 self.init_progress_reporter.next_stage() 1123 self.init_progress_reporter.next_stage()
1124 bb.event.check_for_interrupts(self.cooker.data)
1017 1125
1018 # Calculate task weights 1126 # Calculate task weights
1019 # Check of higher length circular dependencies 1127 # Check of higher length circular dependencies
1020 self.runq_weight = self.calculate_task_weights(endpoints) 1128 self.runq_weight = self.calculate_task_weights(endpoints)
1021 1129
1022 self.init_progress_reporter.next_stage() 1130 self.init_progress_reporter.next_stage()
1131 bb.event.check_for_interrupts(self.cooker.data)
1023 1132
1024 # Sanity Check - Check for multiple tasks building the same provider 1133 # Sanity Check - Check for multiple tasks building the same provider
1025 for mc in self.dataCaches: 1134 for mc in self.dataCaches:
@@ -1040,7 +1149,7 @@ class RunQueueData:
1040 for prov in prov_list: 1149 for prov in prov_list:
1041 if len(prov_list[prov]) < 2: 1150 if len(prov_list[prov]) < 2:
1042 continue 1151 continue
1043 if prov in self.multi_provider_whitelist: 1152 if prov in self.multi_provider_allowed:
1044 continue 1153 continue
1045 seen_pn = [] 1154 seen_pn = []
1046 # If two versions of the same PN are being built its fatal, we don't support it. 1155 # If two versions of the same PN are being built its fatal, we don't support it.
@@ -1050,12 +1159,12 @@ class RunQueueData:
1050 seen_pn.append(pn) 1159 seen_pn.append(pn)
1051 else: 1160 else:
1052 bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn)) 1161 bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn))
1053 msg = "Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov])) 1162 msgs = ["Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))]
1054 # 1163 #
1055 # Construct a list of things which uniquely depend on each provider 1164 # Construct a list of things which uniquely depend on each provider
1056 # since this may help the user figure out which dependency is triggering this warning 1165 # since this may help the user figure out which dependency is triggering this warning
1057 # 1166 #
1058 msg += "\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from." 1167 msgs.append("\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from.")
1059 deplist = {} 1168 deplist = {}
1060 commondeps = None 1169 commondeps = None
1061 for provfn in prov_list[prov]: 1170 for provfn in prov_list[prov]:
@@ -1075,12 +1184,12 @@ class RunQueueData:
1075 commondeps &= deps 1184 commondeps &= deps
1076 deplist[provfn] = deps 1185 deplist[provfn] = deps
1077 for provfn in deplist: 1186 for provfn in deplist:
1078 msg += "\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps)) 1187 msgs.append("\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps)))
1079 # 1188 #
1080 # Construct a list of provides and runtime providers for each recipe 1189 # Construct a list of provides and runtime providers for each recipe
1081 # (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC) 1190 # (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC)
1082 # 1191 #
1083 msg += "\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful." 1192 msgs.append("\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful.")
1084 provide_results = {} 1193 provide_results = {}
1085 rprovide_results = {} 1194 rprovide_results = {}
1086 commonprovs = None 1195 commonprovs = None
@@ -1107,30 +1216,20 @@ class RunQueueData:
1107 else: 1216 else:
1108 commonrprovs &= rprovides 1217 commonrprovs &= rprovides
1109 rprovide_results[provfn] = rprovides 1218 rprovide_results[provfn] = rprovides
1110 #msg += "\nCommon provides:\n %s" % ("\n ".join(commonprovs)) 1219 #msgs.append("\nCommon provides:\n %s" % ("\n ".join(commonprovs)))
1111 #msg += "\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs)) 1220 #msgs.append("\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs)))
1112 for provfn in prov_list[prov]: 1221 for provfn in prov_list[prov]:
1113 msg += "\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs)) 1222 msgs.append("\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs)))
1114 msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)) 1223 msgs.append("\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)))
1115 1224
1116 if self.warn_multi_bb: 1225 if self.warn_multi_bb:
1117 logger.verbnote(msg) 1226 logger.verbnote("".join(msgs))
1118 else: 1227 else:
1119 logger.error(msg) 1228 logger.error("".join(msgs))
1120 1229
1121 self.init_progress_reporter.next_stage() 1230 self.init_progress_reporter.next_stage()
1122
1123 # Create a whitelist usable by the stamp checks
1124 self.stampfnwhitelist = {}
1125 for mc in self.taskData:
1126 self.stampfnwhitelist[mc] = []
1127 for entry in self.stampwhitelist.split():
1128 if entry not in self.taskData[mc].build_targets:
1129 continue
1130 fn = self.taskData.build_targets[entry][0]
1131 self.stampfnwhitelist[mc].append(fn)
1132
1133 self.init_progress_reporter.next_stage() 1231 self.init_progress_reporter.next_stage()
1232 bb.event.check_for_interrupts(self.cooker.data)
1134 1233
1135 # Iterate over the task list looking for tasks with a 'setscene' function 1234 # Iterate over the task list looking for tasks with a 'setscene' function
1136 self.runq_setscene_tids = set() 1235 self.runq_setscene_tids = set()
@@ -1143,6 +1242,7 @@ class RunQueueData:
1143 self.runq_setscene_tids.add(tid) 1242 self.runq_setscene_tids.add(tid)
1144 1243
1145 self.init_progress_reporter.next_stage() 1244 self.init_progress_reporter.next_stage()
1245 bb.event.check_for_interrupts(self.cooker.data)
1146 1246
1147 # Invalidate task if force mode active 1247 # Invalidate task if force mode active
1148 if self.cooker.configuration.force: 1248 if self.cooker.configuration.force:
@@ -1159,6 +1259,7 @@ class RunQueueData:
1159 invalidate_task(fn + ":" + st, True) 1259 invalidate_task(fn + ":" + st, True)
1160 1260
1161 self.init_progress_reporter.next_stage() 1261 self.init_progress_reporter.next_stage()
1262 bb.event.check_for_interrupts(self.cooker.data)
1162 1263
1163 # Create and print to the logs a virtual/xxxx -> PN (fn) table 1264 # Create and print to the logs a virtual/xxxx -> PN (fn) table
1164 for mc in taskData: 1265 for mc in taskData:
@@ -1171,30 +1272,45 @@ class RunQueueData:
1171 bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc]) 1272 bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc])
1172 1273
1173 self.init_progress_reporter.next_stage() 1274 self.init_progress_reporter.next_stage()
1275 bb.event.check_for_interrupts(self.cooker.data)
1174 1276
1175 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) 1277 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
1176 1278
1279 starttime = time.time()
1280 lasttime = starttime
1281
1177 # Iterate over the task list and call into the siggen code 1282 # Iterate over the task list and call into the siggen code
1178 dealtwith = set() 1283 dealtwith = set()
1179 todeal = set(self.runtaskentries) 1284 todeal = set(self.runtaskentries)
1180 while len(todeal) > 0: 1285 while todeal:
1286 ready = set()
1181 for tid in todeal.copy(): 1287 for tid in todeal.copy():
1182 if len(self.runtaskentries[tid].depends - dealtwith) == 0: 1288 if not (self.runtaskentries[tid].depends - dealtwith):
1183 dealtwith.add(tid) 1289 self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1184 todeal.remove(tid) 1290 # get_taskhash for a given tid *must* be called before get_unihash* below
1185 self.prepare_task_hash(tid) 1291 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1292 ready.add(tid)
1293 unihashes = bb.parse.siggen.get_unihashes(ready)
1294 for tid in ready:
1295 dealtwith.add(tid)
1296 todeal.remove(tid)
1297 self.runtaskentries[tid].unihash = unihashes[tid]
1298
1299 bb.event.check_for_interrupts(self.cooker.data)
1300
1301 if time.time() > (lasttime + 30):
1302 lasttime = time.time()
1303 hashequiv_logger.verbose("Initial setup loop progress: %s of %s in %s" % (len(todeal), len(self.runtaskentries), lasttime - starttime))
1304
1305 endtime = time.time()
1306 if (endtime-starttime > 60):
1307 hashequiv_logger.verbose("Initial setup loop took: %s" % (endtime-starttime))
1186 1308
1187 bb.parse.siggen.writeout_file_checksum_cache() 1309 bb.parse.siggen.writeout_file_checksum_cache()
1188 1310
1189 #self.dump_data() 1311 #self.dump_data()
1190 return len(self.runtaskentries) 1312 return len(self.runtaskentries)
1191 1313
1192 def prepare_task_hash(self, tid):
1193 dc = bb.parse.siggen.get_data_caches(self.dataCaches, mc_from_tid(tid))
1194 bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, dc)
1195 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, dc)
1196 self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
1197
1198 def dump_data(self): 1314 def dump_data(self):
1199 """ 1315 """
1200 Dump some debug information on the internal data structures 1316 Dump some debug information on the internal data structures
@@ -1218,7 +1334,6 @@ class RunQueue:
1218 self.cfgData = cfgData 1334 self.cfgData = cfgData
1219 self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets) 1335 self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets)
1220 1336
1221 self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile"
1222 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None 1337 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None
1223 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None 1338 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None
1224 1339
@@ -1237,30 +1352,40 @@ class RunQueue:
1237 self.worker = {} 1352 self.worker = {}
1238 self.fakeworker = {} 1353 self.fakeworker = {}
1239 1354
1355 @staticmethod
1356 def send_pickled_data(worker, data, name):
1357 msg = bytearray()
1358 msg.extend(b"<" + name.encode() + b">")
1359 pickled_data = pickle.dumps(data)
1360 msg.extend(len(pickled_data).to_bytes(4, 'big'))
1361 msg.extend(pickled_data)
1362 msg.extend(b"</" + name.encode() + b">")
1363 worker.stdin.write(msg)
1364
1240 def _start_worker(self, mc, fakeroot = False, rqexec = None): 1365 def _start_worker(self, mc, fakeroot = False, rqexec = None):
1241 logger.debug("Starting bitbake-worker") 1366 logger.debug("Starting bitbake-worker")
1242 magic = "decafbad" 1367 magic = "decafbad"
1243 if self.cooker.configuration.profile: 1368 if self.cooker.configuration.profile:
1244 magic = "decafbadbad" 1369 magic = "decafbadbad"
1370 fakerootlogs = None
1371
1372 workerscript = os.path.realpath(os.path.dirname(__file__) + "/../../bin/bitbake-worker")
1245 if fakeroot: 1373 if fakeroot:
1246 magic = magic + "beef" 1374 magic = magic + "beef"
1247 mcdata = self.cooker.databuilder.mcdata[mc] 1375 mcdata = self.cooker.databuilder.mcdata[mc]
1248 fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD")) 1376 fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD"))
1249 fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split() 1377 fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split()
1250 env = os.environ.copy() 1378 env = os.environ.copy()
1251 for key, value in (var.split('=') for var in fakerootenv): 1379 for key, value in (var.split('=',1) for var in fakerootenv):
1252 env[key] = value 1380 env[key] = value
1253 worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env) 1381 worker = subprocess.Popen(fakerootcmd + [sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
1382 fakerootlogs = self.rqdata.dataCaches[mc].fakerootlogs
1254 else: 1383 else:
1255 worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE) 1384 worker = subprocess.Popen([sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
1256 bb.utils.nonblockingfd(worker.stdout) 1385 bb.utils.nonblockingfd(worker.stdout)
1257 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec) 1386 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs)
1258 1387
1259 workerdata = { 1388 workerdata = {
1260 "taskdeps" : self.rqdata.dataCaches[mc].task_deps,
1261 "fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv,
1262 "fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs,
1263 "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
1264 "sigdata" : bb.parse.siggen.get_taskdata(), 1389 "sigdata" : bb.parse.siggen.get_taskdata(),
1265 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel, 1390 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel,
1266 "build_verbose_shell" : self.cooker.configuration.build_verbose_shell, 1391 "build_verbose_shell" : self.cooker.configuration.build_verbose_shell,
@@ -1274,9 +1399,9 @@ class RunQueue:
1274 "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"), 1399 "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"),
1275 } 1400 }
1276 1401
1277 worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>") 1402 RunQueue.send_pickled_data(worker, self.cooker.configuration, "cookerconfig")
1278 worker.stdin.write(b"<extraconfigdata>" + pickle.dumps(self.cooker.extraconfigdata) + b"</extraconfigdata>") 1403 RunQueue.send_pickled_data(worker, self.cooker.extraconfigdata, "extraconfigdata")
1279 worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>") 1404 RunQueue.send_pickled_data(worker, workerdata, "workerdata")
1280 worker.stdin.flush() 1405 worker.stdin.flush()
1281 1406
1282 return RunQueueWorker(worker, workerpipe) 1407 return RunQueueWorker(worker, workerpipe)
@@ -1286,7 +1411,7 @@ class RunQueue:
1286 return 1411 return
1287 logger.debug("Teardown for bitbake-worker") 1412 logger.debug("Teardown for bitbake-worker")
1288 try: 1413 try:
1289 worker.process.stdin.write(b"<quit></quit>") 1414 RunQueue.send_pickled_data(worker.process, b"", "quit")
1290 worker.process.stdin.flush() 1415 worker.process.stdin.flush()
1291 worker.process.stdin.close() 1416 worker.process.stdin.close()
1292 except IOError: 1417 except IOError:
@@ -1298,12 +1423,12 @@ class RunQueue:
1298 continue 1423 continue
1299 worker.pipe.close() 1424 worker.pipe.close()
1300 1425
1301 def start_worker(self): 1426 def start_worker(self, rqexec):
1302 if self.worker: 1427 if self.worker:
1303 self.teardown_workers() 1428 self.teardown_workers()
1304 self.teardown = False 1429 self.teardown = False
1305 for mc in self.rqdata.dataCaches: 1430 for mc in self.rqdata.dataCaches:
1306 self.worker[mc] = self._start_worker(mc) 1431 self.worker[mc] = self._start_worker(mc, False, rqexec)
1307 1432
1308 def start_fakeworker(self, rqexec, mc): 1433 def start_fakeworker(self, rqexec, mc):
1309 if not mc in self.fakeworker: 1434 if not mc in self.fakeworker:
@@ -1345,15 +1470,7 @@ class RunQueue:
1345 if taskname is None: 1470 if taskname is None:
1346 taskname = tn 1471 taskname = tn
1347 1472
1348 if self.stamppolicy == "perfile": 1473 stampfile = bb.parse.siggen.stampfile_mcfn(taskname, taskfn)
1349 fulldeptree = False
1350 else:
1351 fulldeptree = True
1352 stampwhitelist = []
1353 if self.stamppolicy == "whitelist":
1354 stampwhitelist = self.rqdata.stampfnwhitelist[mc]
1355
1356 stampfile = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn)
1357 1474
1358 # If the stamp is missing, it's not current 1475 # If the stamp is missing, it's not current
1359 if not os.access(stampfile, os.F_OK): 1476 if not os.access(stampfile, os.F_OK):
@@ -1365,7 +1482,7 @@ class RunQueue:
1365 logger.debug2("%s.%s is nostamp\n", fn, taskname) 1482 logger.debug2("%s.%s is nostamp\n", fn, taskname)
1366 return False 1483 return False
1367 1484
1368 if taskname != "do_setscene" and taskname.endswith("_setscene"): 1485 if taskname.endswith("_setscene"):
1369 return True 1486 return True
1370 1487
1371 if cache is None: 1488 if cache is None:
@@ -1376,15 +1493,15 @@ class RunQueue:
1376 for dep in self.rqdata.runtaskentries[tid].depends: 1493 for dep in self.rqdata.runtaskentries[tid].depends:
1377 if iscurrent: 1494 if iscurrent:
1378 (mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep) 1495 (mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep)
1379 stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCaches[mc2], taskfn2) 1496 stampfile2 = bb.parse.siggen.stampfile_mcfn(taskname2, taskfn2)
1380 stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCaches[mc2], taskfn2) 1497 stampfile3 = bb.parse.siggen.stampfile_mcfn(taskname2 + "_setscene", taskfn2)
1381 t2 = get_timestamp(stampfile2) 1498 t2 = get_timestamp(stampfile2)
1382 t3 = get_timestamp(stampfile3) 1499 t3 = get_timestamp(stampfile3)
1383 if t3 and not t2: 1500 if t3 and not t2:
1384 continue 1501 continue
1385 if t3 and t3 > t2: 1502 if t3 and t3 > t2:
1386 continue 1503 continue
1387 if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): 1504 if fn == fn2:
1388 if not t2: 1505 if not t2:
1389 logger.debug2('Stampfile %s does not exist', stampfile2) 1506 logger.debug2('Stampfile %s does not exist', stampfile2)
1390 iscurrent = False 1507 iscurrent = False
@@ -1434,10 +1551,11 @@ class RunQueue:
1434 """ 1551 """
1435 Run the tasks in a queue prepared by rqdata.prepare() 1552 Run the tasks in a queue prepared by rqdata.prepare()
1436 Upon failure, optionally try to recover the build using any alternate providers 1553 Upon failure, optionally try to recover the build using any alternate providers
1437 (if the abort on failure configuration option isn't set) 1554 (if the halt on failure configuration option isn't set)
1438 """ 1555 """
1439 1556
1440 retval = True 1557 retval = True
1558 bb.event.check_for_interrupts(self.cooker.data)
1441 1559
1442 if self.state is runQueuePrepare: 1560 if self.state is runQueuePrepare:
1443 # NOTE: if you add, remove or significantly refactor the stages of this 1561 # NOTE: if you add, remove or significantly refactor the stages of this
@@ -1466,31 +1584,37 @@ class RunQueue:
1466 1584
1467 if not self.dm_event_handler_registered: 1585 if not self.dm_event_handler_registered:
1468 res = bb.event.register(self.dm_event_handler_name, 1586 res = bb.event.register(self.dm_event_handler_name,
1469 lambda x: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False, 1587 lambda x, y: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False,
1470 ('bb.event.HeartbeatEvent',), data=self.cfgData) 1588 ('bb.event.HeartbeatEvent',), data=self.cfgData)
1471 self.dm_event_handler_registered = True 1589 self.dm_event_handler_registered = True
1472 1590
1473 dump = self.cooker.configuration.dump_signatures 1591 self.rqdata.init_progress_reporter.next_stage()
1474 if dump: 1592 self.rqexe = RunQueueExecute(self)
1593
1594 dumpsigs = self.cooker.configuration.dump_signatures
1595 if dumpsigs:
1475 self.rqdata.init_progress_reporter.finish() 1596 self.rqdata.init_progress_reporter.finish()
1476 if 'printdiff' in dump: 1597 if 'printdiff' in dumpsigs:
1477 invalidtasks = self.print_diffscenetasks() 1598 self.invalidtasks_dump = self.print_diffscenetasks()
1478 self.dump_signatures(dump) 1599 self.state = runQueueDumpSigs
1479 if 'printdiff' in dump: 1600
1480 self.write_diffscenetasks(invalidtasks) 1601 if self.state is runQueueDumpSigs:
1602 dumpsigs = self.cooker.configuration.dump_signatures
1603 retval = self.dump_signatures(dumpsigs)
1604 if retval is False:
1605 if 'printdiff' in dumpsigs:
1606 self.write_diffscenetasks(self.invalidtasks_dump)
1481 self.state = runQueueComplete 1607 self.state = runQueueComplete
1482 1608
1483 if self.state is runQueueSceneInit: 1609 if self.state is runQueueSceneInit:
1484 self.rqdata.init_progress_reporter.next_stage() 1610 self.start_worker(self.rqexe)
1485 self.start_worker() 1611 self.rqdata.init_progress_reporter.finish()
1486 self.rqdata.init_progress_reporter.next_stage()
1487 self.rqexe = RunQueueExecute(self)
1488 1612
1489 # If we don't have any setscene functions, skip execution 1613 # If we don't have any setscene functions, skip execution
1490 if len(self.rqdata.runq_setscene_tids) == 0: 1614 if not self.rqdata.runq_setscene_tids:
1491 logger.info('No setscene tasks') 1615 logger.info('No setscene tasks')
1492 for tid in self.rqdata.runtaskentries: 1616 for tid in self.rqdata.runtaskentries:
1493 if len(self.rqdata.runtaskentries[tid].depends) == 0: 1617 if not self.rqdata.runtaskentries[tid].depends:
1494 self.rqexe.setbuildable(tid) 1618 self.rqexe.setbuildable(tid)
1495 self.rqexe.tasks_notcovered.add(tid) 1619 self.rqexe.tasks_notcovered.add(tid)
1496 self.rqexe.sqdone = True 1620 self.rqexe.sqdone = True
@@ -1563,43 +1687,62 @@ class RunQueue:
1563 else: 1687 else:
1564 self.rqexe.finish() 1688 self.rqexe.finish()
1565 1689
1566 def rq_dump_sigfn(self, fn, options): 1690 def _rq_dump_sigtid(self, tids):
1567 bb_cache = bb.cache.NoCache(self.cooker.databuilder) 1691 for tid in tids:
1568 mc = bb.runqueue.mc_from_tid(fn) 1692 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
1569 the_data = bb_cache.loadDataFull(fn, self.cooker.collections[mc].get_file_appends(fn)) 1693 dataCaches = self.rqdata.dataCaches
1570 siggen = bb.parse.siggen 1694 bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True)
1571 dataCaches = self.rqdata.dataCaches
1572 siggen.dump_sigfn(fn, dataCaches, options)
1573 1695
1574 def dump_signatures(self, options): 1696 def dump_signatures(self, options):
1575 fns = set() 1697 if not hasattr(self, "dumpsigs_launched"):
1576 bb.note("Reparsing files to collect dependency data") 1698 if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset:
1699 bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled")
1577 1700
1578 for tid in self.rqdata.runtaskentries: 1701 bb.note("Writing task signature files")
1579 fn = fn_from_tid(tid) 1702
1580 fns.add(fn) 1703 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1)
1581 1704 def chunkify(l, n):
1582 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) 1705 return [l[i::n] for i in range(n)]
1583 # We cannot use the real multiprocessing.Pool easily due to some local data 1706 dumpsigs_tids = chunkify(list(self.rqdata.runtaskentries), max_process)
1584 # that can't be pickled. This is a cheap multi-process solution. 1707
1585 launched = [] 1708 # We cannot use the real multiprocessing.Pool easily due to some local data
1586 while fns: 1709 # that can't be pickled. This is a cheap multi-process solution.
1587 if len(launched) < max_process: 1710 self.dumpsigs_launched = []
1588 p = Process(target=self.rq_dump_sigfn, args=(fns.pop(), options)) 1711
1712 for tids in dumpsigs_tids:
1713 p = Process(target=self._rq_dump_sigtid, args=(tids, ))
1589 p.start() 1714 p.start()
1590 launched.append(p) 1715 self.dumpsigs_launched.append(p)
1591 for q in launched: 1716
1592 # The finished processes are joined when calling is_alive() 1717 return 1.0
1593 if not q.is_alive(): 1718
1594 launched.remove(q) 1719 for q in self.dumpsigs_launched:
1595 for p in launched: 1720 # The finished processes are joined when calling is_alive()
1721 if not q.is_alive():
1722 self.dumpsigs_launched.remove(q)
1723
1724 if self.dumpsigs_launched:
1725 return 1.0
1726
1727 for p in self.dumpsigs_launched:
1596 p.join() 1728 p.join()
1597 1729
1598 bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options) 1730 bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options)
1599 1731
1600 return 1732 return False
1601 1733
1602 def print_diffscenetasks(self): 1734 def print_diffscenetasks(self):
1735 def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid):
1736 invalidtasks = []
1737 for t in taskdepends[task].depends:
1738 if t not in valid and t not in visited_invalid:
1739 invalidtasks.extend(get_root_invalid_tasks(t, taskdepends, valid, noexec, visited_invalid))
1740 visited_invalid.add(t)
1741
1742 direct_invalid = [t for t in taskdepends[task].depends if t not in valid]
1743 if not direct_invalid and task not in noexec:
1744 invalidtasks = [task]
1745 return invalidtasks
1603 1746
1604 noexec = [] 1747 noexec = []
1605 tocheck = set() 1748 tocheck = set()
@@ -1633,46 +1776,49 @@ class RunQueue:
1633 valid_new.add(dep) 1776 valid_new.add(dep)
1634 1777
1635 invalidtasks = set() 1778 invalidtasks = set()
1636 for tid in self.rqdata.runtaskentries:
1637 if tid not in valid_new and tid not in noexec:
1638 invalidtasks.add(tid)
1639 1779
1640 found = set() 1780 toptasks = set(["{}:{}".format(t[3], t[2]) for t in self.rqdata.targets])
1641 processed = set() 1781 for tid in toptasks:
1642 for tid in invalidtasks:
1643 toprocess = set([tid]) 1782 toprocess = set([tid])
1644 while toprocess: 1783 while toprocess:
1645 next = set() 1784 next = set()
1785 visited_invalid = set()
1646 for t in toprocess: 1786 for t in toprocess:
1647 for dep in self.rqdata.runtaskentries[t].depends: 1787 if t not in valid_new and t not in noexec:
1648 if dep in invalidtasks: 1788 invalidtasks.update(get_root_invalid_tasks(t, self.rqdata.runtaskentries, valid_new, noexec, visited_invalid))
1649 found.add(tid) 1789 continue
1650 if dep not in processed: 1790 if t in self.rqdata.runq_setscene_tids:
1651 processed.add(dep) 1791 for dep in self.rqexe.sqdata.sq_deps[t]:
1652 next.add(dep) 1792 next.add(dep)
1793 continue
1794
1795 for dep in self.rqdata.runtaskentries[t].depends:
1796 next.add(dep)
1797
1653 toprocess = next 1798 toprocess = next
1654 if tid in found:
1655 toprocess = set()
1656 1799
1657 tasklist = [] 1800 tasklist = []
1658 for tid in invalidtasks.difference(found): 1801 for tid in invalidtasks:
1659 tasklist.append(tid) 1802 tasklist.append(tid)
1660 1803
1661 if tasklist: 1804 if tasklist:
1662 bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist)) 1805 bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist))
1663 1806
1664 return invalidtasks.difference(found) 1807 return invalidtasks
1665 1808
1666 def write_diffscenetasks(self, invalidtasks): 1809 def write_diffscenetasks(self, invalidtasks):
1810 bb.siggen.check_siggen_version(bb.siggen)
1667 1811
1668 # Define recursion callback 1812 # Define recursion callback
1669 def recursecb(key, hash1, hash2): 1813 def recursecb(key, hash1, hash2):
1670 hashes = [hash1, hash2] 1814 hashes = [hash1, hash2]
1815 bb.debug(1, "Recursively looking for recipe {} hashes {}".format(key, hashes))
1671 hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData) 1816 hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData)
1817 bb.debug(1, "Found hashfiles:\n{}".format(hashfiles))
1672 1818
1673 recout = [] 1819 recout = []
1674 if len(hashfiles) == 2: 1820 if len(hashfiles) == 2:
1675 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb) 1821 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
1676 recout.extend(list(' ' + l for l in out2)) 1822 recout.extend(list(' ' + l for l in out2))
1677 else: 1823 else:
1678 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) 1824 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
@@ -1683,20 +1829,25 @@ class RunQueue:
1683 for tid in invalidtasks: 1829 for tid in invalidtasks:
1684 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 1830 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
1685 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 1831 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
1686 h = self.rqdata.runtaskentries[tid].hash 1832 h = self.rqdata.runtaskentries[tid].unihash
1687 matches = bb.siggen.find_siginfo(pn, taskname, [], self.cfgData) 1833 bb.debug(1, "Looking for recipe {} task {}".format(pn, taskname))
1834 matches = bb.siggen.find_siginfo(pn, taskname, [], self.cooker.databuilder.mcdata[mc])
1835 bb.debug(1, "Found hashfiles:\n{}".format(matches))
1688 match = None 1836 match = None
1689 for m in matches: 1837 for m in matches.values():
1690 if h in m: 1838 if h in m['path']:
1691 match = m 1839 match = m['path']
1692 if match is None: 1840 if match is None:
1693 bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h) 1841 bb.fatal("Can't find a task we're supposed to have written out? (hash: %s tid: %s)?" % (h, tid))
1694 matches = {k : v for k, v in iter(matches.items()) if h not in k} 1842 matches = {k : v for k, v in iter(matches.items()) if h not in k}
1843 matches_local = {k : v for k, v in iter(matches.items()) if h not in k and not v['sstate']}
1844 if matches_local:
1845 matches = matches_local
1695 if matches: 1846 if matches:
1696 latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1] 1847 latestmatch = matches[sorted(matches.keys(), key=lambda h: matches[h]['time'])[-1]]['path']
1697 prevh = __find_sha256__.search(latestmatch).group(0) 1848 prevh = __find_sha256__.search(latestmatch).group(0)
1698 output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb) 1849 output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
1699 bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output)) 1850 bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, most recent matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
1700 1851
1701 1852
1702class RunQueueExecute: 1853class RunQueueExecute:
@@ -1709,6 +1860,10 @@ class RunQueueExecute:
1709 1860
1710 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1) 1861 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
1711 self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed" 1862 self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
1863 self.max_cpu_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_CPU")
1864 self.max_io_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_IO")
1865 self.max_memory_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_MEMORY")
1866 self.max_loadfactor = self.cfgData.getVar("BB_LOADFACTOR_MAX")
1712 1867
1713 self.sq_buildable = set() 1868 self.sq_buildable = set()
1714 self.sq_running = set() 1869 self.sq_running = set()
@@ -1726,6 +1881,8 @@ class RunQueueExecute:
1726 self.build_stamps2 = [] 1881 self.build_stamps2 = []
1727 self.failed_tids = [] 1882 self.failed_tids = []
1728 self.sq_deferred = {} 1883 self.sq_deferred = {}
1884 self.sq_needed_harddeps = set()
1885 self.sq_harddep_deferred = set()
1729 1886
1730 self.stampcache = {} 1887 self.stampcache = {}
1731 1888
@@ -1733,17 +1890,39 @@ class RunQueueExecute:
1733 self.holdoff_need_update = True 1890 self.holdoff_need_update = True
1734 self.sqdone = False 1891 self.sqdone = False
1735 1892
1736 self.stats = RunQueueStats(len(self.rqdata.runtaskentries)) 1893 self.stats = RunQueueStats(len(self.rqdata.runtaskentries), len(self.rqdata.runq_setscene_tids))
1737 self.sq_stats = RunQueueStats(len(self.rqdata.runq_setscene_tids))
1738
1739 for mc in rq.worker:
1740 rq.worker[mc].pipe.setrunqueueexec(self)
1741 for mc in rq.fakeworker:
1742 rq.fakeworker[mc].pipe.setrunqueueexec(self)
1743 1894
1744 if self.number_tasks <= 0: 1895 if self.number_tasks <= 0:
1745 bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks) 1896 bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
1746 1897
1898 lower_limit = 1.0
1899 upper_limit = 1000000.0
1900 if self.max_cpu_pressure:
1901 self.max_cpu_pressure = float(self.max_cpu_pressure)
1902 if self.max_cpu_pressure < lower_limit:
1903 bb.fatal("Invalid BB_PRESSURE_MAX_CPU %s, minimum value is %s." % (self.max_cpu_pressure, lower_limit))
1904 if self.max_cpu_pressure > upper_limit:
1905 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_CPU is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_cpu_pressure))
1906
1907 if self.max_io_pressure:
1908 self.max_io_pressure = float(self.max_io_pressure)
1909 if self.max_io_pressure < lower_limit:
1910 bb.fatal("Invalid BB_PRESSURE_MAX_IO %s, minimum value is %s." % (self.max_io_pressure, lower_limit))
1911 if self.max_io_pressure > upper_limit:
1912 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_IO is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
1913
1914 if self.max_memory_pressure:
1915 self.max_memory_pressure = float(self.max_memory_pressure)
1916 if self.max_memory_pressure < lower_limit:
1917 bb.fatal("Invalid BB_PRESSURE_MAX_MEMORY %s, minimum value is %s." % (self.max_memory_pressure, lower_limit))
1918 if self.max_memory_pressure > upper_limit:
1919 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_MEMORY is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
1920
1921 if self.max_loadfactor:
1922 self.max_loadfactor = float(self.max_loadfactor)
1923 if self.max_loadfactor <= 0:
1924 bb.fatal("Invalid BB_LOADFACTOR_MAX %s, needs to be greater than zero." % (self.max_loadfactor))
1925
1747 # List of setscene tasks which we've covered 1926 # List of setscene tasks which we've covered
1748 self.scenequeue_covered = set() 1927 self.scenequeue_covered = set()
1749 # List of tasks which are covered (including setscene ones) 1928 # List of tasks which are covered (including setscene ones)
@@ -1753,11 +1932,6 @@ class RunQueueExecute:
1753 self.tasks_notcovered = set() 1932 self.tasks_notcovered = set()
1754 self.scenequeue_notneeded = set() 1933 self.scenequeue_notneeded = set()
1755 1934
1756 # We can't skip specified target tasks which aren't setscene tasks
1757 self.cantskip = set(self.rqdata.target_tids)
1758 self.cantskip.difference_update(self.rqdata.runq_setscene_tids)
1759 self.cantskip.intersection_update(self.rqdata.runtaskentries)
1760
1761 schedulers = self.get_schedulers() 1935 schedulers = self.get_schedulers()
1762 for scheduler in schedulers: 1936 for scheduler in schedulers:
1763 if self.scheduler == scheduler.name: 1937 if self.scheduler == scheduler.name:
@@ -1768,11 +1942,29 @@ class RunQueueExecute:
1768 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % 1942 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
1769 (self.scheduler, ", ".join(obj.name for obj in schedulers))) 1943 (self.scheduler, ", ".join(obj.name for obj in schedulers)))
1770 1944
1771 #if len(self.rqdata.runq_setscene_tids) > 0: 1945 #if self.rqdata.runq_setscene_tids:
1772 self.sqdata = SQData() 1946 self.sqdata = SQData()
1773 build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self) 1947 build_scenequeue_data(self.sqdata, self.rqdata, self)
1948
1949 update_scenequeue_data(self.sqdata.sq_revdeps, self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=True)
1950
1951 # Compute a list of 'stale' sstate tasks where the current hash does not match the one
1952 # in any stamp files. Pass the list out to metadata as an event.
1953 found = {}
1954 for tid in self.rqdata.runq_setscene_tids:
1955 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
1956 stamps = bb.build.find_stale_stamps(taskname, taskfn)
1957 if stamps:
1958 if mc not in found:
1959 found[mc] = {}
1960 found[mc][tid] = stamps
1961 for mc in found:
1962 event = bb.event.StaleSetSceneTasks(found[mc])
1963 bb.event.fire(event, self.cooker.databuilder.mcdata[mc])
1964
1965 self.build_taskdepdata_cache()
1774 1966
1775 def runqueue_process_waitpid(self, task, status): 1967 def runqueue_process_waitpid(self, task, status, fakerootlog=None):
1776 1968
1777 # self.build_stamps[pid] may not exist when use shared work directory. 1969 # self.build_stamps[pid] may not exist when use shared work directory.
1778 if task in self.build_stamps: 1970 if task in self.build_stamps:
@@ -1785,9 +1977,10 @@ class RunQueueExecute:
1785 else: 1977 else:
1786 self.sq_task_complete(task) 1978 self.sq_task_complete(task)
1787 self.sq_live.remove(task) 1979 self.sq_live.remove(task)
1980 self.stats.updateActiveSetscene(len(self.sq_live))
1788 else: 1981 else:
1789 if status != 0: 1982 if status != 0:
1790 self.task_fail(task, status) 1983 self.task_fail(task, status, fakerootlog=fakerootlog)
1791 else: 1984 else:
1792 self.task_complete(task) 1985 self.task_complete(task)
1793 return True 1986 return True
@@ -1795,20 +1988,20 @@ class RunQueueExecute:
1795 def finish_now(self): 1988 def finish_now(self):
1796 for mc in self.rq.worker: 1989 for mc in self.rq.worker:
1797 try: 1990 try:
1798 self.rq.worker[mc].process.stdin.write(b"<finishnow></finishnow>") 1991 RunQueue.send_pickled_data(self.rq.worker[mc].process, b"", "finishnow")
1799 self.rq.worker[mc].process.stdin.flush() 1992 self.rq.worker[mc].process.stdin.flush()
1800 except IOError: 1993 except IOError:
1801 # worker must have died? 1994 # worker must have died?
1802 pass 1995 pass
1803 for mc in self.rq.fakeworker: 1996 for mc in self.rq.fakeworker:
1804 try: 1997 try:
1805 self.rq.fakeworker[mc].process.stdin.write(b"<finishnow></finishnow>") 1998 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, b"", "finishnow")
1806 self.rq.fakeworker[mc].process.stdin.flush() 1999 self.rq.fakeworker[mc].process.stdin.flush()
1807 except IOError: 2000 except IOError:
1808 # worker must have died? 2001 # worker must have died?
1809 pass 2002 pass
1810 2003
1811 if len(self.failed_tids) != 0: 2004 if self.failed_tids:
1812 self.rq.state = runQueueFailed 2005 self.rq.state = runQueueFailed
1813 return 2006 return
1814 2007
@@ -1818,13 +2011,13 @@ class RunQueueExecute:
1818 def finish(self): 2011 def finish(self):
1819 self.rq.state = runQueueCleanUp 2012 self.rq.state = runQueueCleanUp
1820 2013
1821 active = self.stats.active + self.sq_stats.active 2014 active = self.stats.active + len(self.sq_live)
1822 if active > 0: 2015 if active > 0:
1823 bb.event.fire(runQueueExitWait(active), self.cfgData) 2016 bb.event.fire(runQueueExitWait(active), self.cfgData)
1824 self.rq.read_workers() 2017 self.rq.read_workers()
1825 return self.rq.active_fds() 2018 return self.rq.active_fds()
1826 2019
1827 if len(self.failed_tids) != 0: 2020 if self.failed_tids:
1828 self.rq.state = runQueueFailed 2021 self.rq.state = runQueueFailed
1829 return True 2022 return True
1830 2023
@@ -1851,7 +2044,7 @@ class RunQueueExecute:
1851 return valid 2044 return valid
1852 2045
1853 def can_start_task(self): 2046 def can_start_task(self):
1854 active = self.stats.active + self.sq_stats.active 2047 active = self.stats.active + len(self.sq_live)
1855 can_start = active < self.number_tasks 2048 can_start = active < self.number_tasks
1856 return can_start 2049 return can_start
1857 2050
@@ -1871,8 +2064,7 @@ class RunQueueExecute:
1871 try: 2064 try:
1872 module = __import__(modname, fromlist=(name,)) 2065 module = __import__(modname, fromlist=(name,))
1873 except ImportError as exc: 2066 except ImportError as exc:
1874 logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc)) 2067 bb.fatal("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
1875 raise SystemExit(1)
1876 else: 2068 else:
1877 schedulers.add(getattr(module, name)) 2069 schedulers.add(getattr(module, name))
1878 return schedulers 2070 return schedulers
@@ -1902,21 +2094,52 @@ class RunQueueExecute:
1902 self.setbuildable(revdep) 2094 self.setbuildable(revdep)
1903 logger.debug("Marking task %s as buildable", revdep) 2095 logger.debug("Marking task %s as buildable", revdep)
1904 2096
2097 found = None
2098 for t in sorted(self.sq_deferred.copy()):
2099 if self.sq_deferred[t] == task:
2100 # Allow the next deferred task to run. Any other deferred tasks should be deferred after that task.
2101 # We shouldn't allow all to run at once as it is prone to races.
2102 if not found:
2103 bb.debug(1, "Deferred task %s now buildable" % t)
2104 del self.sq_deferred[t]
2105 update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
2106 found = t
2107 else:
2108 bb.debug(1, "Deferring %s after %s" % (t, found))
2109 self.sq_deferred[t] = found
2110
1905 def task_complete(self, task): 2111 def task_complete(self, task):
1906 self.stats.taskCompleted() 2112 self.stats.taskCompleted()
1907 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData) 2113 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
1908 self.task_completeoutright(task) 2114 self.task_completeoutright(task)
1909 self.runq_tasksrun.add(task) 2115 self.runq_tasksrun.add(task)
1910 2116
1911 def task_fail(self, task, exitcode): 2117 def task_fail(self, task, exitcode, fakerootlog=None):
1912 """ 2118 """
1913 Called when a task has failed 2119 Called when a task has failed
1914 Updates the state engine with the failure 2120 Updates the state engine with the failure
1915 """ 2121 """
1916 self.stats.taskFailed() 2122 self.stats.taskFailed()
1917 self.failed_tids.append(task) 2123 self.failed_tids.append(task)
1918 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData) 2124
1919 if self.rqdata.taskData[''].abort: 2125 fakeroot_log = []
2126 if fakerootlog and os.path.exists(fakerootlog):
2127 with open(fakerootlog) as fakeroot_log_file:
2128 fakeroot_failed = False
2129 for line in reversed(fakeroot_log_file.readlines()):
2130 for fakeroot_error in ['mismatch', 'error', 'fatal']:
2131 if fakeroot_error in line.lower():
2132 fakeroot_failed = True
2133 if 'doing new pid setup and server start' in line:
2134 break
2135 fakeroot_log.append(line)
2136
2137 if not fakeroot_failed:
2138 fakeroot_log = []
2139
2140 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=("".join(fakeroot_log) or None)), self.cfgData)
2141
2142 if self.rqdata.taskData[''].halt:
1920 self.rq.state = runQueueCleanUp 2143 self.rq.state = runQueueCleanUp
1921 2144
1922 def task_skip(self, task, reason): 2145 def task_skip(self, task, reason):
@@ -1931,7 +2154,7 @@ class RunQueueExecute:
1931 err = False 2154 err = False
1932 if not self.sqdone: 2155 if not self.sqdone:
1933 logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered))) 2156 logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered)))
1934 completeevent = sceneQueueComplete(self.sq_stats, self.rq) 2157 completeevent = sceneQueueComplete(self.stats, self.rq)
1935 bb.event.fire(completeevent, self.cfgData) 2158 bb.event.fire(completeevent, self.cfgData)
1936 if self.sq_deferred: 2159 if self.sq_deferred:
1937 logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred)) 2160 logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred))
@@ -1943,6 +2166,10 @@ class RunQueueExecute:
1943 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks)) 2166 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks))
1944 err = True 2167 err = True
1945 2168
2169 for tid in self.scenequeue_covered.intersection(self.scenequeue_notcovered):
2170 # No task should end up in both covered and uncovered, that is a bug.
2171 logger.error("Setscene task %s in both covered and notcovered." % tid)
2172
1946 for tid in self.rqdata.runq_setscene_tids: 2173 for tid in self.rqdata.runq_setscene_tids:
1947 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered: 2174 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered:
1948 err = True 2175 err = True
@@ -1961,7 +2188,7 @@ class RunQueueExecute:
1961 if x not in self.tasks_scenequeue_done: 2188 if x not in self.tasks_scenequeue_done:
1962 logger.error("Task %s was never processed by the setscene code" % x) 2189 logger.error("Task %s was never processed by the setscene code" % x)
1963 err = True 2190 err = True
1964 if len(self.rqdata.runtaskentries[x].depends) == 0 and x not in self.runq_buildable: 2191 if not self.rqdata.runtaskentries[x].depends and x not in self.runq_buildable:
1965 logger.error("Task %s was never marked as buildable by the setscene code" % x) 2192 logger.error("Task %s was never marked as buildable by the setscene code" % x)
1966 err = True 2193 err = True
1967 return err 2194 return err
@@ -1979,13 +2206,24 @@ class RunQueueExecute:
1979 if not hasattr(self, "sorted_setscene_tids"): 2206 if not hasattr(self, "sorted_setscene_tids"):
1980 # Don't want to sort this set every execution 2207 # Don't want to sort this set every execution
1981 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids) 2208 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids)
2209 # Resume looping where we left off when we returned to feed the mainloop
2210 self.setscene_tids_generator = itertools.cycle(self.rqdata.runq_setscene_tids)
1982 2211
1983 task = None 2212 task = None
1984 if not self.sqdone and self.can_start_task(): 2213 if not self.sqdone and self.can_start_task():
1985 # Find the next setscene to run 2214 loopcount = 0
1986 for nexttask in self.sorted_setscene_tids: 2215 # Find the next setscene to run, exit the loop when we've processed all tids or found something to execute
1987 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values(): 2216 while loopcount < len(self.rqdata.runq_setscene_tids):
1988 if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]): 2217 loopcount += 1
2218 nexttask = next(self.setscene_tids_generator)
2219 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
2220 if nexttask in self.sq_deferred and self.sq_deferred[nexttask] not in self.runq_complete:
2221 # Skip deferred tasks quickly before the 'expensive' tests below - this is key to performant multiconfig builds
2222 continue
2223 if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
2224 nexttask not in self.sq_needed_harddeps and \
2225 self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
2226 self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
1989 if nexttask not in self.rqdata.target_tids: 2227 if nexttask not in self.rqdata.target_tids:
1990 logger.debug2("Skipping setscene for task %s" % nexttask) 2228 logger.debug2("Skipping setscene for task %s" % nexttask)
1991 self.sq_task_skip(nexttask) 2229 self.sq_task_skip(nexttask)
@@ -1993,13 +2231,25 @@ class RunQueueExecute:
1993 if nexttask in self.sq_deferred: 2231 if nexttask in self.sq_deferred:
1994 del self.sq_deferred[nexttask] 2232 del self.sq_deferred[nexttask]
1995 return True 2233 return True
2234 if nexttask in self.sqdata.sq_harddeps_rev and not self.sqdata.sq_harddeps_rev[nexttask].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2235 logger.debug2("Deferring %s due to hard dependencies" % nexttask)
2236 updated = False
2237 for dep in self.sqdata.sq_harddeps_rev[nexttask]:
2238 if dep not in self.sq_needed_harddeps:
2239 logger.debug2("Enabling task %s as it is a hard dependency" % dep)
2240 self.sq_buildable.add(dep)
2241 self.sq_needed_harddeps.add(dep)
2242 updated = True
2243 self.sq_harddep_deferred.add(nexttask)
2244 if updated:
2245 return True
2246 continue
1996 # If covered tasks are running, need to wait for them to complete 2247 # If covered tasks are running, need to wait for them to complete
1997 for t in self.sqdata.sq_covered_tasks[nexttask]: 2248 for t in self.sqdata.sq_covered_tasks[nexttask]:
1998 if t in self.runq_running and t not in self.runq_complete: 2249 if t in self.runq_running and t not in self.runq_complete:
1999 continue 2250 continue
2000 if nexttask in self.sq_deferred: 2251 if nexttask in self.sq_deferred:
2001 if self.sq_deferred[nexttask] not in self.runq_complete: 2252 # Deferred tasks that were still deferred were skipped above so we now need to process
2002 continue
2003 logger.debug("Task %s no longer deferred" % nexttask) 2253 logger.debug("Task %s no longer deferred" % nexttask)
2004 del self.sq_deferred[nexttask] 2254 del self.sq_deferred[nexttask]
2005 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) 2255 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False)
@@ -2007,8 +2257,6 @@ class RunQueueExecute:
2007 logger.debug("%s didn't become valid, skipping setscene" % nexttask) 2257 logger.debug("%s didn't become valid, skipping setscene" % nexttask)
2008 self.sq_task_failoutright(nexttask) 2258 self.sq_task_failoutright(nexttask)
2009 return True 2259 return True
2010 else:
2011 self.sqdata.outrightfail.remove(nexttask)
2012 if nexttask in self.sqdata.outrightfail: 2260 if nexttask in self.sqdata.outrightfail:
2013 logger.debug2('No package found, so skipping setscene task %s', nexttask) 2261 logger.debug2('No package found, so skipping setscene task %s', nexttask)
2014 self.sq_task_failoutright(nexttask) 2262 self.sq_task_failoutright(nexttask)
@@ -2040,28 +2288,42 @@ class RunQueueExecute:
2040 self.sq_task_failoutright(task) 2288 self.sq_task_failoutright(task)
2041 return True 2289 return True
2042 2290
2043 startevent = sceneQueueTaskStarted(task, self.sq_stats, self.rq) 2291 startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
2044 bb.event.fire(startevent, self.cfgData) 2292 bb.event.fire(startevent, self.cfgData)
2045 2293
2046 taskdepdata = self.sq_build_taskdepdata(task)
2047
2048 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] 2294 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
2049 taskhash = self.rqdata.get_task_hash(task) 2295 realfn = bb.cache.virtualfn2realfn(taskfn)[0]
2050 unihash = self.rqdata.get_task_unihash(task) 2296 runtask = {
2297 'fn' : taskfn,
2298 'task' : task,
2299 'taskname' : taskname,
2300 'taskhash' : self.rqdata.get_task_hash(task),
2301 'unihash' : self.rqdata.get_task_unihash(task),
2302 'quieterrors' : True,
2303 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
2304 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
2305 'taskdepdata' : self.sq_build_taskdepdata(task),
2306 'dry_run' : False,
2307 'taskdep': taskdep,
2308 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
2309 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
2310 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
2311 }
2312
2051 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: 2313 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
2052 if not mc in self.rq.fakeworker: 2314 if not mc in self.rq.fakeworker:
2053 self.rq.start_fakeworker(self, mc) 2315 self.rq.start_fakeworker(self, mc)
2054 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") 2316 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
2055 self.rq.fakeworker[mc].process.stdin.flush() 2317 self.rq.fakeworker[mc].process.stdin.flush()
2056 else: 2318 else:
2057 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") 2319 RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
2058 self.rq.worker[mc].process.stdin.flush() 2320 self.rq.worker[mc].process.stdin.flush()
2059 2321
2060 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 2322 self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2061 self.build_stamps2.append(self.build_stamps[task]) 2323 self.build_stamps2.append(self.build_stamps[task])
2062 self.sq_running.add(task) 2324 self.sq_running.add(task)
2063 self.sq_live.add(task) 2325 self.sq_live.add(task)
2064 self.sq_stats.taskActive() 2326 self.stats.updateActiveSetscene(len(self.sq_live))
2065 if self.can_start_task(): 2327 if self.can_start_task():
2066 return True 2328 return True
2067 2329
@@ -2092,9 +2354,9 @@ class RunQueueExecute:
2092 if task is not None: 2354 if task is not None:
2093 (mc, fn, taskname, taskfn) = split_tid_mcfn(task) 2355 (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
2094 2356
2095 if self.rqdata.setscenewhitelist is not None: 2357 if self.rqdata.setscene_ignore_tasks is not None:
2096 if self.check_setscenewhitelist(task): 2358 if self.check_setscene_ignore_tasks(task):
2097 self.task_fail(task, "setscene whitelist") 2359 self.task_fail(task, "setscene ignore_tasks")
2098 return True 2360 return True
2099 2361
2100 if task in self.tasks_covered: 2362 if task in self.tasks_covered:
@@ -2117,18 +2379,32 @@ class RunQueueExecute:
2117 self.runq_running.add(task) 2379 self.runq_running.add(task)
2118 self.stats.taskActive() 2380 self.stats.taskActive()
2119 if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): 2381 if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
2120 bb.build.make_stamp(taskname, self.rqdata.dataCaches[mc], taskfn) 2382 bb.build.make_stamp_mcfn(taskname, taskfn)
2121 self.task_complete(task) 2383 self.task_complete(task)
2122 return True 2384 return True
2123 else: 2385 else:
2124 startevent = runQueueTaskStarted(task, self.stats, self.rq) 2386 startevent = runQueueTaskStarted(task, self.stats, self.rq)
2125 bb.event.fire(startevent, self.cfgData) 2387 bb.event.fire(startevent, self.cfgData)
2126 2388
2127 taskdepdata = self.build_taskdepdata(task)
2128
2129 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] 2389 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
2130 taskhash = self.rqdata.get_task_hash(task) 2390 realfn = bb.cache.virtualfn2realfn(taskfn)[0]
2131 unihash = self.rqdata.get_task_unihash(task) 2391 runtask = {
2392 'fn' : taskfn,
2393 'task' : task,
2394 'taskname' : taskname,
2395 'taskhash' : self.rqdata.get_task_hash(task),
2396 'unihash' : self.rqdata.get_task_unihash(task),
2397 'quieterrors' : False,
2398 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
2399 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
2400 'taskdepdata' : self.build_taskdepdata(task),
2401 'dry_run' : self.rqdata.setscene_enforce,
2402 'taskdep': taskdep,
2403 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
2404 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
2405 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
2406 }
2407
2132 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): 2408 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
2133 if not mc in self.rq.fakeworker: 2409 if not mc in self.rq.fakeworker:
2134 try: 2410 try:
@@ -2138,31 +2414,31 @@ class RunQueueExecute:
2138 self.rq.state = runQueueFailed 2414 self.rq.state = runQueueFailed
2139 self.stats.taskFailed() 2415 self.stats.taskFailed()
2140 return True 2416 return True
2141 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") 2417 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
2142 self.rq.fakeworker[mc].process.stdin.flush() 2418 self.rq.fakeworker[mc].process.stdin.flush()
2143 else: 2419 else:
2144 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") 2420 RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
2145 self.rq.worker[mc].process.stdin.flush() 2421 self.rq.worker[mc].process.stdin.flush()
2146 2422
2147 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 2423 self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2148 self.build_stamps2.append(self.build_stamps[task]) 2424 self.build_stamps2.append(self.build_stamps[task])
2149 self.runq_running.add(task) 2425 self.runq_running.add(task)
2150 self.stats.taskActive() 2426 self.stats.taskActive()
2151 if self.can_start_task(): 2427 if self.can_start_task():
2152 return True 2428 return True
2153 2429
2154 if self.stats.active > 0 or self.sq_stats.active > 0: 2430 if self.stats.active > 0 or self.sq_live:
2155 self.rq.read_workers() 2431 self.rq.read_workers()
2156 return self.rq.active_fds() 2432 return self.rq.active_fds()
2157 2433
2158 # No more tasks can be run. If we have deferred setscene tasks we should run them. 2434 # No more tasks can be run. If we have deferred setscene tasks we should run them.
2159 if self.sq_deferred: 2435 if self.sq_deferred:
2160 tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0]) 2436 deferred_tid = list(self.sq_deferred.keys())[0]
2161 logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid) 2437 blocking_tid = self.sq_deferred.pop(deferred_tid)
2162 self.sq_task_failoutright(tid) 2438 logger.warning("Runqueue deadlocked on deferred tasks, forcing task %s blocked by %s" % (deferred_tid, blocking_tid))
2163 return True 2439 return True
2164 2440
2165 if len(self.failed_tids) != 0: 2441 if self.failed_tids:
2166 self.rq.state = runQueueFailed 2442 self.rq.state = runQueueFailed
2167 return True 2443 return True
2168 2444
@@ -2195,6 +2471,25 @@ class RunQueueExecute:
2195 ret.add(dep) 2471 ret.add(dep)
2196 return ret 2472 return ret
2197 2473
2474 # Build the individual cache entries in advance once to save time
2475 def build_taskdepdata_cache(self):
2476 taskdepdata_cache = {}
2477 for task in self.rqdata.runtaskentries:
2478 (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
2479 taskdepdata_cache[task] = bb.TaskData(
2480 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2481 taskname = taskname,
2482 fn = fn,
2483 deps = self.filtermcdeps(task, mc, self.rqdata.runtaskentries[task].depends),
2484 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2485 taskhash = self.rqdata.runtaskentries[task].hash,
2486 unihash = self.rqdata.runtaskentries[task].unihash,
2487 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2488 taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps,
2489 )
2490
2491 self.taskdepdata_cache = taskdepdata_cache
2492
2198 # We filter out multiconfig dependencies from taskdepdata we pass to the tasks 2493 # We filter out multiconfig dependencies from taskdepdata we pass to the tasks
2199 # as most code can't handle them 2494 # as most code can't handle them
2200 def build_taskdepdata(self, task): 2495 def build_taskdepdata(self, task):
@@ -2206,15 +2501,11 @@ class RunQueueExecute:
2206 while next: 2501 while next:
2207 additional = [] 2502 additional = []
2208 for revdep in next: 2503 for revdep in next:
2209 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) 2504 self.taskdepdata_cache[revdep] = self.taskdepdata_cache[revdep]._replace(
2210 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2505 unihash=self.rqdata.runtaskentries[revdep].unihash
2211 deps = self.rqdata.runtaskentries[revdep].depends 2506 )
2212 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2507 taskdepdata[revdep] = self.taskdepdata_cache[revdep]
2213 taskhash = self.rqdata.runtaskentries[revdep].hash 2508 for revdep2 in self.taskdepdata_cache[revdep].deps:
2214 unihash = self.rqdata.runtaskentries[revdep].unihash
2215 deps = self.filtermcdeps(task, mc, deps)
2216 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
2217 for revdep2 in deps:
2218 if revdep2 not in taskdepdata: 2509 if revdep2 not in taskdepdata:
2219 additional.append(revdep2) 2510 additional.append(revdep2)
2220 next = additional 2511 next = additional
@@ -2228,7 +2519,7 @@ class RunQueueExecute:
2228 return 2519 return
2229 2520
2230 notcovered = set(self.scenequeue_notcovered) 2521 notcovered = set(self.scenequeue_notcovered)
2231 notcovered |= self.cantskip 2522 notcovered |= self.sqdata.cantskip
2232 for tid in self.scenequeue_notcovered: 2523 for tid in self.scenequeue_notcovered:
2233 notcovered |= self.sqdata.sq_covered_tasks[tid] 2524 notcovered |= self.sqdata.sq_covered_tasks[tid]
2234 notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids) 2525 notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids)
@@ -2241,7 +2532,7 @@ class RunQueueExecute:
2241 covered.intersection_update(self.tasks_scenequeue_done) 2532 covered.intersection_update(self.tasks_scenequeue_done)
2242 2533
2243 for tid in notcovered | covered: 2534 for tid in notcovered | covered:
2244 if len(self.rqdata.runtaskentries[tid].depends) == 0: 2535 if not self.rqdata.runtaskentries[tid].depends:
2245 self.setbuildable(tid) 2536 self.setbuildable(tid)
2246 elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete): 2537 elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete):
2247 self.setbuildable(tid) 2538 self.setbuildable(tid)
@@ -2273,10 +2564,16 @@ class RunQueueExecute:
2273 self.updated_taskhash_queue.remove((tid, unihash)) 2564 self.updated_taskhash_queue.remove((tid, unihash))
2274 2565
2275 if unihash != self.rqdata.runtaskentries[tid].unihash: 2566 if unihash != self.rqdata.runtaskentries[tid].unihash:
2276 hashequiv_logger.verbose("Task %s unihash changed to %s" % (tid, unihash)) 2567 # Make sure we rehash any other tasks with the same task hash that we're deferred against.
2277 self.rqdata.runtaskentries[tid].unihash = unihash 2568 torehash = [tid]
2278 bb.parse.siggen.set_unihash(tid, unihash) 2569 for deftid in self.sq_deferred:
2279 toprocess.add(tid) 2570 if self.sq_deferred[deftid] == tid:
2571 torehash.append(deftid)
2572 for hashtid in torehash:
2573 hashequiv_logger.verbose("Task %s unihash changed to %s" % (hashtid, unihash))
2574 self.rqdata.runtaskentries[hashtid].unihash = unihash
2575 bb.parse.siggen.set_unihash(hashtid, unihash)
2576 toprocess.add(hashtid)
2280 2577
2281 # Work out all tasks which depend upon these 2578 # Work out all tasks which depend upon these
2282 total = set() 2579 total = set()
@@ -2294,23 +2591,33 @@ class RunQueueExecute:
2294 # Now iterate those tasks in dependency order to regenerate their taskhash/unihash 2591 # Now iterate those tasks in dependency order to regenerate their taskhash/unihash
2295 next = set() 2592 next = set()
2296 for p in total: 2593 for p in total:
2297 if len(self.rqdata.runtaskentries[p].depends) == 0: 2594 if not self.rqdata.runtaskentries[p].depends:
2298 next.add(p) 2595 next.add(p)
2299 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): 2596 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total):
2300 next.add(p) 2597 next.add(p)
2301 2598
2599 starttime = time.time()
2600 lasttime = starttime
2601
2302 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled 2602 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled
2303 while next: 2603 while next:
2304 current = next.copy() 2604 current = next.copy()
2305 next = set() 2605 next = set()
2606 ready = {}
2306 for tid in current: 2607 for tid in current:
2307 if len(self.rqdata.runtaskentries[p].depends) and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): 2608 if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
2308 continue 2609 continue
2610 # get_taskhash for a given tid *must* be called before get_unihash* below
2611 ready[tid] = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches)
2612
2613 unihashes = bb.parse.siggen.get_unihashes(ready.keys())
2614
2615 for tid in ready:
2309 orighash = self.rqdata.runtaskentries[tid].hash 2616 orighash = self.rqdata.runtaskentries[tid].hash
2310 dc = bb.parse.siggen.get_data_caches(self.rqdata.dataCaches, mc_from_tid(tid)) 2617 newhash = ready[tid]
2311 newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, dc)
2312 origuni = self.rqdata.runtaskentries[tid].unihash 2618 origuni = self.rqdata.runtaskentries[tid].unihash
2313 newuni = bb.parse.siggen.get_unihash(tid) 2619 newuni = unihashes[tid]
2620
2314 # FIXME, need to check it can come from sstate at all for determinism? 2621 # FIXME, need to check it can come from sstate at all for determinism?
2315 remapped = False 2622 remapped = False
2316 if newuni == origuni: 2623 if newuni == origuni:
@@ -2331,12 +2638,21 @@ class RunQueueExecute:
2331 next |= self.rqdata.runtaskentries[tid].revdeps 2638 next |= self.rqdata.runtaskentries[tid].revdeps
2332 total.remove(tid) 2639 total.remove(tid)
2333 next.intersection_update(total) 2640 next.intersection_update(total)
2641 bb.event.check_for_interrupts(self.cooker.data)
2642
2643 if time.time() > (lasttime + 30):
2644 lasttime = time.time()
2645 hashequiv_logger.verbose("Rehash loop slow progress: %s in %s" % (len(total), lasttime - starttime))
2646
2647 endtime = time.time()
2648 if (endtime-starttime > 60):
2649 hashequiv_logger.verbose("Rehash loop took more than 60s: %s" % (endtime-starttime))
2334 2650
2335 if changed: 2651 if changed:
2336 for mc in self.rq.worker: 2652 for mc in self.rq.worker:
2337 self.rq.worker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") 2653 RunQueue.send_pickled_data(self.rq.worker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
2338 for mc in self.rq.fakeworker: 2654 for mc in self.rq.fakeworker:
2339 self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") 2655 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
2340 2656
2341 hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed))) 2657 hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed)))
2342 2658
@@ -2370,7 +2686,7 @@ class RunQueueExecute:
2370 self.tasks_scenequeue_done.remove(tid) 2686 self.tasks_scenequeue_done.remove(tid)
2371 for dep in self.sqdata.sq_covered_tasks[tid]: 2687 for dep in self.sqdata.sq_covered_tasks[tid]:
2372 if dep in self.runq_complete and dep not in self.runq_tasksrun: 2688 if dep in self.runq_complete and dep not in self.runq_tasksrun:
2373 bb.error("Task %s marked as completed but now needing to rerun? Aborting build." % dep) 2689 bb.error("Task %s marked as completed but now needing to rerun? Halting build." % dep)
2374 self.failed_tids.append(tid) 2690 self.failed_tids.append(tid)
2375 self.rq.state = runQueueCleanUp 2691 self.rq.state = runQueueCleanUp
2376 return 2692 return
@@ -2383,17 +2699,6 @@ class RunQueueExecute:
2383 self.sq_buildable.remove(tid) 2699 self.sq_buildable.remove(tid)
2384 if tid in self.sq_running: 2700 if tid in self.sq_running:
2385 self.sq_running.remove(tid) 2701 self.sq_running.remove(tid)
2386 harddepfail = False
2387 for t in self.sqdata.sq_harddeps:
2388 if tid in self.sqdata.sq_harddeps[t] and t in self.scenequeue_notcovered:
2389 harddepfail = True
2390 break
2391 if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2392 if tid not in self.sq_buildable:
2393 self.sq_buildable.add(tid)
2394 if len(self.sqdata.sq_revdeps[tid]) == 0:
2395 self.sq_buildable.add(tid)
2396
2397 if tid in self.sqdata.outrightfail: 2702 if tid in self.sqdata.outrightfail:
2398 self.sqdata.outrightfail.remove(tid) 2703 self.sqdata.outrightfail.remove(tid)
2399 if tid in self.scenequeue_notcovered: 2704 if tid in self.scenequeue_notcovered:
@@ -2404,7 +2709,7 @@ class RunQueueExecute:
2404 self.scenequeue_notneeded.remove(tid) 2709 self.scenequeue_notneeded.remove(tid)
2405 2710
2406 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 2711 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2407 self.sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", self.rqdata.dataCaches[mc], taskfn, noextra=True) 2712 self.sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2408 2713
2409 if tid in self.stampcache: 2714 if tid in self.stampcache:
2410 del self.stampcache[tid] 2715 del self.stampcache[tid]
@@ -2412,29 +2717,67 @@ class RunQueueExecute:
2412 if tid in self.build_stamps: 2717 if tid in self.build_stamps:
2413 del self.build_stamps[tid] 2718 del self.build_stamps[tid]
2414 2719
2415 update_tasks.append((tid, harddepfail, tid in self.sqdata.valid)) 2720 update_tasks.append(tid)
2416 2721
2417 if update_tasks: 2722 update_tasks2 = []
2723 for tid in update_tasks:
2724 harddepfail = False
2725 for t in self.sqdata.sq_harddeps_rev[tid]:
2726 if t in self.scenequeue_notcovered:
2727 harddepfail = True
2728 break
2729 if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2730 if tid not in self.sq_buildable:
2731 self.sq_buildable.add(tid)
2732 if not self.sqdata.sq_revdeps[tid]:
2733 self.sq_buildable.add(tid)
2734
2735 update_tasks2.append((tid, harddepfail, tid in self.sqdata.valid))
2736
2737 if update_tasks2:
2418 self.sqdone = False 2738 self.sqdone = False
2419 update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False) 2739 for mc in sorted(self.sqdata.multiconfigs):
2740 for tid in sorted([t[0] for t in update_tasks2]):
2741 if mc_from_tid(tid) != mc:
2742 continue
2743 h = pending_hash_index(tid, self.rqdata)
2744 if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
2745 self.sq_deferred[tid] = self.sqdata.hashes[h]
2746 bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
2747 update_scenequeue_data([t[0] for t in update_tasks2], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
2420 2748
2421 for (tid, harddepfail, origvalid) in update_tasks: 2749 for (tid, harddepfail, origvalid) in update_tasks2:
2422 if tid in self.sqdata.valid and not origvalid: 2750 if tid in self.sqdata.valid and not origvalid:
2423 hashequiv_logger.verbose("Setscene task %s became valid" % tid) 2751 hashequiv_logger.verbose("Setscene task %s became valid" % tid)
2424 if harddepfail: 2752 if harddepfail:
2753 logger.debug2("%s has an unavailable hard dependency so skipping" % (tid))
2425 self.sq_task_failoutright(tid) 2754 self.sq_task_failoutright(tid)
2426 2755
2427 if changed: 2756 if changed:
2757 self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
2758 self.sq_needed_harddeps = set()
2759 self.sq_harddep_deferred = set()
2428 self.holdoff_need_update = True 2760 self.holdoff_need_update = True
2429 2761
2430 def scenequeue_updatecounters(self, task, fail=False): 2762 def scenequeue_updatecounters(self, task, fail=False):
2431 2763
2432 for dep in sorted(self.sqdata.sq_deps[task]): 2764 if fail and task in self.sqdata.sq_harddeps:
2433 if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]: 2765 for dep in sorted(self.sqdata.sq_harddeps[task]):
2766 if dep in self.scenequeue_covered or dep in self.scenequeue_notcovered:
2767 # dependency could be already processed, e.g. noexec setscene task
2768 continue
2769 noexec, stamppresent = check_setscene_stamps(dep, self.rqdata, self.rq, self.stampcache)
2770 if noexec or stamppresent:
2771 continue
2434 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) 2772 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
2435 self.sq_task_failoutright(dep) 2773 self.sq_task_failoutright(dep)
2436 continue 2774 continue
2437 if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): 2775
2776 # For performance, only compute allcovered once if needed
2777 if self.sqdata.sq_deps[task]:
2778 allcovered = self.scenequeue_covered | self.scenequeue_notcovered
2779 for dep in sorted(self.sqdata.sq_deps[task]):
2780 if self.sqdata.sq_revdeps[dep].issubset(allcovered):
2438 if dep not in self.sq_buildable: 2781 if dep not in self.sq_buildable:
2439 self.sq_buildable.add(dep) 2782 self.sq_buildable.add(dep)
2440 2783
@@ -2452,6 +2795,14 @@ class RunQueueExecute:
2452 new.add(dep) 2795 new.add(dep)
2453 next = new 2796 next = new
2454 2797
2798 # If this task was one which other setscene tasks have a hard dependency upon, we need
2799 # to walk through the hard dependencies and allow execution of those which have completed dependencies.
2800 if task in self.sqdata.sq_harddeps:
2801 for dep in self.sq_harddep_deferred.copy():
2802 if self.sqdata.sq_harddeps_rev[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2803 self.sq_harddep_deferred.remove(dep)
2804
2805 self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
2455 self.holdoff_need_update = True 2806 self.holdoff_need_update = True
2456 2807
2457 def sq_task_completeoutright(self, task): 2808 def sq_task_completeoutright(self, task):
@@ -2466,22 +2817,20 @@ class RunQueueExecute:
2466 self.scenequeue_updatecounters(task) 2817 self.scenequeue_updatecounters(task)
2467 2818
2468 def sq_check_taskfail(self, task): 2819 def sq_check_taskfail(self, task):
2469 if self.rqdata.setscenewhitelist is not None: 2820 if self.rqdata.setscene_ignore_tasks is not None:
2470 realtask = task.split('_setscene')[0] 2821 realtask = task.split('_setscene')[0]
2471 (mc, fn, taskname, taskfn) = split_tid_mcfn(realtask) 2822 (mc, fn, taskname, taskfn) = split_tid_mcfn(realtask)
2472 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2823 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2473 if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist): 2824 if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks):
2474 logger.error('Task %s.%s failed' % (pn, taskname + "_setscene")) 2825 logger.error('Task %s.%s failed' % (pn, taskname + "_setscene"))
2475 self.rq.state = runQueueCleanUp 2826 self.rq.state = runQueueCleanUp
2476 2827
2477 def sq_task_complete(self, task): 2828 def sq_task_complete(self, task):
2478 self.sq_stats.taskCompleted() 2829 bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
2479 bb.event.fire(sceneQueueTaskCompleted(task, self.sq_stats, self.rq), self.cfgData)
2480 self.sq_task_completeoutright(task) 2830 self.sq_task_completeoutright(task)
2481 2831
2482 def sq_task_fail(self, task, result): 2832 def sq_task_fail(self, task, result):
2483 self.sq_stats.taskFailed() 2833 bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
2484 bb.event.fire(sceneQueueTaskFailed(task, self.sq_stats, result, self), self.cfgData)
2485 self.scenequeue_notcovered.add(task) 2834 self.scenequeue_notcovered.add(task)
2486 self.scenequeue_updatecounters(task, True) 2835 self.scenequeue_updatecounters(task, True)
2487 self.sq_check_taskfail(task) 2836 self.sq_check_taskfail(task)
@@ -2489,8 +2838,6 @@ class RunQueueExecute:
2489 def sq_task_failoutright(self, task): 2838 def sq_task_failoutright(self, task):
2490 self.sq_running.add(task) 2839 self.sq_running.add(task)
2491 self.sq_buildable.add(task) 2840 self.sq_buildable.add(task)
2492 self.sq_stats.taskSkipped()
2493 self.sq_stats.taskCompleted()
2494 self.scenequeue_notcovered.add(task) 2841 self.scenequeue_notcovered.add(task)
2495 self.scenequeue_updatecounters(task, True) 2842 self.scenequeue_updatecounters(task, True)
2496 2843
@@ -2498,8 +2845,6 @@ class RunQueueExecute:
2498 self.sq_running.add(task) 2845 self.sq_running.add(task)
2499 self.sq_buildable.add(task) 2846 self.sq_buildable.add(task)
2500 self.sq_task_completeoutright(task) 2847 self.sq_task_completeoutright(task)
2501 self.sq_stats.taskSkipped()
2502 self.sq_stats.taskCompleted()
2503 2848
2504 def sq_build_taskdepdata(self, task): 2849 def sq_build_taskdepdata(self, task):
2505 def getsetscenedeps(tid): 2850 def getsetscenedeps(tid):
@@ -2525,12 +2870,19 @@ class RunQueueExecute:
2525 additional = [] 2870 additional = []
2526 for revdep in next: 2871 for revdep in next:
2527 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) 2872 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
2528 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2529 deps = getsetscenedeps(revdep) 2873 deps = getsetscenedeps(revdep)
2530 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2874
2531 taskhash = self.rqdata.runtaskentries[revdep].hash 2875 taskdepdata[revdep] = bb.TaskData(
2532 unihash = self.rqdata.runtaskentries[revdep].unihash 2876 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2533 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash] 2877 taskname = taskname,
2878 fn = fn,
2879 deps = deps,
2880 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2881 taskhash = self.rqdata.runtaskentries[revdep].hash,
2882 unihash = self.rqdata.runtaskentries[revdep].unihash,
2883 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2884 taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps,
2885 )
2534 for revdep2 in deps: 2886 for revdep2 in deps:
2535 if revdep2 not in taskdepdata: 2887 if revdep2 not in taskdepdata:
2536 additional.append(revdep2) 2888 additional.append(revdep2)
@@ -2539,8 +2891,8 @@ class RunQueueExecute:
2539 #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n")) 2891 #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n"))
2540 return taskdepdata 2892 return taskdepdata
2541 2893
2542 def check_setscenewhitelist(self, tid): 2894 def check_setscene_ignore_tasks(self, tid):
2543 # Check task that is going to run against the whitelist 2895 # Check task that is going to run against the ignore tasks list
2544 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 2896 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2545 # Ignore covered tasks 2897 # Ignore covered tasks
2546 if tid in self.tasks_covered: 2898 if tid in self.tasks_covered:
@@ -2554,14 +2906,15 @@ class RunQueueExecute:
2554 return False 2906 return False
2555 2907
2556 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2908 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2557 if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist): 2909 if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks):
2558 if tid in self.rqdata.runq_setscene_tids: 2910 if tid in self.rqdata.runq_setscene_tids:
2559 msg = 'Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname) 2911 msg = ['Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname)]
2560 else: 2912 else:
2561 msg = 'Task %s.%s attempted to execute unexpectedly' % (pn, taskname) 2913 msg = ['Task %s.%s attempted to execute unexpectedly' % (pn, taskname)]
2562 for t in self.scenequeue_notcovered: 2914 for t in self.scenequeue_notcovered:
2563 msg = msg + "\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash) 2915 msg.append("\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash))
2564 logger.error(msg + '\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered)) 2916 msg.append('\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered))
2917 logger.error("".join(msg))
2565 return True 2918 return True
2566 return False 2919 return False
2567 2920
@@ -2573,6 +2926,7 @@ class SQData(object):
2573 self.sq_revdeps = {} 2926 self.sq_revdeps = {}
2574 # Injected inter-setscene task dependencies 2927 # Injected inter-setscene task dependencies
2575 self.sq_harddeps = {} 2928 self.sq_harddeps = {}
2929 self.sq_harddeps_rev = {}
2576 # Cache of stamp files so duplicates can't run in parallel 2930 # Cache of stamp files so duplicates can't run in parallel
2577 self.stamps = {} 2931 self.stamps = {}
2578 # Setscene tasks directly depended upon by the build 2932 # Setscene tasks directly depended upon by the build
@@ -2582,12 +2936,17 @@ class SQData(object):
2582 # A list of normal tasks a setscene task covers 2936 # A list of normal tasks a setscene task covers
2583 self.sq_covered_tasks = {} 2937 self.sq_covered_tasks = {}
2584 2938
2585def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): 2939def build_scenequeue_data(sqdata, rqdata, sqrq):
2586 2940
2587 sq_revdeps = {} 2941 sq_revdeps = {}
2588 sq_revdeps_squash = {} 2942 sq_revdeps_squash = {}
2589 sq_collated_deps = {} 2943 sq_collated_deps = {}
2590 2944
2945 # We can't skip specified target tasks which aren't setscene tasks
2946 sqdata.cantskip = set(rqdata.target_tids)
2947 sqdata.cantskip.difference_update(rqdata.runq_setscene_tids)
2948 sqdata.cantskip.intersection_update(rqdata.runtaskentries)
2949
2591 # We need to construct a dependency graph for the setscene functions. Intermediate 2950 # We need to construct a dependency graph for the setscene functions. Intermediate
2592 # dependencies between the setscene tasks only complicate the code. This code 2951 # dependencies between the setscene tasks only complicate the code. This code
2593 # therefore aims to collapse the huge runqueue dependency tree into a smaller one 2952 # therefore aims to collapse the huge runqueue dependency tree into a smaller one
@@ -2600,7 +2959,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2600 for tid in rqdata.runtaskentries: 2959 for tid in rqdata.runtaskentries:
2601 sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps) 2960 sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps)
2602 sq_revdeps_squash[tid] = set() 2961 sq_revdeps_squash[tid] = set()
2603 if (len(sq_revdeps[tid]) == 0) and tid not in rqdata.runq_setscene_tids: 2962 if not sq_revdeps[tid] and tid not in rqdata.runq_setscene_tids:
2604 #bb.warn("Added endpoint %s" % (tid)) 2963 #bb.warn("Added endpoint %s" % (tid))
2605 endpoints[tid] = set() 2964 endpoints[tid] = set()
2606 2965
@@ -2634,16 +2993,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2634 sq_revdeps_squash[point] = set() 2993 sq_revdeps_squash[point] = set()
2635 if point in rqdata.runq_setscene_tids: 2994 if point in rqdata.runq_setscene_tids:
2636 sq_revdeps_squash[point] = tasks 2995 sq_revdeps_squash[point] = tasks
2637 tasks = set()
2638 continue 2996 continue
2639 for dep in rqdata.runtaskentries[point].depends: 2997 for dep in rqdata.runtaskentries[point].depends:
2640 if point in sq_revdeps[dep]: 2998 if point in sq_revdeps[dep]:
2641 sq_revdeps[dep].remove(point) 2999 sq_revdeps[dep].remove(point)
2642 if tasks: 3000 if tasks:
2643 sq_revdeps_squash[dep] |= tasks 3001 sq_revdeps_squash[dep] |= tasks
2644 if len(sq_revdeps[dep]) == 0 and dep not in rqdata.runq_setscene_tids: 3002 if not sq_revdeps[dep] and dep not in rqdata.runq_setscene_tids:
2645 newendpoints[dep] = task 3003 newendpoints[dep] = task
2646 if len(newendpoints) != 0: 3004 if newendpoints:
2647 process_endpoints(newendpoints) 3005 process_endpoints(newendpoints)
2648 3006
2649 process_endpoints(endpoints) 3007 process_endpoints(endpoints)
@@ -2655,16 +3013,16 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2655 # Take the build endpoints (no revdeps) and find the sstate tasks they depend upon 3013 # Take the build endpoints (no revdeps) and find the sstate tasks they depend upon
2656 new = True 3014 new = True
2657 for tid in rqdata.runtaskentries: 3015 for tid in rqdata.runtaskentries:
2658 if len(rqdata.runtaskentries[tid].revdeps) == 0: 3016 if not rqdata.runtaskentries[tid].revdeps:
2659 sqdata.unskippable.add(tid) 3017 sqdata.unskippable.add(tid)
2660 sqdata.unskippable |= sqrq.cantskip 3018 sqdata.unskippable |= sqdata.cantskip
2661 while new: 3019 while new:
2662 new = False 3020 new = False
2663 orig = sqdata.unskippable.copy() 3021 orig = sqdata.unskippable.copy()
2664 for tid in sorted(orig, reverse=True): 3022 for tid in sorted(orig, reverse=True):
2665 if tid in rqdata.runq_setscene_tids: 3023 if tid in rqdata.runq_setscene_tids:
2666 continue 3024 continue
2667 if len(rqdata.runtaskentries[tid].depends) == 0: 3025 if not rqdata.runtaskentries[tid].depends:
2668 # These are tasks which have no setscene tasks in their chain, need to mark as directly buildable 3026 # These are tasks which have no setscene tasks in their chain, need to mark as directly buildable
2669 sqrq.setbuildable(tid) 3027 sqrq.setbuildable(tid)
2670 sqdata.unskippable |= rqdata.runtaskentries[tid].depends 3028 sqdata.unskippable |= rqdata.runtaskentries[tid].depends
@@ -2676,14 +3034,13 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2676 rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries)) 3034 rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries))
2677 3035
2678 # Sanity check all dependencies could be changed to setscene task references 3036 # Sanity check all dependencies could be changed to setscene task references
2679 for taskcounter, tid in enumerate(rqdata.runtaskentries): 3037 for tid in rqdata.runtaskentries:
2680 if tid in rqdata.runq_setscene_tids: 3038 if tid in rqdata.runq_setscene_tids:
2681 pass 3039 pass
2682 elif len(sq_revdeps_squash[tid]) != 0: 3040 elif sq_revdeps_squash[tid]:
2683 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.") 3041 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.")
2684 else: 3042 else:
2685 del sq_revdeps_squash[tid] 3043 del sq_revdeps_squash[tid]
2686 rqdata.init_progress_reporter.update(taskcounter)
2687 3044
2688 rqdata.init_progress_reporter.next_stage() 3045 rqdata.init_progress_reporter.next_stage()
2689 3046
@@ -2694,7 +3051,9 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2694 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 3051 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2695 realtid = tid + "_setscene" 3052 realtid = tid + "_setscene"
2696 idepends = rqdata.taskData[mc].taskentries[realtid].idepends 3053 idepends = rqdata.taskData[mc].taskentries[realtid].idepends
2697 sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", rqdata.dataCaches[mc], taskfn, noextra=True) 3054 sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
3055
3056 sqdata.sq_harddeps_rev[tid] = set()
2698 for (depname, idependtask) in idepends: 3057 for (depname, idependtask) in idepends:
2699 3058
2700 if depname not in rqdata.taskData[mc].build_targets: 3059 if depname not in rqdata.taskData[mc].build_targets:
@@ -2707,20 +3066,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2707 if deptid not in rqdata.runtaskentries: 3066 if deptid not in rqdata.runtaskentries:
2708 bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask)) 3067 bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask))
2709 3068
3069 logger.debug2("Adding hard setscene dependency %s for %s" % (deptid, tid))
3070
2710 if not deptid in sqdata.sq_harddeps: 3071 if not deptid in sqdata.sq_harddeps:
2711 sqdata.sq_harddeps[deptid] = set() 3072 sqdata.sq_harddeps[deptid] = set()
2712 sqdata.sq_harddeps[deptid].add(tid) 3073 sqdata.sq_harddeps[deptid].add(tid)
2713 3074 sqdata.sq_harddeps_rev[tid].add(deptid)
2714 sq_revdeps_squash[tid].add(deptid)
2715 # Have to zero this to avoid circular dependencies
2716 sq_revdeps_squash[deptid] = set()
2717 3075
2718 rqdata.init_progress_reporter.next_stage() 3076 rqdata.init_progress_reporter.next_stage()
2719 3077
2720 for task in sqdata.sq_harddeps:
2721 for dep in sqdata.sq_harddeps[task]:
2722 sq_revdeps_squash[dep].add(task)
2723
2724 rqdata.init_progress_reporter.next_stage() 3078 rqdata.init_progress_reporter.next_stage()
2725 3079
2726 #for tid in sq_revdeps_squash: 3080 #for tid in sq_revdeps_squash:
@@ -2744,16 +3098,47 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2744 sqdata.multiconfigs = set() 3098 sqdata.multiconfigs = set()
2745 for tid in sqdata.sq_revdeps: 3099 for tid in sqdata.sq_revdeps:
2746 sqdata.multiconfigs.add(mc_from_tid(tid)) 3100 sqdata.multiconfigs.add(mc_from_tid(tid))
2747 if len(sqdata.sq_revdeps[tid]) == 0: 3101 if not sqdata.sq_revdeps[tid]:
2748 sqrq.sq_buildable.add(tid) 3102 sqrq.sq_buildable.add(tid)
2749 3103
2750 rqdata.init_progress_reporter.finish() 3104 rqdata.init_progress_reporter.next_stage()
2751 3105
2752 sqdata.noexec = set() 3106 sqdata.noexec = set()
2753 sqdata.stamppresent = set() 3107 sqdata.stamppresent = set()
2754 sqdata.valid = set() 3108 sqdata.valid = set()
2755 3109
2756 update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True) 3110 sqdata.hashes = {}
3111 sqrq.sq_deferred = {}
3112 for mc in sorted(sqdata.multiconfigs):
3113 for tid in sorted(sqdata.sq_revdeps):
3114 if mc_from_tid(tid) != mc:
3115 continue
3116 h = pending_hash_index(tid, rqdata)
3117 if h not in sqdata.hashes:
3118 sqdata.hashes[h] = tid
3119 else:
3120 sqrq.sq_deferred[tid] = sqdata.hashes[h]
3121 bb.debug(1, "Deferring %s after %s" % (tid, sqdata.hashes[h]))
3122
3123def check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=False):
3124
3125 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
3126
3127 taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
3128
3129 if 'noexec' in taskdep and taskname in taskdep['noexec']:
3130 bb.build.make_stamp_mcfn(taskname + "_setscene", taskfn)
3131 return True, False
3132
3133 if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
3134 logger.debug2('Setscene stamp current for task %s', tid)
3135 return False, True
3136
3137 if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache):
3138 logger.debug2('Normal stamp current for task %s', tid)
3139 return False, True
3140
3141 return False, False
2757 3142
2758def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True): 3143def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True):
2759 3144
@@ -2764,55 +3149,42 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
2764 sqdata.stamppresent.remove(tid) 3149 sqdata.stamppresent.remove(tid)
2765 if tid in sqdata.valid: 3150 if tid in sqdata.valid:
2766 sqdata.valid.remove(tid) 3151 sqdata.valid.remove(tid)
3152 if tid in sqdata.outrightfail:
3153 sqdata.outrightfail.remove(tid)
2767 3154
2768 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 3155 noexec, stamppresent = check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=True)
2769
2770 taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
2771 3156
2772 if 'noexec' in taskdep and taskname in taskdep['noexec']: 3157 if noexec:
2773 sqdata.noexec.add(tid) 3158 sqdata.noexec.add(tid)
2774 sqrq.sq_task_skip(tid) 3159 sqrq.sq_task_skip(tid)
2775 bb.build.make_stamp(taskname + "_setscene", rqdata.dataCaches[mc], taskfn) 3160 logger.debug2("%s is noexec so skipping setscene" % (tid))
2776 continue
2777
2778 if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
2779 logger.debug2('Setscene stamp current for task %s', tid)
2780 sqdata.stamppresent.add(tid)
2781 sqrq.sq_task_skip(tid)
2782 continue 3161 continue
2783 3162
2784 if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache): 3163 if stamppresent:
2785 logger.debug2('Normal stamp current for task %s', tid)
2786 sqdata.stamppresent.add(tid) 3164 sqdata.stamppresent.add(tid)
2787 sqrq.sq_task_skip(tid) 3165 sqrq.sq_task_skip(tid)
3166 logger.debug2("%s has a valid stamp, skipping" % (tid))
2788 continue 3167 continue
2789 3168
2790 tocheck.add(tid) 3169 tocheck.add(tid)
2791 3170
2792 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary) 3171 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary)
2793 3172
2794 sqdata.hashes = {} 3173 for tid in tids:
2795 for mc in sorted(sqdata.multiconfigs): 3174 if tid in sqdata.stamppresent:
2796 for tid in sorted(sqdata.sq_revdeps): 3175 continue
2797 if mc_from_tid(tid) != mc: 3176 if tid in sqdata.valid:
2798 continue 3177 continue
2799 if tid in sqdata.stamppresent: 3178 if tid in sqdata.noexec:
2800 continue 3179 continue
2801 if tid in sqdata.valid: 3180 if tid in sqrq.scenequeue_covered:
2802 continue 3181 continue
2803 if tid in sqdata.noexec: 3182 if tid in sqrq.scenequeue_notcovered:
2804 continue 3183 continue
2805 if tid in sqrq.scenequeue_notcovered: 3184 if tid in sqrq.sq_deferred:
2806 continue 3185 continue
2807 sqdata.outrightfail.add(tid) 3186 sqdata.outrightfail.add(tid)
2808 3187 logger.debug2("%s already handled (fallthrough), skipping" % (tid))
2809 h = pending_hash_index(tid, rqdata)
2810 if h not in sqdata.hashes:
2811 sqdata.hashes[h] = tid
2812 else:
2813 sqrq.sq_deferred[tid] = sqdata.hashes[h]
2814 bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
2815
2816 3188
2817class TaskFailure(Exception): 3189class TaskFailure(Exception):
2818 """ 3190 """
@@ -2876,12 +3248,16 @@ class runQueueTaskFailed(runQueueEvent):
2876 """ 3248 """
2877 Event notifying a task failed 3249 Event notifying a task failed
2878 """ 3250 """
2879 def __init__(self, task, stats, exitcode, rq): 3251 def __init__(self, task, stats, exitcode, rq, fakeroot_log=None):
2880 runQueueEvent.__init__(self, task, stats, rq) 3252 runQueueEvent.__init__(self, task, stats, rq)
2881 self.exitcode = exitcode 3253 self.exitcode = exitcode
3254 self.fakeroot_log = fakeroot_log
2882 3255
2883 def __str__(self): 3256 def __str__(self):
2884 return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode) 3257 if self.fakeroot_log:
3258 return "Task (%s) failed with exit code '%s' \nPseudo log:\n%s" % (self.taskstring, self.exitcode, self.fakeroot_log)
3259 else:
3260 return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode)
2885 3261
2886class sceneQueueTaskFailed(sceneQueueEvent): 3262class sceneQueueTaskFailed(sceneQueueEvent):
2887 """ 3263 """
@@ -2933,18 +3309,16 @@ class runQueuePipe():
2933 """ 3309 """
2934 Abstraction for a pipe between a worker thread and the server 3310 Abstraction for a pipe between a worker thread and the server
2935 """ 3311 """
2936 def __init__(self, pipein, pipeout, d, rq, rqexec): 3312 def __init__(self, pipein, pipeout, d, rq, rqexec, fakerootlogs=None):
2937 self.input = pipein 3313 self.input = pipein
2938 if pipeout: 3314 if pipeout:
2939 pipeout.close() 3315 pipeout.close()
2940 bb.utils.nonblockingfd(self.input) 3316 bb.utils.nonblockingfd(self.input)
2941 self.queue = b"" 3317 self.queue = bytearray()
2942 self.d = d 3318 self.d = d
2943 self.rq = rq 3319 self.rq = rq
2944 self.rqexec = rqexec 3320 self.rqexec = rqexec
2945 3321 self.fakerootlogs = fakerootlogs
2946 def setrunqueueexec(self, rqexec):
2947 self.rqexec = rqexec
2948 3322
2949 def read(self): 3323 def read(self):
2950 for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]: 3324 for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]:
@@ -2956,13 +3330,13 @@ class runQueuePipe():
2956 3330
2957 start = len(self.queue) 3331 start = len(self.queue)
2958 try: 3332 try:
2959 self.queue = self.queue + (self.input.read(102400) or b"") 3333 self.queue.extend(self.input.read(512 * 1024) or b"")
2960 except (OSError, IOError) as e: 3334 except (OSError, IOError) as e:
2961 if e.errno != errno.EAGAIN: 3335 if e.errno != errno.EAGAIN:
2962 raise 3336 raise
2963 end = len(self.queue) 3337 end = len(self.queue)
2964 found = True 3338 found = True
2965 while found and len(self.queue): 3339 while found and self.queue:
2966 found = False 3340 found = False
2967 index = self.queue.find(b"</event>") 3341 index = self.queue.find(b"</event>")
2968 while index != -1 and self.queue.startswith(b"<event>"): 3342 while index != -1 and self.queue.startswith(b"<event>"):
@@ -2987,7 +3361,11 @@ class runQueuePipe():
2987 task, status = pickle.loads(self.queue[10:index]) 3361 task, status = pickle.loads(self.queue[10:index])
2988 except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e: 3362 except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e:
2989 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index])) 3363 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index]))
2990 self.rqexec.runqueue_process_waitpid(task, status) 3364 (_, _, _, taskfn) = split_tid_mcfn(task)
3365 fakerootlog = None
3366 if self.fakerootlogs and taskfn and taskfn in self.fakerootlogs:
3367 fakerootlog = self.fakerootlogs[taskfn]
3368 self.rqexec.runqueue_process_waitpid(task, status, fakerootlog=fakerootlog)
2991 found = True 3369 found = True
2992 self.queue = self.queue[index+11:] 3370 self.queue = self.queue[index+11:]
2993 index = self.queue.find(b"</exitcode>") 3371 index = self.queue.find(b"</exitcode>")
@@ -2996,16 +3374,16 @@ class runQueuePipe():
2996 def close(self): 3374 def close(self):
2997 while self.read(): 3375 while self.read():
2998 continue 3376 continue
2999 if len(self.queue) > 0: 3377 if self.queue:
3000 print("Warning, worker left partial message: %s" % self.queue) 3378 print("Warning, worker left partial message: %s" % self.queue)
3001 self.input.close() 3379 self.input.close()
3002 3380
3003def get_setscene_enforce_whitelist(d, targets): 3381def get_setscene_enforce_ignore_tasks(d, targets):
3004 if d.getVar('BB_SETSCENE_ENFORCE') != '1': 3382 if d.getVar('BB_SETSCENE_ENFORCE') != '1':
3005 return None 3383 return None
3006 whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split() 3384 ignore_tasks = (d.getVar("BB_SETSCENE_ENFORCE_IGNORE_TASKS") or "").split()
3007 outlist = [] 3385 outlist = []
3008 for item in whitelist[:]: 3386 for item in ignore_tasks[:]:
3009 if item.startswith('%:'): 3387 if item.startswith('%:'):
3010 for (mc, target, task, fn) in targets: 3388 for (mc, target, task, fn) in targets:
3011 outlist.append(target + ':' + item.split(':')[1]) 3389 outlist.append(target + ':' + item.split(':')[1])
@@ -3013,12 +3391,12 @@ def get_setscene_enforce_whitelist(d, targets):
3013 outlist.append(item) 3391 outlist.append(item)
3014 return outlist 3392 return outlist
3015 3393
3016def check_setscene_enforce_whitelist(pn, taskname, whitelist): 3394def check_setscene_enforce_ignore_tasks(pn, taskname, ignore_tasks):
3017 import fnmatch 3395 import fnmatch
3018 if whitelist is not None: 3396 if ignore_tasks is not None:
3019 item = '%s:%s' % (pn, taskname) 3397 item = '%s:%s' % (pn, taskname)
3020 for whitelist_item in whitelist: 3398 for ignore_tasks in ignore_tasks:
3021 if fnmatch.fnmatch(item, whitelist_item): 3399 if fnmatch.fnmatch(item, ignore_tasks):
3022 return True 3400 return True
3023 return False 3401 return False
3024 return True 3402 return True
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index b27b4aefe0..4b35be62cd 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -26,6 +26,9 @@ import errno
26import re 26import re
27import datetime 27import datetime
28import pickle 28import pickle
29import traceback
30import gc
31import stat
29import bb.server.xmlrpcserver 32import bb.server.xmlrpcserver
30from bb import daemonize 33from bb import daemonize
31from multiprocessing import queues 34from multiprocessing import queues
@@ -35,9 +38,46 @@ logger = logging.getLogger('BitBake')
35class ProcessTimeout(SystemExit): 38class ProcessTimeout(SystemExit):
36 pass 39 pass
37 40
41def currenttime():
42 return datetime.datetime.now().strftime('%H:%M:%S.%f')
43
38def serverlog(msg): 44def serverlog(msg):
39 print(str(os.getpid()) + " " + datetime.datetime.now().strftime('%H:%M:%S.%f') + " " + msg) 45 print(str(os.getpid()) + " " + currenttime() + " " + msg)
40 sys.stdout.flush() 46 #Seems a flush here triggers filesytem sync like behaviour and long hangs in the server
47 #sys.stdout.flush()
48
49#
50# When we have lockfile issues, try and find infomation about which process is
51# using the lockfile
52#
53def get_lockfile_process_msg(lockfile):
54 # Some systems may not have lsof available
55 procs = None
56 try:
57 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
58 except subprocess.CalledProcessError:
59 # File was deleted?
60 pass
61 except OSError as e:
62 if e.errno != errno.ENOENT:
63 raise
64 if procs is None:
65 # Fall back to fuser if lsof is unavailable
66 try:
67 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
68 except subprocess.CalledProcessError:
69 # File was deleted?
70 pass
71 except OSError as e:
72 if e.errno != errno.ENOENT:
73 raise
74 if procs:
75 return procs.decode("utf-8")
76 return None
77
78class idleFinish():
79 def __init__(self, msg):
80 self.msg = msg
41 81
42class ProcessServer(): 82class ProcessServer():
43 profile_filename = "profile.log" 83 profile_filename = "profile.log"
@@ -56,12 +96,19 @@ class ProcessServer():
56 self.maxuiwait = 30 96 self.maxuiwait = 30
57 self.xmlrpc = False 97 self.xmlrpc = False
58 98
99 self.idle = None
100 # Need a lock for _idlefuns changes
59 self._idlefuns = {} 101 self._idlefuns = {}
102 self._idlefuncsLock = threading.Lock()
103 self.idle_cond = threading.Condition(self._idlefuncsLock)
60 104
61 self.bitbake_lock = lock 105 self.bitbake_lock = lock
62 self.bitbake_lock_name = lockname 106 self.bitbake_lock_name = lockname
63 self.sock = sock 107 self.sock = sock
64 self.sockname = sockname 108 self.sockname = sockname
109 # It is possible the directory may be renamed. Cache the inode of the socket file
110 # so we can tell if things changed.
111 self.sockinode = os.stat(self.sockname)[stat.ST_INO]
65 112
66 self.server_timeout = server_timeout 113 self.server_timeout = server_timeout
67 self.timeout = self.server_timeout 114 self.timeout = self.server_timeout
@@ -70,7 +117,9 @@ class ProcessServer():
70 def register_idle_function(self, function, data): 117 def register_idle_function(self, function, data):
71 """Register a function to be called while the server is idle""" 118 """Register a function to be called while the server is idle"""
72 assert hasattr(function, '__call__') 119 assert hasattr(function, '__call__')
73 self._idlefuns[function] = data 120 with bb.utils.lock_timeout(self._idlefuncsLock):
121 self._idlefuns[function] = data
122 serverlog("Registering idle function %s" % str(function))
74 123
75 def run(self): 124 def run(self):
76 125
@@ -109,6 +158,31 @@ class ProcessServer():
109 158
110 return ret 159 return ret
111 160
161 def _idle_check(self):
162 return len(self._idlefuns) == 0 and self.cooker.command.currentAsyncCommand is None
163
164 def wait_for_idle(self, timeout=30):
165 # Wait for the idle loop to have cleared
166 with bb.utils.lock_timeout(self._idlefuncsLock):
167 return self.idle_cond.wait_for(self._idle_check, timeout) is not False
168
169 def set_async_cmd(self, cmd):
170 with bb.utils.lock_timeout(self._idlefuncsLock):
171 ret = self.idle_cond.wait_for(self._idle_check, 30)
172 if ret is False:
173 return False
174 self.cooker.command.currentAsyncCommand = cmd
175 return True
176
177 def clear_async_cmd(self):
178 with bb.utils.lock_timeout(self._idlefuncsLock):
179 self.cooker.command.currentAsyncCommand = None
180 self.idle_cond.notify_all()
181
182 def get_async_cmd(self):
183 with bb.utils.lock_timeout(self._idlefuncsLock):
184 return self.cooker.command.currentAsyncCommand
185
112 def main(self): 186 def main(self):
113 self.cooker.pre_serve() 187 self.cooker.pre_serve()
114 188
@@ -123,14 +197,19 @@ class ProcessServer():
123 fds.append(self.xmlrpc) 197 fds.append(self.xmlrpc)
124 seendata = False 198 seendata = False
125 serverlog("Entering server connection loop") 199 serverlog("Entering server connection loop")
200 serverlog("Lockfile is: %s\nSocket is %s (%s)" % (self.bitbake_lock_name, self.sockname, os.path.exists(self.sockname)))
126 201
127 def disconnect_client(self, fds): 202 def disconnect_client(self, fds):
128 serverlog("Disconnecting Client") 203 serverlog("Disconnecting Client (socket: %s)" % os.path.exists(self.sockname))
129 if self.controllersock: 204 if self.controllersock:
130 fds.remove(self.controllersock) 205 fds.remove(self.controllersock)
131 self.controllersock.close() 206 self.controllersock.close()
132 self.controllersock = False 207 self.controllersock = False
133 if self.haveui: 208 if self.haveui:
209 # Wait for the idle loop to have cleared (30s max)
210 if not self.wait_for_idle(30):
211 serverlog("Idle loop didn't finish queued commands after 30s, exiting.")
212 self.quit = True
134 fds.remove(self.command_channel) 213 fds.remove(self.command_channel)
135 bb.event.unregister_UIHhandler(self.event_handle, True) 214 bb.event.unregister_UIHhandler(self.event_handle, True)
136 self.command_channel_reply.writer.close() 215 self.command_channel_reply.writer.close()
@@ -142,12 +221,12 @@ class ProcessServer():
142 self.cooker.clientComplete() 221 self.cooker.clientComplete()
143 self.haveui = False 222 self.haveui = False
144 ready = select.select(fds,[],[],0)[0] 223 ready = select.select(fds,[],[],0)[0]
145 if newconnections: 224 if newconnections and not self.quit:
146 serverlog("Starting new client") 225 serverlog("Starting new client")
147 conn = newconnections.pop(-1) 226 conn = newconnections.pop(-1)
148 fds.append(conn) 227 fds.append(conn)
149 self.controllersock = conn 228 self.controllersock = conn
150 elif self.timeout is None and not ready: 229 elif not self.timeout and not ready:
151 serverlog("No timeout, exiting.") 230 serverlog("No timeout, exiting.")
152 self.quit = True 231 self.quit = True
153 232
@@ -214,11 +293,14 @@ class ProcessServer():
214 continue 293 continue
215 try: 294 try:
216 serverlog("Running command %s" % command) 295 serverlog("Running command %s" % command)
217 self.command_channel_reply.send(self.cooker.command.runCommand(command)) 296 reply = self.cooker.command.runCommand(command, self)
218 serverlog("Command Completed") 297 serverlog("Sending reply %s" % repr(reply))
298 self.command_channel_reply.send(reply)
299 serverlog("Command Completed (socket: %s)" % os.path.exists(self.sockname))
219 except Exception as e: 300 except Exception as e:
220 serverlog('Exception in server main event loop running command %s (%s)' % (command, str(e))) 301 stack = traceback.format_exc()
221 logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e))) 302 serverlog('Exception in server main event loop running command %s (%s)' % (command, stack))
303 logger.exception('Exception in server main event loop running command %s (%s)' % (command, stack))
222 304
223 if self.xmlrpc in ready: 305 if self.xmlrpc in ready:
224 self.xmlrpc.handle_requests() 306 self.xmlrpc.handle_requests()
@@ -239,21 +321,42 @@ class ProcessServer():
239 bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout) 321 bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout)
240 seendata = True 322 seendata = True
241 323
242 ready = self.idle_commands(.1, fds) 324 if not self.idle:
325 self.idle = threading.Thread(target=self.idle_thread)
326 self.idle.start()
327 elif self.idle and not self.idle.is_alive():
328 serverlog("Idle thread terminated, main thread exiting too")
329 bb.error("Idle thread terminated, main thread exiting too")
330 self.quit = True
243 331
244 if len(threading.enumerate()) != 1: 332 nextsleep = 1.0
245 serverlog("More than one thread left?: " + str(threading.enumerate())) 333 if self.xmlrpc:
334 nextsleep = self.xmlrpc.get_timeout(nextsleep)
335 try:
336 ready = select.select(fds,[],[],nextsleep)[0]
337 except InterruptedError:
338 # Ignore EINTR
339 ready = []
340
341 if self.idle:
342 self.idle.join()
246 343
247 serverlog("Exiting") 344 serverlog("Exiting (socket: %s)" % os.path.exists(self.sockname))
248 # Remove the socket file so we don't get any more connections to avoid races 345 # Remove the socket file so we don't get any more connections to avoid races
346 # The build directory could have been renamed so if the file isn't the one we created
347 # we shouldn't delete it.
249 try: 348 try:
250 os.unlink(self.sockname) 349 sockinode = os.stat(self.sockname)[stat.ST_INO]
251 except: 350 if sockinode == self.sockinode:
252 pass 351 os.unlink(self.sockname)
352 else:
353 serverlog("bitbake.sock inode mismatch (%s vs %s), not deleting." % (sockinode, self.sockinode))
354 except Exception as err:
355 serverlog("Removing socket file '%s' failed (%s)" % (self.sockname, err))
253 self.sock.close() 356 self.sock.close()
254 357
255 try: 358 try:
256 self.cooker.shutdown(True) 359 self.cooker.shutdown(True, idle=False)
257 self.cooker.notifier.stop() 360 self.cooker.notifier.stop()
258 self.cooker.confignotifier.stop() 361 self.cooker.confignotifier.stop()
259 except: 362 except:
@@ -261,6 +364,9 @@ class ProcessServer():
261 364
262 self.cooker.post_serve() 365 self.cooker.post_serve()
263 366
367 if len(threading.enumerate()) != 1:
368 serverlog("More than one thread left?: " + str(threading.enumerate()))
369
264 # Flush logs before we release the lock 370 # Flush logs before we release the lock
265 sys.stdout.flush() 371 sys.stdout.flush()
266 sys.stderr.flush() 372 sys.stderr.flush()
@@ -276,20 +382,21 @@ class ProcessServer():
276 except FileNotFoundError: 382 except FileNotFoundError:
277 return None 383 return None
278 384
279 lockcontents = get_lock_contents(lockfile)
280 serverlog("Original lockfile contents: " + str(lockcontents))
281
282 lock.close() 385 lock.close()
283 lock = None 386 lock = None
284 387
285 while not lock: 388 while not lock:
286 i = 0 389 i = 0
287 lock = None 390 lock = None
391 if not os.path.exists(os.path.basename(lockfile)):
392 serverlog("Lockfile directory gone, exiting.")
393 return
394
288 while not lock and i < 30: 395 while not lock and i < 30:
289 lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False) 396 lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False)
290 if not lock: 397 if not lock:
291 newlockcontents = get_lock_contents(lockfile) 398 newlockcontents = get_lock_contents(lockfile)
292 if newlockcontents != lockcontents: 399 if not newlockcontents[0].startswith([f"{os.getpid()}\n", f"{os.getpid()} "]):
293 # A new server was started, the lockfile contents changed, we can exit 400 # A new server was started, the lockfile contents changed, we can exit
294 serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents)) 401 serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents))
295 return 402 return
@@ -303,87 +410,95 @@ class ProcessServer():
303 return 410 return
304 411
305 if not lock: 412 if not lock:
306 # Some systems may not have lsof available 413 procs = get_lockfile_process_msg(lockfile)
307 procs = None 414 msg = ["Delaying shutdown due to active processes which appear to be holding bitbake.lock"]
308 try:
309 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
310 except subprocess.CalledProcessError:
311 # File was deleted?
312 continue
313 except OSError as e:
314 if e.errno != errno.ENOENT:
315 raise
316 if procs is None:
317 # Fall back to fuser if lsof is unavailable
318 try:
319 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
320 except subprocess.CalledProcessError:
321 # File was deleted?
322 continue
323 except OSError as e:
324 if e.errno != errno.ENOENT:
325 raise
326
327 msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
328 if procs: 415 if procs:
329 msg += ":\n%s" % str(procs.decode("utf-8")) 416 msg.append(":\n%s" % procs)
330 serverlog(msg) 417 serverlog("".join(msg))
331 418
332 def idle_commands(self, delay, fds=None): 419 def idle_thread(self):
333 nextsleep = delay 420 if self.cooker.configuration.profile:
334 if not fds:
335 fds = []
336
337 for function, data in list(self._idlefuns.items()):
338 try: 421 try:
339 retval = function(self, data, False) 422 import cProfile as profile
340 if retval is False: 423 except:
341 del self._idlefuns[function] 424 import profile
342 nextsleep = None 425 prof = profile.Profile()
343 elif retval is True:
344 nextsleep = None
345 elif isinstance(retval, float) and nextsleep:
346 if (retval < nextsleep):
347 nextsleep = retval
348 elif nextsleep is None:
349 continue
350 else:
351 fds = fds + retval
352 except SystemExit:
353 raise
354 except Exception as exc:
355 if not isinstance(exc, bb.BBHandledException):
356 logger.exception('Running idle function')
357 del self._idlefuns[function]
358 self.quit = True
359 426
360 # Create new heartbeat event? 427 ret = profile.Profile.runcall(prof, self.idle_thread_internal)
361 now = time.time() 428
362 if now >= self.next_heartbeat: 429 prof.dump_stats("profile-mainloop.log")
363 # We might have missed heartbeats. Just trigger once in 430 bb.utils.process_profilelog("profile-mainloop.log")
364 # that case and continue after the usual delay. 431 serverlog("Raw profiling information saved to profile-mainloop.log and processed statistics to profile-mainloop.log.processed")
365 self.next_heartbeat += self.heartbeat_seconds
366 if self.next_heartbeat <= now:
367 self.next_heartbeat = now + self.heartbeat_seconds
368 if hasattr(self.cooker, "data"):
369 heartbeat = bb.event.HeartbeatEvent(now)
370 bb.event.fire(heartbeat, self.cooker.data)
371 if nextsleep and now + nextsleep > self.next_heartbeat:
372 # Shorten timeout so that we we wake up in time for
373 # the heartbeat.
374 nextsleep = self.next_heartbeat - now
375
376 if nextsleep is not None:
377 if self.xmlrpc:
378 nextsleep = self.xmlrpc.get_timeout(nextsleep)
379 try:
380 return select.select(fds,[],[],nextsleep)[0]
381 except InterruptedError:
382 # Ignore EINTR
383 return []
384 else: 432 else:
385 return select.select(fds,[],[],0)[0] 433 self.idle_thread_internal()
434
435 def idle_thread_internal(self):
436 def remove_idle_func(function):
437 with bb.utils.lock_timeout(self._idlefuncsLock):
438 del self._idlefuns[function]
439 self.idle_cond.notify_all()
440
441 while not self.quit:
442 nextsleep = 1.0
443 fds = []
444
445 with bb.utils.lock_timeout(self._idlefuncsLock):
446 items = list(self._idlefuns.items())
386 447
448 for function, data in items:
449 try:
450 retval = function(self, data, False)
451 if isinstance(retval, idleFinish):
452 serverlog("Removing idle function %s at idleFinish" % str(function))
453 remove_idle_func(function)
454 self.cooker.command.finishAsyncCommand(retval.msg)
455 nextsleep = None
456 elif retval is False:
457 serverlog("Removing idle function %s" % str(function))
458 remove_idle_func(function)
459 nextsleep = None
460 elif retval is True:
461 nextsleep = None
462 elif isinstance(retval, float) and nextsleep:
463 if (retval < nextsleep):
464 nextsleep = retval
465 elif nextsleep is None:
466 continue
467 else:
468 fds = fds + retval
469 except SystemExit:
470 raise
471 except Exception as exc:
472 if not isinstance(exc, bb.BBHandledException):
473 logger.exception('Running idle function')
474 remove_idle_func(function)
475 serverlog("Exception %s broke the idle_thread, exiting" % traceback.format_exc())
476 self.quit = True
477
478 # Create new heartbeat event?
479 now = time.time()
480 if items and bb.event._heartbeat_enabled and now >= self.next_heartbeat:
481 # We might have missed heartbeats. Just trigger once in
482 # that case and continue after the usual delay.
483 self.next_heartbeat += self.heartbeat_seconds
484 if self.next_heartbeat <= now:
485 self.next_heartbeat = now + self.heartbeat_seconds
486 if hasattr(self.cooker, "data"):
487 heartbeat = bb.event.HeartbeatEvent(now)
488 try:
489 bb.event.fire(heartbeat, self.cooker.data)
490 except Exception as exc:
491 if not isinstance(exc, bb.BBHandledException):
492 logger.exception('Running heartbeat function')
493 serverlog("Exception %s broke in idle_thread, exiting" % traceback.format_exc())
494 self.quit = True
495 if nextsleep and bb.event._heartbeat_enabled and now + nextsleep > self.next_heartbeat:
496 # Shorten timeout so that we we wake up in time for
497 # the heartbeat.
498 nextsleep = self.next_heartbeat - now
499
500 if nextsleep is not None:
501 select.select(fds,[],[],nextsleep)[0]
387 502
388class ServerCommunicator(): 503class ServerCommunicator():
389 def __init__(self, connection, recv): 504 def __init__(self, connection, recv):
@@ -391,12 +506,18 @@ class ServerCommunicator():
391 self.recv = recv 506 self.recv = recv
392 507
393 def runCommand(self, command): 508 def runCommand(self, command):
394 self.connection.send(command) 509 try:
510 self.connection.send(command)
511 except BrokenPipeError as e:
512 raise BrokenPipeError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
395 if not self.recv.poll(30): 513 if not self.recv.poll(30):
396 logger.info("No reply from server in 30s") 514 logger.info("No reply from server in 30s (for command %s at %s)" % (command[0], currenttime()))
397 if not self.recv.poll(30): 515 if not self.recv.poll(30):
398 raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)") 516 raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s at %s)" % currenttime())
399 ret, exc = self.recv.get() 517 try:
518 ret, exc = self.recv.get()
519 except EOFError as e:
520 raise EOFError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
400 # Should probably turn all exceptions in exc back into exceptions? 521 # Should probably turn all exceptions in exc back into exceptions?
401 # For now, at least handle BBHandledException 522 # For now, at least handle BBHandledException
402 if exc and ("BBHandledException" in exc or "SystemExit" in exc): 523 if exc and ("BBHandledException" in exc or "SystemExit" in exc):
@@ -429,6 +550,7 @@ class BitBakeProcessServerConnection(object):
429 self.socket_connection = sock 550 self.socket_connection = sock
430 551
431 def terminate(self): 552 def terminate(self):
553 self.events.close()
432 self.socket_connection.close() 554 self.socket_connection.close()
433 self.connection.connection.close() 555 self.connection.connection.close()
434 self.connection.recv.close() 556 self.connection.recv.close()
@@ -439,13 +561,14 @@ start_log_datetime_format = '%Y-%m-%d %H:%M:%S.%f'
439 561
440class BitBakeServer(object): 562class BitBakeServer(object):
441 563
442 def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface): 564 def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface, profile):
443 565
444 self.server_timeout = server_timeout 566 self.server_timeout = server_timeout
445 self.xmlrpcinterface = xmlrpcinterface 567 self.xmlrpcinterface = xmlrpcinterface
446 self.featureset = featureset 568 self.featureset = featureset
447 self.sockname = sockname 569 self.sockname = sockname
448 self.bitbake_lock = lock 570 self.bitbake_lock = lock
571 self.profile = profile
449 self.readypipe, self.readypipein = os.pipe() 572 self.readypipe, self.readypipein = os.pipe()
450 573
451 # Place the log in the builddirectory alongside the lock file 574 # Place the log in the builddirectory alongside the lock file
@@ -466,7 +589,7 @@ class BitBakeServer(object):
466 try: 589 try:
467 r = ready.get() 590 r = ready.get()
468 except EOFError: 591 except EOFError:
469 # Trap the child exitting/closing the pipe and error out 592 # Trap the child exiting/closing the pipe and error out
470 r = None 593 r = None
471 if not r or r[0] != "r": 594 if not r or r[0] != "r":
472 ready.close() 595 ready.close()
@@ -509,9 +632,9 @@ class BitBakeServer(object):
509 os.set_inheritable(self.bitbake_lock.fileno(), True) 632 os.set_inheritable(self.bitbake_lock.fileno(), True)
510 os.set_inheritable(self.readypipein, True) 633 os.set_inheritable(self.readypipein, True)
511 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server") 634 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server")
512 os.execl(sys.executable, "bitbake-server", serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1])) 635 os.execl(sys.executable, sys.executable, serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(int(self.profile)), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
513 636
514def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface): 637def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface, profile):
515 638
516 import bb.cookerdata 639 import bb.cookerdata
517 import bb.cooker 640 import bb.cooker
@@ -523,6 +646,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
523 646
524 # Create server control socket 647 # Create server control socket
525 if os.path.exists(sockname): 648 if os.path.exists(sockname):
649 serverlog("WARNING: removing existing socket file '%s'" % sockname)
526 os.unlink(sockname) 650 os.unlink(sockname)
527 651
528 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 652 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@@ -539,7 +663,8 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
539 writer = ConnectionWriter(readypipeinfd) 663 writer = ConnectionWriter(readypipeinfd)
540 try: 664 try:
541 featureset = [] 665 featureset = []
542 cooker = bb.cooker.BBCooker(featureset, server.register_idle_function) 666 cooker = bb.cooker.BBCooker(featureset, server)
667 cooker.configuration.profile = profile
543 except bb.BBHandledException: 668 except bb.BBHandledException:
544 return None 669 return None
545 writer.send("r") 670 writer.send("r")
@@ -549,7 +674,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
549 674
550 server.run() 675 server.run()
551 finally: 676 finally:
552 # Flush any ,essages/errors to the logfile before exit 677 # Flush any messages/errors to the logfile before exit
553 sys.stdout.flush() 678 sys.stdout.flush()
554 sys.stderr.flush() 679 sys.stderr.flush()
555 680
@@ -654,23 +779,18 @@ class BBUIEventQueue:
654 self.reader = ConnectionReader(readfd) 779 self.reader = ConnectionReader(readfd)
655 780
656 self.t = threading.Thread() 781 self.t = threading.Thread()
657 self.t.setDaemon(True)
658 self.t.run = self.startCallbackHandler 782 self.t.run = self.startCallbackHandler
659 self.t.start() 783 self.t.start()
660 784
661 def getEvent(self): 785 def getEvent(self):
662 self.eventQueueLock.acquire() 786 with bb.utils.lock_timeout(self.eventQueueLock):
663 787 if len(self.eventQueue) == 0:
664 if len(self.eventQueue) == 0: 788 return None
665 self.eventQueueLock.release()
666 return None
667
668 item = self.eventQueue.pop(0)
669 789
670 if len(self.eventQueue) == 0: 790 item = self.eventQueue.pop(0)
671 self.eventQueueNotify.clear() 791 if len(self.eventQueue) == 0:
792 self.eventQueueNotify.clear()
672 793
673 self.eventQueueLock.release()
674 return item 794 return item
675 795
676 def waitEvent(self, delay): 796 def waitEvent(self, delay):
@@ -678,10 +798,9 @@ class BBUIEventQueue:
678 return self.getEvent() 798 return self.getEvent()
679 799
680 def queue_event(self, event): 800 def queue_event(self, event):
681 self.eventQueueLock.acquire() 801 with bb.utils.lock_timeout(self.eventQueueLock):
682 self.eventQueue.append(event) 802 self.eventQueue.append(event)
683 self.eventQueueNotify.set() 803 self.eventQueueNotify.set()
684 self.eventQueueLock.release()
685 804
686 def send_event(self, event): 805 def send_event(self, event):
687 self.queue_event(pickle.loads(event)) 806 self.queue_event(pickle.loads(event))
@@ -690,13 +809,17 @@ class BBUIEventQueue:
690 bb.utils.set_process_name("UIEventQueue") 809 bb.utils.set_process_name("UIEventQueue")
691 while True: 810 while True:
692 try: 811 try:
693 self.reader.wait() 812 ready = self.reader.wait(0.25)
694 event = self.reader.get() 813 if ready:
695 self.queue_event(event) 814 event = self.reader.get()
696 except EOFError: 815 self.queue_event(event)
816 except (EOFError, OSError, TypeError):
697 # Easiest way to exit is to close the file descriptor to cause an exit 817 # Easiest way to exit is to close the file descriptor to cause an exit
698 break 818 break
819
820 def close(self):
699 self.reader.close() 821 self.reader.close()
822 self.t.join()
700 823
701class ConnectionReader(object): 824class ConnectionReader(object):
702 825
@@ -711,7 +834,7 @@ class ConnectionReader(object):
711 return self.reader.poll(timeout) 834 return self.reader.poll(timeout)
712 835
713 def get(self): 836 def get(self):
714 with self.rlock: 837 with bb.utils.lock_timeout(self.rlock):
715 res = self.reader.recv_bytes() 838 res = self.reader.recv_bytes()
716 return multiprocessing.reduction.ForkingPickler.loads(res) 839 return multiprocessing.reduction.ForkingPickler.loads(res)
717 840
@@ -730,10 +853,31 @@ class ConnectionWriter(object):
730 # Why bb.event needs this I have no idea 853 # Why bb.event needs this I have no idea
731 self.event = self 854 self.event = self
732 855
856 def _send(self, obj):
857 gc.disable()
858 with bb.utils.lock_timeout(self.wlock):
859 self.writer.send_bytes(obj)
860 gc.enable()
861
733 def send(self, obj): 862 def send(self, obj):
734 obj = multiprocessing.reduction.ForkingPickler.dumps(obj) 863 obj = multiprocessing.reduction.ForkingPickler.dumps(obj)
735 with self.wlock: 864 # See notes/code in CookerParser
736 self.writer.send_bytes(obj) 865 # We must not terminate holding this lock else processes will hang.
866 # For SIGTERM, raising afterwards avoids this.
867 # For SIGINT, we don't want to have written partial data to the pipe.
868 # pthread_sigmask block/unblock would be nice but doesn't work, https://bugs.python.org/issue47139
869 process = multiprocessing.current_process()
870 if process and hasattr(process, "queue_signals"):
871 with bb.utils.lock_timeout(process.signal_threadlock):
872 process.queue_signals = True
873 self._send(obj)
874 process.queue_signals = False
875
876 while len(process.signal_received) > 0:
877 sig = process.signal_received.pop()
878 process.handle_sig(sig, None)
879 else:
880 self._send(obj)
737 881
738 def fileno(self): 882 def fileno(self):
739 return self.writer.fileno() 883 return self.writer.fileno()
diff --git a/bitbake/lib/bb/server/xmlrpcserver.py b/bitbake/lib/bb/server/xmlrpcserver.py
index 2fa71be667..ebc271aca4 100644
--- a/bitbake/lib/bb/server/xmlrpcserver.py
+++ b/bitbake/lib/bb/server/xmlrpcserver.py
@@ -11,8 +11,11 @@ import hashlib
11import time 11import time
12import inspect 12import inspect
13from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler 13from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
14import bb.server.xmlrpcclient
14 15
15import bb 16import bb
17import bb.cooker
18import bb.event
16 19
17# This request handler checks if the request has a "Bitbake-token" header 20# This request handler checks if the request has a "Bitbake-token" header
18# field (this comes from the client side) and compares it with its internal 21# field (this comes from the client side) and compares it with its internal
@@ -53,7 +56,7 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
53 56
54 def __init__(self, interface, cooker, parent): 57 def __init__(self, interface, cooker, parent):
55 # Use auto port configuration 58 # Use auto port configuration
56 if (interface[1] == -1): 59 if interface[1] == -1:
57 interface = (interface[0], 0) 60 interface = (interface[0], 0)
58 SimpleXMLRPCServer.__init__(self, interface, 61 SimpleXMLRPCServer.__init__(self, interface,
59 requestHandler=BitBakeXMLRPCRequestHandler, 62 requestHandler=BitBakeXMLRPCRequestHandler,
@@ -86,11 +89,12 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
86 def handle_requests(self): 89 def handle_requests(self):
87 self._handle_request_noblock() 90 self._handle_request_noblock()
88 91
89class BitBakeXMLRPCServerCommands(): 92class BitBakeXMLRPCServerCommands:
90 93
91 def __init__(self, server): 94 def __init__(self, server):
92 self.server = server 95 self.server = server
93 self.has_client = False 96 self.has_client = False
97 self.event_handle = None
94 98
95 def registerEventHandler(self, host, port): 99 def registerEventHandler(self, host, port):
96 """ 100 """
@@ -99,8 +103,8 @@ class BitBakeXMLRPCServerCommands():
99 s, t = bb.server.xmlrpcclient._create_server(host, port) 103 s, t = bb.server.xmlrpcclient._create_server(host, port)
100 104
101 # we don't allow connections if the cooker is running 105 # we don't allow connections if the cooker is running
102 if (self.server.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]): 106 if self.server.cooker.state in [bb.cooker.State.PARSING, bb.cooker.State.RUNNING]:
103 return None, "Cooker is busy: %s" % bb.cooker.state.get_name(self.server.cooker.state) 107 return None, f"Cooker is busy: {self.server.cooker.state.name}"
104 108
105 self.event_handle = bb.event.register_UIHhandler(s, True) 109 self.event_handle = bb.event.register_UIHhandler(s, True)
106 return self.event_handle, 'OK' 110 return self.event_handle, 'OK'
@@ -117,7 +121,7 @@ class BitBakeXMLRPCServerCommands():
117 """ 121 """
118 Run a cooker command on the server 122 Run a cooker command on the server
119 """ 123 """
120 return self.server.cooker.command.runCommand(command, self.server.readonly) 124 return self.server.cooker.command.runCommand(command, self.server.parent, self.server.readonly)
121 125
122 def getEventHandle(self): 126 def getEventHandle(self):
123 return self.event_handle 127 return self.event_handle
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 0d88c6ec68..a6163b55ea 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -11,6 +13,10 @@ import pickle
11import bb.data 13import bb.data
12import difflib 14import difflib
13import simplediff 15import simplediff
16import json
17import types
18from contextlib import contextmanager
19import bb.compress.zstd
14from bb.checksum import FileChecksumCache 20from bb.checksum import FileChecksumCache
15from bb import runqueue 21from bb import runqueue
16import hashserv 22import hashserv
@@ -19,6 +25,35 @@ import hashserv.client
19logger = logging.getLogger('BitBake.SigGen') 25logger = logging.getLogger('BitBake.SigGen')
20hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv') 26hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
21 27
28#find_siginfo and find_siginfo_version are set by the metadata siggen
29# The minimum version of the find_siginfo function we need
30find_siginfo_minversion = 2
31
32HASHSERV_ENVVARS = [
33 "SSL_CERT_DIR",
34 "SSL_CERT_FILE",
35 "NO_PROXY",
36 "HTTPS_PROXY",
37 "HTTP_PROXY"
38]
39
40def check_siggen_version(siggen):
41 if not hasattr(siggen, "find_siginfo_version"):
42 bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)")
43 if siggen.find_siginfo_version < siggen.find_siginfo_minversion:
44 bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (%s vs %s)" % (siggen.find_siginfo_version, siggen.find_siginfo_minversion))
45
46class SetEncoder(json.JSONEncoder):
47 def default(self, obj):
48 if isinstance(obj, set) or isinstance(obj, frozenset):
49 return dict(_set_object=list(sorted(obj)))
50 return json.JSONEncoder.default(self, obj)
51
52def SetDecoder(dct):
53 if '_set_object' in dct:
54 return frozenset(dct['_set_object'])
55 return dct
56
22def init(d): 57def init(d):
23 siggens = [obj for obj in globals().values() 58 siggens = [obj for obj in globals().values()
24 if type(obj) is type and issubclass(obj, SignatureGenerator)] 59 if type(obj) is type and issubclass(obj, SignatureGenerator)]
@@ -27,7 +62,6 @@ def init(d):
27 for sg in siggens: 62 for sg in siggens:
28 if desired == sg.name: 63 if desired == sg.name:
29 return sg(d) 64 return sg(d)
30 break
31 else: 65 else:
32 logger.error("Invalid signature generator '%s', using default 'noop'\n" 66 logger.error("Invalid signature generator '%s', using default 'noop'\n"
33 "Available generators: %s", desired, 67 "Available generators: %s", desired,
@@ -39,11 +73,6 @@ class SignatureGenerator(object):
39 """ 73 """
40 name = "noop" 74 name = "noop"
41 75
42 # If the derived class supports multiconfig datacaches, set this to True
43 # The default is False for backward compatibility with derived signature
44 # generators that do not understand multiconfig caches
45 supports_multiconfig_datacaches = False
46
47 def __init__(self, data): 76 def __init__(self, data):
48 self.basehash = {} 77 self.basehash = {}
49 self.taskhash = {} 78 self.taskhash = {}
@@ -61,9 +90,39 @@ class SignatureGenerator(object):
61 def postparsing_clean_cache(self): 90 def postparsing_clean_cache(self):
62 return 91 return
63 92
93 def setup_datacache(self, datacaches):
94 self.datacaches = datacaches
95
96 def setup_datacache_from_datastore(self, mcfn, d):
97 # In task context we have no cache so setup internal data structures
98 # from the fully parsed data store provided
99
100 mc = d.getVar("__BBMULTICONFIG", False) or ""
101 tasks = d.getVar('__BBTASKS', False)
102
103 self.datacaches = {}
104 self.datacaches[mc] = types.SimpleNamespace()
105 setattr(self.datacaches[mc], "stamp", {})
106 self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP')
107 setattr(self.datacaches[mc], "stamp_extrainfo", {})
108 self.datacaches[mc].stamp_extrainfo[mcfn] = {}
109 for t in tasks:
110 flag = d.getVarFlag(t, "stamp-extra-info")
111 if flag:
112 self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
113
114 def get_cached_unihash(self, tid):
115 return None
116
64 def get_unihash(self, tid): 117 def get_unihash(self, tid):
118 unihash = self.get_cached_unihash(tid)
119 if unihash:
120 return unihash
65 return self.taskhash[tid] 121 return self.taskhash[tid]
66 122
123 def get_unihashes(self, tids):
124 return {tid: self.get_unihash(tid) for tid in tids}
125
67 def prep_taskhash(self, tid, deps, dataCaches): 126 def prep_taskhash(self, tid, deps, dataCaches):
68 return 127 return
69 128
@@ -75,17 +134,51 @@ class SignatureGenerator(object):
75 """Write/update the file checksum cache onto disk""" 134 """Write/update the file checksum cache onto disk"""
76 return 135 return
77 136
137 def stampfile_base(self, mcfn):
138 mc = bb.runqueue.mc_from_tid(mcfn)
139 return self.datacaches[mc].stamp[mcfn]
140
141 def stampfile_mcfn(self, taskname, mcfn, extrainfo=True):
142 mc = bb.runqueue.mc_from_tid(mcfn)
143 stamp = self.datacaches[mc].stamp[mcfn]
144 if not stamp:
145 return
146
147 stamp_extrainfo = ""
148 if extrainfo:
149 taskflagname = taskname
150 if taskname.endswith("_setscene"):
151 taskflagname = taskname.replace("_setscene", "")
152 stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
153
154 return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo)
155
78 def stampfile(self, stampbase, file_name, taskname, extrainfo): 156 def stampfile(self, stampbase, file_name, taskname, extrainfo):
79 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') 157 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
80 158
159 def stampcleanmask_mcfn(self, taskname, mcfn):
160 mc = bb.runqueue.mc_from_tid(mcfn)
161 stamp = self.datacaches[mc].stamp[mcfn]
162 if not stamp:
163 return []
164
165 taskflagname = taskname
166 if taskname.endswith("_setscene"):
167 taskflagname = taskname.replace("_setscene", "")
168 stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
169
170 return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo)
171
81 def stampcleanmask(self, stampbase, file_name, taskname, extrainfo): 172 def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
82 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') 173 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
83 174
84 def dump_sigtask(self, fn, task, stampbase, runtime): 175 def dump_sigtask(self, mcfn, task, stampbase, runtime):
85 return 176 return
86 177
87 def invalidate_task(self, task, d, fn): 178 def invalidate_task(self, task, mcfn):
88 bb.build.del_stamp(task, d, fn) 179 mc = bb.runqueue.mc_from_tid(mcfn)
180 stamp = self.datacaches[mc].stamp[mcfn]
181 bb.utils.remove(stamp)
89 182
90 def dump_sigs(self, dataCache, options): 183 def dump_sigs(self, dataCache, options):
91 return 184 return
@@ -111,37 +204,13 @@ class SignatureGenerator(object):
111 def set_setscene_tasks(self, setscene_tasks): 204 def set_setscene_tasks(self, setscene_tasks):
112 return 205 return
113 206
114 @classmethod 207 def exit(self):
115 def get_data_caches(cls, dataCaches, mc): 208 return
116 """
117 This function returns the datacaches that should be passed to signature
118 generator functions. If the signature generator supports multiconfig
119 caches, the entire dictionary of data caches is sent, otherwise a
120 special proxy is sent that support both index access to all
121 multiconfigs, and also direct access for the default multiconfig.
122
123 The proxy class allows code in this class itself to always use
124 multiconfig aware code (to ease maintenance), but derived classes that
125 are unaware of multiconfig data caches can still access the default
126 multiconfig as expected.
127
128 Do not override this function in derived classes; it will be removed in
129 the future when support for multiconfig data caches is mandatory
130 """
131 class DataCacheProxy(object):
132 def __init__(self):
133 pass
134
135 def __getitem__(self, key):
136 return dataCaches[key]
137
138 def __getattr__(self, name):
139 return getattr(dataCaches[mc], name)
140
141 if cls.supports_multiconfig_datacaches:
142 return dataCaches
143 209
144 return DataCacheProxy() 210def build_pnid(mc, pn, taskname):
211 if mc:
212 return "mc:" + mc + ":" + pn + ":" + taskname
213 return pn + ":" + taskname
145 214
146class SignatureGeneratorBasic(SignatureGenerator): 215class SignatureGeneratorBasic(SignatureGenerator):
147 """ 216 """
@@ -152,15 +221,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
152 self.basehash = {} 221 self.basehash = {}
153 self.taskhash = {} 222 self.taskhash = {}
154 self.unihash = {} 223 self.unihash = {}
155 self.taskdeps = {}
156 self.runtaskdeps = {} 224 self.runtaskdeps = {}
157 self.file_checksum_values = {} 225 self.file_checksum_values = {}
158 self.taints = {} 226 self.taints = {}
159 self.gendeps = {}
160 self.lookupcache = {}
161 self.setscenetasks = set() 227 self.setscenetasks = set()
162 self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split()) 228 self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split())
163 self.taskwhitelist = None 229 self.taskhash_ignore_tasks = None
164 self.init_rundepcheck(data) 230 self.init_rundepcheck(data)
165 checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE") 231 checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
166 if checksum_cache_file: 232 if checksum_cache_file:
@@ -175,21 +241,21 @@ class SignatureGeneratorBasic(SignatureGenerator):
175 self.tidtopn = {} 241 self.tidtopn = {}
176 242
177 def init_rundepcheck(self, data): 243 def init_rundepcheck(self, data):
178 self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None 244 self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None
179 if self.taskwhitelist: 245 if self.taskhash_ignore_tasks:
180 self.twl = re.compile(self.taskwhitelist) 246 self.twl = re.compile(self.taskhash_ignore_tasks)
181 else: 247 else:
182 self.twl = None 248 self.twl = None
183 249
184 def _build_data(self, fn, d): 250 def _build_data(self, mcfn, d):
185 251
186 ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1') 252 ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
187 tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basewhitelist) 253 tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars)
188 254
189 taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn) 255 taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn)
190 256
191 for task in tasklist: 257 for task in tasklist:
192 tid = fn + ":" + task 258 tid = mcfn + ":" + task
193 if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]: 259 if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
194 bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid])) 260 bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
195 bb.error("The following commands may help:") 261 bb.error("The following commands may help:")
@@ -200,11 +266,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
200 bb.error("%s -Sprintdiff\n" % cmd) 266 bb.error("%s -Sprintdiff\n" % cmd)
201 self.basehash[tid] = basehash[tid] 267 self.basehash[tid] = basehash[tid]
202 268
203 self.taskdeps[fn] = taskdeps 269 return taskdeps, gendeps, lookupcache
204 self.gendeps[fn] = gendeps
205 self.lookupcache[fn] = lookupcache
206
207 return taskdeps
208 270
209 def set_setscene_tasks(self, setscene_tasks): 271 def set_setscene_tasks(self, setscene_tasks):
210 self.setscenetasks = set(setscene_tasks) 272 self.setscenetasks = set(setscene_tasks)
@@ -212,35 +274,47 @@ class SignatureGeneratorBasic(SignatureGenerator):
212 def finalise(self, fn, d, variant): 274 def finalise(self, fn, d, variant):
213 275
214 mc = d.getVar("__BBMULTICONFIG", False) or "" 276 mc = d.getVar("__BBMULTICONFIG", False) or ""
277 mcfn = fn
215 if variant or mc: 278 if variant or mc:
216 fn = bb.cache.realfn2virtual(fn, variant, mc) 279 mcfn = bb.cache.realfn2virtual(fn, variant, mc)
217 280
218 try: 281 try:
219 taskdeps = self._build_data(fn, d) 282 taskdeps, gendeps, lookupcache = self._build_data(mcfn, d)
220 except bb.parse.SkipRecipe: 283 except bb.parse.SkipRecipe:
221 raise 284 raise
222 except: 285 except:
223 bb.warn("Error during finalise of %s" % fn) 286 bb.warn("Error during finalise of %s" % mcfn)
224 raise 287 raise
225 288
289 basehashes = {}
290 for task in taskdeps:
291 basehashes[task] = self.basehash[mcfn + ":" + task]
292
293 d.setVar("__siggen_basehashes", basehashes)
294 d.setVar("__siggen_gendeps", gendeps)
295 d.setVar("__siggen_varvals", lookupcache)
296 d.setVar("__siggen_taskdeps", taskdeps)
297
226 #Slow but can be useful for debugging mismatched basehashes 298 #Slow but can be useful for debugging mismatched basehashes
227 #for task in self.taskdeps[fn]: 299 #self.setup_datacache_from_datastore(mcfn, d)
228 # self.dump_sigtask(fn, task, d.getVar("STAMP"), False) 300 #for task in taskdeps:
301 # self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
229 302
230 for task in taskdeps: 303 def setup_datacache_from_datastore(self, mcfn, d):
231 d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + ":" + task]) 304 super().setup_datacache_from_datastore(mcfn, d)
232 305
233 def postparsing_clean_cache(self): 306 mc = bb.runqueue.mc_from_tid(mcfn)
234 # 307 for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]:
235 # After parsing we can remove some things from memory to reduce our memory footprint 308 if not hasattr(self.datacaches[mc], attr):
236 # 309 setattr(self.datacaches[mc], attr, {})
237 self.gendeps = {} 310 self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals")
238 self.lookupcache = {} 311 self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps")
239 self.taskdeps = {} 312 self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps")
240 313
241 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches): 314 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
242 # Return True if we should keep the dependency, False to drop it 315 # Return True if we should keep the dependency, False to drop it
243 # We only manipulate the dependencies for packages not in the whitelist 316 # We only manipulate the dependencies for packages not in the ignore
317 # list
244 if self.twl and not self.twl.search(recipename): 318 if self.twl and not self.twl.search(recipename):
245 # then process the actual dependencies 319 # then process the actual dependencies
246 if self.twl.search(depname): 320 if self.twl.search(depname):
@@ -258,38 +332,37 @@ class SignatureGeneratorBasic(SignatureGenerator):
258 332
259 def prep_taskhash(self, tid, deps, dataCaches): 333 def prep_taskhash(self, tid, deps, dataCaches):
260 334
261 (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) 335 (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid)
262 336
263 self.basehash[tid] = dataCaches[mc].basetaskhash[tid] 337 self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
264 self.runtaskdeps[tid] = [] 338 self.runtaskdeps[tid] = []
265 self.file_checksum_values[tid] = [] 339 self.file_checksum_values[tid] = []
266 recipename = dataCaches[mc].pkg_fn[fn] 340 recipename = dataCaches[mc].pkg_fn[mcfn]
267 341
268 self.tidtopn[tid] = recipename 342 self.tidtopn[tid] = recipename
343 # save hashfn for deps into siginfo?
344 for dep in deps:
345 (depmc, _, deptask, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
346 dep_pn = dataCaches[depmc].pkg_fn[depmcfn]
269 347
270 for dep in sorted(deps, key=clean_basepath): 348 if not self.rundep_check(mcfn, recipename, task, dep, dep_pn, dataCaches):
271 (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
272 depname = dataCaches[depmc].pkg_fn[depmcfn]
273 if not self.supports_multiconfig_datacaches and mc != depmc:
274 # If the signature generator doesn't understand multiconfig
275 # data caches, any dependency not in the same multiconfig must
276 # be skipped for backward compatibility
277 continue
278 if not self.rundep_check(fn, recipename, task, dep, depname, dataCaches):
279 continue 349 continue
350
280 if dep not in self.taskhash: 351 if dep not in self.taskhash:
281 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) 352 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
282 self.runtaskdeps[tid].append(dep)
283 353
284 if task in dataCaches[mc].file_checksums[fn]: 354 dep_pnid = build_pnid(depmc, dep_pn, deptask)
355 self.runtaskdeps[tid].append((dep_pnid, dep))
356
357 if task in dataCaches[mc].file_checksums[mcfn]:
285 if self.checksum_cache: 358 if self.checksum_cache:
286 checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) 359 checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
287 else: 360 else:
288 checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) 361 checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
289 for (f,cs) in checksums: 362 for (f,cs) in checksums:
290 self.file_checksum_values[tid].append((f,cs)) 363 self.file_checksum_values[tid].append((f,cs))
291 364
292 taskdep = dataCaches[mc].task_deps[fn] 365 taskdep = dataCaches[mc].task_deps[mcfn]
293 if 'nostamp' in taskdep and task in taskdep['nostamp']: 366 if 'nostamp' in taskdep and task in taskdep['nostamp']:
294 # Nostamp tasks need an implicit taint so that they force any dependent tasks to run 367 # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
295 if tid in self.taints and self.taints[tid].startswith("nostamp:"): 368 if tid in self.taints and self.taints[tid].startswith("nostamp:"):
@@ -300,32 +373,34 @@ class SignatureGeneratorBasic(SignatureGenerator):
300 taint = str(uuid.uuid4()) 373 taint = str(uuid.uuid4())
301 self.taints[tid] = "nostamp:" + taint 374 self.taints[tid] = "nostamp:" + taint
302 375
303 taint = self.read_taint(fn, task, dataCaches[mc].stamp[fn]) 376 taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn])
304 if taint: 377 if taint:
305 self.taints[tid] = taint 378 self.taints[tid] = taint
306 logger.warning("%s is tainted from a forced run" % tid) 379 logger.warning("%s is tainted from a forced run" % tid)
307 380
308 return 381 return set(dep for _, dep in self.runtaskdeps[tid])
309 382
310 def get_taskhash(self, tid, deps, dataCaches): 383 def get_taskhash(self, tid, deps, dataCaches):
311 384
312 data = self.basehash[tid] 385 data = self.basehash[tid]
313 for dep in self.runtaskdeps[tid]: 386 for dep in sorted(self.runtaskdeps[tid]):
314 data = data + self.get_unihash(dep) 387 data += self.get_unihash(dep[1])
315 388
316 for (f, cs) in self.file_checksum_values[tid]: 389 for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
317 if cs: 390 if cs:
318 data = data + cs 391 if "/./" in f:
392 data += "./" + f.split("/./")[1]
393 data += cs
319 394
320 if tid in self.taints: 395 if tid in self.taints:
321 if self.taints[tid].startswith("nostamp:"): 396 if self.taints[tid].startswith("nostamp:"):
322 data = data + self.taints[tid][8:] 397 data += self.taints[tid][8:]
323 else: 398 else:
324 data = data + self.taints[tid] 399 data += self.taints[tid]
325 400
326 h = hashlib.sha256(data.encode("utf-8")).hexdigest() 401 h = hashlib.sha256(data.encode("utf-8")).hexdigest()
327 self.taskhash[tid] = h 402 self.taskhash[tid] = h
328 #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task]) 403 #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task])
329 return h 404 return h
330 405
331 def writeout_file_checksum_cache(self): 406 def writeout_file_checksum_cache(self):
@@ -340,9 +415,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
340 def save_unitaskhashes(self): 415 def save_unitaskhashes(self):
341 self.unihash_cache.save(self.unitaskhashes) 416 self.unihash_cache.save(self.unitaskhashes)
342 417
343 def dump_sigtask(self, fn, task, stampbase, runtime): 418 def dump_sigtask(self, mcfn, task, stampbase, runtime):
344 419 tid = mcfn + ":" + task
345 tid = fn + ":" + task 420 mc = bb.runqueue.mc_from_tid(mcfn)
346 referencestamp = stampbase 421 referencestamp = stampbase
347 if isinstance(runtime, str) and runtime.startswith("customfile"): 422 if isinstance(runtime, str) and runtime.startswith("customfile"):
348 sigfile = stampbase 423 sigfile = stampbase
@@ -357,29 +432,34 @@ class SignatureGeneratorBasic(SignatureGenerator):
357 432
358 data = {} 433 data = {}
359 data['task'] = task 434 data['task'] = task
360 data['basewhitelist'] = self.basewhitelist 435 data['basehash_ignore_vars'] = self.basehash_ignore_vars
361 data['taskwhitelist'] = self.taskwhitelist 436 data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
362 data['taskdeps'] = self.taskdeps[fn][task] 437 data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task]
363 data['basehash'] = self.basehash[tid] 438 data['basehash'] = self.basehash[tid]
364 data['gendeps'] = {} 439 data['gendeps'] = {}
365 data['varvals'] = {} 440 data['varvals'] = {}
366 data['varvals'][task] = self.lookupcache[fn][task] 441 data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
367 for dep in self.taskdeps[fn][task]: 442 for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
368 if dep in self.basewhitelist: 443 if dep in self.basehash_ignore_vars:
369 continue 444 continue
370 data['gendeps'][dep] = self.gendeps[fn][dep] 445 data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
371 data['varvals'][dep] = self.lookupcache[fn][dep] 446 data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
372 447
373 if runtime and tid in self.taskhash: 448 if runtime and tid in self.taskhash:
374 data['runtaskdeps'] = self.runtaskdeps[tid] 449 data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])]
375 data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[tid]] 450 data['file_checksum_values'] = []
451 for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
452 if "/./" in f:
453 data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
454 else:
455 data['file_checksum_values'].append((os.path.basename(f), cs))
376 data['runtaskhashes'] = {} 456 data['runtaskhashes'] = {}
377 for dep in data['runtaskdeps']: 457 for dep in self.runtaskdeps[tid]:
378 data['runtaskhashes'][dep] = self.get_unihash(dep) 458 data['runtaskhashes'][dep[0]] = self.get_unihash(dep[1])
379 data['taskhash'] = self.taskhash[tid] 459 data['taskhash'] = self.taskhash[tid]
380 data['unihash'] = self.get_unihash(tid) 460 data['unihash'] = self.get_unihash(tid)
381 461
382 taint = self.read_taint(fn, task, referencestamp) 462 taint = self.read_taint(mcfn, task, referencestamp)
383 if taint: 463 if taint:
384 data['taint'] = taint 464 data['taint'] = taint
385 465
@@ -396,13 +476,13 @@ class SignatureGeneratorBasic(SignatureGenerator):
396 bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid)) 476 bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
397 sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash) 477 sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
398 478
399 fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.") 479 fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
400 try: 480 try:
401 with os.fdopen(fd, "wb") as stream: 481 with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
402 p = pickle.dump(data, stream, -1) 482 json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
403 stream.flush() 483 f.flush()
404 os.chmod(tmpfile, 0o664) 484 os.chmod(tmpfile, 0o664)
405 os.rename(tmpfile, sigfile) 485 bb.utils.rename(tmpfile, sigfile)
406 except (OSError, IOError) as err: 486 except (OSError, IOError) as err:
407 try: 487 try:
408 os.unlink(tmpfile) 488 os.unlink(tmpfile)
@@ -410,18 +490,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
410 pass 490 pass
411 raise err 491 raise err
412 492
413 def dump_sigfn(self, fn, dataCaches, options):
414 if fn in self.taskdeps:
415 for task in self.taskdeps[fn]:
416 tid = fn + ":" + task
417 mc = bb.runqueue.mc_from_tid(tid)
418 if tid not in self.taskhash:
419 continue
420 if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]:
421 bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid)
422 bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid]))
423 self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
424
425class SignatureGeneratorBasicHash(SignatureGeneratorBasic): 493class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
426 name = "basichash" 494 name = "basichash"
427 495
@@ -432,11 +500,11 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
432 # If task is not in basehash, then error 500 # If task is not in basehash, then error
433 return self.basehash[tid] 501 return self.basehash[tid]
434 502
435 def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False): 503 def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False):
436 if taskname != "do_setscene" and taskname.endswith("_setscene"): 504 if taskname.endswith("_setscene"):
437 tid = fn + ":" + taskname[:-9] 505 tid = mcfn + ":" + taskname[:-9]
438 else: 506 else:
439 tid = fn + ":" + taskname 507 tid = mcfn + ":" + taskname
440 if clean: 508 if clean:
441 h = "*" 509 h = "*"
442 else: 510 else:
@@ -444,29 +512,99 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
444 512
445 return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.') 513 return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
446 514
447 def stampcleanmask(self, stampbase, fn, taskname, extrainfo): 515 def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo):
448 return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True) 516 return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True)
517
518 def invalidate_task(self, task, mcfn):
519 bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task))
449 520
450 def invalidate_task(self, task, d, fn): 521 mc = bb.runqueue.mc_from_tid(mcfn)
451 bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task)) 522 stamp = self.datacaches[mc].stamp[mcfn]
452 bb.build.write_taint(task, d, fn) 523
524 taintfn = stamp + '.' + task + '.taint'
525
526 import uuid
527 bb.utils.mkdirhier(os.path.dirname(taintfn))
528 # The specific content of the taint file is not really important,
529 # we just need it to be random, so a random UUID is used
530 with open(taintfn, 'w') as taintf:
531 taintf.write(str(uuid.uuid4()))
453 532
454class SignatureGeneratorUniHashMixIn(object): 533class SignatureGeneratorUniHashMixIn(object):
455 def __init__(self, data): 534 def __init__(self, data):
456 self.extramethod = {} 535 self.extramethod = {}
536 # NOTE: The cache only tracks hashes that exist. Hashes that don't
537 # exist are always queried from the server since it is possible for
538 # hashes to appear over time, but much less likely for them to
539 # disappear
540 self.unihash_exists_cache = set()
541 self.username = None
542 self.password = None
543 self.env = {}
544
545 origenv = data.getVar("BB_ORIGENV")
546 for e in HASHSERV_ENVVARS:
547 value = data.getVar(e)
548 if not value and origenv:
549 value = origenv.getVar(e)
550 if value:
551 self.env[e] = value
457 super().__init__(data) 552 super().__init__(data)
458 553
459 def get_taskdata(self): 554 def get_taskdata(self):
460 return (self.server, self.method, self.extramethod) + super().get_taskdata() 555 return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata()
461 556
462 def set_taskdata(self, data): 557 def set_taskdata(self, data):
463 self.server, self.method, self.extramethod = data[:3] 558 self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6]
464 super().set_taskdata(data[3:]) 559 super().set_taskdata(data[6:])
560
561 def get_hashserv_creds(self):
562 if self.username and self.password:
563 return {
564 "username": self.username,
565 "password": self.password,
566 }
567
568 return {}
569
570 @contextmanager
571 def _client_env(self):
572 orig_env = os.environ.copy()
573 try:
574 for k, v in self.env.items():
575 os.environ[k] = v
576
577 yield
578 finally:
579 for k, v in self.env.items():
580 if k in orig_env:
581 os.environ[k] = orig_env[k]
582 else:
583 del os.environ[k]
465 584
585 @contextmanager
466 def client(self): 586 def client(self):
467 if getattr(self, '_client', None) is None: 587 with self._client_env():
468 self._client = hashserv.create_client(self.server) 588 if getattr(self, '_client', None) is None:
469 return self._client 589 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
590 yield self._client
591
592 def reset(self, data):
593 self.__close_clients()
594 return super().reset(data)
595
596 def exit(self):
597 self.__close_clients()
598 return super().exit()
599
600 def __close_clients(self):
601 with self._client_env():
602 if getattr(self, '_client', None) is not None:
603 self._client.close()
604 self._client = None
605 if getattr(self, '_client_pool', None) is not None:
606 self._client_pool.close()
607 self._client_pool = None
470 608
471 def get_stampfile_hash(self, tid): 609 def get_stampfile_hash(self, tid):
472 if tid in self.taskhash: 610 if tid in self.taskhash:
@@ -499,7 +637,7 @@ class SignatureGeneratorUniHashMixIn(object):
499 return None 637 return None
500 return unihash 638 return unihash
501 639
502 def get_unihash(self, tid): 640 def get_cached_unihash(self, tid):
503 taskhash = self.taskhash[tid] 641 taskhash = self.taskhash[tid]
504 642
505 # If its not a setscene task we can return 643 # If its not a setscene task we can return
@@ -514,40 +652,96 @@ class SignatureGeneratorUniHashMixIn(object):
514 self.unihash[tid] = unihash 652 self.unihash[tid] = unihash
515 return unihash 653 return unihash
516 654
517 # In the absence of being able to discover a unique hash from the 655 return None
518 # server, make it be equivalent to the taskhash. The unique "hash" only
519 # really needs to be a unique string (not even necessarily a hash), but
520 # making it match the taskhash has a few advantages:
521 #
522 # 1) All of the sstate code that assumes hashes can be the same
523 # 2) It provides maximal compatibility with builders that don't use
524 # an equivalency server
525 # 3) The value is easy for multiple independent builders to derive the
526 # same unique hash from the same input. This means that if the
527 # independent builders find the same taskhash, but it isn't reported
528 # to the server, there is a better chance that they will agree on
529 # the unique hash.
530 unihash = taskhash
531 656
532 try: 657 def _get_method(self, tid):
533 method = self.method 658 method = self.method
534 if tid in self.extramethod: 659 if tid in self.extramethod:
535 method = method + self.extramethod[tid] 660 method = method + self.extramethod[tid]
536 data = self.client().get_unihash(method, self.taskhash[tid]) 661
537 if data: 662 return method
538 unihash = data 663
664 def unihashes_exist(self, query):
665 if len(query) == 0:
666 return {}
667
668 query_keys = []
669 result = {}
670 for key, unihash in query.items():
671 if unihash in self.unihash_exists_cache:
672 result[key] = True
673 else:
674 query_keys.append(key)
675
676 if query_keys:
677 with self.client() as client:
678 query_result = client.unihash_exists_batch(query[k] for k in query_keys)
679
680 for idx, key in enumerate(query_keys):
681 exists = query_result[idx]
682 if exists:
683 self.unihash_exists_cache.add(query[key])
684 result[key] = exists
685
686 return result
687
688 def get_unihash(self, tid):
689 return self.get_unihashes([tid])[tid]
690
691 def get_unihashes(self, tids):
692 """
693 For a iterable of tids, returns a dictionary that maps each tid to a
694 unihash
695 """
696 result = {}
697 query_tids = []
698
699 for tid in tids:
700 unihash = self.get_cached_unihash(tid)
701 if unihash:
702 result[tid] = unihash
703 else:
704 query_tids.append(tid)
705
706 if query_tids:
707 unihashes = []
708 try:
709 with self.client() as client:
710 unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids)
711 except (ConnectionError, FileNotFoundError) as e:
712 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
713
714 for idx, tid in enumerate(query_tids):
715 # In the absence of being able to discover a unique hash from the
716 # server, make it be equivalent to the taskhash. The unique "hash" only
717 # really needs to be a unique string (not even necessarily a hash), but
718 # making it match the taskhash has a few advantages:
719 #
720 # 1) All of the sstate code that assumes hashes can be the same
721 # 2) It provides maximal compatibility with builders that don't use
722 # an equivalency server
723 # 3) The value is easy for multiple independent builders to derive the
724 # same unique hash from the same input. This means that if the
725 # independent builders find the same taskhash, but it isn't reported
726 # to the server, there is a better chance that they will agree on
727 # the unique hash.
728 taskhash = self.taskhash[tid]
729
730 if unihashes and unihashes[idx]:
731 unihash = unihashes[idx]
539 # A unique hash equal to the taskhash is not very interesting, 732 # A unique hash equal to the taskhash is not very interesting,
540 # so it is reported it at debug level 2. If they differ, that 733 # so it is reported it at debug level 2. If they differ, that
541 # is much more interesting, so it is reported at debug level 1 734 # is much more interesting, so it is reported at debug level 1
542 hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) 735 hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
543 else: 736 else:
544 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) 737 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
545 except hashserv.client.HashConnectionError as e: 738 unihash = taskhash
546 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
547 739
548 self.set_unihash(tid, unihash) 740 self.set_unihash(tid, unihash)
549 self.unihash[tid] = unihash 741 self.unihash[tid] = unihash
550 return unihash 742 result[tid] = unihash
743
744 return result
551 745
552 def report_unihash(self, path, task, d): 746 def report_unihash(self, path, task, d):
553 import importlib 747 import importlib
@@ -556,14 +750,14 @@ class SignatureGeneratorUniHashMixIn(object):
556 unihash = d.getVar('BB_UNIHASH') 750 unihash = d.getVar('BB_UNIHASH')
557 report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' 751 report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
558 tempdir = d.getVar('T') 752 tempdir = d.getVar('T')
559 fn = d.getVar('BB_FILENAME') 753 mcfn = d.getVar('BB_FILENAME')
560 tid = fn + ':do_' + task 754 tid = mcfn + ':do_' + task
561 key = tid + ':' + taskhash 755 key = tid + ':' + taskhash
562 756
563 if self.setscenetasks and tid not in self.setscenetasks: 757 if self.setscenetasks and tid not in self.setscenetasks:
564 return 758 return
565 759
566 # This can happen if locked sigs are in action. Detect and just abort 760 # This can happen if locked sigs are in action. Detect and just exit
567 if taskhash != self.taskhash[tid]: 761 if taskhash != self.taskhash[tid]:
568 return 762 return
569 763
@@ -611,17 +805,19 @@ class SignatureGeneratorUniHashMixIn(object):
611 if tid in self.extramethod: 805 if tid in self.extramethod:
612 method = method + self.extramethod[tid] 806 method = method + self.extramethod[tid]
613 807
614 data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data) 808 with self.client() as client:
809 data = client.report_unihash(taskhash, method, outhash, unihash, extra_data)
810
615 new_unihash = data['unihash'] 811 new_unihash = data['unihash']
616 812
617 if new_unihash != unihash: 813 if new_unihash != unihash:
618 hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) 814 hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
619 bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d) 815 bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d)
620 self.set_unihash(tid, new_unihash) 816 self.set_unihash(tid, new_unihash)
621 d.setVar('BB_UNIHASH', new_unihash) 817 d.setVar('BB_UNIHASH', new_unihash)
622 else: 818 else:
623 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) 819 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
624 except hashserv.client.HashConnectionError as e: 820 except (ConnectionError, FileNotFoundError) as e:
625 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 821 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
626 finally: 822 finally:
627 if sigfile: 823 if sigfile:
@@ -642,7 +838,9 @@ class SignatureGeneratorUniHashMixIn(object):
642 if tid in self.extramethod: 838 if tid in self.extramethod:
643 method = method + self.extramethod[tid] 839 method = method + self.extramethod[tid]
644 840
645 data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) 841 with self.client() as client:
842 data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
843
646 hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) 844 hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
647 845
648 if data is None: 846 if data is None:
@@ -661,7 +859,7 @@ class SignatureGeneratorUniHashMixIn(object):
661 # TODO: What to do here? 859 # TODO: What to do here?
662 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) 860 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
663 861
664 except hashserv.client.HashConnectionError as e: 862 except (ConnectionError, FileNotFoundError) as e:
665 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 863 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
666 864
667 return False 865 return False
@@ -676,19 +874,18 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
676 self.server = data.getVar('BB_HASHSERVE') 874 self.server = data.getVar('BB_HASHSERVE')
677 self.method = "sstate_output_hash" 875 self.method = "sstate_output_hash"
678 876
679# 877def clean_checksum_file_path(file_checksum_tuple):
680# Dummy class used for bitbake-selftest 878 f, cs = file_checksum_tuple
681# 879 if "/./" in f:
682class SignatureGeneratorTestMulticonfigDepends(SignatureGeneratorBasicHash): 880 return "./" + f.split("/./")[1]
683 name = "TestMulticonfigDepends" 881 return os.path.basename(f)
684 supports_multiconfig_datacaches = True
685 882
686def dump_this_task(outfile, d): 883def dump_this_task(outfile, d):
687 import bb.parse 884 import bb.parse
688 fn = d.getVar("BB_FILENAME") 885 mcfn = d.getVar("BB_FILENAME")
689 task = "do_" + d.getVar("BB_CURRENTTASK") 886 task = "do_" + d.getVar("BB_CURRENTTASK")
690 referencestamp = bb.build.stamp_internal(task, d, None, True) 887 referencestamp = bb.parse.siggen.stampfile_base(mcfn)
691 bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp) 888 bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp)
692 889
693def init_colors(enable_color): 890def init_colors(enable_color):
694 """Initialise colour dict for passing to compare_sigfiles()""" 891 """Initialise colour dict for passing to compare_sigfiles()"""
@@ -741,38 +938,15 @@ def list_inline_diff(oldlist, newlist, colors=None):
741 ret.append(item) 938 ret.append(item)
742 return '[%s]' % (', '.join(ret)) 939 return '[%s]' % (', '.join(ret))
743 940
744def clean_basepath(basepath): 941# Handled renamed fields
745 basepath, dir, recipe_task = basepath.rsplit("/", 2) 942def handle_renames(data):
746 cleaned = dir + '/' + recipe_task 943 if 'basewhitelist' in data:
747 944 data['basehash_ignore_vars'] = data['basewhitelist']
748 if basepath[0] == '/': 945 del data['basewhitelist']
749 return cleaned 946 if 'taskwhitelist' in data:
750 947 data['taskhash_ignore_tasks'] = data['taskwhitelist']
751 if basepath.startswith("mc:") and basepath.count(':') >= 2: 948 del data['taskwhitelist']
752 mc, mc_name, basepath = basepath.split(":", 2)
753 mc_suffix = ':mc:' + mc_name
754 else:
755 mc_suffix = ''
756
757 # mc stuff now removed from basepath. Whatever was next, if present will be the first
758 # suffix. ':/', recipe path start, marks the end of this. Something like
759 # 'virtual:a[:b[:c]]:/path...' (b and c being optional)
760 if basepath[0] != '/':
761 cleaned += ':' + basepath.split(':/', 1)[0]
762
763 return cleaned + mc_suffix
764 949
765def clean_basepaths(a):
766 b = {}
767 for x in a:
768 b[clean_basepath(x)] = a[x]
769 return b
770
771def clean_basepaths_list(a):
772 b = []
773 for x in a:
774 b.append(clean_basepath(x))
775 return b
776 950
777def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): 951def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
778 output = [] 952 output = []
@@ -794,20 +968,29 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
794 formatparams.update(values) 968 formatparams.update(values)
795 return formatstr.format(**formatparams) 969 return formatstr.format(**formatparams)
796 970
797 with open(a, 'rb') as f: 971 try:
798 p1 = pickle.Unpickler(f) 972 with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
799 a_data = p1.load() 973 a_data = json.load(f, object_hook=SetDecoder)
800 with open(b, 'rb') as f: 974 except (TypeError, OSError) as err:
801 p2 = pickle.Unpickler(f) 975 bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
802 b_data = p2.load() 976 raise err
803 977 try:
804 def dict_diff(a, b, whitelist=set()): 978 with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
979 b_data = json.load(f, object_hook=SetDecoder)
980 except (TypeError, OSError) as err:
981 bb.error("Failed to open sigdata file '%s': %s" % (b, str(err)))
982 raise err
983
984 for data in [a_data, b_data]:
985 handle_renames(data)
986
987 def dict_diff(a, b, ignored_vars=set()):
805 sa = set(a.keys()) 988 sa = set(a.keys())
806 sb = set(b.keys()) 989 sb = set(b.keys())
807 common = sa & sb 990 common = sa & sb
808 changed = set() 991 changed = set()
809 for i in common: 992 for i in common:
810 if a[i] != b[i] and i not in whitelist: 993 if a[i] != b[i] and i not in ignored_vars:
811 changed.add(i) 994 changed.add(i)
812 added = sb - sa 995 added = sb - sa
813 removed = sa - sb 996 removed = sa - sb
@@ -815,11 +998,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
815 998
816 def file_checksums_diff(a, b): 999 def file_checksums_diff(a, b):
817 from collections import Counter 1000 from collections import Counter
818 # Handle old siginfo format 1001
819 if isinstance(a, dict): 1002 # Convert lists back to tuples
820 a = [(os.path.basename(f), cs) for f, cs in a.items()] 1003 a = [(f[0], f[1]) for f in a]
821 if isinstance(b, dict): 1004 b = [(f[0], f[1]) for f in b]
822 b = [(os.path.basename(f), cs) for f, cs in b.items()] 1005
823 # Compare lists, ensuring we can handle duplicate filenames if they exist 1006 # Compare lists, ensuring we can handle duplicate filenames if they exist
824 removedcount = Counter(a) 1007 removedcount = Counter(a)
825 removedcount.subtract(b) 1008 removedcount.subtract(b)
@@ -846,15 +1029,15 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
846 removed = [x[0] for x in removed] 1029 removed = [x[0] for x in removed]
847 return changed, added, removed 1030 return changed, added, removed
848 1031
849 if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']: 1032 if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']:
850 output.append(color_format("{color_title}basewhitelist changed{color_default} from '%s' to '%s'") % (a_data['basewhitelist'], b_data['basewhitelist'])) 1033 output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars']))
851 if a_data['basewhitelist'] and b_data['basewhitelist']: 1034 if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']:
852 output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])) 1035 output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars']))
853 1036
854 if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']: 1037 if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']:
855 output.append(color_format("{color_title}taskwhitelist changed{color_default} from '%s' to '%s'") % (a_data['taskwhitelist'], b_data['taskwhitelist'])) 1038 output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks']))
856 if a_data['taskwhitelist'] and b_data['taskwhitelist']: 1039 if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']:
857 output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])) 1040 output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks']))
858 1041
859 if a_data['taskdeps'] != b_data['taskdeps']: 1042 if a_data['taskdeps'] != b_data['taskdeps']:
860 output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))) 1043 output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
@@ -862,23 +1045,23 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
862 if a_data['basehash'] != b_data['basehash'] and not collapsed: 1045 if a_data['basehash'] != b_data['basehash'] and not collapsed:
863 output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash'])) 1046 output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
864 1047
865 changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist']) 1048 changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars'])
866 if changed: 1049 if changed:
867 for dep in changed: 1050 for dep in sorted(changed):
868 output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])) 1051 output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
869 if a_data['gendeps'][dep] and b_data['gendeps'][dep]: 1052 if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
870 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])) 1053 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
871 if added: 1054 if added:
872 for dep in added: 1055 for dep in sorted(added):
873 output.append(color_format("{color_title}Dependency on variable %s was added") % (dep)) 1056 output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
874 if removed: 1057 if removed:
875 for dep in removed: 1058 for dep in sorted(removed):
876 output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep)) 1059 output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
877 1060
878 1061
879 changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals']) 1062 changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
880 if changed: 1063 if changed:
881 for dep in changed: 1064 for dep in sorted(changed):
882 oldval = a_data['varvals'][dep] 1065 oldval = a_data['varvals'][dep]
883 newval = b_data['varvals'][dep] 1066 newval = b_data['varvals'][dep]
884 if newval and oldval and ('\n' in oldval or '\n' in newval): 1067 if newval and oldval and ('\n' in oldval or '\n' in newval):
@@ -902,9 +1085,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
902 output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval)) 1085 output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
903 1086
904 if not 'file_checksum_values' in a_data: 1087 if not 'file_checksum_values' in a_data:
905 a_data['file_checksum_values'] = {} 1088 a_data['file_checksum_values'] = []
906 if not 'file_checksum_values' in b_data: 1089 if not 'file_checksum_values' in b_data:
907 b_data['file_checksum_values'] = {} 1090 b_data['file_checksum_values'] = []
908 1091
909 changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values']) 1092 changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
910 if changed: 1093 if changed:
@@ -931,11 +1114,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
931 a = a_data['runtaskdeps'][idx] 1114 a = a_data['runtaskdeps'][idx]
932 b = b_data['runtaskdeps'][idx] 1115 b = b_data['runtaskdeps'][idx]
933 if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed: 1116 if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
934 changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b])) 1117 changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
935 1118
936 if changed: 1119 if changed:
937 clean_a = clean_basepaths_list(a_data['runtaskdeps']) 1120 clean_a = a_data['runtaskdeps']
938 clean_b = clean_basepaths_list(b_data['runtaskdeps']) 1121 clean_b = b_data['runtaskdeps']
939 if clean_a != clean_b: 1122 if clean_a != clean_b:
940 output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors)) 1123 output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
941 else: 1124 else:
@@ -948,7 +1131,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
948 b = b_data['runtaskhashes'] 1131 b = b_data['runtaskhashes']
949 changed, added, removed = dict_diff(a, b) 1132 changed, added, removed = dict_diff(a, b)
950 if added: 1133 if added:
951 for dep in added: 1134 for dep in sorted(added):
952 bdep_found = False 1135 bdep_found = False
953 if removed: 1136 if removed:
954 for bdep in removed: 1137 for bdep in removed:
@@ -956,9 +1139,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
956 #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep)) 1139 #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
957 bdep_found = True 1140 bdep_found = True
958 if not bdep_found: 1141 if not bdep_found:
959 output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (clean_basepath(dep), b[dep])) 1142 output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep]))
960 if removed: 1143 if removed:
961 for dep in removed: 1144 for dep in sorted(removed):
962 adep_found = False 1145 adep_found = False
963 if added: 1146 if added:
964 for adep in added: 1147 for adep in added:
@@ -966,11 +1149,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
966 #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep)) 1149 #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
967 adep_found = True 1150 adep_found = True
968 if not adep_found: 1151 if not adep_found:
969 output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (clean_basepath(dep), a[dep])) 1152 output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep]))
970 if changed: 1153 if changed:
971 for dep in changed: 1154 for dep in sorted(changed):
972 if not collapsed: 1155 if not collapsed:
973 output.append(color_format("{color_title}Hash for dependent task %s changed{color_default} from %s to %s") % (clean_basepath(dep), a[dep], b[dep])) 1156 output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep]))
974 if callable(recursecb): 1157 if callable(recursecb):
975 recout = recursecb(dep, a[dep], b[dep]) 1158 recout = recursecb(dep, a[dep], b[dep])
976 if recout: 1159 if recout:
@@ -980,6 +1163,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
980 # If a dependent hash changed, might as well print the line above and then defer to the changes in 1163 # If a dependent hash changed, might as well print the line above and then defer to the changes in
981 # that hash since in all likelyhood, they're the same changes this task also saw. 1164 # that hash since in all likelyhood, they're the same changes this task also saw.
982 output = [output[-1]] + recout 1165 output = [output[-1]] + recout
1166 break
983 1167
984 a_taint = a_data.get('taint', None) 1168 a_taint = a_data.get('taint', None)
985 b_taint = b_data.get('taint', None) 1169 b_taint = b_data.get('taint', None)
@@ -1001,7 +1185,7 @@ def calc_basehash(sigdata):
1001 basedata = '' 1185 basedata = ''
1002 1186
1003 alldeps = sigdata['taskdeps'] 1187 alldeps = sigdata['taskdeps']
1004 for dep in alldeps: 1188 for dep in sorted(alldeps):
1005 basedata = basedata + dep 1189 basedata = basedata + dep
1006 val = sigdata['varvals'][dep] 1190 val = sigdata['varvals'][dep]
1007 if val is not None: 1191 if val is not None:
@@ -1017,6 +1201,8 @@ def calc_taskhash(sigdata):
1017 1201
1018 for c in sigdata['file_checksum_values']: 1202 for c in sigdata['file_checksum_values']:
1019 if c[1]: 1203 if c[1]:
1204 if "./" in c[0]:
1205 data = data + c[0]
1020 data = data + c[1] 1206 data = data + c[1]
1021 1207
1022 if 'taint' in sigdata: 1208 if 'taint' in sigdata:
@@ -1031,32 +1217,37 @@ def calc_taskhash(sigdata):
1031def dump_sigfile(a): 1217def dump_sigfile(a):
1032 output = [] 1218 output = []
1033 1219
1034 with open(a, 'rb') as f: 1220 try:
1035 p1 = pickle.Unpickler(f) 1221 with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
1036 a_data = p1.load() 1222 a_data = json.load(f, object_hook=SetDecoder)
1223 except (TypeError, OSError) as err:
1224 bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
1225 raise err
1226
1227 handle_renames(a_data)
1037 1228
1038 output.append("basewhitelist: %s" % (a_data['basewhitelist'])) 1229 output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars'])))
1039 1230
1040 output.append("taskwhitelist: %s" % (a_data['taskwhitelist'])) 1231 output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or [])))
1041 1232
1042 output.append("Task dependencies: %s" % (sorted(a_data['taskdeps']))) 1233 output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
1043 1234
1044 output.append("basehash: %s" % (a_data['basehash'])) 1235 output.append("basehash: %s" % (a_data['basehash']))
1045 1236
1046 for dep in a_data['gendeps']: 1237 for dep in sorted(a_data['gendeps']):
1047 output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep])) 1238 output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep])))
1048 1239
1049 for dep in a_data['varvals']: 1240 for dep in sorted(a_data['varvals']):
1050 output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep])) 1241 output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
1051 1242
1052 if 'runtaskdeps' in a_data: 1243 if 'runtaskdeps' in a_data:
1053 output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps'])) 1244 output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps'])))
1054 1245
1055 if 'file_checksum_values' in a_data: 1246 if 'file_checksum_values' in a_data:
1056 output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values'])) 1247 output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values'])))
1057 1248
1058 if 'runtaskhashes' in a_data: 1249 if 'runtaskhashes' in a_data:
1059 for dep in a_data['runtaskhashes']: 1250 for dep in sorted(a_data['runtaskhashes']):
1060 output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])) 1251 output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
1061 1252
1062 if 'taint' in a_data: 1253 if 'taint' in a_data:
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
index 47bad6d1fa..66545a65af 100644
--- a/bitbake/lib/bb/taskdata.py
+++ b/bitbake/lib/bb/taskdata.py
@@ -39,7 +39,7 @@ class TaskData:
39 """ 39 """
40 BitBake Task Data implementation 40 BitBake Task Data implementation
41 """ 41 """
42 def __init__(self, abort = True, skiplist = None, allowincomplete = False): 42 def __init__(self, halt = True, skiplist = None, allowincomplete = False):
43 self.build_targets = {} 43 self.build_targets = {}
44 self.run_targets = {} 44 self.run_targets = {}
45 45
@@ -57,7 +57,7 @@ class TaskData:
57 self.failed_rdeps = [] 57 self.failed_rdeps = []
58 self.failed_fns = [] 58 self.failed_fns = []
59 59
60 self.abort = abort 60 self.halt = halt
61 self.allowincomplete = allowincomplete 61 self.allowincomplete = allowincomplete
62 62
63 self.skiplist = skiplist 63 self.skiplist = skiplist
@@ -328,7 +328,7 @@ class TaskData:
328 try: 328 try:
329 self.add_provider_internal(cfgData, dataCache, item) 329 self.add_provider_internal(cfgData, dataCache, item)
330 except bb.providers.NoProvider: 330 except bb.providers.NoProvider:
331 if self.abort: 331 if self.halt:
332 raise 332 raise
333 self.remove_buildtarget(item) 333 self.remove_buildtarget(item)
334 334
@@ -451,12 +451,12 @@ class TaskData:
451 for target in self.build_targets: 451 for target in self.build_targets:
452 if fn in self.build_targets[target]: 452 if fn in self.build_targets[target]:
453 self.build_targets[target].remove(fn) 453 self.build_targets[target].remove(fn)
454 if len(self.build_targets[target]) == 0: 454 if not self.build_targets[target]:
455 self.remove_buildtarget(target, missing_list) 455 self.remove_buildtarget(target, missing_list)
456 for target in self.run_targets: 456 for target in self.run_targets:
457 if fn in self.run_targets[target]: 457 if fn in self.run_targets[target]:
458 self.run_targets[target].remove(fn) 458 self.run_targets[target].remove(fn)
459 if len(self.run_targets[target]) == 0: 459 if not self.run_targets[target]:
460 self.remove_runtarget(target, missing_list) 460 self.remove_runtarget(target, missing_list)
461 461
462 def remove_buildtarget(self, target, missing_list=None): 462 def remove_buildtarget(self, target, missing_list=None):
@@ -479,7 +479,7 @@ class TaskData:
479 fn = tid.rsplit(":",1)[0] 479 fn = tid.rsplit(":",1)[0]
480 self.fail_fn(fn, missing_list) 480 self.fail_fn(fn, missing_list)
481 481
482 if self.abort and target in self.external_targets: 482 if self.halt and target in self.external_targets:
483 logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list) 483 logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
484 raise bb.providers.NoProvider(target) 484 raise bb.providers.NoProvider(target)
485 485
@@ -516,7 +516,7 @@ class TaskData:
516 self.add_provider_internal(cfgData, dataCache, target) 516 self.add_provider_internal(cfgData, dataCache, target)
517 added = added + 1 517 added = added + 1
518 except bb.providers.NoProvider: 518 except bb.providers.NoProvider:
519 if self.abort and target in self.external_targets and not self.allowincomplete: 519 if self.halt and target in self.external_targets and not self.allowincomplete:
520 raise 520 raise
521 if not self.allowincomplete: 521 if not self.allowincomplete:
522 self.remove_buildtarget(target) 522 self.remove_buildtarget(target)
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index 826a2d2f6d..c0d1362a0c 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -44,6 +44,7 @@ class VariableReferenceTest(ReferenceTest):
44 def parseExpression(self, exp): 44 def parseExpression(self, exp):
45 parsedvar = self.d.expandWithRefs(exp, None) 45 parsedvar = self.d.expandWithRefs(exp, None)
46 self.references = parsedvar.references 46 self.references = parsedvar.references
47 self.execs = parsedvar.execs
47 48
48 def test_simple_reference(self): 49 def test_simple_reference(self):
49 self.setEmptyVars(["FOO"]) 50 self.setEmptyVars(["FOO"])
@@ -61,6 +62,11 @@ class VariableReferenceTest(ReferenceTest):
61 self.parseExpression("${@d.getVar('BAR') + 'foo'}") 62 self.parseExpression("${@d.getVar('BAR') + 'foo'}")
62 self.assertReferences(set(["BAR"])) 63 self.assertReferences(set(["BAR"]))
63 64
65 def test_python_exec_reference(self):
66 self.parseExpression("${@eval('3 * 5')}")
67 self.assertReferences(set())
68 self.assertExecs(set(["eval"]))
69
64class ShellReferenceTest(ReferenceTest): 70class ShellReferenceTest(ReferenceTest):
65 71
66 def parseExpression(self, exp): 72 def parseExpression(self, exp):
@@ -100,6 +106,46 @@ ${D}${libdir}/pkgconfig/*.pc
100 self.parseExpression("foo=$(echo bar)") 106 self.parseExpression("foo=$(echo bar)")
101 self.assertExecs(set(["echo"])) 107 self.assertExecs(set(["echo"]))
102 108
109 def test_assign_subshell_expansion_quotes(self):
110 self.parseExpression('foo="$(echo bar)"')
111 self.assertExecs(set(["echo"]))
112
113 def test_assign_subshell_expansion_nested(self):
114 self.parseExpression('foo="$(func1 "$(func2 bar$(func3))")"')
115 self.assertExecs(set(["func1", "func2", "func3"]))
116
117 def test_assign_subshell_expansion_multiple(self):
118 self.parseExpression('foo="$(func1 "$(func2)") $(func3)"')
119 self.assertExecs(set(["func1", "func2", "func3"]))
120
121 def test_assign_subshell_expansion_escaped_quotes(self):
122 self.parseExpression('foo="\\"fo\\"o$(func1)"')
123 self.assertExecs(set(["func1"]))
124
125 def test_assign_subshell_expansion_empty(self):
126 self.parseExpression('foo="bar$()foo"')
127 self.assertExecs(set())
128
129 def test_assign_subshell_backticks(self):
130 self.parseExpression("foo=`echo bar`")
131 self.assertExecs(set(["echo"]))
132
133 def test_assign_subshell_backticks_quotes(self):
134 self.parseExpression('foo="`echo bar`"')
135 self.assertExecs(set(["echo"]))
136
137 def test_assign_subshell_backticks_multiple(self):
138 self.parseExpression('foo="`func1 bar` `func2`"')
139 self.assertExecs(set(["func1", "func2"]))
140
141 def test_assign_subshell_backticks_escaped_quotes(self):
142 self.parseExpression('foo="\\"fo\\"o`func1`"')
143 self.assertExecs(set(["func1"]))
144
145 def test_assign_subshell_backticks_empty(self):
146 self.parseExpression('foo="bar``foo"')
147 self.assertExecs(set())
148
103 def test_shell_unexpanded(self): 149 def test_shell_unexpanded(self):
104 self.setEmptyVars(["QT_BASE_NAME"]) 150 self.setEmptyVars(["QT_BASE_NAME"])
105 self.parseExpression('echo "${QT_BASE_NAME}"') 151 self.parseExpression('echo "${QT_BASE_NAME}"')
@@ -111,9 +157,9 @@ ${D}${libdir}/pkgconfig/*.pc
111 self.assertExecs(set(["sed"])) 157 self.assertExecs(set(["sed"]))
112 158
113 def test_parameter_expansion_modifiers(self): 159 def test_parameter_expansion_modifiers(self):
114 # - and + are also valid modifiers for parameter expansion, but are 160 # -,+ and : are also valid modifiers for parameter expansion, but are
115 # valid characters in bitbake variable names, so are not included here 161 # valid characters in bitbake variable names, so are not included here
116 for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'): 162 for i in ('=', '?', '#', '%', '##', '%%'):
117 name = "foo%sbar" % i 163 name = "foo%sbar" % i
118 self.parseExpression("${%s}" % name) 164 self.parseExpression("${%s}" % name)
119 self.assertNotIn(name, self.references) 165 self.assertNotIn(name, self.references)
@@ -318,7 +364,7 @@ d.getVar(a(), False)
318 "filename": "example.bb", 364 "filename": "example.bb",
319 }) 365 })
320 366
321 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 367 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
322 368
323 self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"])) 369 self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
324 370
@@ -365,7 +411,7 @@ esac
365 self.d.setVarFlags("FOO", {"func": True}) 411 self.d.setVarFlags("FOO", {"func": True})
366 self.setEmptyVars(execs) 412 self.setEmptyVars(execs)
367 413
368 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 414 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
369 415
370 self.assertEqual(deps, set(["somevar", "inverted"] + execs)) 416 self.assertEqual(deps, set(["somevar", "inverted"] + execs))
371 417
@@ -375,7 +421,7 @@ esac
375 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") 421 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
376 self.d.setVarFlag("FOO", "vardeps", "oe_libinstall") 422 self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
377 423
378 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 424 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
379 425
380 self.assertEqual(deps, set(["oe_libinstall"])) 426 self.assertEqual(deps, set(["oe_libinstall"]))
381 427
@@ -384,7 +430,7 @@ esac
384 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") 430 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
385 self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}") 431 self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
386 432
387 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 433 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
388 434
389 self.assertEqual(deps, set(["oe_libinstall"])) 435 self.assertEqual(deps, set(["oe_libinstall"]))
390 436
@@ -399,7 +445,7 @@ esac
399 # Check dependencies 445 # Check dependencies
400 self.d.setVar('ANOTHERVAR', expr) 446 self.d.setVar('ANOTHERVAR', expr)
401 self.d.setVar('TESTVAR', 'anothervalue testval testval2') 447 self.d.setVar('TESTVAR', 'anothervalue testval testval2')
402 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d) 448 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
403 self.assertEqual(sorted(values.splitlines()), 449 self.assertEqual(sorted(values.splitlines()),
404 sorted([expr, 450 sorted([expr,
405 'TESTVAR{anothervalue} = Set', 451 'TESTVAR{anothervalue} = Set',
@@ -412,11 +458,55 @@ esac
412 # Check final value 458 # Check final value
413 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone']) 459 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone'])
414 460
461 def test_contains_vardeps_excluded(self):
462 # Check the ignored_vars option to build_dependencies is handled by contains functionality
463 varval = '${TESTVAR2} ${@bb.utils.filter("TESTVAR", "somevalue anothervalue", d)}'
464 self.d.setVar('ANOTHERVAR', varval)
465 self.d.setVar('TESTVAR', 'anothervalue testval testval2')
466 self.d.setVar('TESTVAR2', 'testval3')
467 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(["TESTVAR"]), self.d, self.d)
468 self.assertEqual(sorted(values.splitlines()), sorted([varval]))
469 self.assertEqual(deps, set(["TESTVAR2"]))
470 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
471
472 # Check the vardepsexclude flag is handled by contains functionality
473 self.d.setVarFlag('ANOTHERVAR', 'vardepsexclude', 'TESTVAR')
474 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
475 self.assertEqual(sorted(values.splitlines()), sorted([varval]))
476 self.assertEqual(deps, set(["TESTVAR2"]))
477 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
478
479 def test_contains_vardeps_override_operators(self):
480 # Check override operators handle dependencies correctly with the contains functionality
481 expr_plain = 'testval'
482 expr_prepend = '${@bb.utils.filter("TESTVAR1", "testval1", d)} '
483 expr_append = ' ${@bb.utils.filter("TESTVAR2", "testval2", d)}'
484 expr_remove = '${@bb.utils.contains("TESTVAR3", "no-testval", "testval", "", d)}'
485 # Check dependencies
486 self.d.setVar('ANOTHERVAR', expr_plain)
487 self.d.prependVar('ANOTHERVAR', expr_prepend)
488 self.d.appendVar('ANOTHERVAR', expr_append)
489 self.d.setVar('ANOTHERVAR:remove', expr_remove)
490 self.d.setVar('TESTVAR1', 'blah')
491 self.d.setVar('TESTVAR2', 'testval2')
492 self.d.setVar('TESTVAR3', 'no-testval')
493 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
494 self.assertEqual(sorted(values.splitlines()),
495 sorted([
496 expr_prepend + expr_plain + expr_append,
497 '_remove of ' + expr_remove,
498 'TESTVAR1{testval1} = Unset',
499 'TESTVAR2{testval2} = Set',
500 'TESTVAR3{no-testval} = Set',
501 ]))
502 # Check final value
503 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval2'])
504
415 #Currently no wildcard support 505 #Currently no wildcard support
416 #def test_vardeps_wildcards(self): 506 #def test_vardeps_wildcards(self):
417 # self.d.setVar("oe_libinstall", "echo test") 507 # self.d.setVar("oe_libinstall", "echo test")
418 # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") 508 # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
419 # self.d.setVarFlag("FOO", "vardeps", "oe_*") 509 # self.d.setVarFlag("FOO", "vardeps", "oe_*")
420 # self.assertEquals(deps, set(["oe_libinstall"])) 510 # self.assertEqual(deps, set(["oe_libinstall"]))
421 511
422 512
diff --git a/bitbake/lib/bb/tests/color.py b/bitbake/lib/bb/tests/color.py
index bf03750c69..bb70cb393d 100644
--- a/bitbake/lib/bb/tests/color.py
+++ b/bitbake/lib/bb/tests/color.py
@@ -20,7 +20,7 @@ class ProgressWatcher:
20 def __init__(self): 20 def __init__(self):
21 self._reports = [] 21 self._reports = []
22 22
23 def handle_event(self, event): 23 def handle_event(self, event, d):
24 self._reports.append((event.progress, event.rate)) 24 self._reports.append((event.progress, event.rate))
25 25
26 def reports(self): 26 def reports(self):
@@ -31,7 +31,7 @@ class ColorCodeTests(unittest.TestCase):
31 def setUp(self): 31 def setUp(self):
32 self.d = bb.data.init() 32 self.d = bb.data.init()
33 self._progress_watcher = ProgressWatcher() 33 self._progress_watcher = ProgressWatcher()
34 bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event) 34 bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event, data=self.d)
35 35
36 def tearDown(self): 36 def tearDown(self):
37 bb.event.remove("bb.build.TaskProgress", None) 37 bb.event.remove("bb.build.TaskProgress", None)
diff --git a/bitbake/lib/bb/tests/compression.py b/bitbake/lib/bb/tests/compression.py
new file mode 100644
index 0000000000..16c297b315
--- /dev/null
+++ b/bitbake/lib/bb/tests/compression.py
@@ -0,0 +1,100 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7from pathlib import Path
8import bb.compress.lz4
9import bb.compress.zstd
10import contextlib
11import os
12import shutil
13import tempfile
14import unittest
15import subprocess
16
17
18class CompressionTests(object):
19 def setUp(self):
20 self._t = tempfile.TemporaryDirectory()
21 self.tmpdir = Path(self._t.name)
22 self.addCleanup(self._t.cleanup)
23
24 def _file_helper(self, mode_suffix, data):
25 tmp_file = self.tmpdir / "compressed"
26
27 with self.do_open(tmp_file, mode="w" + mode_suffix) as f:
28 f.write(data)
29
30 with self.do_open(tmp_file, mode="r" + mode_suffix) as f:
31 read_data = f.read()
32
33 self.assertEqual(read_data, data)
34
35 def test_text_file(self):
36 self._file_helper("t", "Hello")
37
38 def test_binary_file(self):
39 self._file_helper("b", "Hello".encode("utf-8"))
40
41 def _pipe_helper(self, mode_suffix, data):
42 rfd, wfd = os.pipe()
43 with open(rfd, "rb") as r, open(wfd, "wb") as w:
44 with self.do_open(r, mode="r" + mode_suffix) as decompress:
45 with self.do_open(w, mode="w" + mode_suffix) as compress:
46 compress.write(data)
47 read_data = decompress.read()
48
49 self.assertEqual(read_data, data)
50
51 def test_text_pipe(self):
52 self._pipe_helper("t", "Hello")
53
54 def test_binary_pipe(self):
55 self._pipe_helper("b", "Hello".encode("utf-8"))
56
57 def test_bad_decompress(self):
58 tmp_file = self.tmpdir / "compressed"
59 with tmp_file.open("wb") as f:
60 f.write(b"\x00")
61
62 with self.assertRaises(OSError):
63 with self.do_open(tmp_file, mode="rb", stderr=subprocess.DEVNULL) as f:
64 data = f.read()
65
66
67class LZ4Tests(CompressionTests, unittest.TestCase):
68 def setUp(self):
69 if shutil.which("lz4") is None:
70 self.skipTest("'lz4' not found")
71 super().setUp()
72
73 @contextlib.contextmanager
74 def do_open(self, *args, **kwargs):
75 with bb.compress.lz4.open(*args, **kwargs) as f:
76 yield f
77
78
79class ZStdTests(CompressionTests, unittest.TestCase):
80 def setUp(self):
81 if shutil.which("zstd") is None:
82 self.skipTest("'zstd' not found")
83 super().setUp()
84
85 @contextlib.contextmanager
86 def do_open(self, *args, **kwargs):
87 with bb.compress.zstd.open(*args, **kwargs) as f:
88 yield f
89
90
91class PZStdTests(CompressionTests, unittest.TestCase):
92 def setUp(self):
93 if shutil.which("pzstd") is None:
94 self.skipTest("'pzstd' not found")
95 super().setUp()
96
97 @contextlib.contextmanager
98 def do_open(self, *args, **kwargs):
99 with bb.compress.zstd.open(*args, num_threads=2, **kwargs) as f:
100 yield f
diff --git a/bitbake/lib/bb/tests/cooker.py b/bitbake/lib/bb/tests/cooker.py
index c82d4b7b81..9e524ae345 100644
--- a/bitbake/lib/bb/tests/cooker.py
+++ b/bitbake/lib/bb/tests/cooker.py
@@ -1,6 +1,8 @@
1# 1#
2# BitBake Tests for cooker.py 2# BitBake Tests for cooker.py
3# 3#
4# Copyright BitBake Contributors
5#
4# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
5# 7#
6 8
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
index 1d4a64b109..a895f6a58e 100644
--- a/bitbake/lib/bb/tests/data.py
+++ b/bitbake/lib/bb/tests/data.py
@@ -60,6 +60,15 @@ class DataExpansions(unittest.TestCase):
60 val = self.d.expand("${@5*12}") 60 val = self.d.expand("${@5*12}")
61 self.assertEqual(str(val), "60") 61 self.assertEqual(str(val), "60")
62 62
63 def test_python_snippet_w_dict(self):
64 val = self.d.expand("${@{ 'green': 1, 'blue': 2 }['green']}")
65 self.assertEqual(str(val), "1")
66
67 def test_python_unexpanded_multi(self):
68 self.d.setVar("bar", "${unsetvar}")
69 val = self.d.expand("${@2*2},${foo},${@d.getVar('foo') + ' ${bar}'},${foo}")
70 self.assertEqual(str(val), "4,value_of_foo,${@d.getVar('foo') + ' ${unsetvar}'},value_of_foo")
71
63 def test_expand_in_python_snippet(self): 72 def test_expand_in_python_snippet(self):
64 val = self.d.expand("${@'boo ' + '${foo}'}") 73 val = self.d.expand("${@'boo ' + '${foo}'}")
65 self.assertEqual(str(val), "boo value_of_foo") 74 self.assertEqual(str(val), "boo value_of_foo")
@@ -68,6 +77,18 @@ class DataExpansions(unittest.TestCase):
68 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") 77 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
69 self.assertEqual(str(val), "value_of_foo value_of_bar") 78 self.assertEqual(str(val), "value_of_foo value_of_bar")
70 79
80 def test_python_snippet_function_reference(self):
81 self.d.setVar("TESTVAL", "testvalue")
82 self.d.setVar("testfunc", 'd.getVar("TESTVAL")')
83 context = bb.utils.get_context()
84 context["testfunc"] = lambda d: d.getVar("TESTVAL")
85 val = self.d.expand("${@testfunc(d)}")
86 self.assertEqual(str(val), "testvalue")
87
88 def test_python_snippet_builtin_metadata(self):
89 self.d.setVar("eval", "INVALID")
90 self.d.expand("${@eval('3')}")
91
71 def test_python_unexpanded(self): 92 def test_python_unexpanded(self):
72 self.d.setVar("bar", "${unsetvar}") 93 self.d.setVar("bar", "${unsetvar}")
73 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") 94 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
@@ -245,35 +266,35 @@ class TestConcatOverride(unittest.TestCase):
245 266
246 def test_prepend(self): 267 def test_prepend(self):
247 self.d.setVar("TEST", "${VAL}") 268 self.d.setVar("TEST", "${VAL}")
248 self.d.setVar("TEST_prepend", "${FOO}:") 269 self.d.setVar("TEST:prepend", "${FOO}:")
249 self.assertEqual(self.d.getVar("TEST"), "foo:val") 270 self.assertEqual(self.d.getVar("TEST"), "foo:val")
250 271
251 def test_append(self): 272 def test_append(self):
252 self.d.setVar("TEST", "${VAL}") 273 self.d.setVar("TEST", "${VAL}")
253 self.d.setVar("TEST_append", ":${BAR}") 274 self.d.setVar("TEST:append", ":${BAR}")
254 self.assertEqual(self.d.getVar("TEST"), "val:bar") 275 self.assertEqual(self.d.getVar("TEST"), "val:bar")
255 276
256 def test_multiple_append(self): 277 def test_multiple_append(self):
257 self.d.setVar("TEST", "${VAL}") 278 self.d.setVar("TEST", "${VAL}")
258 self.d.setVar("TEST_prepend", "${FOO}:") 279 self.d.setVar("TEST:prepend", "${FOO}:")
259 self.d.setVar("TEST_append", ":val2") 280 self.d.setVar("TEST:append", ":val2")
260 self.d.setVar("TEST_append", ":${BAR}") 281 self.d.setVar("TEST:append", ":${BAR}")
261 self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar") 282 self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar")
262 283
263 def test_append_unset(self): 284 def test_append_unset(self):
264 self.d.setVar("TEST_prepend", "${FOO}:") 285 self.d.setVar("TEST:prepend", "${FOO}:")
265 self.d.setVar("TEST_append", ":val2") 286 self.d.setVar("TEST:append", ":val2")
266 self.d.setVar("TEST_append", ":${BAR}") 287 self.d.setVar("TEST:append", ":${BAR}")
267 self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar") 288 self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar")
268 289
269 def test_remove(self): 290 def test_remove(self):
270 self.d.setVar("TEST", "${VAL} ${BAR}") 291 self.d.setVar("TEST", "${VAL} ${BAR}")
271 self.d.setVar("TEST_remove", "val") 292 self.d.setVar("TEST:remove", "val")
272 self.assertEqual(self.d.getVar("TEST"), " bar") 293 self.assertEqual(self.d.getVar("TEST"), " bar")
273 294
274 def test_remove_cleared(self): 295 def test_remove_cleared(self):
275 self.d.setVar("TEST", "${VAL} ${BAR}") 296 self.d.setVar("TEST", "${VAL} ${BAR}")
276 self.d.setVar("TEST_remove", "val") 297 self.d.setVar("TEST:remove", "val")
277 self.d.setVar("TEST", "${VAL} ${BAR}") 298 self.d.setVar("TEST", "${VAL} ${BAR}")
278 self.assertEqual(self.d.getVar("TEST"), "val bar") 299 self.assertEqual(self.d.getVar("TEST"), "val bar")
279 300
@@ -281,42 +302,42 @@ class TestConcatOverride(unittest.TestCase):
281 # (including that whitespace is preserved) 302 # (including that whitespace is preserved)
282 def test_remove_inactive_override(self): 303 def test_remove_inactive_override(self):
283 self.d.setVar("TEST", "${VAL} ${BAR} 123") 304 self.d.setVar("TEST", "${VAL} ${BAR} 123")
284 self.d.setVar("TEST_remove_inactiveoverride", "val") 305 self.d.setVar("TEST:remove:inactiveoverride", "val")
285 self.assertEqual(self.d.getVar("TEST"), "val bar 123") 306 self.assertEqual(self.d.getVar("TEST"), "val bar 123")
286 307
287 def test_doubleref_remove(self): 308 def test_doubleref_remove(self):
288 self.d.setVar("TEST", "${VAL} ${BAR}") 309 self.d.setVar("TEST", "${VAL} ${BAR}")
289 self.d.setVar("TEST_remove", "val") 310 self.d.setVar("TEST:remove", "val")
290 self.d.setVar("TEST_TEST", "${TEST} ${TEST}") 311 self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
291 self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar") 312 self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar")
292 313
293 def test_empty_remove(self): 314 def test_empty_remove(self):
294 self.d.setVar("TEST", "") 315 self.d.setVar("TEST", "")
295 self.d.setVar("TEST_remove", "val") 316 self.d.setVar("TEST:remove", "val")
296 self.assertEqual(self.d.getVar("TEST"), "") 317 self.assertEqual(self.d.getVar("TEST"), "")
297 318
298 def test_remove_expansion(self): 319 def test_remove_expansion(self):
299 self.d.setVar("BAR", "Z") 320 self.d.setVar("BAR", "Z")
300 self.d.setVar("TEST", "${BAR}/X Y") 321 self.d.setVar("TEST", "${BAR}/X Y")
301 self.d.setVar("TEST_remove", "${BAR}/X") 322 self.d.setVar("TEST:remove", "${BAR}/X")
302 self.assertEqual(self.d.getVar("TEST"), " Y") 323 self.assertEqual(self.d.getVar("TEST"), " Y")
303 324
304 def test_remove_expansion_items(self): 325 def test_remove_expansion_items(self):
305 self.d.setVar("TEST", "A B C D") 326 self.d.setVar("TEST", "A B C D")
306 self.d.setVar("BAR", "B D") 327 self.d.setVar("BAR", "B D")
307 self.d.setVar("TEST_remove", "${BAR}") 328 self.d.setVar("TEST:remove", "${BAR}")
308 self.assertEqual(self.d.getVar("TEST"), "A C ") 329 self.assertEqual(self.d.getVar("TEST"), "A C ")
309 330
310 def test_remove_preserve_whitespace(self): 331 def test_remove_preserve_whitespace(self):
311 # When the removal isn't active, the original value should be preserved 332 # When the removal isn't active, the original value should be preserved
312 self.d.setVar("TEST", " A B") 333 self.d.setVar("TEST", " A B")
313 self.d.setVar("TEST_remove", "C") 334 self.d.setVar("TEST:remove", "C")
314 self.assertEqual(self.d.getVar("TEST"), " A B") 335 self.assertEqual(self.d.getVar("TEST"), " A B")
315 336
316 def test_remove_preserve_whitespace2(self): 337 def test_remove_preserve_whitespace2(self):
317 # When the removal is active preserve the whitespace 338 # When the removal is active preserve the whitespace
318 self.d.setVar("TEST", " A B") 339 self.d.setVar("TEST", " A B")
319 self.d.setVar("TEST_remove", "B") 340 self.d.setVar("TEST:remove", "B")
320 self.assertEqual(self.d.getVar("TEST"), " A ") 341 self.assertEqual(self.d.getVar("TEST"), " A ")
321 342
322class TestOverrides(unittest.TestCase): 343class TestOverrides(unittest.TestCase):
@@ -329,81 +350,86 @@ class TestOverrides(unittest.TestCase):
329 self.assertEqual(self.d.getVar("TEST"), "testvalue") 350 self.assertEqual(self.d.getVar("TEST"), "testvalue")
330 351
331 def test_one_override(self): 352 def test_one_override(self):
332 self.d.setVar("TEST_bar", "testvalue2") 353 self.d.setVar("TEST:bar", "testvalue2")
333 self.assertEqual(self.d.getVar("TEST"), "testvalue2") 354 self.assertEqual(self.d.getVar("TEST"), "testvalue2")
334 355
335 def test_one_override_unset(self): 356 def test_one_override_unset(self):
336 self.d.setVar("TEST2_bar", "testvalue2") 357 self.d.setVar("TEST2:bar", "testvalue2")
337 358
338 self.assertEqual(self.d.getVar("TEST2"), "testvalue2") 359 self.assertEqual(self.d.getVar("TEST2"), "testvalue2")
339 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) 360 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2:bar'])
340 361
341 def test_multiple_override(self): 362 def test_multiple_override(self):
342 self.d.setVar("TEST_bar", "testvalue2") 363 self.d.setVar("TEST:bar", "testvalue2")
343 self.d.setVar("TEST_local", "testvalue3") 364 self.d.setVar("TEST:local", "testvalue3")
344 self.d.setVar("TEST_foo", "testvalue4") 365 self.d.setVar("TEST:foo", "testvalue4")
345 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 366 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
346 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) 367 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST:foo', 'OVERRIDES', 'TEST:bar', 'TEST:local'])
347 368
348 def test_multiple_combined_overrides(self): 369 def test_multiple_combined_overrides(self):
349 self.d.setVar("TEST_local_foo_bar", "testvalue3") 370 self.d.setVar("TEST:local:foo:bar", "testvalue3")
350 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 371 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
351 372
352 def test_multiple_overrides_unset(self): 373 def test_multiple_overrides_unset(self):
353 self.d.setVar("TEST2_local_foo_bar", "testvalue3") 374 self.d.setVar("TEST2:local:foo:bar", "testvalue3")
354 self.assertEqual(self.d.getVar("TEST2"), "testvalue3") 375 self.assertEqual(self.d.getVar("TEST2"), "testvalue3")
355 376
356 def test_keyexpansion_override(self): 377 def test_keyexpansion_override(self):
357 self.d.setVar("LOCAL", "local") 378 self.d.setVar("LOCAL", "local")
358 self.d.setVar("TEST_bar", "testvalue2") 379 self.d.setVar("TEST:bar", "testvalue2")
359 self.d.setVar("TEST_${LOCAL}", "testvalue3") 380 self.d.setVar("TEST:${LOCAL}", "testvalue3")
360 self.d.setVar("TEST_foo", "testvalue4") 381 self.d.setVar("TEST:foo", "testvalue4")
361 bb.data.expandKeys(self.d) 382 bb.data.expandKeys(self.d)
362 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 383 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
363 384
364 def test_rename_override(self): 385 def test_rename_override(self):
365 self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a") 386 self.d.setVar("ALTERNATIVE:ncurses-tools:class-target", "a")
366 self.d.setVar("OVERRIDES", "class-target") 387 self.d.setVar("OVERRIDES", "class-target")
367 self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools") 388 self.d.renameVar("ALTERNATIVE:ncurses-tools", "ALTERNATIVE:lib32-ncurses-tools")
368 self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a") 389 self.assertEqual(self.d.getVar("ALTERNATIVE:lib32-ncurses-tools"), "a")
369 390
370 def test_underscore_override(self): 391 def test_underscore_override(self):
371 self.d.setVar("TEST_bar", "testvalue2") 392 self.d.setVar("TEST:bar", "testvalue2")
372 self.d.setVar("TEST_some_val", "testvalue3") 393 self.d.setVar("TEST:some_val", "testvalue3")
373 self.d.setVar("TEST_foo", "testvalue4") 394 self.d.setVar("TEST:foo", "testvalue4")
374 self.d.setVar("OVERRIDES", "foo:bar:some_val") 395 self.d.setVar("OVERRIDES", "foo:bar:some_val")
375 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 396 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
376 397
398 # Test an override with _<numeric> in it based on a real world OE issue
399 def test_underscore_override_2(self):
400 self.d.setVar("TARGET_ARCH", "x86_64")
401 self.d.setVar("PN", "test-${TARGET_ARCH}")
402 self.d.setVar("VERSION", "1")
403 self.d.setVar("VERSION:pn-test-${TARGET_ARCH}", "2")
404 self.d.setVar("OVERRIDES", "pn-${PN}")
405 bb.data.expandKeys(self.d)
406 self.assertEqual(self.d.getVar("VERSION"), "2")
407
377 def test_remove_with_override(self): 408 def test_remove_with_override(self):
378 self.d.setVar("TEST_bar", "testvalue2") 409 self.d.setVar("TEST:bar", "testvalue2")
379 self.d.setVar("TEST_some_val", "testvalue3 testvalue5") 410 self.d.setVar("TEST:some_val", "testvalue3 testvalue5")
380 self.d.setVar("TEST_some_val_remove", "testvalue3") 411 self.d.setVar("TEST:some_val:remove", "testvalue3")
381 self.d.setVar("TEST_foo", "testvalue4") 412 self.d.setVar("TEST:foo", "testvalue4")
382 self.d.setVar("OVERRIDES", "foo:bar:some_val") 413 self.d.setVar("OVERRIDES", "foo:bar:some_val")
383 self.assertEqual(self.d.getVar("TEST"), " testvalue5") 414 self.assertEqual(self.d.getVar("TEST"), " testvalue5")
384 415
385 def test_append_and_override_1(self): 416 def test_append_and_override_1(self):
386 self.d.setVar("TEST_append", "testvalue2") 417 self.d.setVar("TEST:append", "testvalue2")
387 self.d.setVar("TEST_bar", "testvalue3") 418 self.d.setVar("TEST:bar", "testvalue3")
388 self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2") 419 self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2")
389 420
390 def test_append_and_override_2(self): 421 def test_append_and_override_2(self):
391 self.d.setVar("TEST_append_bar", "testvalue2") 422 self.d.setVar("TEST:append:bar", "testvalue2")
392 self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2") 423 self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2")
393 424
394 def test_append_and_override_3(self): 425 def test_append_and_override_3(self):
395 self.d.setVar("TEST_bar_append", "testvalue2") 426 self.d.setVar("TEST:bar:append", "testvalue2")
396 self.assertEqual(self.d.getVar("TEST"), "testvalue2") 427 self.assertEqual(self.d.getVar("TEST"), "testvalue2")
397 428
398 # Test an override with _<numeric> in it based on a real world OE issue 429 def test_append_and_unused_override(self):
399 def test_underscore_override(self): 430 # Had a bug where an unused override append could return "" instead of None
400 self.d.setVar("TARGET_ARCH", "x86_64") 431 self.d.setVar("BAR:append:unusedoverride", "testvalue2")
401 self.d.setVar("PN", "test-${TARGET_ARCH}") 432 self.assertEqual(self.d.getVar("BAR"), None)
402 self.d.setVar("VERSION", "1")
403 self.d.setVar("VERSION_pn-test-${TARGET_ARCH}", "2")
404 self.d.setVar("OVERRIDES", "pn-${PN}")
405 bb.data.expandKeys(self.d)
406 self.assertEqual(self.d.getVar("VERSION"), "2")
407 433
408class TestKeyExpansion(unittest.TestCase): 434class TestKeyExpansion(unittest.TestCase):
409 def setUp(self): 435 def setUp(self):
@@ -424,17 +450,64 @@ class TestFlags(unittest.TestCase):
424 self.d = bb.data.init() 450 self.d = bb.data.init()
425 self.d.setVar("foo", "value of foo") 451 self.d.setVar("foo", "value of foo")
426 self.d.setVarFlag("foo", "flag1", "value of flag1") 452 self.d.setVarFlag("foo", "flag1", "value of flag1")
453 self.d.setVarFlag("foo", "_defaultval_flag_flag1", "default of flag1")
427 self.d.setVarFlag("foo", "flag2", "value of flag2") 454 self.d.setVarFlag("foo", "flag2", "value of flag2")
455 self.d.setVarFlag("foo", "_defaultval_flag_flag2", "default of flag2")
456 self.d.setVarFlag("foo", "flag3", "value of flag3")
457 self.d.setVarFlag("foo", "_defaultval_flag_flagnovalue", "default of flagnovalue")
428 458
429 def test_setflag(self): 459 def test_setflag(self):
430 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") 460 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
431 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2") 461 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2")
462 self.assertDictEqual(
463 self.d.getVarFlags("foo"),
464 {
465 "flag1": "value of flag1",
466 "flag2": "value of flag2",
467 "flag3": "value of flag3",
468 "flagnovalue": "default of flagnovalue",
469 }
470 )
471 self.assertDictEqual(
472 self.d.getVarFlags("foo", internalflags=True),
473 {
474 "_content": "value of foo",
475 "flag1": "value of flag1",
476 "flag2": "value of flag2",
477 "flag3": "value of flag3",
478 "_defaultval_flag_flag1": "default of flag1",
479 "_defaultval_flag_flag2": "default of flag2",
480 "_defaultval_flag_flagnovalue": "default of flagnovalue",
481 }
482 )
432 483
433 def test_delflag(self): 484 def test_delflag(self):
434 self.d.delVarFlag("foo", "flag2") 485 self.d.delVarFlag("foo", "flag2")
486 self.d.delVarFlag("foo", "flag3")
435 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") 487 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
436 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None) 488 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
437 489 self.assertDictEqual(
490 self.d.getVarFlags("foo"),
491 {
492 "flag1": "value of flag1",
493 "flagnovalue": "default of flagnovalue",
494 }
495 )
496 self.assertDictEqual(
497 self.d.getVarFlags("foo", internalflags=True),
498 {
499 "_content": "value of foo",
500 "flag1": "value of flag1",
501 "_defaultval_flag_flag1": "default of flag1",
502 "_defaultval_flag_flagnovalue": "default of flagnovalue",
503 }
504 )
505
506 def test_delvar(self):
507 self.d.delVar("foo")
508 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), None)
509 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
510 self.assertEqual(self.d.getVarFlags("foo", internalflags=True), None)
438 511
439class Contains(unittest.TestCase): 512class Contains(unittest.TestCase):
440 def setUp(self): 513 def setUp(self):
@@ -498,7 +571,7 @@ class TaskHash(unittest.TestCase):
498 d.setVar("VAR", "val") 571 d.setVar("VAR", "val")
499 # Adding an inactive removal shouldn't change the hash 572 # Adding an inactive removal shouldn't change the hash
500 d.setVar("BAR", "notbar") 573 d.setVar("BAR", "notbar")
501 d.setVar("MYCOMMAND_remove", "${BAR}") 574 d.setVar("MYCOMMAND:remove", "${BAR}")
502 nexthash = gettask_bashhash("mytask", d) 575 nexthash = gettask_bashhash("mytask", d)
503 self.assertEqual(orighash, nexthash) 576 self.assertEqual(orighash, nexthash)
504 577
diff --git a/bitbake/lib/bb/tests/event.py b/bitbake/lib/bb/tests/event.py
index 9ca7e9bc8e..ef61891d30 100644
--- a/bitbake/lib/bb/tests/event.py
+++ b/bitbake/lib/bb/tests/event.py
@@ -13,6 +13,7 @@ import pickle
13import threading 13import threading
14import time 14import time
15import unittest 15import unittest
16import tempfile
16from unittest.mock import Mock 17from unittest.mock import Mock
17from unittest.mock import call 18from unittest.mock import call
18 19
@@ -157,7 +158,7 @@ class EventHandlingTest(unittest.TestCase):
157 self._test_process.event_handler, 158 self._test_process.event_handler,
158 event, 159 event,
159 None) 160 None)
160 self._test_process.event_handler.assert_called_once_with(event) 161 self._test_process.event_handler.assert_called_once_with(event, None)
161 162
162 def test_fire_class_handlers(self): 163 def test_fire_class_handlers(self):
163 """ Test fire_class_handlers method """ 164 """ Test fire_class_handlers method """
@@ -175,10 +176,10 @@ class EventHandlingTest(unittest.TestCase):
175 bb.event.fire_class_handlers(event1, None) 176 bb.event.fire_class_handlers(event1, None)
176 bb.event.fire_class_handlers(event2, None) 177 bb.event.fire_class_handlers(event2, None)
177 bb.event.fire_class_handlers(event2, None) 178 bb.event.fire_class_handlers(event2, None)
178 expected_event_handler1 = [call(event1)] 179 expected_event_handler1 = [call(event1, None)]
179 expected_event_handler2 = [call(event1), 180 expected_event_handler2 = [call(event1, None),
180 call(event2), 181 call(event2, None),
181 call(event2)] 182 call(event2, None)]
182 self.assertEqual(self._test_process.event_handler1.call_args_list, 183 self.assertEqual(self._test_process.event_handler1.call_args_list,
183 expected_event_handler1) 184 expected_event_handler1)
184 self.assertEqual(self._test_process.event_handler2.call_args_list, 185 self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -205,7 +206,7 @@ class EventHandlingTest(unittest.TestCase):
205 bb.event.fire_class_handlers(event2, None) 206 bb.event.fire_class_handlers(event2, None)
206 bb.event.fire_class_handlers(event2, None) 207 bb.event.fire_class_handlers(event2, None)
207 expected_event_handler1 = [] 208 expected_event_handler1 = []
208 expected_event_handler2 = [call(event1)] 209 expected_event_handler2 = [call(event1, None)]
209 self.assertEqual(self._test_process.event_handler1.call_args_list, 210 self.assertEqual(self._test_process.event_handler1.call_args_list,
210 expected_event_handler1) 211 expected_event_handler1)
211 self.assertEqual(self._test_process.event_handler2.call_args_list, 212 self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -223,7 +224,7 @@ class EventHandlingTest(unittest.TestCase):
223 self.assertEqual(result, bb.event.Registered) 224 self.assertEqual(result, bb.event.Registered)
224 bb.event.fire_class_handlers(event1, None) 225 bb.event.fire_class_handlers(event1, None)
225 bb.event.fire_class_handlers(event2, None) 226 bb.event.fire_class_handlers(event2, None)
226 expected = [call(event1), call(event2)] 227 expected = [call(event1, None), call(event2, None)]
227 self.assertEqual(self._test_process.event_handler1.call_args_list, 228 self.assertEqual(self._test_process.event_handler1.call_args_list,
228 expected) 229 expected)
229 230
@@ -237,7 +238,7 @@ class EventHandlingTest(unittest.TestCase):
237 self.assertEqual(result, bb.event.Registered) 238 self.assertEqual(result, bb.event.Registered)
238 bb.event.fire_class_handlers(event1, None) 239 bb.event.fire_class_handlers(event1, None)
239 bb.event.fire_class_handlers(event2, None) 240 bb.event.fire_class_handlers(event2, None)
240 expected = [call(event1), call(event2), call(event1)] 241 expected = [call(event1, None), call(event2, None), call(event1, None)]
241 self.assertEqual(self._test_process.event_handler1.call_args_list, 242 self.assertEqual(self._test_process.event_handler1.call_args_list,
242 expected) 243 expected)
243 244
@@ -251,7 +252,7 @@ class EventHandlingTest(unittest.TestCase):
251 self.assertEqual(result, bb.event.Registered) 252 self.assertEqual(result, bb.event.Registered)
252 bb.event.fire_class_handlers(event1, None) 253 bb.event.fire_class_handlers(event1, None)
253 bb.event.fire_class_handlers(event2, None) 254 bb.event.fire_class_handlers(event2, None)
254 expected = [call(event1), call(event2), call(event1), call(event2)] 255 expected = [call(event1,None), call(event2, None), call(event1, None), call(event2, None)]
255 self.assertEqual(self._test_process.event_handler1.call_args_list, 256 self.assertEqual(self._test_process.event_handler1.call_args_list,
256 expected) 257 expected)
257 258
@@ -359,9 +360,10 @@ class EventHandlingTest(unittest.TestCase):
359 360
360 event1 = bb.event.ConfigParsed() 361 event1 = bb.event.ConfigParsed()
361 bb.event.fire(event1, None) 362 bb.event.fire(event1, None)
362 expected = [call(event1)] 363 expected = [call(event1, None)]
363 self.assertEqual(self._test_process.event_handler1.call_args_list, 364 self.assertEqual(self._test_process.event_handler1.call_args_list,
364 expected) 365 expected)
366 expected = [call(event1)]
365 self.assertEqual(self._test_ui1.event.send.call_args_list, 367 self.assertEqual(self._test_ui1.event.send.call_args_list,
366 expected) 368 expected)
367 369
@@ -450,10 +452,9 @@ class EventHandlingTest(unittest.TestCase):
450 and disable threadlocks tests """ 452 and disable threadlocks tests """
451 bb.event.fire(bb.event.OperationStarted(), None) 453 bb.event.fire(bb.event.OperationStarted(), None)
452 454
453 def test_enable_threadlock(self): 455 def test_event_threadlock(self):
454 """ Test enable_threadlock method """ 456 """ Test enable_threadlock method """
455 self._set_threadlock_test_mockups() 457 self._set_threadlock_test_mockups()
456 bb.event.enable_threadlock()
457 self._set_and_run_threadlock_test_workers() 458 self._set_and_run_threadlock_test_workers()
458 # Calls to UI handlers should be in order as all the registered 459 # Calls to UI handlers should be in order as all the registered
459 # handlers for the event coming from the first worker should be 460 # handlers for the event coming from the first worker should be
@@ -461,20 +462,6 @@ class EventHandlingTest(unittest.TestCase):
461 self.assertEqual(self._threadlock_test_calls, 462 self.assertEqual(self._threadlock_test_calls,
462 ["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"]) 463 ["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"])
463 464
464
465 def test_disable_threadlock(self):
466 """ Test disable_threadlock method """
467 self._set_threadlock_test_mockups()
468 bb.event.disable_threadlock()
469 self._set_and_run_threadlock_test_workers()
470 # Calls to UI handlers should be intertwined together. Thanks to the
471 # delay in the registered handlers for the event coming from the first
472 # worker, the event coming from the second worker starts being
473 # processed before finishing handling the first worker event.
474 self.assertEqual(self._threadlock_test_calls,
475 ["w1_ui1", "w2_ui1", "w1_ui2", "w2_ui2"])
476
477
478class EventClassesTest(unittest.TestCase): 465class EventClassesTest(unittest.TestCase):
479 """ Event classes test class """ 466 """ Event classes test class """
480 467
@@ -482,6 +469,8 @@ class EventClassesTest(unittest.TestCase):
482 469
483 def setUp(self): 470 def setUp(self):
484 bb.event.worker_pid = EventClassesTest._worker_pid 471 bb.event.worker_pid = EventClassesTest._worker_pid
472 self.d = bb.data.init()
473 bb.parse.siggen = bb.siggen.init(self.d)
485 474
486 def test_Event(self): 475 def test_Event(self):
487 """ Test the Event base class """ 476 """ Test the Event base class """
@@ -964,3 +953,24 @@ class EventClassesTest(unittest.TestCase):
964 event = bb.event.FindSigInfoResult(result) 953 event = bb.event.FindSigInfoResult(result)
965 self.assertEqual(event.result, result) 954 self.assertEqual(event.result, result)
966 self.assertEqual(event.pid, EventClassesTest._worker_pid) 955 self.assertEqual(event.pid, EventClassesTest._worker_pid)
956
957 def test_lineno_in_eventhandler(self):
958 # The error lineno is 5, not 4 since the first line is '\n'
959 error_line = """
960# Comment line1
961# Comment line2
962python test_lineno_in_eventhandler() {
963 This is an error line
964}
965addhandler test_lineno_in_eventhandler
966test_lineno_in_eventhandler[eventmask] = "bb.event.ConfigParsed"
967"""
968
969 with self.assertLogs() as logs:
970 f = tempfile.NamedTemporaryFile(suffix = '.bb')
971 f.write(bytes(error_line, "utf-8"))
972 f.flush()
973 d = bb.parse.handle(f.name, self.d)['']
974
975 output = "".join(logs.output)
976 self.assertTrue(" line 5\n" in output)
diff --git a/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html b/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
new file mode 100644
index 0000000000..4a1eb4de13
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
@@ -0,0 +1,59 @@
1<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
2<html>
3 <head>
4 <title>Index of /debian/pool/main/m/minicom</title>
5 </head>
6 <body>
7<h1>Index of /debian/pool/main/m/minicom</h1>
8 <table>
9 <tr><th valign="top"><img src="/icons/blank.gif" alt="[ICO]"></th><th><a href="?C=N;O=D">Name</a></th><th><a href="?C=M;O=A">Last modified</a></th><th><a href="?C=S;O=A">Size</a></th></tr>
10 <tr><th colspan="4"><hr></th></tr>
11<tr><td valign="top"><img src="/icons/back.gif" alt="[PARENTDIR]"></td><td><a href="/debian/pool/main/m/">Parent Directory</a></td><td>&nbsp;</td><td align="right"> - </td></tr>
12<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.debian.tar.xz">minicom_2.7-1+deb8u1.debian.tar.xz</a></td><td align="right">2017-04-24 08:22 </td><td align="right"> 14K</td></tr>
13<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.dsc">minicom_2.7-1+deb8u1.dsc</a></td><td align="right">2017-04-24 08:22 </td><td align="right">1.9K</td></tr>
14<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_amd64.deb">minicom_2.7-1+deb8u1_amd64.deb</a></td><td align="right">2017-04-25 21:10 </td><td align="right">257K</td></tr>
15<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armel.deb">minicom_2.7-1+deb8u1_armel.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">246K</td></tr>
16<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armhf.deb">minicom_2.7-1+deb8u1_armhf.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">245K</td></tr>
17<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_i386.deb">minicom_2.7-1+deb8u1_i386.deb</a></td><td align="right">2017-04-25 21:41 </td><td align="right">258K</td></tr>
18<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.debian.tar.xz">minicom_2.7-1.1.debian.tar.xz</a></td><td align="right">2017-04-22 09:34 </td><td align="right"> 14K</td></tr>
19<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.dsc">minicom_2.7-1.1.dsc</a></td><td align="right">2017-04-22 09:34 </td><td align="right">1.9K</td></tr>
20<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_amd64.deb">minicom_2.7-1.1_amd64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
21<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_arm64.deb">minicom_2.7-1.1_arm64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">250K</td></tr>
22<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armel.deb">minicom_2.7-1.1_armel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">255K</td></tr>
23<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armhf.deb">minicom_2.7-1.1_armhf.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">254K</td></tr>
24<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_i386.deb">minicom_2.7-1.1_i386.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">266K</td></tr>
25<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips.deb">minicom_2.7-1.1_mips.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">258K</td></tr>
26<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips64el.deb">minicom_2.7-1.1_mips64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
27<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mipsel.deb">minicom_2.7-1.1_mipsel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
28<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_ppc64el.deb">minicom_2.7-1.1_ppc64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">253K</td></tr>
29<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_s390x.deb">minicom_2.7-1.1_s390x.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
30<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_amd64.deb">minicom_2.7.1-1+b1_amd64.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">262K</td></tr>
31<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_arm64.deb">minicom_2.7.1-1+b1_arm64.deb</a></td><td align="right">2018-05-06 07:58 </td><td align="right">250K</td></tr>
32<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armel.deb">minicom_2.7.1-1+b1_armel.deb</a></td><td align="right">2018-05-06 08:45 </td><td align="right">253K</td></tr>
33<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armhf.deb">minicom_2.7.1-1+b1_armhf.deb</a></td><td align="right">2018-05-06 10:42 </td><td align="right">253K</td></tr>
34<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_i386.deb">minicom_2.7.1-1+b1_i386.deb</a></td><td align="right">2018-05-06 08:55 </td><td align="right">266K</td></tr>
35<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mips.deb">minicom_2.7.1-1+b1_mips.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">258K</td></tr>
36<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mipsel.deb">minicom_2.7.1-1+b1_mipsel.deb</a></td><td align="right">2018-05-06 12:13 </td><td align="right">259K</td></tr>
37<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_ppc64el.deb">minicom_2.7.1-1+b1_ppc64el.deb</a></td><td align="right">2018-05-06 09:10 </td><td align="right">260K</td></tr>
38<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_s390x.deb">minicom_2.7.1-1+b1_s390x.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">257K</td></tr>
39<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b2_mips64el.deb">minicom_2.7.1-1+b2_mips64el.deb</a></td><td align="right">2018-05-06 09:41 </td><td align="right">260K</td></tr>
40<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.debian.tar.xz">minicom_2.7.1-1.debian.tar.xz</a></td><td align="right">2017-08-13 15:40 </td><td align="right"> 14K</td></tr>
41<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.dsc">minicom_2.7.1-1.dsc</a></td><td align="right">2017-08-13 15:40 </td><td align="right">1.8K</td></tr>
42<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.1.orig.tar.gz">minicom_2.7.1.orig.tar.gz</a></td><td align="right">2017-08-13 15:40 </td><td align="right">855K</td></tr>
43<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.orig.tar.gz">minicom_2.7.orig.tar.gz</a></td><td align="right">2014-01-01 09:36 </td><td align="right">843K</td></tr>
44<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.debian.tar.xz">minicom_2.8-2.debian.tar.xz</a></td><td align="right">2021-06-15 03:47 </td><td align="right"> 14K</td></tr>
45<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.dsc">minicom_2.8-2.dsc</a></td><td align="right">2021-06-15 03:47 </td><td align="right">1.8K</td></tr>
46<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_amd64.deb">minicom_2.8-2_amd64.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">280K</td></tr>
47<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_arm64.deb">minicom_2.8-2_arm64.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">275K</td></tr>
48<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armel.deb">minicom_2.8-2_armel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">271K</td></tr>
49<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armhf.deb">minicom_2.8-2_armhf.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">272K</td></tr>
50<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_i386.deb">minicom_2.8-2_i386.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">285K</td></tr>
51<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mips64el.deb">minicom_2.8-2_mips64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">277K</td></tr>
52<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mipsel.deb">minicom_2.8-2_mipsel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">278K</td></tr>
53<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_ppc64el.deb">minicom_2.8-2_ppc64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">286K</td></tr>
54<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_s390x.deb">minicom_2.8-2_s390x.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">275K</td></tr>
55<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8.orig.tar.bz2">minicom_2.8.orig.tar.bz2</a></td><td align="right">2021-01-03 12:44 </td><td align="right">598K</td></tr>
56 <tr><th colspan="4"><hr></th></tr>
57</table>
58<address>Apache Server at ftp.debian.org Port 80</address>
59</body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
new file mode 100644
index 0000000000..4e41af6d6a
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
@@ -0,0 +1,20 @@
1<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
2
3<title>Index of /sources/libxml2/2.10/</title>
4</head><body><h1>Index of /sources/libxml2/2.10/</h1>
5<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
6<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
7<tr><td class="link"><a href="LATEST-IS-2.10.3" title="LATEST-IS-2.10.3">LATEST-IS-2.10.3</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
8<tr><td class="link"><a href="libxml2-2.10.0.news" title="libxml2-2.10.0.news">libxml2-2.10.0.news</a></td><td class="size">7.1 KiB</td><td class="date">2022-Aug-17 11:55</td></tr>
9<tr><td class="link"><a href="libxml2-2.10.0.sha256sum" title="libxml2-2.10.0.sha256sum">libxml2-2.10.0.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-17 11:55</td></tr>
10<tr><td class="link"><a href="libxml2-2.10.0.tar.xz" title="libxml2-2.10.0.tar.xz">libxml2-2.10.0.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-17 11:55</td></tr>
11<tr><td class="link"><a href="libxml2-2.10.1.news" title="libxml2-2.10.1.news">libxml2-2.10.1.news</a></td><td class="size">455 B</td><td class="date">2022-Aug-25 11:33</td></tr>
12<tr><td class="link"><a href="libxml2-2.10.1.sha256sum" title="libxml2-2.10.1.sha256sum">libxml2-2.10.1.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-25 11:33</td></tr>
13<tr><td class="link"><a href="libxml2-2.10.1.tar.xz" title="libxml2-2.10.1.tar.xz">libxml2-2.10.1.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-25 11:33</td></tr>
14<tr><td class="link"><a href="libxml2-2.10.2.news" title="libxml2-2.10.2.news">libxml2-2.10.2.news</a></td><td class="size">309 B</td><td class="date">2022-Aug-29 14:56</td></tr>
15<tr><td class="link"><a href="libxml2-2.10.2.sha256sum" title="libxml2-2.10.2.sha256sum">libxml2-2.10.2.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-29 14:56</td></tr>
16<tr><td class="link"><a href="libxml2-2.10.2.tar.xz" title="libxml2-2.10.2.tar.xz">libxml2-2.10.2.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Aug-29 14:56</td></tr>
17<tr><td class="link"><a href="libxml2-2.10.3.news" title="libxml2-2.10.3.news">libxml2-2.10.3.news</a></td><td class="size">294 B</td><td class="date">2022-Oct-14 12:55</td></tr>
18<tr><td class="link"><a href="libxml2-2.10.3.sha256sum" title="libxml2-2.10.3.sha256sum">libxml2-2.10.3.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Oct-14 12:55</td></tr>
19<tr><td class="link"><a href="libxml2-2.10.3.tar.xz" title="libxml2-2.10.3.tar.xz">libxml2-2.10.3.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
20</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
new file mode 100644
index 0000000000..abdfdd0fa2
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
@@ -0,0 +1,40 @@
1<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
2
3<title>Index of /sources/libxml2/2.9/</title>
4</head><body><h1>Index of /sources/libxml2/2.9/</h1>
5<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
6<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
7<tr><td class="link"><a href="LATEST-IS-2.9.14" title="LATEST-IS-2.9.14">LATEST-IS-2.9.14</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
8<tr><td class="link"><a href="libxml2-2.9.0.sha256sum" title="libxml2-2.9.0.sha256sum">libxml2-2.9.0.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:27</td></tr>
9<tr><td class="link"><a href="libxml2-2.9.0.tar.xz" title="libxml2-2.9.0.tar.xz">libxml2-2.9.0.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:27</td></tr>
10<tr><td class="link"><a href="libxml2-2.9.1.sha256sum" title="libxml2-2.9.1.sha256sum">libxml2-2.9.1.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:28</td></tr>
11<tr><td class="link"><a href="libxml2-2.9.1.tar.xz" title="libxml2-2.9.1.tar.xz">libxml2-2.9.1.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:28</td></tr>
12<tr><td class="link"><a href="libxml2-2.9.10.sha256sum" title="libxml2-2.9.10.sha256sum">libxml2-2.9.10.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:42</td></tr>
13<tr><td class="link"><a href="libxml2-2.9.10.tar.xz" title="libxml2-2.9.10.tar.xz">libxml2-2.9.10.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:42</td></tr>
14<tr><td class="link"><a href="libxml2-2.9.11.sha256sum" title="libxml2-2.9.11.sha256sum">libxml2-2.9.11.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:43</td></tr>
15<tr><td class="link"><a href="libxml2-2.9.11.tar.xz" title="libxml2-2.9.11.tar.xz">libxml2-2.9.11.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:43</td></tr>
16<tr><td class="link"><a href="libxml2-2.9.12.sha256sum" title="libxml2-2.9.12.sha256sum">libxml2-2.9.12.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:45</td></tr>
17<tr><td class="link"><a href="libxml2-2.9.12.tar.xz" title="libxml2-2.9.12.tar.xz">libxml2-2.9.12.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:45</td></tr>
18<tr><td class="link"><a href="libxml2-2.9.13.news" title="libxml2-2.9.13.news">libxml2-2.9.13.news</a></td><td class="size">26.6 KiB</td><td class="date">2022-Feb-20 12:42</td></tr>
19<tr><td class="link"><a href="libxml2-2.9.13.sha256sum" title="libxml2-2.9.13.sha256sum">libxml2-2.9.13.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Feb-20 12:42</td></tr>
20<tr><td class="link"><a href="libxml2-2.9.13.tar.xz" title="libxml2-2.9.13.tar.xz">libxml2-2.9.13.tar.xz</a></td><td class="size">3.1 MiB</td><td class="date">2022-Feb-20 12:42</td></tr>
21<tr><td class="link"><a href="libxml2-2.9.14.news" title="libxml2-2.9.14.news">libxml2-2.9.14.news</a></td><td class="size">1.0 KiB</td><td class="date">2022-May-02 12:03</td></tr>
22<tr><td class="link"><a href="libxml2-2.9.14.sha256sum" title="libxml2-2.9.14.sha256sum">libxml2-2.9.14.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-May-02 12:03</td></tr>
23<tr><td class="link"><a href="libxml2-2.9.14.tar.xz" title="libxml2-2.9.14.tar.xz">libxml2-2.9.14.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
24<tr><td class="link"><a href="libxml2-2.9.2.sha256sum" title="libxml2-2.9.2.sha256sum">libxml2-2.9.2.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:30</td></tr>
25<tr><td class="link"><a href="libxml2-2.9.2.tar.xz" title="libxml2-2.9.2.tar.xz">libxml2-2.9.2.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:30</td></tr>
26<tr><td class="link"><a href="libxml2-2.9.3.sha256sum" title="libxml2-2.9.3.sha256sum">libxml2-2.9.3.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:31</td></tr>
27<tr><td class="link"><a href="libxml2-2.9.3.tar.xz" title="libxml2-2.9.3.tar.xz">libxml2-2.9.3.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:31</td></tr>
28<tr><td class="link"><a href="libxml2-2.9.4.sha256sum" title="libxml2-2.9.4.sha256sum">libxml2-2.9.4.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:33</td></tr>
29<tr><td class="link"><a href="libxml2-2.9.4.tar.xz" title="libxml2-2.9.4.tar.xz">libxml2-2.9.4.tar.xz</a></td><td class="size">2.9 MiB</td><td class="date">2022-Feb-14 18:33</td></tr>
30<tr><td class="link"><a href="libxml2-2.9.5.sha256sum" title="libxml2-2.9.5.sha256sum">libxml2-2.9.5.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:35</td></tr>
31<tr><td class="link"><a href="libxml2-2.9.5.tar.xz" title="libxml2-2.9.5.tar.xz">libxml2-2.9.5.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:35</td></tr>
32<tr><td class="link"><a href="libxml2-2.9.6.sha256sum" title="libxml2-2.9.6.sha256sum">libxml2-2.9.6.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:36</td></tr>
33<tr><td class="link"><a href="libxml2-2.9.6.tar.xz" title="libxml2-2.9.6.tar.xz">libxml2-2.9.6.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:36</td></tr>
34<tr><td class="link"><a href="libxml2-2.9.7.sha256sum" title="libxml2-2.9.7.sha256sum">libxml2-2.9.7.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:37</td></tr>
35<tr><td class="link"><a href="libxml2-2.9.7.tar.xz" title="libxml2-2.9.7.tar.xz">libxml2-2.9.7.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:37</td></tr>
36<tr><td class="link"><a href="libxml2-2.9.8.sha256sum" title="libxml2-2.9.8.sha256sum">libxml2-2.9.8.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:39</td></tr>
37<tr><td class="link"><a href="libxml2-2.9.8.tar.xz" title="libxml2-2.9.8.tar.xz">libxml2-2.9.8.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:39</td></tr>
38<tr><td class="link"><a href="libxml2-2.9.9.sha256sum" title="libxml2-2.9.9.sha256sum">libxml2-2.9.9.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:40</td></tr>
39<tr><td class="link"><a href="libxml2-2.9.9.tar.xz" title="libxml2-2.9.9.tar.xz">libxml2-2.9.9.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:40</td></tr>
40</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html
new file mode 100644
index 0000000000..c183e06a55
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html
@@ -0,0 +1,19 @@
1<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
2
3<title>Index of /sources/libxml2/</title>
4</head><body><h1>Index of /sources/libxml2/</h1>
5<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
6<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
7<tr><td class="link"><a href="2.0/" title="2.0">2.0/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
8<tr><td class="link"><a href="2.1/" title="2.1">2.1/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
9<tr><td class="link"><a href="2.10/" title="2.10">2.10/</a></td><td class="size">-</td><td class="date">2022-Oct-14 12:55</td></tr>
10<tr><td class="link"><a href="2.2/" title="2.2">2.2/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
11<tr><td class="link"><a href="2.3/" title="2.3">2.3/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
12<tr><td class="link"><a href="2.4/" title="2.4">2.4/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
13<tr><td class="link"><a href="2.5/" title="2.5">2.5/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
14<tr><td class="link"><a href="2.6/" title="2.6">2.6/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
15<tr><td class="link"><a href="2.7/" title="2.7">2.7/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:24</td></tr>
16<tr><td class="link"><a href="2.8/" title="2.8">2.8/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:26</td></tr>
17<tr><td class="link"><a href="2.9/" title="2.9">2.9/</a></td><td class="size">-</td><td class="date">2022-May-02 12:04</td></tr>
18<tr><td class="link"><a href="cache.json" title="cache.json">cache.json</a></td><td class="size">22.8 KiB</td><td class="date">2022-Oct-14 12:55</td></tr>
19</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php
new file mode 100644
index 0000000000..e27ee134f2
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php
@@ -0,0 +1,3528 @@
1<?xml version="1.0" encoding="UTF-8"?>
2<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
3 "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
4<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
5<head>
6<title>MiniUPnP download zone</title>
7<link href="../css/miniupnp.css" rel="stylesheet" type="text/css"/>
8<meta name="description" content="files download of the miniupnp project"/>
9<meta name="keywords" content="upnp,download,openbsd,freebsd,linux,windows"/>
10<meta name="viewport" content="width=device-width" />
11<link href="rss.php" title="MiniUPnPd, MiniUPnPc and MiniSSDPd Files" type="application/rss+xml" rel="alternate" />
12<link rel="canonical" href="http://miniupnp.free.fr/files/" />
13<link rel="alternate" hreflang="fr" href="/files/index_fr.php" />
14<script async="async" src="//pagead2.googlesyndication.com/pagead/js/adsbygoogle.js" type="text/javascript"></script>
15<script type="text/javascript">
16 (adsbygoogle = window.adsbygoogle || []).push({
17 google_ad_client: "ca-pub-6883148866513192",
18 enable_page_level_ads: true
19 });
20</script>
21</head>
22<body>
23<h2>MiniUPnP Project</h2>
24
25<p align="center">
26<a href="../">Home</a> |
27<b>Downloads</b> |
28<a href="../devicelist.php">Compatibility list</a> |
29<a href="../libnatpmp.html">libnatpmp</a> |
30<a href="../minissdpd.html">MiniSSDPd</a> |
31<a href="../xchat-upnp.html">xchat upnp patch</a> |
32<a href="../search.html">Search</a> |
33<a href="https://miniupnp.tuxfamily.org/forum/">Forum</a>
34</p>
35<p align="center">
36<b>English</b> | <a href="/files/index_fr.php">Fran&ccedil;ais</a>
37</p>
38
39<div align="center">
40<script type="text/javascript"><!--
41google_ad_client = "pub-6883148866513192";
42/* 728x90, created 7/10/08 */
43google_ad_slot = "0774293141";
44google_ad_width = 728;
45google_ad_height = 90;
46//-->
47</script>
48<script type="text/javascript"
49src="https://pagead2.googlesyndication.com/pagead/show_ads.js">
50</script>
51</div>
52
53<h2>MiniUPnP download zone</h2>
54<p>
55Find on this page the source of miniupnp and
56some related files. You will also find precompiled binaries
57of the UPnP client sample program for windows compiled using
58<a href="https://mingw.osdn.io/">MinGW</a>. There are also Windows
59binaries (including python module) automatically built using
60<a href="https://ci.appveyor.com/project/miniupnp/miniupnp/build/artifacts">AppVeyor</a>.
61</p>
62<p>If you just need one of the software installed on your machine,
63you probably don't need to download and compile the source files.
64It is very likely that a package/port already exists for
65your system/distribution. Refer to your system documentation
66to find how to search and install a package/port.
67Mac OS X does have port systems too : see
68<a href="http://www.macports.org/">MacPorts</a> or
69<a href="http://mxcl.github.com/homebrew/">Homebrew</a> or
70<a href="http://www.finkproject.org/">Fink</a>.
71</p>
72<p>
73The miniupnpc (client) sources have been successfully compiled
74under Windows XP/vista/7/10/etc. (using
75<a href="https://mingw.osdn.io/">MinGW</a>,
76<a href="https://www.mingw-w64.org/">Mingw-w64</a>
77or <a href="http://www.cygwin.com/">Cygwin</a>),
78Linux, OpenBSD, FreeBSD, NetBSD, DragonFlyBSD,
79Solaris, MacOS X and AmigaOS. <br/>
80The Makefile of the client is made for GNU make :
81check which version your system have
82with the command "make --version". On some systems, such as OpenBSD,
83you have to use "gmake". Under Windows with MinGW, GNU make is
84called "mingw32-make" and a sligthly modified version of the Makefile
85should be used : Makefile.mingw. Run "mingw32make.bat" to compile. <br/>
86If you have any compatibility problem, please post on the
87<a href="https://miniupnp.tuxfamily.org/forum/">forum</a>
88or contact me by email.
89</p>
90<!--
91<p>A devoted user compiled miniupnp<strong>c</strong> for
92Openwrt (currently Kamikaze 7.09)
93and his work is available here :
94<a href="http://replay.waybackmachine.org/20081120030628/http://www.myantihero.net/pub/openwrt/packages/">http://myantihero.net/pub/openwrt/packages/</a>.</p>
95-->
96<p>Get miniupnpc under AmigaOS 4 on
97<a href="http://os4depot.net/index.php?function=showfile&amp;file=network/misc/miniupnpc.lha">OS4Depot</a>.
98</p>
99<p>
100Dario Meloni has made a Ruby Gem embedding miniupnpc :
101<a href="https://rubygems.org/gems/mupnp">https://rubygems.org/gems/mupnp</a>.
102</p>
103<p>
104The python module is available on pypi.org :
105<a href="https://pypi.org/project/miniupnpc/">pip install miniupnpc</a>.
106</p>
107<p>
108The daemon (starting in November 2006) compiles with BSD make under BSD
109and Solaris.<br/>
110To compile the daemon under linux, use "make -f Makefile.linux"<br/>
111To compile for <a href="http://openwrt.org/">OpenWRT</a>
112please read the README.openwrt file, or use the packages
113<a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpc</a> and
114<a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpd</a>.
115<!-- The
116<a href="http://www.x-wrt.org/">X-Wrt</a> project is providing
117precompiled ipkg packages for OpenWrt for both OpenWrt
118<a href="ftp://ftp.berlios.de/pub/xwrt/packages/">White Russian</a>
119and OpenWrt
120<a href="ftp://ftp.berlios.de/pub/xwrt/kamikaze/packages">kamikaze</a>.
121Check
122<a href="ftp://ftp.berlios.de/pub/xwrt/">ftp://ftp.berlios.de/pub/xwrt/</a>.
123For White Russian, take a look at
124<a href="http://jackassofalltrades.com/openwrt/">this</a>. -->
125<br/>
126<a href="http://pfsense.com">pfSense</a> users are advised to use the
127miniupnpd port available for their system. Recent versions of
128pfSense include MiniUPnPd in the base system.
129<br/>
130For <a href="http://en.wikipedia.org/wiki/WRT54G">Linksys WRT54G</a>
131and WRT54GL owners,
132<a href="http://sourceforge.net/projects/tarifa/">Tarifa firmware</a>
133is another alternative to get miniUPnPd running on the router.
134</p>
135<p>
136Please read README and
137LICENCE files included with the distribution for further informations.
138</p>
139<p>
140The MiniUPnP daemon (miniupnpd) is working under
141<a href="http://www.openbsd.org/">OpenBSD</a>,
142<a href="http://www.netbsd.org/">NetBSD</a>,
143<a href="http://www.freebsd.org/">FreeBSD</a>,
144<a href="http://www.dragonflybsd.org/">DragonFlyBSD</a>,
145<a href="http://www.apple.com/macosx/">Mac OS X</a> and
146(<a href="https://en.wikipedia.org/wiki/OpenSolaris">Open</a>)<a href="http://www.oracle.com/us/products/servers-storage/solaris/solaris11/overview/index.html">Solaris</a>
147with <a href="http://www.openbsd.org/faq/pf/">pf</a>,
148with <a href="https://en.wikipedia.org/wiki/IPFilter">IP Filter</a> or
149with <a href="http://en.wikipedia.org/wiki/Ipfirewall">ipfw</a>.
150The linux version uses either libiptc which permits to access
151<a href="http://netfilter.org/">netfilter</a>
152rules inside the kernel the same way as
153<a href="https://www.netfilter.org/projects/iptables/index.html">iptables</a>, or
154<a href="https://www.netfilter.org/projects/libnftnl/index.html">libnftnl</a>
155which is the equivalent for
156<a href="https://www.netfilter.org/projects/nftables/index.html">nftables</a>.
157</p>
158
159<p>Releases are now GPG signed with the key <a href="../A31ACAAF.asc">A31ACAAF</a>.
160Previous signing key was <a href="../A5C0863C.asc">A5C0863C</a>.
161Get it from your favorite
162<a href="https://pgp.mit.edu/pks/lookup?search=0xA31ACAAF&amp;op=index&amp;fingerprint=on">key server</a>.</p>
163
164<h4>REST API</h4>
165<p>You can use the REST API to get the latest releases available:</p>
166<ul>
167<li><a href="rest.php/tags/miniupnpd?count=1">rest.php/tags/miniupnpd?count=1</a>: latest miniupnpd.</li>
168<li><a href="rest.php/tags?count=1">rest.php/tags?count=1</a>: miniupnpc, miniupnpd and minissdpd.</li>
169</ul>
170
171<h4>You can help !</h4>
172<p>If you make a package/port for your favorite OS distribution,
173inform me so I can upload the package here or add a link to your
174repository.
175</p>
176
177<h4>Latest files</h4>
178<table>
179<tr><th>name</th>
180<th>size</th>
181<th>date</th>
182<th>comment</th>
183<th><!-- Changelog --></th>
184<th><!-- Signature --></th>
185</tr>
186<tr>
187 <td class="filename"><a href='miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td>
188 <td class="filesize">140137</td>
189 <td class="filedate">05/03/2025 10:31</td>
190 <td class="comment">MiniUPnP client release source code</td>
191 <td><a href="changelog.php?file=miniupnpc-2.3.2.tar.gz">changelog</a></td>
192 <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td>
193</tr>
194<tr>
195 <td class="filename"><a href='miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td>
196 <td class="filesize">265329</td>
197 <td class="filedate">22/06/2024 22:31</td>
198 <td class="comment">MiniUPnP daemon release source code</td>
199 <td><a href="changelog.php?file=miniupnpd-2.3.7.tar.gz">changelog</a></td>
200 <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td>
201</tr>
202<tr>
203 <td class="filename"><a href='libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td>
204 <td class="filesize">26506</td>
205 <td class="filedate">23/04/2023 11:02</td>
206 <td class="comment">latest libnatpmp source code</td>
207 <td><a href="changelog.php?file=libnatpmp-20230423.tar.gz">changelog</a></td>
208 <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td>
209</tr>
210<tr>
211 <td class="filename"><a href='minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td>
212 <td class="filesize">39077</td>
213 <td class="filedate">22/10/2022 18:41</td>
214 <td class="comment">MiniSSDPd release source code</td>
215 <td><a href="changelog.php?file=minissdpd-1.6.0.tar.gz">changelog</a></td>
216 <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td>
217</tr>
218<tr>
219 <td class="filename"><a href='upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td>
220 <td class="filesize">69503</td>
221 <td class="filedate">15/05/2022 14:31</td>
222 <td class="comment">Windows executable</td>
223 <td><a href="changelog.php?file=upnpc-exe-win32-20220515.zip">changelog</a></td>
224 <td></td>
225</tr>
226<tr>
227 <td class="filename"><a href='minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td>
228 <td class="filesize">38870</td>
229 <td class="filedate">04/11/2021 23:34</td>
230 <td class="comment">latest MiniSSDPd source code</td>
231 <td><a href="changelog.php?file=minissdpd-1.5.20211105.tar.gz">changelog</a></td>
232 <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td>
233</tr>
234<tr>
235 <td class="filename"><a href='miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td>
236 <td class="filesize">97682</td>
237 <td class="filedate">15/10/2020 22:31</td>
238 <td class="comment">latest MiniUPnP client source code</td>
239 <td><a href="changelog.php?file=miniupnpc-2.1.20201016.tar.gz">changelog</a></td>
240 <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td>
241</tr>
242<tr>
243 <td class="filename"><a href='miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td>
244 <td class="filesize">245426</td>
245 <td class="filedate">10/05/2020 18:23</td>
246 <td class="comment">latest MiniUPnP daemon source code</td>
247 <td><a href="changelog.php?file=miniupnpd-2.1.20200510.tar.gz">changelog</a></td>
248 <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td>
249</tr>
250<tr>
251 <td class="filename"><a href='xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td>
252 <td class="filesize">10329</td>
253 <td class="filedate">11/08/2011 15:18</td>
254 <td class="comment">Patch to add UPnP capabilities to xchat</td>
255 <td><a href="changelog.php?file=xchat-upnp20110811.patch">changelog</a></td>
256 <td></td>
257</tr>
258<tr>
259 <td class="filename"><a href='minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td>
260 <td class="filesize">7598</td>
261 <td class="filedate">25/07/2011 14:57</td>
262 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
263 <td><a href="changelog.php?file=minidlna_1.0.21.minissdp1.patch">changelog</a></td>
264 <td></td>
265</tr>
266<tr>
267 <td class="filename"><a href='miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td>
268 <td class="filesize">14840</td>
269 <td class="filedate">04/11/2006 18:16</td>
270 <td class="comment">Jo&atilde;o Paulo Barraca version of the upnp client</td>
271 <td><a href="changelog.php?file=miniupnpc-new20060630.tar.gz">changelog</a></td>
272 <td></td>
273</tr>
274</table>
275
276<h4>All files</h4>
277<table>
278<tr><th>name</th>
279<th>size</th>
280<th>date</th>
281<th>comment</th>
282<th><!-- signature --></th>
283</tr>
284<tr>
285 <td class="filename"><a href='download.php?file=miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td>
286 <td class="filesize">140137</td>
287 <td class="filedate">05/03/2025 10:31:36 +0000</td>
288 <td class="comment">MiniUPnP client release source code</td>
289 <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td>
290</tr>
291<tr>
292 <td class="filename"><a href='download.php?file=miniupnpc-2.3.1.tar.gz'>miniupnpc-2.3.1.tar.gz</a></td>
293 <td class="filesize">139499</td>
294 <td class="filedate">23/02/2025 16:44:16 +0000</td>
295 <td class="comment">MiniUPnP client release source code</td>
296 <td><a href="miniupnpc-2.3.1.tar.gz.sig">Signature</a></td>
297</tr>
298<tr>
299 <td class="filename"><a href='download.php?file=miniupnpc-2.3.0.tar.gz'>miniupnpc-2.3.0.tar.gz</a></td>
300 <td class="filesize">105071</td>
301 <td class="filedate">10/01/2025 23:16:45 +0000</td>
302 <td class="comment">MiniUPnP client release source code</td>
303 <td><a href="miniupnpc-2.3.0.tar.gz.sig">Signature</a></td>
304</tr>
305<tr>
306 <td class="filename"><a href='download.php?file=miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td>
307 <td class="filesize">265329</td>
308 <td class="filedate">22/06/2024 22:31:38 +0000</td>
309 <td class="comment">MiniUPnP daemon release source code</td>
310 <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td>
311</tr>
312<tr>
313 <td class="filename"><a href='download.php?file=miniupnpc-2.2.8.tar.gz'>miniupnpc-2.2.8.tar.gz</a></td>
314 <td class="filesize">104603</td>
315 <td class="filedate">08/06/2024 22:13:39 +0000</td>
316 <td class="comment">MiniUPnP client release source code</td>
317 <td><a href="miniupnpc-2.2.8.tar.gz.sig">Signature</a></td>
318</tr>
319<tr>
320 <td class="filename"><a href='download.php?file=miniupnpd-2.3.6.tar.gz'>miniupnpd-2.3.6.tar.gz</a></td>
321 <td class="filesize">263018</td>
322 <td class="filedate">19/03/2024 23:39:51 +0000</td>
323 <td class="comment">MiniUPnP daemon release source code</td>
324 <td><a href="miniupnpd-2.3.6.tar.gz.sig">Signature</a></td>
325</tr>
326<tr>
327 <td class="filename"><a href='download.php?file=miniupnpc-2.2.7.tar.gz'>miniupnpc-2.2.7.tar.gz</a></td>
328 <td class="filesize">104258</td>
329 <td class="filedate">19/03/2024 23:25:18 +0000</td>
330 <td class="comment">MiniUPnP client release source code</td>
331 <td><a href="miniupnpc-2.2.7.tar.gz.sig">Signature</a></td>
332</tr>
333<tr>
334 <td class="filename"><a href='download.php?file=miniupnpd-2.3.5.tar.gz'>miniupnpd-2.3.5.tar.gz</a></td>
335 <td class="filesize">261952</td>
336 <td class="filedate">02/03/2024 11:04:07 +0000</td>
337 <td class="comment">MiniUPnP daemon release source code</td>
338 <td><a href="miniupnpd-2.3.5.tar.gz.sig">Signature</a></td>
339</tr>
340<tr>
341 <td class="filename"><a href='download.php?file=miniupnpd-2.3.4.tar.gz'>miniupnpd-2.3.4.tar.gz</a></td>
342 <td class="filesize">260810</td>
343 <td class="filedate">04/01/2024 00:53:17 +0000</td>
344 <td class="comment">MiniUPnP daemon release source code</td>
345 <td><a href="miniupnpd-2.3.4.tar.gz.sig">Signature</a></td>
346</tr>
347<tr>
348 <td class="filename"><a href='download.php?file=miniupnpc-2.2.6.tar.gz'>miniupnpc-2.2.6.tar.gz</a></td>
349 <td class="filesize">103949</td>
350 <td class="filedate">04/01/2024 00:27:14 +0000</td>
351 <td class="comment">MiniUPnP client release source code</td>
352 <td><a href="miniupnpc-2.2.6.tar.gz.sig">Signature</a></td>
353</tr>
354<tr>
355 <td class="filename"><a href='download.php?file=miniupnpc-2.2.5.tar.gz'>miniupnpc-2.2.5.tar.gz</a></td>
356 <td class="filesize">103654</td>
357 <td class="filedate">11/06/2023 23:14:56 +0000</td>
358 <td class="comment">MiniUPnP client release source code</td>
359 <td><a href="miniupnpc-2.2.5.tar.gz.sig">Signature</a></td>
360</tr>
361<tr>
362 <td class="filename"><a href='download.php?file=libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td>
363 <td class="filesize">26506</td>
364 <td class="filedate">23/04/2023 11:02:09 +0000</td>
365 <td class="comment">libnatpmp source code</td>
366 <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td>
367</tr>
368<tr>
369 <td class="filename"><a href='download.php?file=miniupnpd-2.3.3.tar.gz'>miniupnpd-2.3.3.tar.gz</a></td>
370 <td class="filesize">260079</td>
371 <td class="filedate">17/02/2023 03:07:46 +0000</td>
372 <td class="comment">MiniUPnP daemon release source code</td>
373 <td><a href="miniupnpd-2.3.3.tar.gz.sig">Signature</a></td>
374</tr>
375<tr>
376 <td class="filename"><a href='download.php?file=miniupnpd-2.3.2.tar.gz'>miniupnpd-2.3.2.tar.gz</a></td>
377 <td class="filesize">259686</td>
378 <td class="filedate">19/01/2023 23:18:08 +0000</td>
379 <td class="comment">MiniUPnP daemon release source code</td>
380 <td><a href="miniupnpd-2.3.2.tar.gz.sig">Signature</a></td>
381</tr>
382<tr>
383 <td class="filename"><a href='download.php?file=minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td>
384 <td class="filesize">39077</td>
385 <td class="filedate">22/10/2022 18:41:54 +0000</td>
386 <td class="comment">MiniSSDPd release source code</td>
387 <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td>
388</tr>
389<tr>
390 <td class="filename"><a href='download.php?file=miniupnpc-2.2.4.tar.gz'>miniupnpc-2.2.4.tar.gz</a></td>
391 <td class="filesize">102932</td>
392 <td class="filedate">21/10/2022 21:01:01 +0000</td>
393 <td class="comment">MiniUPnP client release source code</td>
394 <td><a href="miniupnpc-2.2.4.tar.gz.sig">Signature</a></td>
395</tr>
396<tr>
397 <td class="filename"><a href='download.php?file=miniupnpd-2.3.1.tar.gz'>miniupnpd-2.3.1.tar.gz</a></td>
398 <td class="filesize">258050</td>
399 <td class="filedate">16/10/2022 05:58:44 +0000</td>
400 <td class="comment">MiniUPnP daemon release source code</td>
401 <td><a href="miniupnpd-2.3.1.tar.gz.sig">Signature</a></td>
402</tr>
403<tr>
404 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td>
405 <td class="filesize">69503</td>
406 <td class="filedate">15/05/2022 14:31:25 +0000</td>
407 <td class="comment">Windows executable</td>
408 <td></td>
409</tr>
410<tr>
411 <td class="filename"><a href='download.php?file=hexchat-2.16.patch'>hexchat-2.16.patch</a></td>
412 <td class="filesize">8147</td>
413 <td class="filedate">19/03/2022 16:52:05 +0000</td>
414 <td class="comment"></td>
415 <td></td>
416</tr>
417<tr>
418 <td class="filename"><a href='download.php?file=miniupnpd-2.3.0.tar.gz'>miniupnpd-2.3.0.tar.gz</a></td>
419 <td class="filesize">256069</td>
420 <td class="filedate">23/01/2022 00:23:32 +0000</td>
421 <td class="comment">MiniUPnP daemon release source code</td>
422 <td><a href="miniupnpd-2.3.0.tar.gz.sig">Signature</a></td>
423</tr>
424<tr>
425 <td class="filename"><a href='download.php?file=minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td>
426 <td class="filesize">38870</td>
427 <td class="filedate">04/11/2021 23:34:49 +0000</td>
428 <td class="comment">MiniSSDPd source code</td>
429 <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td>
430</tr>
431<tr>
432 <td class="filename"><a href='download.php?file=miniupnpc-2.2.3.tar.gz'>miniupnpc-2.2.3.tar.gz</a></td>
433 <td class="filesize">101360</td>
434 <td class="filedate">28/09/2021 21:43:32 +0000</td>
435 <td class="comment">MiniUPnP client release source code</td>
436 <td><a href="miniupnpc-2.2.3.tar.gz.sig">Signature</a></td>
437</tr>
438<tr>
439 <td class="filename"><a href='download.php?file=miniupnpd-2.2.3.tar.gz'>miniupnpd-2.2.3.tar.gz</a></td>
440 <td class="filesize">254752</td>
441 <td class="filedate">21/08/2021 08:35:13 +0000</td>
442 <td class="comment">MiniUPnP daemon release source code</td>
443 <td><a href="miniupnpd-2.2.3.tar.gz.sig">Signature</a></td>
444</tr>
445<tr>
446 <td class="filename"><a href='download.php?file=miniupnpd-2.2.2.tar.gz'>miniupnpd-2.2.2.tar.gz</a></td>
447 <td class="filesize">250649</td>
448 <td class="filedate">13/05/2021 11:30:11 +0000</td>
449 <td class="comment">MiniUPnP daemon release source code</td>
450 <td><a href="miniupnpd-2.2.2.tar.gz.sig">Signature</a></td>
451</tr>
452<tr>
453 <td class="filename"><a href='download.php?file=miniupnpc-2.2.2.tar.gz'>miniupnpc-2.2.2.tar.gz</a></td>
454 <td class="filesize">100008</td>
455 <td class="filedate">02/03/2021 23:44:52 +0000</td>
456 <td class="comment">MiniUPnP client release source code</td>
457 <td><a href="miniupnpc-2.2.2.tar.gz.sig">Signature</a></td>
458</tr>
459<tr>
460 <td class="filename"><a href='download.php?file=miniupnpd-2.2.1.tar.gz'>miniupnpd-2.2.1.tar.gz</a></td>
461 <td class="filesize">250023</td>
462 <td class="filedate">20/12/2020 18:08:08 +0000</td>
463 <td class="comment">MiniUPnP daemon release source code</td>
464 <td><a href="miniupnpd-2.2.1.tar.gz.sig">Signature</a></td>
465</tr>
466<tr>
467 <td class="filename"><a href='download.php?file=miniupnpc-2.2.1.tar.gz'>miniupnpc-2.2.1.tar.gz</a></td>
468 <td class="filesize">99595</td>
469 <td class="filedate">20/12/2020 18:08:02 +0000</td>
470 <td class="comment">MiniUPnP client release source code</td>
471 <td><a href="miniupnpc-2.2.1.tar.gz.sig">Signature</a></td>
472</tr>
473<tr>
474 <td class="filename"><a href='download.php?file=miniupnpc-2.2.0.tar.gz'>miniupnpc-2.2.0.tar.gz</a></td>
475 <td class="filesize">98348</td>
476 <td class="filedate">09/11/2020 19:51:24 +0000</td>
477 <td class="comment">MiniUPnP client release source code</td>
478 <td><a href="miniupnpc-2.2.0.tar.gz.sig">Signature</a></td>
479</tr>
480<tr>
481 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0.tar.gz'>miniupnpd-2.2.0.tar.gz</a></td>
482 <td class="filesize">249858</td>
483 <td class="filedate">31/10/2020 09:20:59 +0000</td>
484 <td class="comment">MiniUPnP daemon release source code</td>
485 <td><a href="miniupnpd-2.2.0.tar.gz.sig">Signature</a></td>
486</tr>
487<tr>
488 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC3.tar.gz'>miniupnpd-2.2.0-RC3.tar.gz</a></td>
489 <td class="filesize">249879</td>
490 <td class="filedate">30/10/2020 21:49:49 +0000</td>
491 <td class="comment">MiniUPnP daemon release source code</td>
492 <td><a href="miniupnpd-2.2.0-RC3.tar.gz.sig">Signature</a></td>
493</tr>
494<tr>
495 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td>
496 <td class="filesize">97682</td>
497 <td class="filedate">15/10/2020 22:31:09 +0000</td>
498 <td class="comment">MiniUPnP client source code</td>
499 <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td>
500</tr>
501<tr>
502 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC2.tar.gz'>miniupnpd-2.2.0-RC2.tar.gz</a></td>
503 <td class="filesize">248756</td>
504 <td class="filedate">28/09/2020 21:57:22 +0000</td>
505 <td class="comment">MiniUPnP daemon release source code</td>
506 <td><a href="miniupnpd-2.2.0-RC2.tar.gz.sig">Signature</a></td>
507</tr>
508<tr>
509 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20200928.tar.gz'>miniupnpc-2.1.20200928.tar.gz</a></td>
510 <td class="filesize">96508</td>
511 <td class="filedate">28/09/2020 21:56:09 +0000</td>
512 <td class="comment">MiniUPnP client source code</td>
513 <td><a href="miniupnpc-2.1.20200928.tar.gz.sig">Signature</a></td>
514</tr>
515<tr>
516 <td class="filename"><a href='download.php?file=minissdpd-1.5.20200928.tar.gz'>minissdpd-1.5.20200928.tar.gz</a></td>
517 <td class="filesize">37860</td>
518 <td class="filedate">28/09/2020 21:55:40 +0000</td>
519 <td class="comment">MiniSSDPd source code</td>
520 <td><a href="minissdpd-1.5.20200928.tar.gz.sig">Signature</a></td>
521</tr>
522<tr>
523 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC1.tar.gz'>miniupnpd-2.2.0-RC1.tar.gz</a></td>
524 <td class="filesize">247772</td>
525 <td class="filedate">06/06/2020 18:34:50 +0000</td>
526 <td class="comment">MiniUPnP daemon release source code</td>
527 <td><a href="miniupnpd-2.2.0-RC1.tar.gz.sig">Signature</a></td>
528</tr>
529<tr>
530 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC0.tar.gz'>miniupnpd-2.2.0-RC0.tar.gz</a></td>
531 <td class="filesize">245507</td>
532 <td class="filedate">16/05/2020 18:03:17 +0000</td>
533 <td class="comment">MiniUPnP daemon release source code</td>
534 <td><a href="miniupnpd-2.2.0-RC0.tar.gz.sig">Signature</a></td>
535</tr>
536<tr>
537 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td>
538 <td class="filesize">245426</td>
539 <td class="filedate">10/05/2020 18:23:13 +0000</td>
540 <td class="comment">MiniUPnP daemon source code</td>
541 <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td>
542</tr>
543<tr>
544 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200329.tar.gz'>miniupnpd-2.1.20200329.tar.gz</a></td>
545 <td class="filesize">243725</td>
546 <td class="filedate">29/03/2020 09:11:02 +0000</td>
547 <td class="comment">MiniUPnP daemon source code</td>
548 <td><a href="miniupnpd-2.1.20200329.tar.gz.sig">Signature</a></td>
549</tr>
550<tr>
551 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20191224.tar.gz'>miniupnpc-2.1.20191224.tar.gz</a></td>
552 <td class="filesize">94740</td>
553 <td class="filedate">23/12/2019 23:37:32 +0000</td>
554 <td class="comment">MiniUPnP client source code</td>
555 <td><a href="miniupnpc-2.1.20191224.tar.gz.sig">Signature</a></td>
556</tr>
557<tr>
558 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191006.tar.gz'>miniupnpd-2.1.20191006.tar.gz</a></td>
559 <td class="filesize">243255</td>
560 <td class="filedate">06/10/2019 21:02:31 +0000</td>
561 <td class="comment">MiniUPnP daemon source code</td>
562 <td><a href="miniupnpd-2.1.20191006.tar.gz.sig">Signature</a></td>
563</tr>
564<tr>
565 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191005.tar.gz'>miniupnpd-2.1.20191005.tar.gz</a></td>
566 <td class="filesize">244100</td>
567 <td class="filedate">05/10/2019 21:33:08 +0000</td>
568 <td class="comment">MiniUPnP daemon source code</td>
569 <td><a href="miniupnpd-2.1.20191005.tar.gz.sig">Signature</a></td>
570</tr>
571<tr>
572 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191003.tar.gz'>miniupnpd-2.1.20191003.tar.gz</a></td>
573 <td class="filesize">243287</td>
574 <td class="filedate">02/10/2019 22:23:51 +0000</td>
575 <td class="comment">MiniUPnP daemon source code</td>
576 <td><a href="miniupnpd-2.1.20191003.tar.gz.sig">Signature</a></td>
577</tr>
578<tr>
579 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190924.tar.gz'>miniupnpd-2.1.20190924.tar.gz</a></td>
580 <td class="filesize">241008</td>
581 <td class="filedate">24/09/2019 11:58:15 +0000</td>
582 <td class="comment">MiniUPnP daemon source code</td>
583 <td><a href="miniupnpd-2.1.20190924.tar.gz.sig">Signature</a></td>
584</tr>
585<tr>
586 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190902.tar.gz'>miniupnpd-2.1.20190902.tar.gz</a></td>
587 <td class="filesize">240742</td>
588 <td class="filedate">01/09/2019 23:03:03 +0000</td>
589 <td class="comment">MiniUPnP daemon source code</td>
590 <td><a href="miniupnpd-2.1.20190902.tar.gz.sig">Signature</a></td>
591</tr>
592<tr>
593 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190824.tar.gz'>miniupnpd-2.1.20190824.tar.gz</a></td>
594 <td class="filesize">240490</td>
595 <td class="filedate">24/08/2019 09:21:52 +0000</td>
596 <td class="comment">MiniUPnP daemon source code</td>
597 <td><a href="miniupnpd-2.1.20190824.tar.gz.sig">Signature</a></td>
598</tr>
599<tr>
600 <td class="filename"><a href='download.php?file=minissdpd-1.5.20190824.tar.gz'>minissdpd-1.5.20190824.tar.gz</a></td>
601 <td class="filesize">37300</td>
602 <td class="filedate">24/08/2019 09:17:32 +0000</td>
603 <td class="comment">MiniSSDPd source code</td>
604 <td><a href="minissdpd-1.5.20190824.tar.gz.sig">Signature</a></td>
605</tr>
606<tr>
607 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190824.tar.gz'>miniupnpc-2.1.20190824.tar.gz</a></td>
608 <td class="filesize">94564</td>
609 <td class="filedate">24/08/2019 09:12:50 +0000</td>
610 <td class="comment">MiniUPnP client source code</td>
611 <td><a href="miniupnpc-2.1.20190824.tar.gz.sig">Signature</a></td>
612</tr>
613<tr>
614 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190630.tar.gz'>miniupnpd-2.1.20190630.tar.gz</a></td>
615 <td class="filesize">240466</td>
616 <td class="filedate">30/06/2019 20:27:38 +0000</td>
617 <td class="comment">MiniUPnP daemon source code</td>
618 <td><a href="miniupnpd-2.1.20190630.tar.gz.sig">Signature</a></td>
619</tr>
620<tr>
621 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190625.tar.gz'>miniupnpd-2.1.20190625.tar.gz</a></td>
622 <td class="filesize">240120</td>
623 <td class="filedate">25/06/2019 21:33:49 +0000</td>
624 <td class="comment">MiniUPnP daemon source code</td>
625 <td><a href="miniupnpd-2.1.20190625.tar.gz.sig">Signature</a></td>
626</tr>
627<tr>
628 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190625.tar.gz'>miniupnpc-2.1.20190625.tar.gz</a></td>
629 <td class="filesize">94461</td>
630 <td class="filedate">25/06/2019 21:33:26 +0000</td>
631 <td class="comment">MiniUPnP client source code</td>
632 <td><a href="miniupnpc-2.1.20190625.tar.gz.sig">Signature</a></td>
633</tr>
634<tr>
635 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190502.tar.gz'>miniupnpd-2.1.20190502.tar.gz</a></td>
636 <td class="filesize">236052</td>
637 <td class="filedate">02/05/2019 17:22:23 +0000</td>
638 <td class="comment">MiniUPnP daemon source code</td>
639 <td><a href="miniupnpd-2.1.20190502.tar.gz.sig">Signature</a></td>
640</tr>
641<tr>
642 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190408.tar.gz'>miniupnpc-2.1.20190408.tar.gz</a></td>
643 <td class="filesize">94216</td>
644 <td class="filedate">08/04/2019 12:50:21 +0000</td>
645 <td class="comment">MiniUPnP client source code</td>
646 <td><a href="miniupnpc-2.1.20190408.tar.gz.sig">Signature</a></td>
647</tr>
648<tr>
649 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190408.tar.gz'>miniupnpd-2.1.20190408.tar.gz</a></td>
650 <td class="filesize">235989</td>
651 <td class="filedate">08/04/2019 12:50:01 +0000</td>
652 <td class="comment">MiniUPnP daemon source code</td>
653 <td><a href="miniupnpd-2.1.20190408.tar.gz.sig">Signature</a></td>
654</tr>
655<tr>
656 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190403.tar.gz'>miniupnpc-2.1.20190403.tar.gz</a></td>
657 <td class="filesize">94204</td>
658 <td class="filedate">03/04/2019 15:41:36 +0000</td>
659 <td class="comment">MiniUPnP client source code</td>
660 <td><a href="miniupnpc-2.1.20190403.tar.gz.sig">Signature</a></td>
661</tr>
662<tr>
663 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190403.tar.gz'>miniupnpd-2.1.20190403.tar.gz</a></td>
664 <td class="filesize">235909</td>
665 <td class="filedate">03/04/2019 15:41:17 +0000</td>
666 <td class="comment">MiniUPnP daemon source code</td>
667 <td><a href="miniupnpd-2.1.20190403.tar.gz.sig">Signature</a></td>
668</tr>
669<tr>
670 <td class="filename"><a href='download.php?file=minissdpd-1.5.20190210.tar.gz'>minissdpd-1.5.20190210.tar.gz</a></td>
671 <td class="filesize">37227</td>
672 <td class="filedate">10/02/2019 15:21:49 +0000</td>
673 <td class="comment">MiniSSDPd source code</td>
674 <td><a href="minissdpd-1.5.20190210.tar.gz.sig">Signature</a></td>
675</tr>
676<tr>
677 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190210.tar.gz'>miniupnpc-2.1.20190210.tar.gz</a></td>
678 <td class="filesize">94125</td>
679 <td class="filedate">10/02/2019 12:46:09 +0000</td>
680 <td class="comment">MiniUPnP client source code</td>
681 <td><a href="miniupnpc-2.1.20190210.tar.gz.sig">Signature</a></td>
682</tr>
683<tr>
684 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190210.tar.gz'>miniupnpd-2.1.20190210.tar.gz</a></td>
685 <td class="filesize">235093</td>
686 <td class="filedate">10/02/2019 11:20:11 +0000</td>
687 <td class="comment">MiniUPnP daemon source code</td>
688 <td><a href="miniupnpd-2.1.20190210.tar.gz.sig">Signature</a></td>
689</tr>
690<tr>
691 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20180706.tar.gz'>miniupnpd-2.1.20180706.tar.gz</a></td>
692 <td class="filesize">233675</td>
693 <td class="filedate">06/07/2018 12:44:24 +0000</td>
694 <td class="comment">MiniUPnP daemon source code</td>
695 <td><a href="miniupnpd-2.1.20180706.tar.gz.sig">Signature</a></td>
696</tr>
697<tr>
698 <td class="filename"><a href='download.php?file=miniupnpd-2.1.tar.gz'>miniupnpd-2.1.tar.gz</a></td>
699 <td class="filesize">225458</td>
700 <td class="filedate">08/05/2018 21:50:32 +0000</td>
701 <td class="comment">MiniUPnP daemon release source code</td>
702 <td><a href="miniupnpd-2.1.tar.gz.sig">Signature</a></td>
703</tr>
704<tr>
705 <td class="filename"><a href='download.php?file=miniupnpc-2.1.tar.gz'>miniupnpc-2.1.tar.gz</a></td>
706 <td class="filesize">91914</td>
707 <td class="filedate">07/05/2018 11:10:59 +0000</td>
708 <td class="comment">MiniUPnP client release source code</td>
709 <td><a href="miniupnpc-2.1.tar.gz.sig">Signature</a></td>
710</tr>
711<tr>
712 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180503.tar.gz'>miniupnpd-2.0.20180503.tar.gz</a></td>
713 <td class="filesize">225454</td>
714 <td class="filedate">03/05/2018 08:33:10 +0000</td>
715 <td class="comment">MiniUPnP daemon source code</td>
716 <td></td>
717</tr>
718<tr>
719 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180503.tar.gz'>miniupnpc-2.0.20180503.tar.gz</a></td>
720 <td class="filesize">88207</td>
721 <td class="filedate">03/05/2018 08:31:22 +0000</td>
722 <td class="comment">MiniUPnP client source code</td>
723 <td></td>
724</tr>
725<tr>
726 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180422.tar.gz'>miniupnpd-2.0.20180422.tar.gz</a></td>
727 <td class="filesize">224942</td>
728 <td class="filedate">22/04/2018 19:48:54 +0000</td>
729 <td class="comment">MiniUPnP daemon source code</td>
730 <td></td>
731</tr>
732<tr>
733 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180412.tar.gz'>miniupnpd-2.0.20180412.tar.gz</a></td>
734 <td class="filesize">224831</td>
735 <td class="filedate">12/04/2018 08:16:25 +0000</td>
736 <td class="comment">MiniUPnP daemon source code</td>
737 <td></td>
738</tr>
739<tr>
740 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180410.tar.gz'>miniupnpd-2.0.20180410.tar.gz</a></td>
741 <td class="filesize">224736</td>
742 <td class="filedate">10/04/2018 07:58:28 +0000</td>
743 <td class="comment">MiniUPnP daemon source code</td>
744 <td></td>
745</tr>
746<tr>
747 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180410.tar.gz'>miniupnpc-2.0.20180410.tar.gz</a></td>
748 <td class="filesize">87363</td>
749 <td class="filedate">10/04/2018 07:52:55 +0000</td>
750 <td class="comment">MiniUPnP client source code</td>
751 <td></td>
752</tr>
753<tr>
754 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180406.tar.gz'>miniupnpc-2.0.20180406.tar.gz</a></td>
755 <td class="filesize">87374</td>
756 <td class="filedate">06/04/2018 10:55:21 +0000</td>
757 <td class="comment">MiniUPnP client source code</td>
758 <td></td>
759</tr>
760<tr>
761 <td class="filename"><a href='download.php?file=minissdpd-1.5.20180223.tar.gz'>minissdpd-1.5.20180223.tar.gz</a></td>
762 <td class="filesize">36179</td>
763 <td class="filedate">23/02/2018 14:24:07 +0000</td>
764 <td class="comment">MiniSSDPd source code</td>
765 <td></td>
766</tr>
767<tr>
768 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180222.tar.gz'>miniupnpc-2.0.20180222.tar.gz</a></td>
769 <td class="filesize">87018</td>
770 <td class="filedate">22/02/2018 15:09:24 +0000</td>
771 <td class="comment">MiniUPnP client source code</td>
772 <td></td>
773</tr>
774<tr>
775 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180222.tar.gz'>miniupnpd-2.0.20180222.tar.gz</a></td>
776 <td class="filesize">223697</td>
777 <td class="filedate">22/02/2018 15:09:14 +0000</td>
778 <td class="comment">MiniUPnP daemon source code</td>
779 <td></td>
780</tr>
781<tr>
782 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180203.tar.gz'>miniupnpd-2.0.20180203.tar.gz</a></td>
783 <td class="filesize">223084</td>
784 <td class="filedate">03/02/2018 22:34:46 +0000</td>
785 <td class="comment">MiniUPnP daemon source code</td>
786 <td></td>
787</tr>
788<tr>
789 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180203.tar.gz'>miniupnpc-2.0.20180203.tar.gz</a></td>
790 <td class="filesize">86772</td>
791 <td class="filedate">03/02/2018 22:34:32 +0000</td>
792 <td class="comment">MiniUPnP client source code</td>
793 <td></td>
794</tr>
795<tr>
796 <td class="filename"><a href='download.php?file=minissdpd-1.5.20180203.tar.gz'>minissdpd-1.5.20180203.tar.gz</a></td>
797 <td class="filesize">35848</td>
798 <td class="filedate">03/02/2018 22:33:08 +0000</td>
799 <td class="comment">MiniSSDPd source code</td>
800 <td></td>
801</tr>
802<tr>
803 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171212.tar.gz'>miniupnpc-2.0.20171212.tar.gz</a></td>
804 <td class="filesize">86607</td>
805 <td class="filedate">12/12/2017 12:03:38 +0000</td>
806 <td class="comment">MiniUPnP client source code</td>
807 <td></td>
808</tr>
809<tr>
810 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20171212.tar.gz'>miniupnpd-2.0.20171212.tar.gz</a></td>
811 <td class="filesize">222617</td>
812 <td class="filedate">12/12/2017 12:03:32 +0000</td>
813 <td class="comment">MiniUPnP daemon source code</td>
814 <td></td>
815</tr>
816<tr>
817 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171102.tar.gz'>miniupnpc-2.0.20171102.tar.gz</a></td>
818 <td class="filesize">86363</td>
819 <td class="filedate">02/11/2017 17:58:34 +0000</td>
820 <td class="comment">MiniUPnP client source code</td>
821 <td></td>
822</tr>
823<tr>
824 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170509.tar.gz'>miniupnpc-2.0.20170509.tar.gz</a></td>
825 <td class="filesize">86055</td>
826 <td class="filedate">09/05/2017 10:14:56 +0000</td>
827 <td class="comment">MiniUPnP client source code</td>
828 <td></td>
829</tr>
830<tr>
831 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170421.tar.gz'>miniupnpc-2.0.20170421.tar.gz</a></td>
832 <td class="filesize">85984</td>
833 <td class="filedate">21/04/2017 12:02:26 +0000</td>
834 <td class="comment">MiniUPnP client source code</td>
835 <td></td>
836</tr>
837<tr>
838 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20170421.tar.gz'>miniupnpd-2.0.20170421.tar.gz</a></td>
839 <td class="filesize">219191</td>
840 <td class="filedate">21/04/2017 12:02:06 +0000</td>
841 <td class="comment">MiniUPnP daemon source code</td>
842 <td></td>
843</tr>
844<tr>
845 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20161216.tar.gz'>miniupnpd-2.0.20161216.tar.gz</a></td>
846 <td class="filesize">218119</td>
847 <td class="filedate">16/12/2016 09:34:08 +0000</td>
848 <td class="comment">MiniUPnP daemon source code</td>
849 <td></td>
850</tr>
851<tr>
852 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20161216.tar.gz'>miniupnpc-2.0.20161216.tar.gz</a></td>
853 <td class="filesize">85780</td>
854 <td class="filedate">16/12/2016 09:34:03 +0000</td>
855 <td class="comment">MiniUPnP client source code</td>
856 <td></td>
857</tr>
858<tr>
859 <td class="filename"><a href='download.php?file=minissdpd-1.5.20161216.tar.gz'>minissdpd-1.5.20161216.tar.gz</a></td>
860 <td class="filesize">35078</td>
861 <td class="filedate">16/12/2016 09:33:59 +0000</td>
862 <td class="comment">MiniSSDPd source code</td>
863 <td></td>
864</tr>
865<tr>
866 <td class="filename"><a href='download.php?file=miniupnpd-2.0.tar.gz'>miniupnpd-2.0.tar.gz</a></td>
867 <td class="filesize">217802</td>
868 <td class="filedate">19/04/2016 21:12:01 +0000</td>
869 <td class="comment">MiniUPnP daemon release source code</td>
870 <td><a href="miniupnpd-2.0.tar.gz.sig">Signature</a></td>
871</tr>
872<tr>
873 <td class="filename"><a href='download.php?file=miniupnpc-2.0.tar.gz'>miniupnpc-2.0.tar.gz</a></td>
874 <td class="filesize">85287</td>
875 <td class="filedate">19/04/2016 21:07:52 +0000</td>
876 <td class="comment">MiniUPnP client release source code</td>
877 <td></td>
878</tr>
879<tr>
880 <td class="filename"><a href='download.php?file=minissdpd-1.5.20160301.tar.gz'>minissdpd-1.5.20160301.tar.gz</a></td>
881 <td class="filesize">34827</td>
882 <td class="filedate">01/03/2016 18:08:23 +0000</td>
883 <td class="comment">MiniSSDPd source code</td>
884 <td></td>
885</tr>
886<tr>
887 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160222.tar.gz'>miniupnpd-1.9.20160222.tar.gz</a></td>
888 <td class="filesize">217541</td>
889 <td class="filedate">22/02/2016 10:21:40 +0000</td>
890 <td class="comment">MiniUPnP daemon source code</td>
891 <td></td>
892</tr>
893<tr>
894 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160216.tar.gz'>miniupnpd-1.9.20160216.tar.gz</a></td>
895 <td class="filesize">217007</td>
896 <td class="filedate">16/02/2016 12:41:44 +0000</td>
897 <td class="comment">MiniUPnP daemon source code</td>
898 <td></td>
899</tr>
900<tr>
901 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160212.tar.gz'>miniupnpd-1.9.20160212.tar.gz</a></td>
902 <td class="filesize">215866</td>
903 <td class="filedate">12/02/2016 15:22:04 +0000</td>
904 <td class="comment">MiniUPnP daemon source code</td>
905 <td></td>
906</tr>
907<tr>
908 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160209.tar.gz'>miniupnpd-1.9.20160209.tar.gz</a></td>
909 <td class="filesize">213416</td>
910 <td class="filedate">09/02/2016 09:47:03 +0000</td>
911 <td class="comment">MiniUPnP daemon source code</td>
912 <td></td>
913</tr>
914<tr>
915 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20160209.tar.gz'>miniupnpc-1.9.20160209.tar.gz</a></td>
916 <td class="filesize">85268</td>
917 <td class="filedate">09/02/2016 09:44:50 +0000</td>
918 <td class="comment">MiniUPnP client source code</td>
919 <td></td>
920</tr>
921<tr>
922 <td class="filename"><a href='download.php?file=minissdpd-1.5.20160119.tar.gz'>minissdpd-1.5.20160119.tar.gz</a></td>
923 <td class="filesize">34711</td>
924 <td class="filedate">19/01/2016 13:39:51 +0000</td>
925 <td class="comment">MiniSSDPd source code</td>
926 <td></td>
927</tr>
928<tr>
929 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160113.tar.gz'>miniupnpd-1.9.20160113.tar.gz</a></td>
930 <td class="filesize">211437</td>
931 <td class="filedate">13/01/2016 16:03:14 +0000</td>
932 <td class="comment">MiniUPnP daemon source code</td>
933 <td></td>
934</tr>
935<tr>
936 <td class="filename"><a href='download.php?file=minissdpd-1.5.tar.gz'>minissdpd-1.5.tar.gz</a></td>
937 <td class="filesize">34404</td>
938 <td class="filedate">13/01/2016 15:26:53 +0000</td>
939 <td class="comment">MiniSSDPd release source code</td>
940 <td></td>
941</tr>
942<tr>
943 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151212.tar.gz'>miniupnpd-1.9.20151212.tar.gz</a></td>
944 <td class="filesize">210912</td>
945 <td class="filedate">12/12/2015 10:06:07 +0000</td>
946 <td class="comment">MiniUPnP daemon source code</td>
947 <td></td>
948</tr>
949<tr>
950 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151118.tar.gz'>miniupnpd-1.9.20151118.tar.gz</a></td>
951 <td class="filesize">210322</td>
952 <td class="filedate">18/11/2015 08:59:46 +0000</td>
953 <td class="comment">MiniUPnP daemon source code</td>
954 <td></td>
955</tr>
956<tr>
957 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151026.tar.gz'>miniupnpc-1.9.20151026.tar.gz</a></td>
958 <td class="filesize">84208</td>
959 <td class="filedate">26/10/2015 17:07:34 +0000</td>
960 <td class="comment">MiniUPnP client source code</td>
961 <td></td>
962</tr>
963<tr>
964 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151008.tar.gz'>miniupnpc-1.9.20151008.tar.gz</a></td>
965 <td class="filesize">83538</td>
966 <td class="filedate">08/10/2015 16:22:28 +0000</td>
967 <td class="comment">MiniUPnP client source code</td>
968 <td></td>
969</tr>
970<tr>
971 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150922.tar.gz'>miniupnpd-1.9.20150922.tar.gz</a></td>
972 <td class="filesize">208700</td>
973 <td class="filedate">22/09/2015 10:21:50 +0000</td>
974 <td class="comment">MiniUPnP daemon source code</td>
975 <td></td>
976</tr>
977<tr>
978 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150918.zip'>upnpc-exe-win32-20150918.zip</a></td>
979 <td class="filesize">100004</td>
980 <td class="filedate">18/09/2015 12:50:51 +0000</td>
981 <td class="comment">Windows executable</td>
982 <td></td>
983</tr>
984<tr>
985 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150917.tar.gz'>miniupnpc-1.9.20150917.tar.gz</a></td>
986 <td class="filesize">82609</td>
987 <td class="filedate">17/09/2015 14:09:14 +0000</td>
988 <td class="comment">MiniUPnP client source code</td>
989 <td></td>
990</tr>
991<tr>
992 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150824.zip'>upnpc-exe-win32-20150824.zip</a></td>
993 <td class="filesize">99520</td>
994 <td class="filedate">24/08/2015 15:25:18 +0000</td>
995 <td class="comment">Windows executable</td>
996 <td></td>
997</tr>
998<tr>
999 <td class="filename"><a href='download.php?file=minissdpd-1.4.tar.gz'>minissdpd-1.4.tar.gz</a></td>
1000 <td class="filesize">32017</td>
1001 <td class="filedate">06/08/2015 13:38:37 +0000</td>
1002 <td class="comment">MiniSSDPd release source code</td>
1003 <td></td>
1004</tr>
1005<tr>
1006 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150730.tar.gz'>miniupnpc-1.9.20150730.tar.gz</a></td>
1007 <td class="filesize">81431</td>
1008 <td class="filedate">29/07/2015 22:10:00 +0000</td>
1009 <td class="comment">MiniUPnP client source code</td>
1010 <td></td>
1011</tr>
1012<tr>
1013 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150721.tar.gz'>miniupnpd-1.9.20150721.tar.gz</a></td>
1014 <td class="filesize">207562</td>
1015 <td class="filedate">21/07/2015 13:35:51 +0000</td>
1016 <td class="comment">MiniUPnP daemon source code</td>
1017 <td></td>
1018</tr>
1019<tr>
1020 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150721.tar.gz'>miniupnpc-1.9.20150721.tar.gz</a></td>
1021 <td class="filesize">80521</td>
1022 <td class="filedate">21/07/2015 13:27:00 +0000</td>
1023 <td class="comment">MiniUPnP client source code</td>
1024 <td></td>
1025</tr>
1026<tr>
1027 <td class="filename"><a href='download.php?file=libnatpmp-20150609.tar.gz'>libnatpmp-20150609.tar.gz</a></td>
1028 <td class="filesize">24392</td>
1029 <td class="filedate">09/06/2015 15:40:28 +0000</td>
1030 <td class="comment">libnatpmp source code</td>
1031 <td></td>
1032</tr>
1033<tr>
1034 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150609.tar.gz'>miniupnpc-1.9.20150609.tar.gz</a></td>
1035 <td class="filesize">79311</td>
1036 <td class="filedate">09/06/2015 15:39:48 +0000</td>
1037 <td class="comment">MiniUPnP client source code</td>
1038 <td></td>
1039</tr>
1040<tr>
1041 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150609.tar.gz'>miniupnpd-1.9.20150609.tar.gz</a></td>
1042 <td class="filesize">207088</td>
1043 <td class="filedate">09/06/2015 15:39:36 +0000</td>
1044 <td class="comment">MiniUPnP daemon source code</td>
1045 <td></td>
1046</tr>
1047<tr>
1048 <td class="filename"><a href='download.php?file=minissdpd-1.3.20150527.tar.gz'>minissdpd-1.3.20150527.tar.gz</a></td>
1049 <td class="filesize">31025</td>
1050 <td class="filedate">27/05/2015 09:17:15 +0000</td>
1051 <td class="comment">MiniSSDPd source code</td>
1052 <td></td>
1053</tr>
1054<tr>
1055 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150522.tar.gz'>miniupnpc-1.9.20150522.tar.gz</a></td>
1056 <td class="filesize">79080</td>
1057 <td class="filedate">22/05/2015 11:02:27 +0000</td>
1058 <td class="comment">MiniUPnP client source code</td>
1059 <td></td>
1060</tr>
1061<tr>
1062 <td class="filename"><a href='download.php?file=minissdpd-1.3.20150522.tar.gz'>minissdpd-1.3.20150522.tar.gz</a></td>
1063 <td class="filesize">30334</td>
1064 <td class="filedate">22/05/2015 11:02:04 +0000</td>
1065 <td class="comment">MiniSSDPd source code</td>
1066 <td></td>
1067</tr>
1068<tr>
1069 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150430.tar.gz'>miniupnpd-1.9.20150430.tar.gz</a></td>
1070 <td class="filesize">205930</td>
1071 <td class="filedate">30/04/2015 09:09:27 +0000</td>
1072 <td class="comment">MiniUPnP daemon source code</td>
1073 <td></td>
1074</tr>
1075<tr>
1076 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150430.tar.gz'>miniupnpc-1.9.20150430.tar.gz</a></td>
1077 <td class="filesize">78459</td>
1078 <td class="filedate">30/04/2015 08:39:31 +0000</td>
1079 <td class="comment">MiniUPnP client source code</td>
1080 <td></td>
1081</tr>
1082<tr>
1083 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150427.tar.gz'>miniupnpc-1.9.20150427.tar.gz</a></td>
1084 <td class="filesize">78424</td>
1085 <td class="filedate">27/04/2015 16:08:42 +0000</td>
1086 <td class="comment">MiniUPnP client source code</td>
1087 <td></td>
1088</tr>
1089<tr>
1090 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150427.tar.gz'>miniupnpd-1.9.20150427.tar.gz</a></td>
1091 <td class="filesize">191157</td>
1092 <td class="filedate">27/04/2015 16:08:27 +0000</td>
1093 <td class="comment">MiniUPnP daemon source code</td>
1094 <td></td>
1095</tr>
1096<tr>
1097 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150307.tar.gz'>miniupnpd-1.9.20150307.tar.gz</a></td>
1098 <td class="filesize">190913</td>
1099 <td class="filedate">07/03/2015 16:11:51 +0000</td>
1100 <td class="comment">MiniUPnP daemon source code</td>
1101 <td></td>
1102</tr>
1103<tr>
1104 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150206.tar.gz'>miniupnpc-1.9.20150206.tar.gz</a></td>
1105 <td class="filesize">76864</td>
1106 <td class="filedate">06/02/2015 14:38:00 +0000</td>
1107 <td class="comment">MiniUPnP client source code</td>
1108 <td></td>
1109</tr>
1110<tr>
1111 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141209.tar.gz'>miniupnpd-1.9.20141209.tar.gz</a></td>
1112 <td class="filesize">193183</td>
1113 <td class="filedate">09/12/2014 09:58:34 +0000</td>
1114 <td class="comment">MiniUPnP daemon source code</td>
1115 <td></td>
1116</tr>
1117<tr>
1118 <td class="filename"><a href='download.php?file=minissdpd-1.3.tar.gz'>minissdpd-1.3.tar.gz</a></td>
1119 <td class="filesize">30326</td>
1120 <td class="filedate">09/12/2014 09:57:30 +0000</td>
1121 <td class="comment">MiniSSDPd release source code</td>
1122 <td></td>
1123</tr>
1124<tr>
1125 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141204.tar.gz'>minissdpd-1.2.20141204.tar.gz</a></td>
1126 <td class="filesize">26978</td>
1127 <td class="filedate">04/12/2014 10:55:26 +0000</td>
1128 <td class="comment">MiniSSDPd source code</td>
1129 <td></td>
1130</tr>
1131<tr>
1132 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141204.tar.gz'>miniupnpd-1.9.20141204.tar.gz</a></td>
1133 <td class="filesize">192597</td>
1134 <td class="filedate">04/12/2014 10:55:03 +0000</td>
1135 <td class="comment">MiniUPnP daemon source code</td>
1136 <td></td>
1137</tr>
1138<tr>
1139 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141128.tar.gz'>minissdpd-1.2.20141128.tar.gz</a></td>
1140 <td class="filesize">26795</td>
1141 <td class="filedate">28/11/2014 16:33:10 +0000</td>
1142 <td class="comment">MiniSSDPd source code</td>
1143 <td></td>
1144</tr>
1145<tr>
1146 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141128.tar.gz'>miniupnpd-1.9.20141128.tar.gz</a></td>
1147 <td class="filesize">192558</td>
1148 <td class="filedate">28/11/2014 13:31:36 +0000</td>
1149 <td class="comment">MiniUPnP daemon source code</td>
1150 <td></td>
1151</tr>
1152<tr>
1153 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141128.tar.gz'>miniupnpc-1.9.20141128.tar.gz</a></td>
1154 <td class="filesize">76541</td>
1155 <td class="filedate">28/11/2014 13:31:15 +0000</td>
1156 <td class="comment">MiniUPnP client source code</td>
1157 <td></td>
1158</tr>
1159<tr>
1160 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141117.tar.gz'>miniupnpc-1.9.20141117.tar.gz</a></td>
1161 <td class="filesize">73865</td>
1162 <td class="filedate">17/11/2014 09:51:36 +0000</td>
1163 <td class="comment">MiniUPnP client source code</td>
1164 <td></td>
1165</tr>
1166<tr>
1167 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141113.tar.gz'>miniupnpc-1.9.20141113.tar.gz</a></td>
1168 <td class="filesize">72857</td>
1169 <td class="filedate">13/11/2014 10:36:44 +0000</td>
1170 <td class="comment">MiniUPnP client source code</td>
1171 <td></td>
1172</tr>
1173<tr>
1174 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141108.tar.gz'>minissdpd-1.2.20141108.tar.gz</a></td>
1175 <td class="filesize">22001</td>
1176 <td class="filedate">08/11/2014 13:55:41 +0000</td>
1177 <td class="comment">MiniSSDPd source code</td>
1178 <td></td>
1179</tr>
1180<tr>
1181 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141108.tar.gz'>miniupnpc-1.9.20141108.tar.gz</a></td>
1182 <td class="filesize">72781</td>
1183 <td class="filedate">08/11/2014 13:53:48 +0000</td>
1184 <td class="comment">MiniUPnP client source code</td>
1185 <td></td>
1186</tr>
1187<tr>
1188 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141108.tar.gz'>miniupnpd-1.9.20141108.tar.gz</a></td>
1189 <td class="filesize">192413</td>
1190 <td class="filedate">08/11/2014 13:53:38 +0000</td>
1191 <td class="comment">MiniUPnP daemon source code</td>
1192 <td></td>
1193</tr>
1194<tr>
1195 <td class="filename"><a href='download.php?file=miniupnpd-1.9.tar.gz'>miniupnpd-1.9.tar.gz</a></td>
1196 <td class="filesize">192183</td>
1197 <td class="filedate">27/10/2014 16:45:34 +0000</td>
1198 <td class="comment">MiniUPnP daemon release source code</td>
1199 <td></td>
1200</tr>
1201<tr>
1202 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141027.tar.gz'>miniupnpc-1.9.20141027.tar.gz</a></td>
1203 <td class="filesize">76763</td>
1204 <td class="filedate">27/10/2014 16:45:25 +0000</td>
1205 <td class="comment">MiniUPnP client source code</td>
1206 <td></td>
1207</tr>
1208<tr>
1209 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141022.tar.gz'>miniupnpd-1.8.20141022.tar.gz</a></td>
1210 <td class="filesize">191630</td>
1211 <td class="filedate">22/10/2014 09:17:41 +0000</td>
1212 <td class="comment">MiniUPnP daemon source code</td>
1213 <td></td>
1214</tr>
1215<tr>
1216 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141021.tar.gz'>miniupnpd-1.8.20141021.tar.gz</a></td>
1217 <td class="filesize">191270</td>
1218 <td class="filedate">21/10/2014 14:18:58 +0000</td>
1219 <td class="comment">MiniUPnP daemon source code</td>
1220 <td></td>
1221</tr>
1222<tr>
1223 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140911.tar.gz'>miniupnpc-1.9.20140911.tar.gz</a></td>
1224 <td class="filesize">76855</td>
1225 <td class="filedate">11/09/2014 14:15:23 +0000</td>
1226 <td class="comment">MiniUPnP client source code</td>
1227 <td></td>
1228</tr>
1229<tr>
1230 <td class="filename"><a href='download.php?file=minissdpd-1.2.20140906.tar.gz'>minissdpd-1.2.20140906.tar.gz</a></td>
1231 <td class="filesize">21956</td>
1232 <td class="filedate">06/09/2014 08:34:10 +0000</td>
1233 <td class="comment">MiniSSDPd source code</td>
1234 <td></td>
1235</tr>
1236<tr>
1237 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140906.tar.gz'>miniupnpd-1.8.20140906.tar.gz</a></td>
1238 <td class="filesize">191183</td>
1239 <td class="filedate">06/09/2014 08:34:02 +0000</td>
1240 <td class="comment">MiniUPnP daemon source code</td>
1241 <td></td>
1242</tr>
1243<tr>
1244 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140906.tar.gz'>miniupnpc-1.9.20140906.tar.gz</a></td>
1245 <td class="filesize">76791</td>
1246 <td class="filedate">06/09/2014 08:33:45 +0000</td>
1247 <td class="comment">MiniUPnP client source code</td>
1248 <td></td>
1249</tr>
1250<tr>
1251 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140701.tar.gz'>miniupnpc-1.9.20140701.tar.gz</a></td>
1252 <td class="filesize">76735</td>
1253 <td class="filedate">01/07/2014 13:06:51 +0000</td>
1254 <td class="comment">MiniUPnP client source code</td>
1255 <td></td>
1256</tr>
1257<tr>
1258 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140610.tar.gz'>miniupnpc-1.9.20140610.tar.gz</a></td>
1259 <td class="filesize">76674</td>
1260 <td class="filedate">10/06/2014 10:28:27 +0000</td>
1261 <td class="comment">MiniUPnP client source code</td>
1262 <td></td>
1263</tr>
1264<tr>
1265 <td class="filename"><a href='download.php?file=minissdpd-1.2.20140610.tar.gz'>minissdpd-1.2.20140610.tar.gz</a></td>
1266 <td class="filesize">21909</td>
1267 <td class="filedate">10/06/2014 10:03:29 +0000</td>
1268 <td class="comment">MiniSSDPd source code</td>
1269 <td></td>
1270</tr>
1271<tr>
1272 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140523.tar.gz'>miniupnpd-1.8.20140523.tar.gz</a></td>
1273 <td class="filesize">190936</td>
1274 <td class="filedate">23/05/2014 15:48:03 +0000</td>
1275 <td class="comment">MiniUPnP daemon source code</td>
1276 <td></td>
1277</tr>
1278<tr>
1279 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140422.zip'>upnpc-exe-win32-20140422.zip</a></td>
1280 <td class="filesize">97505</td>
1281 <td class="filedate">22/04/2014 10:10:07 +0000</td>
1282 <td class="comment">Windows executable</td>
1283 <td></td>
1284</tr>
1285<tr>
1286 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140422.tar.gz'>miniupnpd-1.8.20140422.tar.gz</a></td>
1287 <td class="filesize">187225</td>
1288 <td class="filedate">22/04/2014 08:58:56 +0000</td>
1289 <td class="comment">MiniUPnP daemon source code</td>
1290 <td></td>
1291</tr>
1292<tr>
1293 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140401.tar.gz'>miniupnpd-1.8.20140401.tar.gz</a></td>
1294 <td class="filesize">183131</td>
1295 <td class="filedate">01/04/2014 10:07:20 +0000</td>
1296 <td class="comment">MiniUPnP daemon source code</td>
1297 <td></td>
1298</tr>
1299<tr>
1300 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140401.tar.gz'>miniupnpc-1.9.20140401.tar.gz</a></td>
1301 <td class="filesize">74703</td>
1302 <td class="filedate">01/04/2014 09:49:46 +0000</td>
1303 <td class="comment">MiniUPnP client source code</td>
1304 <td></td>
1305</tr>
1306<tr>
1307 <td class="filename"><a href='download.php?file=libnatpmp-20140401.tar.gz'>libnatpmp-20140401.tar.gz</a></td>
1308 <td class="filesize">23302</td>
1309 <td class="filedate">01/04/2014 09:49:44 +0000</td>
1310 <td class="comment">libnatpmp source code</td>
1311 <td></td>
1312</tr>
1313<tr>
1314 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140313.tar.gz'>miniupnpd-1.8.20140313.tar.gz</a></td>
1315 <td class="filesize">177120</td>
1316 <td class="filedate">13/03/2014 10:39:11 +0000</td>
1317 <td class="comment">MiniUPnP daemon source code</td>
1318 <td></td>
1319</tr>
1320<tr>
1321 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140310.tar.gz'>miniupnpd-1.8.20140310.tar.gz</a></td>
1322 <td class="filesize">176585</td>
1323 <td class="filedate">09/03/2014 23:16:49 +0000</td>
1324 <td class="comment">MiniUPnP daemon source code</td>
1325 <td></td>
1326</tr>
1327<tr>
1328 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140225.tar.gz'>miniupnpd-1.8.20140225.tar.gz</a></td>
1329 <td class="filesize">175183</td>
1330 <td class="filedate">25/02/2014 11:01:29 +0000</td>
1331 <td class="comment">MiniUPnP daemon source code</td>
1332 <td></td>
1333</tr>
1334<tr>
1335 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140203.tar.gz'>miniupnpd-1.8.20140203.tar.gz</a></td>
1336 <td class="filesize">170112</td>
1337 <td class="filedate">03/02/2014 09:56:05 +0000</td>
1338 <td class="comment">MiniUPnP daemon source code</td>
1339 <td></td>
1340</tr>
1341<tr>
1342 <td class="filename"><a href='download.php?file=miniupnpc-1.9.tar.gz'>miniupnpc-1.9.tar.gz</a></td>
1343 <td class="filesize">74230</td>
1344 <td class="filedate">31/01/2014 13:57:40 +0000</td>
1345 <td class="comment">MiniUPnP client release source code</td>
1346 <td></td>
1347</tr>
1348<tr>
1349 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140127.tar.gz'>miniupnpd-1.8.20140127.tar.gz</a></td>
1350 <td class="filesize">170467</td>
1351 <td class="filedate">27/01/2014 11:25:34 +0000</td>
1352 <td class="comment">MiniUPnP daemon source code</td>
1353 <td></td>
1354</tr>
1355<tr>
1356 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140117.zip'>upnpc-exe-win32-20140117.zip</a></td>
1357 <td class="filesize">97270</td>
1358 <td class="filedate">17/01/2014 11:37:53 +0000</td>
1359 <td class="comment">Windows executable</td>
1360 <td></td>
1361</tr>
1362<tr>
1363 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131216.tar.gz'>miniupnpd-1.8.20131216.tar.gz</a></td>
1364 <td class="filesize">170277</td>
1365 <td class="filedate">16/12/2013 16:15:40 +0000</td>
1366 <td class="comment">MiniUPnP daemon source code</td>
1367 <td></td>
1368</tr>
1369<tr>
1370 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131213.tar.gz'>miniupnpd-1.8.20131213.tar.gz</a></td>
1371 <td class="filesize">169753</td>
1372 <td class="filedate">13/12/2013 16:18:10 +0000</td>
1373 <td class="comment">MiniUPnP daemon source code</td>
1374 <td></td>
1375</tr>
1376<tr>
1377 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131209.tar.gz'>miniupnpc-1.8.20131209.tar.gz</a></td>
1378 <td class="filesize">73900</td>
1379 <td class="filedate">09/12/2013 20:52:54 +0000</td>
1380 <td class="comment">MiniUPnP client source code</td>
1381 <td></td>
1382</tr>
1383<tr>
1384 <td class="filename"><a href='download.php?file=libnatpmp-20131126.tar.gz'>libnatpmp-20131126.tar.gz</a></td>
1385 <td class="filesize">22972</td>
1386 <td class="filedate">26/11/2013 08:51:36 +0000</td>
1387 <td class="comment">libnatpmp source code</td>
1388 <td></td>
1389</tr>
1390<tr>
1391 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131007.tar.gz'>miniupnpc-1.8.20131007.tar.gz</a></td>
1392 <td class="filesize">73750</td>
1393 <td class="filedate">07/10/2013 10:10:25 +0000</td>
1394 <td class="comment">MiniUPnP client source code</td>
1395 <td></td>
1396</tr>
1397<tr>
1398 <td class="filename"><a href='download.php?file=libnatpmp-20130911.tar.gz'>libnatpmp-20130911.tar.gz</a></td>
1399 <td class="filesize">18744</td>
1400 <td class="filedate">11/09/2013 07:35:51 +0000</td>
1401 <td class="comment">libnatpmp source code</td>
1402 <td></td>
1403</tr>
1404<tr>
1405 <td class="filename"><a href='download.php?file=libnatpmp-20130910.tar.gz'>libnatpmp-20130910.tar.gz</a></td>
1406 <td class="filesize">18734</td>
1407 <td class="filedate">10/09/2013 20:15:34 +0000</td>
1408 <td class="comment">libnatpmp source code</td>
1409 <td></td>
1410</tr>
1411<tr>
1412 <td class="filename"><a href='download.php?file=minissdpd-1.2.20130907.tar.gz'>minissdpd-1.2.20130907.tar.gz</a></td>
1413 <td class="filesize">20237</td>
1414 <td class="filedate">07/09/2013 06:46:31 +0000</td>
1415 <td class="comment">MiniSSDPd source code</td>
1416 <td></td>
1417</tr>
1418<tr>
1419 <td class="filename"><a href='download.php?file=minissdpd-1.2.20130819.tar.gz'>minissdpd-1.2.20130819.tar.gz</a></td>
1420 <td class="filesize">20772</td>
1421 <td class="filedate">19/08/2013 16:50:29 +0000</td>
1422 <td class="comment">MiniSSDPd source code</td>
1423 <td></td>
1424</tr>
1425<tr>
1426 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130801.tar.gz'>miniupnpc-1.8.20130801.tar.gz</a></td>
1427 <td class="filesize">73426</td>
1428 <td class="filedate">01/08/2013 21:38:05 +0000</td>
1429 <td class="comment">MiniUPnP client source code</td>
1430 <td></td>
1431</tr>
1432<tr>
1433 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130730.tar.gz'>miniupnpd-1.8.20130730.tar.gz</a></td>
1434 <td class="filesize">149904</td>
1435 <td class="filedate">30/07/2013 11:37:48 +0000</td>
1436 <td class="comment">MiniUPnP daemon source code</td>
1437 <td></td>
1438</tr>
1439<tr>
1440 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130607.tar.gz'>miniupnpd-1.8.20130607.tar.gz</a></td>
1441 <td class="filesize">149521</td>
1442 <td class="filedate">07/06/2013 08:46:17 +0000</td>
1443 <td class="comment">MiniUPnP daemon source code</td>
1444 <td></td>
1445</tr>
1446<tr>
1447 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130521.tar.gz'>miniupnpd-1.8.20130521.tar.gz</a></td>
1448 <td class="filesize">149276</td>
1449 <td class="filedate">21/05/2013 09:01:33 +0000</td>
1450 <td class="comment">MiniUPnP daemon source code</td>
1451 <td></td>
1452</tr>
1453<tr>
1454 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130503.tar.gz'>miniupnpd-1.8.20130503.tar.gz</a></td>
1455 <td class="filesize">148420</td>
1456 <td class="filedate">03/05/2013 19:27:16 +0000</td>
1457 <td class="comment">MiniUPnP daemon source code</td>
1458 <td></td>
1459</tr>
1460<tr>
1461 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130503.tar.gz'>miniupnpc-1.8.20130503.tar.gz</a></td>
1462 <td class="filesize">71858</td>
1463 <td class="filedate">03/05/2013 19:27:07 +0000</td>
1464 <td class="comment">MiniUPnP client source code</td>
1465 <td></td>
1466</tr>
1467<tr>
1468 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130426.tar.gz'>miniupnpd-1.8.20130426.tar.gz</a></td>
1469 <td class="filesize">147890</td>
1470 <td class="filedate">26/04/2013 16:57:20 +0000</td>
1471 <td class="comment">MiniUPnP daemon source code</td>
1472 <td></td>
1473</tr>
1474<tr>
1475 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130211.tar.gz'>miniupnpc-1.8.20130211.tar.gz</a></td>
1476 <td class="filesize">70723</td>
1477 <td class="filedate">11/02/2013 10:32:44 +0000</td>
1478 <td class="comment">MiniUPnP client source code</td>
1479 <td></td>
1480</tr>
1481<tr>
1482 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130207.tar.gz'>miniupnpd-1.8.20130207.tar.gz</a></td>
1483 <td class="filesize">147325</td>
1484 <td class="filedate">07/02/2013 12:29:32 +0000</td>
1485 <td class="comment">MiniUPnP daemon source code</td>
1486 <td></td>
1487</tr>
1488<tr>
1489 <td class="filename"><a href='download.php?file=miniupnpc-1.8.tar.gz'>miniupnpc-1.8.tar.gz</a></td>
1490 <td class="filesize">70624</td>
1491 <td class="filedate">06/02/2013 14:31:06 +0000</td>
1492 <td class="comment">MiniUPnP client release source code</td>
1493 <td></td>
1494</tr>
1495<tr>
1496 <td class="filename"><a href='download.php?file=miniupnpd-1.8.tar.gz'>miniupnpd-1.8.tar.gz</a></td>
1497 <td class="filesize">146679</td>
1498 <td class="filedate">06/02/2013 14:30:59 +0000</td>
1499 <td class="comment">MiniUPnP daemon release source code</td>
1500 <td></td>
1501</tr>
1502<tr>
1503 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20121009.zip'>upnpc-exe-win32-20121009.zip</a></td>
1504 <td class="filesize">96513</td>
1505 <td class="filedate">09/10/2012 17:54:12 +0000</td>
1506 <td class="comment">Windows executable</td>
1507 <td></td>
1508</tr>
1509<tr>
1510 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20121005.tar.gz'>miniupnpd-1.7.20121005.tar.gz</a></td>
1511 <td class="filesize">144393</td>
1512 <td class="filedate">04/10/2012 22:39:05 +0000</td>
1513 <td class="comment">MiniUPnP daemon source code</td>
1514 <td></td>
1515</tr>
1516<tr>
1517 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120830.tar.gz'>miniupnpc-1.7.20120830.tar.gz</a></td>
1518 <td class="filesize">70074</td>
1519 <td class="filedate">30/08/2012 08:41:51 +0000</td>
1520 <td class="comment">MiniUPnP client source code</td>
1521 <td></td>
1522</tr>
1523<tr>
1524 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120824.tar.gz'>miniupnpd-1.7.20120824.tar.gz</a></td>
1525 <td class="filesize">141960</td>
1526 <td class="filedate">24/08/2012 18:15:01 +0000</td>
1527 <td class="comment">MiniUPnP daemon source code</td>
1528 <td></td>
1529</tr>
1530<tr>
1531 <td class="filename"><a href='download.php?file=libnatpmp-20120821.tar.gz'>libnatpmp-20120821.tar.gz</a></td>
1532 <td class="filesize">17832</td>
1533 <td class="filedate">21/08/2012 17:24:46 +0000</td>
1534 <td class="comment">libnatpmp source code</td>
1535 <td></td>
1536</tr>
1537<tr>
1538 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120714.tar.gz'>miniupnpc-1.7.20120714.tar.gz</a></td>
1539 <td class="filesize">69570</td>
1540 <td class="filedate">14/07/2012 14:40:47 +0000</td>
1541 <td class="comment">MiniUPnP client source code</td>
1542 <td></td>
1543</tr>
1544<tr>
1545 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120711.tar.gz'>miniupnpc-1.7.20120711.tar.gz</a></td>
1546 <td class="filesize">69580</td>
1547 <td class="filedate">10/07/2012 22:27:05 +0000</td>
1548 <td class="comment">MiniUPnP client source code</td>
1549 <td></td>
1550</tr>
1551<tr>
1552 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120711.tar.gz'>miniupnpd-1.7.20120711.tar.gz</a></td>
1553 <td class="filesize">141380</td>
1554 <td class="filedate">10/07/2012 22:26:58 +0000</td>
1555 <td class="comment">MiniUPnP daemon source code</td>
1556 <td></td>
1557</tr>
1558<tr>
1559 <td class="filename"><a href='download.php?file=miniupnpd-1.7.tar.gz'>miniupnpd-1.7.tar.gz</a></td>
1560 <td class="filesize">138047</td>
1561 <td class="filedate">27/05/2012 23:13:30 +0000</td>
1562 <td class="comment">MiniUPnP daemon release source code</td>
1563 <td></td>
1564</tr>
1565<tr>
1566 <td class="filename"><a href='download.php?file=miniupnpc-1.7.tar.gz'>miniupnpc-1.7.tar.gz</a></td>
1567 <td class="filesize">68327</td>
1568 <td class="filedate">24/05/2012 18:17:48 +0000</td>
1569 <td class="comment">MiniUPnP client release source code</td>
1570 <td></td>
1571</tr>
1572<tr>
1573 <td class="filename"><a href='download.php?file=minissdpd-1.2.tar.gz'>minissdpd-1.2.tar.gz</a></td>
1574 <td class="filesize">19874</td>
1575 <td class="filedate">24/05/2012 18:06:24 +0000</td>
1576 <td class="comment">MiniSSDPd release source code</td>
1577 <td></td>
1578</tr>
1579<tr>
1580 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120509.tar.gz'>miniupnpd-1.6.20120509.tar.gz</a></td>
1581 <td class="filesize">137147</td>
1582 <td class="filedate">09/05/2012 10:45:44 +0000</td>
1583 <td class="comment">MiniUPnP daemon source code</td>
1584 <td></td>
1585</tr>
1586<tr>
1587 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120509.tar.gz'>miniupnpc-1.6.20120509.tar.gz</a></td>
1588 <td class="filesize">68205</td>
1589 <td class="filedate">09/05/2012 10:45:41 +0000</td>
1590 <td class="comment">MiniUPnP client source code</td>
1591 <td></td>
1592</tr>
1593<tr>
1594 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120509.tar.gz'>minissdpd-1.1.20120509.tar.gz</a></td>
1595 <td class="filesize">18123</td>
1596 <td class="filedate">09/05/2012 10:45:39 +0000</td>
1597 <td class="comment">MiniSSDPd source code</td>
1598 <td></td>
1599</tr>
1600<tr>
1601 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120502.tar.gz'>miniupnpd-1.6.20120502.tar.gz</a></td>
1602 <td class="filesize">136688</td>
1603 <td class="filedate">01/05/2012 22:51:18 +0000</td>
1604 <td class="comment">MiniUPnP daemon source code</td>
1605 <td></td>
1606</tr>
1607<tr>
1608 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120502.tar.gz'>miniupnpc-1.6.20120502.tar.gz</a></td>
1609 <td class="filesize">68170</td>
1610 <td class="filedate">01/05/2012 22:51:11 +0000</td>
1611 <td class="comment">MiniUPnP client source code</td>
1612 <td></td>
1613</tr>
1614<tr>
1615 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120426.tar.gz'>miniupnpd-1.6.20120426.tar.gz</a></td>
1616 <td class="filesize">134764</td>
1617 <td class="filedate">26/04/2012 16:24:29 +0000</td>
1618 <td class="comment">MiniUPnP daemon source code</td>
1619 <td></td>
1620</tr>
1621<tr>
1622 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120424.tar.gz'>miniupnpd-1.6.20120424.tar.gz</a></td>
1623 <td class="filesize">132522</td>
1624 <td class="filedate">23/04/2012 22:43:17 +0000</td>
1625 <td class="comment">MiniUPnP daemon source code</td>
1626 <td></td>
1627</tr>
1628<tr>
1629 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120424.tar.gz'>miniupnpc-1.6.20120424.tar.gz</a></td>
1630 <td class="filesize">68067</td>
1631 <td class="filedate">23/04/2012 22:43:10 +0000</td>
1632 <td class="comment">MiniUPnP client source code</td>
1633 <td></td>
1634</tr>
1635<tr>
1636 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120420.tar.gz'>miniupnpd-1.6.20120420.tar.gz</a></td>
1637 <td class="filesize">131972</td>
1638 <td class="filedate">20/04/2012 14:58:57 +0000</td>
1639 <td class="comment">MiniUPnP daemon source code</td>
1640 <td></td>
1641</tr>
1642<tr>
1643 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120420.tar.gz'>miniupnpc-1.6.20120420.tar.gz</a></td>
1644 <td class="filesize">68068</td>
1645 <td class="filedate">20/04/2012 14:58:39 +0000</td>
1646 <td class="comment">MiniUPnP client source code</td>
1647 <td></td>
1648</tr>
1649<tr>
1650 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120419.tar.gz'>miniupnpd-1.6.20120419.tar.gz</a></td>
1651 <td class="filesize">131088</td>
1652 <td class="filedate">18/04/2012 23:41:36 +0000</td>
1653 <td class="comment">MiniUPnP daemon source code</td>
1654 <td></td>
1655</tr>
1656<tr>
1657 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120418.tar.gz'>miniupnpd-1.6.20120418.tar.gz</a></td>
1658 <td class="filesize">130879</td>
1659 <td class="filedate">18/04/2012 21:01:10 +0000</td>
1660 <td class="comment">MiniUPnP daemon source code</td>
1661 <td></td>
1662</tr>
1663<tr>
1664 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120410.tar.gz'>minissdpd-1.1.20120410.tar.gz</a></td>
1665 <td class="filesize">18059</td>
1666 <td class="filedate">09/04/2012 22:45:38 +0000</td>
1667 <td class="comment">MiniSSDPd source code</td>
1668 <td></td>
1669</tr>
1670<tr>
1671 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120410.tar.gz'>miniupnpc-1.6.20120410.tar.gz</a></td>
1672 <td class="filesize">67934</td>
1673 <td class="filedate">09/04/2012 22:45:10 +0000</td>
1674 <td class="comment">MiniUPnP client source code</td>
1675 <td></td>
1676</tr>
1677<tr>
1678 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120406.tar.gz'>miniupnpd-1.6.20120406.tar.gz</a></td>
1679 <td class="filesize">128992</td>
1680 <td class="filedate">06/04/2012 17:52:57 +0000</td>
1681 <td class="comment">MiniUPnP daemon source code</td>
1682 <td></td>
1683</tr>
1684<tr>
1685 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120320.tar.gz'>miniupnpc-1.6.20120320.tar.gz</a></td>
1686 <td class="filesize">67374</td>
1687 <td class="filedate">20/03/2012 16:55:48 +0000</td>
1688 <td class="comment">MiniUPnP client source code</td>
1689 <td></td>
1690</tr>
1691<tr>
1692 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120320.tar.gz'>miniupnpd-1.6.20120320.tar.gz</a></td>
1693 <td class="filesize">127968</td>
1694 <td class="filedate">20/03/2012 16:46:07 +0000</td>
1695 <td class="comment">MiniUPnP daemon source code</td>
1696 <td></td>
1697</tr>
1698<tr>
1699 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120305.tar.gz'>miniupnpd-1.6.20120305.tar.gz</a></td>
1700 <td class="filesize">126985</td>
1701 <td class="filedate">05/03/2012 20:42:01 +0000</td>
1702 <td class="comment">MiniUPnP daemon source code</td>
1703 <td></td>
1704</tr>
1705<tr>
1706 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120207.tar.gz'>miniupnpd-1.6.20120207.tar.gz</a></td>
1707 <td class="filesize">127425</td>
1708 <td class="filedate">07/02/2012 10:21:16 +0000</td>
1709 <td class="comment">MiniUPnP daemon source code</td>
1710 <td></td>
1711</tr>
1712<tr>
1713 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120203.tar.gz'>miniupnpd-1.6.20120203.tar.gz</a></td>
1714 <td class="filesize">126599</td>
1715 <td class="filedate">03/02/2012 15:14:13 +0000</td>
1716 <td class="comment">MiniUPnP daemon source code</td>
1717 <td></td>
1718</tr>
1719<tr>
1720 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120125.tar.gz'>miniupnpc-1.6.20120125.tar.gz</a></td>
1721 <td class="filesize">67354</td>
1722 <td class="filedate">25/01/2012 21:12:28 +0000</td>
1723 <td class="comment">MiniUPnP client source code</td>
1724 <td></td>
1725</tr>
1726<tr>
1727 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120121.tar.gz'>miniupnpc-1.6.20120121.tar.gz</a></td>
1728 <td class="filesize">67347</td>
1729 <td class="filedate">21/01/2012 14:07:41 +0000</td>
1730 <td class="comment">MiniUPnP client source code</td>
1731 <td></td>
1732</tr>
1733<tr>
1734 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120121.tar.gz'>miniupnpd-1.6.20120121.tar.gz</a></td>
1735 <td class="filesize">126021</td>
1736 <td class="filedate">21/01/2012 14:07:33 +0000</td>
1737 <td class="comment">MiniUPnP daemon source code</td>
1738 <td></td>
1739</tr>
1740<tr>
1741 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120121.tar.gz'>minissdpd-1.1.20120121.tar.gz</a></td>
1742 <td class="filesize">17762</td>
1743 <td class="filedate">21/01/2012 14:07:16 +0000</td>
1744 <td class="comment">MiniSSDPd source code</td>
1745 <td></td>
1746</tr>
1747<tr>
1748 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20120121.zip'>upnpc-exe-win32-20120121.zip</a></td>
1749 <td class="filesize">94575</td>
1750 <td class="filedate">21/01/2012 13:59:11 +0000</td>
1751 <td class="comment">Windows executable</td>
1752 <td></td>
1753</tr>
1754<tr>
1755 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20111212.zip'>upnpc-exe-win32-20111212.zip</a></td>
1756 <td class="filesize">94507</td>
1757 <td class="filedate">12/12/2011 12:33:48 +0000</td>
1758 <td class="comment">Windows executable</td>
1759 <td></td>
1760</tr>
1761<tr>
1762 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20111118.tar.gz'>miniupnpd-1.6.20111118.tar.gz</a></td>
1763 <td class="filesize">125683</td>
1764 <td class="filedate">18/11/2011 11:26:12 +0000</td>
1765 <td class="comment">MiniUPnP daemon source code</td>
1766 <td></td>
1767</tr>
1768<tr>
1769 <td class="filename"><a href='download.php?file=minissdpd-1.1.20111007.tar.gz'>minissdpd-1.1.20111007.tar.gz</a></td>
1770 <td class="filesize">17611</td>
1771 <td class="filedate">07/10/2011 09:47:51 +0000</td>
1772 <td class="comment">MiniSSDPd source code</td>
1773 <td></td>
1774</tr>
1775<tr>
1776 <td class="filename"><a href='download.php?file=xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td>
1777 <td class="filesize">10329</td>
1778 <td class="filedate">11/08/2011 15:18:25 +0000</td>
1779 <td class="comment">Patch to add UPnP capabilities to xchat</td>
1780 <td></td>
1781</tr>
1782<tr>
1783 <td class="filename"><a href='download.php?file=xchat-upnp20110811-2.8.8.patch'>xchat-upnp20110811-2.8.8.patch</a></td>
1784 <td class="filesize">11529</td>
1785 <td class="filedate">11/08/2011 15:18:23 +0000</td>
1786 <td class="comment">Patch to add UPnP capabilities to xchat</td>
1787 <td></td>
1788</tr>
1789<tr>
1790 <td class="filename"><a href='download.php?file=libnatpmp-20110808.tar.gz'>libnatpmp-20110808.tar.gz</a></td>
1791 <td class="filesize">17762</td>
1792 <td class="filedate">08/08/2011 21:21:34 +0000</td>
1793 <td class="comment">libnatpmp source code</td>
1794 <td></td>
1795</tr>
1796<tr>
1797 <td class="filename"><a href='download.php?file=libnatpmp-20110730.tar.gz'>libnatpmp-20110730.tar.gz</a></td>
1798 <td class="filesize">17687</td>
1799 <td class="filedate">30/07/2011 13:19:31 +0000</td>
1800 <td class="comment">libnatpmp source code</td>
1801 <td></td>
1802</tr>
1803<tr>
1804 <td class="filename"><a href='download.php?file=minissdpd-1.1.tar.gz'>minissdpd-1.1.tar.gz</a></td>
1805 <td class="filesize">17481</td>
1806 <td class="filedate">30/07/2011 13:17:30 +0000</td>
1807 <td class="comment">MiniSSDPd release source code</td>
1808 <td></td>
1809</tr>
1810<tr>
1811 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20110730.tar.gz'>miniupnpd-1.6.20110730.tar.gz</a></td>
1812 <td class="filesize">125583</td>
1813 <td class="filedate">30/07/2011 13:17:09 +0000</td>
1814 <td class="comment">MiniUPnP daemon source code</td>
1815 <td></td>
1816</tr>
1817<tr>
1818 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110729.tar.gz'>minissdpd-1.0.20110729.tar.gz</a></td>
1819 <td class="filesize">15898</td>
1820 <td class="filedate">29/07/2011 08:47:26 +0000</td>
1821 <td class="comment">MiniSSDPd source code</td>
1822 <td></td>
1823</tr>
1824<tr>
1825 <td class="filename"><a href='download.php?file=miniupnpc-1.6.tar.gz'>miniupnpc-1.6.tar.gz</a></td>
1826 <td class="filesize">66454</td>
1827 <td class="filedate">25/07/2011 18:03:09 +0000</td>
1828 <td class="comment">MiniUPnP client release source code</td>
1829 <td></td>
1830</tr>
1831<tr>
1832 <td class="filename"><a href='download.php?file=miniupnpd-1.6.tar.gz'>miniupnpd-1.6.tar.gz</a></td>
1833 <td class="filesize">124917</td>
1834 <td class="filedate">25/07/2011 16:37:57 +0000</td>
1835 <td class="comment">MiniUPnP daemon release source code</td>
1836 <td></td>
1837</tr>
1838<tr>
1839 <td class="filename"><a href='download.php?file=minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td>
1840 <td class="filesize">7598</td>
1841 <td class="filedate">25/07/2011 14:57:50 +0000</td>
1842 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1843 <td></td>
1844</tr>
1845<tr>
1846 <td class="filename"><a href='download.php?file=libnatpmp-20110715.tar.gz'>libnatpmp-20110715.tar.gz</a></td>
1847 <td class="filesize">17943</td>
1848 <td class="filedate">15/07/2011 08:31:40 +0000</td>
1849 <td class="comment">libnatpmp source code</td>
1850 <td></td>
1851</tr>
1852<tr>
1853 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110715.tar.gz'>miniupnpd-1.5.20110715.tar.gz</a></td>
1854 <td class="filesize">124519</td>
1855 <td class="filedate">15/07/2011 07:55:17 +0000</td>
1856 <td class="comment">MiniUPnP daemon source code</td>
1857 <td></td>
1858</tr>
1859<tr>
1860 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110714.zip'>upnpc-exe-win32-20110714.zip</a></td>
1861 <td class="filesize">94236</td>
1862 <td class="filedate">13/07/2011 23:16:01 +0000</td>
1863 <td class="comment">Windows executable</td>
1864 <td></td>
1865</tr>
1866<tr>
1867 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110623.tar.gz'>miniupnpd-1.5.20110623.tar.gz</a></td>
1868 <td class="filesize">123529</td>
1869 <td class="filedate">22/06/2011 22:29:15 +0000</td>
1870 <td class="comment">MiniUPnP daemon source code</td>
1871 <td></td>
1872</tr>
1873<tr>
1874 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110620.tar.gz'>miniupnpd-1.5.20110620.tar.gz</a></td>
1875 <td class="filesize">123221</td>
1876 <td class="filedate">20/06/2011 14:11:11 +0000</td>
1877 <td class="comment">MiniUPnP daemon source code</td>
1878 <td></td>
1879</tr>
1880<tr>
1881 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110618.tar.gz'>miniupnpd-1.5.20110618.tar.gz</a></td>
1882 <td class="filesize">123176</td>
1883 <td class="filedate">17/06/2011 23:29:18 +0000</td>
1884 <td class="comment">MiniUPnP daemon source code</td>
1885 <td></td>
1886</tr>
1887<tr>
1888 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110618.tar.gz'>miniupnpc-1.5.20110618.tar.gz</a></td>
1889 <td class="filesize">66401</td>
1890 <td class="filedate">17/06/2011 23:29:17 +0000</td>
1891 <td class="comment">MiniUPnP client source code</td>
1892 <td></td>
1893</tr>
1894<tr>
1895 <td class="filename"><a href='download.php?file=libnatpmp-20110618.tar.gz'>libnatpmp-20110618.tar.gz</a></td>
1896 <td class="filesize">17901</td>
1897 <td class="filedate">17/06/2011 23:29:16 +0000</td>
1898 <td class="comment">libnatpmp source code</td>
1899 <td></td>
1900</tr>
1901<tr>
1902 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110618.tar.gz'>minissdpd-1.0.20110618.tar.gz</a></td>
1903 <td class="filesize">15193</td>
1904 <td class="filedate">17/06/2011 23:29:16 +0000</td>
1905 <td class="comment">MiniSSDPd source code</td>
1906 <td></td>
1907</tr>
1908<tr>
1909 <td class="filename" colspan="2"><a href='download.php?file=minidlna_cvs20110529_minissdp1.patch'>minidlna_cvs20110529_minissdp1.patch</a></td>
1910 <td class="filedate">29/05/2011 21:19:09 +0000</td>
1911 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1912 <td></td>
1913</tr>
1914<tr>
1915 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110528.tar.gz'>miniupnpd-1.5.20110528.tar.gz</a></td>
1916 <td class="filesize">121985</td>
1917 <td class="filedate">28/05/2011 09:39:04 +0000</td>
1918 <td class="comment">MiniUPnP daemon source code</td>
1919 <td></td>
1920</tr>
1921<tr>
1922 <td class="filename"><a href='download.php?file=minidlna_1.0.19_minissdp1.patch'>minidlna_1.0.19_minissdp1.patch</a></td>
1923 <td class="filesize">9080</td>
1924 <td class="filedate">27/05/2011 09:55:04 +0000</td>
1925 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1926 <td></td>
1927</tr>
1928<tr>
1929 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110527.tar.gz'>miniupnpd-1.5.20110527.tar.gz</a></td>
1930 <td class="filesize">120896</td>
1931 <td class="filedate">27/05/2011 08:28:35 +0000</td>
1932 <td class="comment">MiniUPnP daemon source code</td>
1933 <td></td>
1934</tr>
1935<tr>
1936 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110527.tar.gz'>miniupnpc-1.5.20110527.tar.gz</a></td>
1937 <td class="filesize">66279</td>
1938 <td class="filedate">27/05/2011 08:28:34 +0000</td>
1939 <td class="comment">MiniUPnP client source code</td>
1940 <td></td>
1941</tr>
1942<tr>
1943 <td class="filename"><a href='download.php?file=libnatpmp-20110527.tar.gz'>libnatpmp-20110527.tar.gz</a></td>
1944 <td class="filesize">17627</td>
1945 <td class="filedate">27/05/2011 08:28:33 +0000</td>
1946 <td class="comment">libnatpmp source code</td>
1947 <td></td>
1948</tr>
1949<tr>
1950 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110523.tar.gz'>minissdpd-1.0.20110523.tar.gz</a></td>
1951 <td class="filesize">15024</td>
1952 <td class="filedate">23/05/2011 12:55:31 +0000</td>
1953 <td class="comment">MiniSSDPd source code</td>
1954 <td></td>
1955</tr>
1956<tr>
1957 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110520.tar.gz'>miniupnpd-1.5.20110520.tar.gz</a></td>
1958 <td class="filesize">119227</td>
1959 <td class="filedate">20/05/2011 18:00:41 +0000</td>
1960 <td class="comment">MiniUPnP daemon source code</td>
1961 <td></td>
1962</tr>
1963<tr>
1964 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110519.tar.gz'>miniupnpd-1.5.20110519.tar.gz</a></td>
1965 <td class="filesize">114735</td>
1966 <td class="filedate">18/05/2011 22:29:06 +0000</td>
1967 <td class="comment">MiniUPnP daemon source code</td>
1968 <td></td>
1969</tr>
1970<tr>
1971 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110516.tar.gz'>miniupnpd-1.5.20110516.tar.gz</a></td>
1972 <td class="filesize">113348</td>
1973 <td class="filedate">16/05/2011 09:32:51 +0000</td>
1974 <td class="comment">MiniUPnP daemon source code</td>
1975 <td></td>
1976</tr>
1977<tr>
1978 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110515.tar.gz'>miniupnpd-1.5.20110515.tar.gz</a></td>
1979 <td class="filesize">113135</td>
1980 <td class="filedate">15/05/2011 21:51:29 +0000</td>
1981 <td class="comment">MiniUPnP daemon source code</td>
1982 <td></td>
1983</tr>
1984<tr>
1985 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110515.tar.gz'>miniupnpc-1.5.20110515.tar.gz</a></td>
1986 <td class="filesize">66112</td>
1987 <td class="filedate">15/05/2011 21:51:28 +0000</td>
1988 <td class="comment">MiniUPnP client source code</td>
1989 <td></td>
1990</tr>
1991<tr>
1992 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110513.tar.gz'>miniupnpd-1.5.20110513.tar.gz</a></td>
1993 <td class="filesize">111029</td>
1994 <td class="filedate">13/05/2011 14:03:12 +0000</td>
1995 <td class="comment">MiniUPnP daemon source code</td>
1996 <td></td>
1997</tr>
1998<tr>
1999 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110506.tar.gz'>miniupnpc-1.5.20110506.tar.gz</a></td>
2000 <td class="filesize">65536</td>
2001 <td class="filedate">06/05/2011 16:35:38 +0000</td>
2002 <td class="comment">MiniUPnP client source code</td>
2003 <td></td>
2004</tr>
2005<tr>
2006 <td class="filename"><a href='download.php?file=miniupnpc-1.4-v6.20100505.zip'>miniupnpc-1.4-v6.20100505.zip</a></td>
2007 <td class="filesize">91833</td>
2008 <td class="filedate">18/04/2011 20:14:11 +0000</td>
2009 <td class="comment"></td>
2010 <td></td>
2011</tr>
2012<tr>
2013 <td class="filename"><a href='download.php?file=miniupnpd-1.4-v6.20100823.zip'>miniupnpd-1.4-v6.20100823.zip</a></td>
2014 <td class="filesize">222235</td>
2015 <td class="filedate">18/04/2011 20:14:07 +0000</td>
2016 <td class="comment"></td>
2017 <td></td>
2018</tr>
2019<tr>
2020 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110418.tar.gz'>miniupnpc-1.5.20110418.tar.gz</a></td>
2021 <td class="filesize">61820</td>
2022 <td class="filedate">18/04/2011 20:09:22 +0000</td>
2023 <td class="comment">MiniUPnP client source code</td>
2024 <td></td>
2025</tr>
2026<tr>
2027 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110418.zip'>upnpc-exe-win32-20110418.zip</a></td>
2028 <td class="filesize">94183</td>
2029 <td class="filedate">18/04/2011 17:53:26 +0000</td>
2030 <td class="comment">Windows executable</td>
2031 <td></td>
2032</tr>
2033<tr>
2034 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110314.tar.gz'>miniupnpc-1.5.20110314.tar.gz</a></td>
2035 <td class="filesize">57210</td>
2036 <td class="filedate">14/03/2011 14:27:29 +0000</td>
2037 <td class="comment">MiniUPnP client source code</td>
2038 <td></td>
2039</tr>
2040<tr>
2041 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110309.tar.gz'>miniupnpd-1.5.20110309.tar.gz</a></td>
2042 <td class="filesize">100073</td>
2043 <td class="filedate">09/03/2011 15:36:12 +0000</td>
2044 <td class="comment">MiniUPnP daemon source code</td>
2045 <td></td>
2046</tr>
2047<tr>
2048 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110302.tar.gz'>miniupnpd-1.5.20110302.tar.gz</a></td>
2049 <td class="filesize">100756</td>
2050 <td class="filedate">02/03/2011 16:17:44 +0000</td>
2051 <td class="comment">MiniUPnP daemon source code</td>
2052 <td></td>
2053</tr>
2054<tr>
2055 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110221.tar.gz'>miniupnpd-1.5.20110221.tar.gz</a></td>
2056 <td class="filesize">100092</td>
2057 <td class="filedate">20/02/2011 23:48:17 +0000</td>
2058 <td class="comment">MiniUPnP daemon source code</td>
2059 <td></td>
2060</tr>
2061<tr>
2062 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110215.zip'>upnpc-exe-win32-20110215.zip</a></td>
2063 <td class="filesize">55409</td>
2064 <td class="filedate">15/02/2011 23:05:00 +0000</td>
2065 <td class="comment">Windows executable</td>
2066 <td></td>
2067</tr>
2068<tr>
2069 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110215.tar.gz'>miniupnpc-1.5.20110215.tar.gz</a></td>
2070 <td class="filesize">54880</td>
2071 <td class="filedate">15/02/2011 11:16:04 +0000</td>
2072 <td class="comment">MiniUPnP client source code</td>
2073 <td></td>
2074</tr>
2075<tr>
2076 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110214.tar.gz'>miniupnpd-1.5.20110214.tar.gz</a></td>
2077 <td class="filesize">99629</td>
2078 <td class="filedate">14/02/2011 18:00:43 +0000</td>
2079 <td class="comment">MiniUPnP daemon source code</td>
2080 <td></td>
2081</tr>
2082<tr>
2083 <td class="filename"><a href='download.php?file=minidlna_1.0.18_minissdp1.patch'>minidlna_1.0.18_minissdp1.patch</a></td>
2084 <td class="filesize">9747</td>
2085 <td class="filedate">02/02/2011 15:12:19 +0000</td>
2086 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
2087 <td></td>
2088</tr>
2089<tr>
2090 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110127.tar.gz'>miniupnpd-1.5.20110127.tar.gz</a></td>
2091 <td class="filesize">97421</td>
2092 <td class="filedate">27/01/2011 17:51:25 +0000</td>
2093 <td class="comment">MiniUPnP daemon source code</td>
2094 <td></td>
2095</tr>
2096<tr>
2097 <td class="filename"><a href='download.php?file=miniupnpd-1.5.tar.gz'>miniupnpd-1.5.tar.gz</a></td>
2098 <td class="filesize">98993</td>
2099 <td class="filedate">04/01/2011 09:45:10 +0000</td>
2100 <td class="comment">MiniUPnP daemon release source code</td>
2101 <td></td>
2102</tr>
2103<tr>
2104 <td class="filename"><a href='download.php?file=miniupnpc-1.5.tar.gz'>miniupnpc-1.5.tar.gz</a></td>
2105 <td class="filesize">53309</td>
2106 <td class="filedate">04/01/2011 09:45:06 +0000</td>
2107 <td class="comment">MiniUPnP client release source code</td>
2108 <td></td>
2109</tr>
2110<tr>
2111 <td class="filename"><a href='download.php?file=libnatpmp-20110103.tar.gz'>libnatpmp-20110103.tar.gz</a></td>
2112 <td class="filesize">17529</td>
2113 <td class="filedate">03/01/2011 17:33:16 +0000</td>
2114 <td class="comment">libnatpmp source code</td>
2115 <td></td>
2116</tr>
2117<tr>
2118 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101221.tar.gz'>miniupnpc-1.4.20101221.tar.gz</a></td>
2119 <td class="filesize">52342</td>
2120 <td class="filedate">21/12/2010 16:15:38 +0000</td>
2121 <td class="comment">MiniUPnP client source code</td>
2122 <td></td>
2123</tr>
2124<tr>
2125 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20101213.zip'>upnpc-exe-win32-20101213.zip</a></td>
2126 <td class="filesize">52359</td>
2127 <td class="filedate">12/12/2010 23:44:01 +0000</td>
2128 <td class="comment">Windows executable</td>
2129 <td></td>
2130</tr>
2131<tr>
2132 <td class="filename"><a href='download.php?file=libnatpmp-20101211.tar.gz'>libnatpmp-20101211.tar.gz</a></td>
2133 <td class="filesize">17324</td>
2134 <td class="filedate">11/12/2010 17:20:36 +0000</td>
2135 <td class="comment">libnatpmp source code</td>
2136 <td></td>
2137</tr>
2138<tr>
2139 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101209.tar.gz'>miniupnpc-1.4.20101209.tar.gz</a></td>
2140 <td class="filesize">51900</td>
2141 <td class="filedate">09/12/2010 16:17:30 +0000</td>
2142 <td class="comment">MiniUPnP client source code</td>
2143 <td></td>
2144</tr>
2145<tr>
2146 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100921.tar.gz'>miniupnpd-1.4.20100921.tar.gz</a></td>
2147 <td class="filesize">95483</td>
2148 <td class="filedate">21/09/2010 15:50:00 +0000</td>
2149 <td class="comment">MiniUPnP daemon source code</td>
2150 <td></td>
2151</tr>
2152<tr>
2153 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100825.zip'>upnpc-exe-win32-20100825.zip</a></td>
2154 <td class="filesize">50636</td>
2155 <td class="filedate">25/08/2010 08:42:59 +0000</td>
2156 <td class="comment">Windows executable</td>
2157 <td></td>
2158</tr>
2159<tr>
2160 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100609.tar.gz'>miniupnpc-1.4.20100609.tar.gz</a></td>
2161 <td class="filesize">50390</td>
2162 <td class="filedate">09/06/2010 11:03:11 +0000</td>
2163 <td class="comment">MiniUPnP client source code</td>
2164 <td></td>
2165</tr>
2166<tr>
2167 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100513.zip'>upnpc-exe-win32-20100513.zip</a></td>
2168 <td class="filesize">50950</td>
2169 <td class="filedate">13/05/2010 16:54:33 +0000</td>
2170 <td class="comment">Windows executable</td>
2171 <td></td>
2172</tr>
2173<tr>
2174 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100511.tar.gz'>miniupnpd-1.4.20100511.tar.gz</a></td>
2175 <td class="filesize">93281</td>
2176 <td class="filedate">11/05/2010 16:22:33 +0000</td>
2177 <td class="comment">MiniUPnP daemon source code</td>
2178 <td></td>
2179</tr>
2180<tr>
2181 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100418.zip'>upnpc-exe-win32-20100418.zip</a></td>
2182 <td class="filesize">40758</td>
2183 <td class="filedate">17/04/2010 23:00:37 +0000</td>
2184 <td class="comment">Windows executable</td>
2185 <td></td>
2186</tr>
2187<tr>
2188 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100418.tar.gz'>miniupnpc-1.4.20100418.tar.gz</a></td>
2189 <td class="filesize">50245</td>
2190 <td class="filedate">17/04/2010 22:18:31 +0000</td>
2191 <td class="comment">MiniUPnP client source code</td>
2192 <td></td>
2193</tr>
2194<tr>
2195 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100412.tar.gz'>miniupnpc-1.4.20100412.tar.gz</a></td>
2196 <td class="filesize">50145</td>
2197 <td class="filedate">12/04/2010 20:42:53 +0000</td>
2198 <td class="comment">MiniUPnP client source code</td>
2199 <td></td>
2200</tr>
2201<tr>
2202 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100407.tar.gz'>miniupnpc-1.4.20100407.tar.gz</a></td>
2203 <td class="filesize">49756</td>
2204 <td class="filedate">07/04/2010 10:05:08 +0000</td>
2205 <td class="comment">MiniUPnP client source code</td>
2206 <td></td>
2207</tr>
2208<tr>
2209 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100405.tar.gz'>miniupnpc-1.4.20100405.tar.gz</a></td>
2210 <td class="filesize">49549</td>
2211 <td class="filedate">05/04/2010 14:34:38 +0000</td>
2212 <td class="comment">MiniUPnP client source code</td>
2213 <td></td>
2214</tr>
2215<tr>
2216 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100308.tar.gz'>miniupnpd-1.4.20100308.tar.gz</a></td>
2217 <td class="filesize">92889</td>
2218 <td class="filedate">08/03/2010 17:18:00 +0000</td>
2219 <td class="comment">MiniUPnP daemon source code</td>
2220 <td></td>
2221</tr>
2222<tr>
2223 <td class="filename"><a href='download.php?file=libnatpmp-20100202.tar.gz'>libnatpmp-20100202.tar.gz</a></td>
2224 <td class="filesize">17231</td>
2225 <td class="filedate">02/02/2010 18:41:13 +0000</td>
2226 <td class="comment">libnatpmp source code</td>
2227 <td></td>
2228</tr>
2229<tr>
2230 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100202.tar.gz'>miniupnpc-1.4.20100202.tar.gz</a></td>
2231 <td class="filesize">46710</td>
2232 <td class="filedate">02/02/2010 18:41:13 +0000</td>
2233 <td class="comment">MiniUPnP client source code</td>
2234 <td></td>
2235</tr>
2236<tr>
2237 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100106.tar.gz'>miniupnpc-1.4.20100106.tar.gz</a></td>
2238 <td class="filesize">46659</td>
2239 <td class="filedate">06/01/2010 10:08:21 +0000</td>
2240 <td class="comment">MiniUPnP client source code</td>
2241 <td></td>
2242</tr>
2243<tr>
2244 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091222.tar.gz'>miniupnpd-1.4.20091222.tar.gz</a></td>
2245 <td class="filesize">90993</td>
2246 <td class="filedate">22/12/2009 17:23:48 +0000</td>
2247 <td class="comment">MiniUPnP daemon source code</td>
2248 <td></td>
2249</tr>
2250<tr>
2251 <td class="filename"><a href='download.php?file=libnatpmp-20091219.tar.gz'>libnatpmp-20091219.tar.gz</a></td>
2252 <td class="filesize">16839</td>
2253 <td class="filedate">19/12/2009 14:35:22 +0000</td>
2254 <td class="comment">libnatpmp source code</td>
2255 <td></td>
2256</tr>
2257<tr>
2258 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091213.tar.gz'>miniupnpc-1.4.20091213.tar.gz</a></td>
2259 <td class="filesize">46510</td>
2260 <td class="filedate">12/12/2009 23:05:40 +0000</td>
2261 <td class="comment">MiniUPnP client source code</td>
2262 <td></td>
2263</tr>
2264<tr>
2265 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091211.tar.gz'>miniupnpc-1.4.20091211.tar.gz</a></td>
2266 <td class="filesize">45852</td>
2267 <td class="filedate">11/12/2009 16:43:01 +0000</td>
2268 <td class="comment">MiniUPnP client source code</td>
2269 <td></td>
2270</tr>
2271<tr>
2272 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20091210.zip'>upnpc-exe-win32-20091210.zip</a></td>
2273 <td class="filesize">38666</td>
2274 <td class="filedate">10/12/2009 18:50:27 +0000</td>
2275 <td class="comment">Windows executable</td>
2276 <td></td>
2277</tr>
2278<tr>
2279 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091208.tar.gz'>miniupnpc-1.4.20091208.tar.gz</a></td>
2280 <td class="filesize">43392</td>
2281 <td class="filedate">08/12/2009 10:58:26 +0000</td>
2282 <td class="comment">MiniUPnP client source code</td>
2283 <td></td>
2284</tr>
2285<tr>
2286 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091203.tar.gz'>miniupnpc-1.4.20091203.tar.gz</a></td>
2287 <td class="filesize">42040</td>
2288 <td class="filedate">03/12/2009 13:56:28 +0000</td>
2289 <td class="comment">MiniUPnP client source code</td>
2290 <td></td>
2291</tr>
2292<tr>
2293 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091106.tar.gz'>miniupnpd-1.4.20091106.tar.gz</a></td>
2294 <td class="filesize">90787</td>
2295 <td class="filedate">06/11/2009 21:18:50 +0000</td>
2296 <td class="comment">MiniUPnP daemon source code</td>
2297 <td></td>
2298</tr>
2299<tr>
2300 <td class="filename"><a href='download.php?file=miniupnpd-1.4.tar.gz'>miniupnpd-1.4.tar.gz</a></td>
2301 <td class="filesize">90071</td>
2302 <td class="filedate">30/10/2009 09:20:05 +0000</td>
2303 <td class="comment">MiniUPnP daemon release source code</td>
2304 <td></td>
2305</tr>
2306<tr>
2307 <td class="filename"><a href='download.php?file=miniupnpc-1.4.tar.gz'>miniupnpc-1.4.tar.gz</a></td>
2308 <td class="filesize">41790</td>
2309 <td class="filedate">30/10/2009 09:20:04 +0000</td>
2310 <td class="comment">MiniUPnP client release source code</td>
2311 <td></td>
2312</tr>
2313<tr>
2314 <td class="filename"><a href='download.php?file=miniupnpc-20091016.tar.gz'>miniupnpc-20091016.tar.gz</a></td>
2315 <td class="filesize">41792</td>
2316 <td class="filedate">16/10/2009 09:04:35 +0000</td>
2317 <td class="comment">MiniUPnP client source code</td>
2318 <td></td>
2319</tr>
2320<tr>
2321 <td class="filename"><a href='download.php?file=miniupnpd-20091010.tar.gz'>miniupnpd-20091010.tar.gz</a></td>
2322 <td class="filesize">90043</td>
2323 <td class="filedate">10/10/2009 19:21:30 +0000</td>
2324 <td class="comment">MiniUPnP daemon source code</td>
2325 <td></td>
2326</tr>
2327<tr>
2328 <td class="filename"><a href='download.php?file=miniupnpc-20091010.tar.gz'>miniupnpc-20091010.tar.gz</a></td>
2329 <td class="filesize">41671</td>
2330 <td class="filedate">10/10/2009 19:21:28 +0000</td>
2331 <td class="comment">MiniUPnP client source code</td>
2332 <td></td>
2333</tr>
2334<tr>
2335 <td class="filename"><a href='download.php?file=miniupnpd-20090921.tar.gz'>miniupnpd-20090921.tar.gz</a></td>
2336 <td class="filesize">89476</td>
2337 <td class="filedate">21/09/2009 13:00:04 +0000</td>
2338 <td class="comment">MiniUPnP daemon source code</td>
2339 <td></td>
2340</tr>
2341<tr>
2342 <td class="filename"><a href='download.php?file=miniupnpc-20090921.tar.gz'>miniupnpc-20090921.tar.gz</a></td>
2343 <td class="filesize">41630</td>
2344 <td class="filedate">21/09/2009 13:00:03 +0000</td>
2345 <td class="comment">MiniUPnP client source code</td>
2346 <td></td>
2347</tr>
2348<tr>
2349 <td class="filename"><a href='download.php?file=miniupnpd-20090904.tar.gz'>miniupnpd-20090904.tar.gz</a></td>
2350 <td class="filesize">89344</td>
2351 <td class="filedate">04/09/2009 16:24:26 +0000</td>
2352 <td class="comment">MiniUPnP daemon source code</td>
2353 <td></td>
2354</tr>
2355<tr>
2356 <td class="filename"><a href='download.php?file=miniupnpd-20090820.tar.gz'>miniupnpd-20090820.tar.gz</a></td>
2357 <td class="filesize">89149</td>
2358 <td class="filedate">20/08/2009 09:35:58 +0000</td>
2359 <td class="comment">MiniUPnP daemon source code</td>
2360 <td></td>
2361</tr>
2362<tr>
2363 <td class="filename"><a href='download.php?file=miniupnpc-20090807.tar.gz'>miniupnpc-20090807.tar.gz</a></td>
2364 <td class="filesize">41288</td>
2365 <td class="filedate">07/08/2009 14:46:11 +0000</td>
2366 <td class="comment">MiniUPnP client source code</td>
2367 <td></td>
2368</tr>
2369<tr>
2370 <td class="filename"><a href='download.php?file=miniupnpc-20090729.tar.gz'>miniupnpc-20090729.tar.gz</a></td>
2371 <td class="filesize">40588</td>
2372 <td class="filedate">29/07/2009 08:47:43 +0000</td>
2373 <td class="comment">MiniUPnP client source code</td>
2374 <td></td>
2375</tr>
2376<tr>
2377 <td class="filename"><a href='download.php?file=xchat-upnp20061022.patch'>xchat-upnp20061022.patch</a></td>
2378 <td class="filesize">10258</td>
2379 <td class="filedate">17/07/2009 15:49:46 +0000</td>
2380 <td class="comment">Patch to add UPnP capabilities to xchat</td>
2381 <td></td>
2382</tr>
2383<tr>
2384 <td class="filename"><a href='download.php?file=miniupnpc-20090713.tar.gz'>miniupnpc-20090713.tar.gz</a></td>
2385 <td class="filesize">40206</td>
2386 <td class="filedate">13/07/2009 08:53:49 +0000</td>
2387 <td class="comment">MiniUPnP client source code</td>
2388 <td></td>
2389</tr>
2390<tr>
2391 <td class="filename"><a href='download.php?file=libnatpmp-20090713.tar.gz'>libnatpmp-20090713.tar.gz</a></td>
2392 <td class="filesize">14262</td>
2393 <td class="filedate">13/07/2009 08:53:49 +0000</td>
2394 <td class="comment">libnatpmp source code</td>
2395 <td></td>
2396</tr>
2397<tr>
2398 <td class="filename"><a href='download.php?file=miniupnpd-20090605.tar.gz'>miniupnpd-20090605.tar.gz</a></td>
2399 <td class="filesize">83774</td>
2400 <td class="filedate">04/06/2009 23:32:20 +0000</td>
2401 <td class="comment">MiniUPnP daemon source code</td>
2402 <td></td>
2403</tr>
2404<tr>
2405 <td class="filename"><a href='download.php?file=miniupnpc-20090605.tar.gz'>miniupnpc-20090605.tar.gz</a></td>
2406 <td class="filesize">40077</td>
2407 <td class="filedate">04/06/2009 23:32:16 +0000</td>
2408 <td class="comment">MiniUPnP client source code</td>
2409 <td></td>
2410</tr>
2411<tr>
2412 <td class="filename"><a href='download.php?file=libnatpmp-20090605.tar.gz'>libnatpmp-20090605.tar.gz</a></td>
2413 <td class="filesize">13817</td>
2414 <td class="filedate">04/06/2009 23:32:15 +0000</td>
2415 <td class="comment">libnatpmp source code</td>
2416 <td></td>
2417</tr>
2418<tr>
2419 <td class="filename"><a href='download.php?file=miniupnpd-20090516.tar.gz'>miniupnpd-20090516.tar.gz</a></td>
2420 <td class="filesize">83689</td>
2421 <td class="filedate">16/05/2009 08:47:31 +0000</td>
2422 <td class="comment">MiniUPnP daemon source code</td>
2423 <td></td>
2424</tr>
2425<tr>
2426 <td class="filename"><a href='download.php?file=miniupnpc-1.3.tar.gz'>miniupnpc-1.3.tar.gz</a></td>
2427 <td class="filesize">40058</td>
2428 <td class="filedate">17/04/2009 21:27:55 +0000</td>
2429 <td class="comment">MiniUPnP client release source code</td>
2430 <td></td>
2431</tr>
2432<tr>
2433 <td class="filename"><a href='download.php?file=miniupnpd-1.3.tar.gz'>miniupnpd-1.3.tar.gz</a></td>
2434 <td class="filesize">83464</td>
2435 <td class="filedate">17/04/2009 20:11:21 +0000</td>
2436 <td class="comment">MiniUPnP daemon release source code</td>
2437 <td></td>
2438</tr>
2439<tr>
2440 <td class="filename"><a href='download.php?file=libnatpmp-20090310.tar.gz'>libnatpmp-20090310.tar.gz</a></td>
2441 <td class="filesize">11847</td>
2442 <td class="filedate">10/03/2009 10:19:45 +0000</td>
2443 <td class="comment">libnatpmp source code</td>
2444 <td></td>
2445</tr>
2446<tr>
2447 <td class="filename"><a href='download.php?file=miniupnpd-20090214.tar.gz'>miniupnpd-20090214.tar.gz</a></td>
2448 <td class="filesize">82921</td>
2449 <td class="filedate">14/02/2009 11:27:03 +0000</td>
2450 <td class="comment">MiniUPnP daemon source code</td>
2451 <td></td>
2452</tr>
2453<tr>
2454 <td class="filename"><a href='download.php?file=miniupnpd-20090213.tar.gz'>miniupnpd-20090213.tar.gz</a></td>
2455 <td class="filesize">82594</td>
2456 <td class="filedate">13/02/2009 19:48:01 +0000</td>
2457 <td class="comment">MiniUPnP daemon source code</td>
2458 <td></td>
2459</tr>
2460<tr>
2461 <td class="filename"><a href='download.php?file=libnatpmp-20090129.tar.gz'>libnatpmp-20090129.tar.gz</a></td>
2462 <td class="filesize">11748</td>
2463 <td class="filedate">29/01/2009 21:50:31 +0000</td>
2464 <td class="comment">libnatpmp source code</td>
2465 <td></td>
2466</tr>
2467<tr>
2468 <td class="filename"><a href='download.php?file=miniupnpc-20090129.tar.gz'>miniupnpc-20090129.tar.gz</a></td>
2469 <td class="filesize">39976</td>
2470 <td class="filedate">29/01/2009 21:50:30 +0000</td>
2471 <td class="comment">MiniUPnP client source code</td>
2472 <td></td>
2473</tr>
2474<tr>
2475 <td class="filename"><a href='download.php?file=miniupnpd-20090129.tar.gz'>miniupnpd-20090129.tar.gz</a></td>
2476 <td class="filesize">82487</td>
2477 <td class="filedate">29/01/2009 21:50:27 +0000</td>
2478 <td class="comment">MiniUPnP daemon source code</td>
2479 <td></td>
2480</tr>
2481<tr>
2482 <td class="filename"><a href='download.php?file=miniupnpd-20081009.tar.gz'>miniupnpd-20081009.tar.gz</a></td>
2483 <td class="filesize">81732</td>
2484 <td class="filedate">09/10/2008 12:53:02 +0000</td>
2485 <td class="comment">MiniUPnP daemon source code</td>
2486 <td></td>
2487</tr>
2488<tr>
2489 <td class="filename"><a href='download.php?file=minissdpd-1.0.tar.gz'>minissdpd-1.0.tar.gz</a></td>
2490 <td class="filesize">12996</td>
2491 <td class="filedate">07/10/2008 14:03:49 +0000</td>
2492 <td class="comment">MiniSSDPd release source code</td>
2493 <td></td>
2494</tr>
2495<tr>
2496 <td class="filename"><a href='download.php?file=miniupnpc-1.2.tar.gz'>miniupnpc-1.2.tar.gz</a></td>
2497 <td class="filesize">38787</td>
2498 <td class="filedate">07/10/2008 14:03:47 +0000</td>
2499 <td class="comment">MiniUPnP client release source code</td>
2500 <td></td>
2501</tr>
2502<tr>
2503 <td class="filename"><a href='download.php?file=miniupnpd-1.2.tar.gz'>miniupnpd-1.2.tar.gz</a></td>
2504 <td class="filesize">81025</td>
2505 <td class="filedate">07/10/2008 14:03:45 +0000</td>
2506 <td class="comment">MiniUPnP daemon release source code</td>
2507 <td></td>
2508</tr>
2509<tr>
2510 <td class="filename"><a href='download.php?file=miniupnpd-20081006.tar.gz'>miniupnpd-20081006.tar.gz</a></td>
2511 <td class="filesize">80510</td>
2512 <td class="filedate">06/10/2008 15:50:34 +0000</td>
2513 <td class="comment">MiniUPnP daemon source code</td>
2514 <td></td>
2515</tr>
2516<tr>
2517 <td class="filename"><a href='download.php?file=minissdpd-20081006.tar.gz'>minissdpd-20081006.tar.gz</a></td>
2518 <td class="filesize">12230</td>
2519 <td class="filedate">06/10/2008 15:50:33 +0000</td>
2520 <td class="comment">MiniSSDPd source code</td>
2521 <td></td>
2522</tr>
2523<tr>
2524 <td class="filename"><a href='download.php?file=libnatpmp-20081006.tar.gz'>libnatpmp-20081006.tar.gz</a></td>
2525 <td class="filesize">11710</td>
2526 <td class="filedate">06/10/2008 15:50:31 +0000</td>
2527 <td class="comment">libnatpmp source code</td>
2528 <td></td>
2529</tr>
2530<tr>
2531 <td class="filename" colspan="2"><a href='download.php?file=mediatomb_minissdp-20081006.patch'>mediatomb_minissdp-20081006.patch</a></td>
2532 <td class="filedate">06/10/2008 15:48:18 +0000</td>
2533 <td class="comment"></td>
2534 <td></td>
2535</tr>
2536<tr>
2537 <td class="filename"><a href='download.php?file=miniupnpc-20081002.tar.gz'>miniupnpc-20081002.tar.gz</a></td>
2538 <td class="filesize">38291</td>
2539 <td class="filedate">02/10/2008 09:20:18 +0000</td>
2540 <td class="comment">MiniUPnP client source code</td>
2541 <td></td>
2542</tr>
2543<tr>
2544 <td class="filename"><a href='download.php?file=miniupnpd-20081001.tar.gz'>miniupnpd-20081001.tar.gz</a></td>
2545 <td class="filesize">79696</td>
2546 <td class="filedate">01/10/2008 13:11:20 +0000</td>
2547 <td class="comment">MiniUPnP daemon source code</td>
2548 <td></td>
2549</tr>
2550<tr>
2551 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080925.zip'>upnpc-exe-win32-20080925.zip</a></td>
2552 <td class="filesize">36602</td>
2553 <td class="filedate">25/09/2008 06:59:33 +0000</td>
2554 <td class="comment">Windows executable</td>
2555 <td></td>
2556</tr>
2557<tr>
2558 <td class="filename"><a href='download.php?file=miniupnpd-20080710.tar.gz'>miniupnpd-20080710.tar.gz</a></td>
2559 <td class="filesize">78898</td>
2560 <td class="filedate">10/07/2008 09:38:41 +0000</td>
2561 <td class="comment">MiniUPnP daemon source code</td>
2562 <td></td>
2563</tr>
2564<tr>
2565 <td class="filename"><a href='download.php?file=libnatpmp-20080707.tar.gz'>libnatpmp-20080707.tar.gz</a></td>
2566 <td class="filesize">11679</td>
2567 <td class="filedate">06/07/2008 22:05:23 +0000</td>
2568 <td class="comment">libnatpmp source code</td>
2569 <td></td>
2570</tr>
2571<tr>
2572 <td class="filename"><a href='download.php?file=miniupnpc-1.1.tar.gz'>miniupnpc-1.1.tar.gz</a></td>
2573 <td class="filesize">38235</td>
2574 <td class="filedate">04/07/2008 16:45:24 +0000</td>
2575 <td class="comment">MiniUPnP client release source code</td>
2576 <td></td>
2577</tr>
2578<tr>
2579 <td class="filename"><a href='download.php?file=miniupnpc-20080703.tar.gz'>miniupnpc-20080703.tar.gz</a></td>
2580 <td class="filesize">38204</td>
2581 <td class="filedate">03/07/2008 15:47:37 +0000</td>
2582 <td class="comment">MiniUPnP client source code</td>
2583 <td></td>
2584</tr>
2585<tr>
2586 <td class="filename"><a href='download.php?file=libnatpmp-20080703.tar.gz'>libnatpmp-20080703.tar.gz</a></td>
2587 <td class="filesize">11570</td>
2588 <td class="filedate">03/07/2008 15:47:25 +0000</td>
2589 <td class="comment">libnatpmp source code</td>
2590 <td></td>
2591</tr>
2592<tr>
2593 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080703.zip'>upnpc-exe-win32-20080703.zip</a></td>
2594 <td class="filesize">36137</td>
2595 <td class="filedate">02/07/2008 23:35:14 +0000</td>
2596 <td class="comment">Windows executable</td>
2597 <td></td>
2598</tr>
2599<tr>
2600 <td class="filename"><a href='download.php?file=libnatpmp-20080702.tar.gz'>libnatpmp-20080702.tar.gz</a></td>
2601 <td class="filesize">8873</td>
2602 <td class="filedate">02/07/2008 17:32:35 +0000</td>
2603 <td class="comment">libnatpmp source code</td>
2604 <td></td>
2605</tr>
2606<tr>
2607 <td class="filename"><a href='download.php?file=libnatpmp-20080630.tar.gz'>libnatpmp-20080630.tar.gz</a></td>
2608 <td class="filesize">8864</td>
2609 <td class="filedate">30/06/2008 14:20:16 +0000</td>
2610 <td class="comment">libnatpmp source code</td>
2611 <td></td>
2612</tr>
2613<tr>
2614 <td class="filename"><a href='download.php?file=libnatpmp-20080529.tar.gz'>libnatpmp-20080529.tar.gz</a></td>
2615 <td class="filesize">7397</td>
2616 <td class="filedate">29/05/2008 09:06:25 +0000</td>
2617 <td class="comment">libnatpmp source code</td>
2618 <td></td>
2619</tr>
2620<tr>
2621 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080514.zip'>upnpc-exe-win32-20080514.zip</a></td>
2622 <td class="filesize">14227</td>
2623 <td class="filedate">14/05/2008 20:23:19 +0000</td>
2624 <td class="comment">Windows executable</td>
2625 <td></td>
2626</tr>
2627<tr>
2628 <td class="filename"><a href='download.php?file=libnatpmp-20080428.tar.gz'>libnatpmp-20080428.tar.gz</a></td>
2629 <td class="filesize">7295</td>
2630 <td class="filedate">28/04/2008 03:09:14 +0000</td>
2631 <td class="comment">libnatpmp source code</td>
2632 <td></td>
2633</tr>
2634<tr>
2635 <td class="filename"><a href='download.php?file=miniupnpd-20080427.tar.gz'>miniupnpd-20080427.tar.gz</a></td>
2636 <td class="filesize">78765</td>
2637 <td class="filedate">27/04/2008 18:16:36 +0000</td>
2638 <td class="comment">MiniUPnP daemon source code</td>
2639 <td></td>
2640</tr>
2641<tr>
2642 <td class="filename"><a href='download.php?file=miniupnpc-20080427.tar.gz'>miniupnpc-20080427.tar.gz</a></td>
2643 <td class="filesize">37610</td>
2644 <td class="filedate">27/04/2008 18:16:35 +0000</td>
2645 <td class="comment">MiniUPnP client source code</td>
2646 <td></td>
2647</tr>
2648<tr>
2649 <td class="filename"><a href='download.php?file=miniupnpd-1.1.tar.gz'>miniupnpd-1.1.tar.gz</a></td>
2650 <td class="filesize">78594</td>
2651 <td class="filedate">25/04/2008 17:38:05 +0000</td>
2652 <td class="comment">MiniUPnP daemon release source code</td>
2653 <td></td>
2654</tr>
2655<tr>
2656 <td class="filename"><a href='download.php?file=miniupnpc-20080423.tar.gz'>miniupnpc-20080423.tar.gz</a></td>
2657 <td class="filesize">36818</td>
2658 <td class="filedate">23/04/2008 11:57:36 +0000</td>
2659 <td class="comment">MiniUPnP client source code</td>
2660 <td></td>
2661</tr>
2662<tr>
2663 <td class="filename"><a href='download.php?file=miniupnpd-20080308.tar.gz'>miniupnpd-20080308.tar.gz</a></td>
2664 <td class="filesize">75679</td>
2665 <td class="filedate">08/03/2008 11:13:29 +0000</td>
2666 <td class="comment">MiniUPnP daemon source code</td>
2667 <td></td>
2668</tr>
2669<tr>
2670 <td class="filename"><a href='download.php?file=miniupnpd-20080303.tar.gz'>miniupnpd-20080303.tar.gz</a></td>
2671 <td class="filesize">74202</td>
2672 <td class="filedate">03/03/2008 01:43:16 +0000</td>
2673 <td class="comment">MiniUPnP daemon source code</td>
2674 <td></td>
2675</tr>
2676<tr>
2677 <td class="filename"><a href='download.php?file=miniupnpd-20080224.tar.gz'>miniupnpd-20080224.tar.gz</a></td>
2678 <td class="filesize">72773</td>
2679 <td class="filedate">24/02/2008 11:23:17 +0000</td>
2680 <td class="comment">MiniUPnP daemon source code</td>
2681 <td></td>
2682</tr>
2683<tr>
2684 <td class="filename"><a href='download.php?file=miniupnpc-1.0.tar.gz'>miniupnpc-1.0.tar.gz</a></td>
2685 <td class="filesize">36223</td>
2686 <td class="filedate">21/02/2008 13:26:46 +0000</td>
2687 <td class="comment">MiniUPnP client release source code</td>
2688 <td></td>
2689</tr>
2690<tr>
2691 <td class="filename"><a href='download.php?file=miniupnpd-20080221.tar.gz'>miniupnpd-20080221.tar.gz</a></td>
2692 <td class="filesize">70823</td>
2693 <td class="filedate">21/02/2008 10:23:46 +0000</td>
2694 <td class="comment">MiniUPnP daemon source code</td>
2695 <td></td>
2696</tr>
2697<tr>
2698 <td class="filename"><a href='download.php?file=miniupnpc-20080217.tar.gz'>miniupnpc-20080217.tar.gz</a></td>
2699 <td class="filesize">35243</td>
2700 <td class="filedate">16/02/2008 23:47:59 +0000</td>
2701 <td class="comment">MiniUPnP client source code</td>
2702 <td></td>
2703</tr>
2704<tr>
2705 <td class="filename"><a href='download.php?file=miniupnpd-20080207.tar.gz'>miniupnpd-20080207.tar.gz</a></td>
2706 <td class="filesize">70647</td>
2707 <td class="filedate">07/02/2008 21:21:00 +0000</td>
2708 <td class="comment">MiniUPnP daemon source code</td>
2709 <td></td>
2710</tr>
2711<tr>
2712 <td class="filename"><a href='download.php?file=miniupnpc-20080203.tar.gz'>miniupnpc-20080203.tar.gz</a></td>
2713 <td class="filesize">34921</td>
2714 <td class="filedate">03/02/2008 22:28:11 +0000</td>
2715 <td class="comment">MiniUPnP client source code</td>
2716 <td></td>
2717</tr>
2718<tr>
2719 <td class="filename"><a href='download.php?file=miniupnpd-1.0.tar.gz'>miniupnpd-1.0.tar.gz</a></td>
2720 <td class="filesize">69427</td>
2721 <td class="filedate">27/01/2008 22:41:25 +0000</td>
2722 <td class="comment">MiniUPnP daemon release source code</td>
2723 <td></td>
2724</tr>
2725<tr>
2726 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080118.zip'>upnpc-exe-win32-20080118.zip</a></td>
2727 <td class="filesize">13582</td>
2728 <td class="filedate">18/01/2008 11:42:16 +0000</td>
2729 <td class="comment">Windows executable</td>
2730 <td></td>
2731</tr>
2732<tr>
2733 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC13.tar.gz'>miniupnpd-1.0-RC13.tar.gz</a></td>
2734 <td class="filesize">67892</td>
2735 <td class="filedate">03/01/2008 16:50:21 +0000</td>
2736 <td class="comment">MiniUPnP daemon release source code</td>
2737 <td></td>
2738</tr>
2739<tr>
2740 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC13.tar.gz'>miniupnpc-1.0-RC13.tar.gz</a></td>
2741 <td class="filesize">34820</td>
2742 <td class="filedate">03/01/2008 16:50:20 +0000</td>
2743 <td class="comment">MiniUPnP client release source code</td>
2744 <td></td>
2745</tr>
2746<tr>
2747 <td class="filename"><a href='download.php?file=miniupnpd-20071220.tar.gz'>miniupnpd-20071220.tar.gz</a></td>
2748 <td class="filesize">67211</td>
2749 <td class="filedate">20/12/2007 12:08:34 +0000</td>
2750 <td class="comment">MiniUPnP daemon source code</td>
2751 <td></td>
2752</tr>
2753<tr>
2754 <td class="filename"><a href='download.php?file=miniupnpc-20071219.tar.gz'>miniupnpc-20071219.tar.gz</a></td>
2755 <td class="filesize">34290</td>
2756 <td class="filedate">19/12/2007 18:31:47 +0000</td>
2757 <td class="comment">MiniUPnP client source code</td>
2758 <td></td>
2759</tr>
2760<tr>
2761 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC12.tar.gz'>minissdpd-1.0-RC12.tar.gz</a></td>
2762 <td class="filesize">9956</td>
2763 <td class="filedate">19/12/2007 18:30:12 +0000</td>
2764 <td class="comment">MiniSSDPd release source code</td>
2765 <td></td>
2766</tr>
2767<tr>
2768 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC12.tar.gz'>miniupnpd-1.0-RC12.tar.gz</a></td>
2769 <td class="filesize">66911</td>
2770 <td class="filedate">14/12/2007 17:39:20 +0000</td>
2771 <td class="comment">MiniUPnP daemon release source code</td>
2772 <td></td>
2773</tr>
2774<tr>
2775 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC12.tar.gz'>miniupnpc-1.0-RC12.tar.gz</a></td>
2776 <td class="filesize">32543</td>
2777 <td class="filedate">14/12/2007 17:39:19 +0000</td>
2778 <td class="comment">MiniUPnP client release source code</td>
2779 <td></td>
2780</tr>
2781<tr>
2782 <td class="filename"><a href='download.php?file=miniupnpc-20071213.tar.gz'>miniupnpc-20071213.tar.gz</a></td>
2783 <td class="filesize">32541</td>
2784 <td class="filedate">13/12/2007 17:09:51 +0000</td>
2785 <td class="comment">MiniUPnP client source code</td>
2786 <td></td>
2787</tr>
2788<tr>
2789 <td class="filename"><a href='download.php?file=miniupnpd-20071213.tar.gz'>miniupnpd-20071213.tar.gz</a></td>
2790 <td class="filesize">66826</td>
2791 <td class="filedate">13/12/2007 16:42:50 +0000</td>
2792 <td class="comment">MiniUPnP daemon source code</td>
2793 <td></td>
2794</tr>
2795<tr>
2796 <td class="filename"><a href='download.php?file=libnatpmp-20071213.tar.gz'>libnatpmp-20071213.tar.gz</a></td>
2797 <td class="filesize">5997</td>
2798 <td class="filedate">13/12/2007 14:56:30 +0000</td>
2799 <td class="comment">libnatpmp source code</td>
2800 <td></td>
2801</tr>
2802<tr>
2803 <td class="filename"><a href='download.php?file=libnatpmp-20071202.tar.gz'>libnatpmp-20071202.tar.gz</a></td>
2804 <td class="filesize">5664</td>
2805 <td class="filedate">02/12/2007 00:15:28 +0000</td>
2806 <td class="comment">libnatpmp source code</td>
2807 <td></td>
2808</tr>
2809<tr>
2810 <td class="filename"><a href='download.php?file=miniupnpd-20071103.tar.gz'>miniupnpd-20071103.tar.gz</a></td>
2811 <td class="filesize">65740</td>
2812 <td class="filedate">02/11/2007 23:58:38 +0000</td>
2813 <td class="comment">MiniUPnP daemon source code</td>
2814 <td></td>
2815</tr>
2816<tr>
2817 <td class="filename"><a href='download.php?file=miniupnpd-20071102.tar.gz'>miniupnpd-20071102.tar.gz</a></td>
2818 <td class="filesize">65733</td>
2819 <td class="filedate">02/11/2007 23:05:44 +0000</td>
2820 <td class="comment">MiniUPnP daemon source code</td>
2821 <td></td>
2822</tr>
2823<tr>
2824 <td class="filename"><a href='download.php?file=miniupnpc-20071103.tar.gz'>miniupnpc-20071103.tar.gz</a></td>
2825 <td class="filesize">32239</td>
2826 <td class="filedate">02/11/2007 23:05:34 +0000</td>
2827 <td class="comment">MiniUPnP client source code</td>
2828 <td></td>
2829</tr>
2830<tr>
2831 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC11.tar.gz'>miniupnpd-1.0-RC11.tar.gz</a></td>
2832 <td class="filesize">64828</td>
2833 <td class="filedate">25/10/2007 13:27:18 +0000</td>
2834 <td class="comment">MiniUPnP daemon release source code</td>
2835 <td></td>
2836</tr>
2837<tr>
2838 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC11.tar.gz'>miniupnpc-1.0-RC11.tar.gz</a></td>
2839 <td class="filesize">32161</td>
2840 <td class="filedate">25/10/2007 13:27:17 +0000</td>
2841 <td class="comment">MiniUPnP client release source code</td>
2842 <td></td>
2843</tr>
2844<tr>
2845 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071025.zip'>upnpc-exe-win32-20071025.zip</a></td>
2846 <td class="filesize">12809</td>
2847 <td class="filedate">24/10/2007 23:15:55 +0000</td>
2848 <td class="comment">Windows executable</td>
2849 <td></td>
2850</tr>
2851<tr>
2852 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC10.tar.gz'>miniupnpd-1.0-RC10.tar.gz</a></td>
2853 <td class="filesize">62674</td>
2854 <td class="filedate">12/10/2007 08:38:33 +0000</td>
2855 <td class="comment">MiniUPnP daemon release source code</td>
2856 <td></td>
2857</tr>
2858<tr>
2859 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC10.tar.gz'>miniupnpc-1.0-RC10.tar.gz</a></td>
2860 <td class="filesize">31962</td>
2861 <td class="filedate">12/10/2007 08:38:31 +0000</td>
2862 <td class="comment">MiniUPnP client release source code</td>
2863 <td></td>
2864</tr>
2865<tr>
2866 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC10.tar.gz'>minissdpd-1.0-RC10.tar.gz</a></td>
2867 <td class="filesize">9517</td>
2868 <td class="filedate">12/10/2007 08:38:30 +0000</td>
2869 <td class="comment">MiniSSDPd release source code</td>
2870 <td></td>
2871</tr>
2872<tr>
2873 <td class="filename"><a href='download.php?file=miniupnpc-20071003.tar.gz'>miniupnpc-20071003.tar.gz</a></td>
2874 <td class="filesize">31199</td>
2875 <td class="filedate">03/10/2007 15:30:13 +0000</td>
2876 <td class="comment">MiniUPnP client source code</td>
2877 <td></td>
2878</tr>
2879<tr>
2880 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071001.zip'>upnpc-exe-win32-20071001.zip</a></td>
2881 <td class="filesize">12604</td>
2882 <td class="filedate">01/10/2007 17:09:22 +0000</td>
2883 <td class="comment">Windows executable</td>
2884 <td></td>
2885</tr>
2886<tr>
2887 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC9.tar.gz'>miniupnpd-1.0-RC9.tar.gz</a></td>
2888 <td class="filesize">54778</td>
2889 <td class="filedate">27/09/2007 19:38:36 +0000</td>
2890 <td class="comment">MiniUPnP daemon release source code</td>
2891 <td></td>
2892</tr>
2893<tr>
2894 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC9.tar.gz'>minissdpd-1.0-RC9.tar.gz</a></td>
2895 <td class="filesize">9163</td>
2896 <td class="filedate">27/09/2007 17:00:03 +0000</td>
2897 <td class="comment">MiniSSDPd release source code</td>
2898 <td></td>
2899</tr>
2900<tr>
2901 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC9.tar.gz'>miniupnpc-1.0-RC9.tar.gz</a></td>
2902 <td class="filesize">30538</td>
2903 <td class="filedate">27/09/2007 17:00:03 +0000</td>
2904 <td class="comment">MiniUPnP client release source code</td>
2905 <td></td>
2906</tr>
2907<tr>
2908 <td class="filename"><a href='download.php?file=miniupnpd-20070924.tar.gz'>miniupnpd-20070924.tar.gz</a></td>
2909 <td class="filesize">52338</td>
2910 <td class="filedate">24/09/2007 20:26:05 +0000</td>
2911 <td class="comment">MiniUPnP daemon source code</td>
2912 <td></td>
2913</tr>
2914<tr>
2915 <td class="filename"><a href='download.php?file=miniupnpd-20070923.tar.gz'>miniupnpd-20070923.tar.gz</a></td>
2916 <td class="filesize">51060</td>
2917 <td class="filedate">23/09/2007 21:13:34 +0000</td>
2918 <td class="comment">MiniUPnP daemon source code</td>
2919 <td></td>
2920</tr>
2921<tr>
2922 <td class="filename"><a href='download.php?file=miniupnpc-20070923.tar.gz'>miniupnpc-20070923.tar.gz</a></td>
2923 <td class="filesize">30246</td>
2924 <td class="filedate">23/09/2007 21:13:33 +0000</td>
2925 <td class="comment">MiniUPnP client source code</td>
2926 <td></td>
2927</tr>
2928<tr>
2929 <td class="filename"><a href='download.php?file=minissdpd-20070923.tar.gz'>minissdpd-20070923.tar.gz</a></td>
2930 <td class="filesize">8978</td>
2931 <td class="filedate">23/09/2007 21:13:32 +0000</td>
2932 <td class="comment">MiniSSDPd source code</td>
2933 <td></td>
2934</tr>
2935<tr>
2936 <td class="filename"><a href='download.php?file=miniupnpc-20070902.tar.gz'>miniupnpc-20070902.tar.gz</a></td>
2937 <td class="filesize">30205</td>
2938 <td class="filedate">01/09/2007 23:47:23 +0000</td>
2939 <td class="comment">MiniUPnP client source code</td>
2940 <td></td>
2941</tr>
2942<tr>
2943 <td class="filename"><a href='download.php?file=minissdpd-20070902.tar.gz'>minissdpd-20070902.tar.gz</a></td>
2944 <td class="filesize">6539</td>
2945 <td class="filedate">01/09/2007 23:47:20 +0000</td>
2946 <td class="comment">MiniSSDPd source code</td>
2947 <td></td>
2948</tr>
2949<tr>
2950 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC8.tar.gz'>miniupnpd-1.0-RC8.tar.gz</a></td>
2951 <td class="filesize">50952</td>
2952 <td class="filedate">29/08/2007 10:56:09 +0000</td>
2953 <td class="comment">MiniUPnP daemon release source code</td>
2954 <td></td>
2955</tr>
2956<tr>
2957 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC8.tar.gz'>miniupnpc-1.0-RC8.tar.gz</a></td>
2958 <td class="filesize">29312</td>
2959 <td class="filedate">29/08/2007 10:56:08 +0000</td>
2960 <td class="comment">MiniUPnP client release source code</td>
2961 <td></td>
2962</tr>
2963<tr>
2964 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC7.tar.gz'>miniupnpd-1.0-RC7.tar.gz</a></td>
2965 <td class="filesize">50613</td>
2966 <td class="filedate">20/07/2007 00:15:45 +0000</td>
2967 <td class="comment">MiniUPnP daemon release source code</td>
2968 <td></td>
2969</tr>
2970<tr>
2971 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC6.tar.gz'>miniupnpd-1.0-RC6.tar.gz</a></td>
2972 <td class="filesize">49986</td>
2973 <td class="filedate">12/06/2007 17:12:07 +0000</td>
2974 <td class="comment">MiniUPnP daemon release source code</td>
2975 <td></td>
2976</tr>
2977<tr>
2978 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC6.tar.gz'>miniupnpc-1.0-RC6.tar.gz</a></td>
2979 <td class="filesize">29032</td>
2980 <td class="filedate">12/06/2007 17:12:06 +0000</td>
2981 <td class="comment">MiniUPnP client release source code</td>
2982 <td></td>
2983</tr>
2984<tr>
2985 <td class="filename"><a href='download.php?file=miniupnpd-20070607.tar.gz'>miniupnpd-20070607.tar.gz</a></td>
2986 <td class="filesize">49768</td>
2987 <td class="filedate">06/06/2007 23:12:00 +0000</td>
2988 <td class="comment">MiniUPnP daemon source code</td>
2989 <td></td>
2990</tr>
2991<tr>
2992 <td class="filename"><a href='download.php?file=miniupnpd-20070605.tar.gz'>miniupnpd-20070605.tar.gz</a></td>
2993 <td class="filesize">49710</td>
2994 <td class="filedate">05/06/2007 21:01:53 +0000</td>
2995 <td class="comment">MiniUPnP daemon source code</td>
2996 <td></td>
2997</tr>
2998<tr>
2999 <td class="filename"><a href='download.php?file=miniupnpd-20070521.tar.gz'>miniupnpd-20070521.tar.gz</a></td>
3000 <td class="filesize">48374</td>
3001 <td class="filedate">21/05/2007 13:07:43 +0000</td>
3002 <td class="comment">MiniUPnP daemon source code</td>
3003 <td></td>
3004</tr>
3005<tr>
3006 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20070519.zip'>upnpc-exe-win32-20070519.zip</a></td>
3007 <td class="filesize">10836</td>
3008 <td class="filedate">19/05/2007 13:14:15 +0000</td>
3009 <td class="comment">Windows executable</td>
3010 <td></td>
3011</tr>
3012<tr>
3013 <td class="filename"><a href='download.php?file=miniupnpc-20070515.tar.gz'>miniupnpc-20070515.tar.gz</a></td>
3014 <td class="filesize">25802</td>
3015 <td class="filedate">15/05/2007 18:15:25 +0000</td>
3016 <td class="comment">MiniUPnP client source code</td>
3017 <td></td>
3018</tr>
3019<tr>
3020 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC5.tar.gz'>miniupnpd-1.0-RC5.tar.gz</a></td>
3021 <td class="filesize">48064</td>
3022 <td class="filedate">10/05/2007 20:22:48 +0000</td>
3023 <td class="comment">MiniUPnP daemon release source code</td>
3024 <td></td>
3025</tr>
3026<tr>
3027 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC5.tar.gz'>miniupnpc-1.0-RC5.tar.gz</a></td>
3028 <td class="filesize">25242</td>
3029 <td class="filedate">10/05/2007 20:22:46 +0000</td>
3030 <td class="comment">MiniUPnP client release source code</td>
3031 <td></td>
3032</tr>
3033<tr>
3034 <td class="filename"><a href='download.php?file=miniupnpd-20070412.tar.gz'>miniupnpd-20070412.tar.gz</a></td>
3035 <td class="filesize">47807</td>
3036 <td class="filedate">12/04/2007 20:21:48 +0000</td>
3037 <td class="comment">MiniUPnP daemon source code</td>
3038 <td></td>
3039</tr>
3040<tr>
3041 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC4.tar.gz'>miniupnpd-1.0-RC4.tar.gz</a></td>
3042 <td class="filesize">47687</td>
3043 <td class="filedate">17/03/2007 11:43:13 +0000</td>
3044 <td class="comment">MiniUPnP daemon release source code</td>
3045 <td></td>
3046</tr>
3047<tr>
3048 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC4.tar.gz'>miniupnpc-1.0-RC4.tar.gz</a></td>
3049 <td class="filesize">25085</td>
3050 <td class="filedate">17/03/2007 11:43:10 +0000</td>
3051 <td class="comment">MiniUPnP client release source code</td>
3052 <td></td>
3053</tr>
3054<tr>
3055 <td class="filename"><a href='download.php?file=miniupnpd-20070311.tar.gz'>miniupnpd-20070311.tar.gz</a></td>
3056 <td class="filesize">47599</td>
3057 <td class="filedate">11/03/2007 00:25:26 +0000</td>
3058 <td class="comment">MiniUPnP daemon source code</td>
3059 <td></td>
3060</tr>
3061<tr>
3062 <td class="filename"><a href='download.php?file=miniupnpd-20070208.tar.gz'>miniupnpd-20070208.tar.gz</a></td>
3063 <td class="filesize">45084</td>
3064 <td class="filedate">07/02/2007 23:04:06 +0000</td>
3065 <td class="comment">MiniUPnP daemon source code</td>
3066 <td></td>
3067</tr>
3068<tr>
3069 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC3.tar.gz'>miniupnpd-1.0-RC3.tar.gz</a></td>
3070 <td class="filesize">44683</td>
3071 <td class="filedate">30/01/2007 23:00:44 +0000</td>
3072 <td class="comment">MiniUPnP daemon release source code</td>
3073 <td></td>
3074</tr>
3075<tr>
3076 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC3.tar.gz'>miniupnpc-1.0-RC3.tar.gz</a></td>
3077 <td class="filesize">25055</td>
3078 <td class="filedate">30/01/2007 23:00:42 +0000</td>
3079 <td class="comment">MiniUPnP client release source code</td>
3080 <td></td>
3081</tr>
3082<tr>
3083 <td class="filename"><a href='download.php?file=miniupnpd-20070130.tar.gz'>miniupnpd-20070130.tar.gz</a></td>
3084 <td class="filesize">43735</td>
3085 <td class="filedate">29/01/2007 23:26:16 +0000</td>
3086 <td class="comment">MiniUPnP daemon source code</td>
3087 <td></td>
3088</tr>
3089<tr>
3090 <td class="filename"><a href='download.php?file=miniupnpc-20070130.tar.gz'>miniupnpc-20070130.tar.gz</a></td>
3091 <td class="filesize">24466</td>
3092 <td class="filedate">29/01/2007 23:26:13 +0000</td>
3093 <td class="comment">MiniUPnP client source code</td>
3094 <td></td>
3095</tr>
3096<tr>
3097 <td class="filename"><a href='download.php?file=miniupnpd-20070127.tar.gz'>miniupnpd-20070127.tar.gz</a></td>
3098 <td class="filesize">42643</td>
3099 <td class="filedate">27/01/2007 16:02:35 +0000</td>
3100 <td class="comment">MiniUPnP daemon source code</td>
3101 <td></td>
3102</tr>
3103<tr>
3104 <td class="filename"><a href='download.php?file=miniupnpc-20070127.tar.gz'>miniupnpc-20070127.tar.gz</a></td>
3105 <td class="filesize">24241</td>
3106 <td class="filedate">27/01/2007 16:02:33 +0000</td>
3107 <td class="comment">MiniUPnP client source code</td>
3108 <td></td>
3109</tr>
3110<tr>
3111 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC2.tar.gz'>miniupnpd-1.0-RC2.tar.gz</a></td>
3112 <td class="filesize">40424</td>
3113 <td class="filedate">17/01/2007 16:13:05 +0000</td>
3114 <td class="comment">MiniUPnP daemon release source code</td>
3115 <td></td>
3116</tr>
3117<tr>
3118 <td class="filename"><a href='download.php?file=miniupnpd-20070112.tar.gz'>miniupnpd-20070112.tar.gz</a></td>
3119 <td class="filesize">40708</td>
3120 <td class="filedate">12/01/2007 13:40:50 +0000</td>
3121 <td class="comment">MiniUPnP daemon source code</td>
3122 <td></td>
3123</tr>
3124<tr>
3125 <td class="filename"><a href='download.php?file=miniupnpd-20070111.tar.gz'>miniupnpd-20070111.tar.gz</a></td>
3126 <td class="filesize">40651</td>
3127 <td class="filedate">11/01/2007 18:50:21 +0000</td>
3128 <td class="comment">MiniUPnP daemon source code</td>
3129 <td></td>
3130</tr>
3131<tr>
3132 <td class="filename"><a href='download.php?file=miniupnpd-20070108.tar.gz'>miniupnpd-20070108.tar.gz</a></td>
3133 <td class="filesize">40025</td>
3134 <td class="filedate">08/01/2007 10:02:14 +0000</td>
3135 <td class="comment">MiniUPnP daemon source code</td>
3136 <td></td>
3137</tr>
3138<tr>
3139 <td class="filename"><a href='download.php?file=miniupnpd-20070103.tar.gz'>miniupnpd-20070103.tar.gz</a></td>
3140 <td class="filesize">40065</td>
3141 <td class="filedate">03/01/2007 14:39:11 +0000</td>
3142 <td class="comment">MiniUPnP daemon source code</td>
3143 <td></td>
3144</tr>
3145<tr>
3146 <td class="filename"><a href='download.php?file=miniupnpc-20061214.tar.gz'>miniupnpc-20061214.tar.gz</a></td>
3147 <td class="filesize">24106</td>
3148 <td class="filedate">14/12/2006 15:43:54 +0000</td>
3149 <td class="comment">MiniUPnP client source code</td>
3150 <td></td>
3151</tr>
3152<tr>
3153 <td class="filename"><a href='download.php?file=miniupnpd-20061214.tar.gz'>miniupnpd-20061214.tar.gz</a></td>
3154 <td class="filesize">39750</td>
3155 <td class="filedate">14/12/2006 13:44:51 +0000</td>
3156 <td class="comment">MiniUPnP daemon source code</td>
3157 <td></td>
3158</tr>
3159<tr>
3160 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC1.tar.gz'>miniupnpd-1.0-RC1.tar.gz</a></td>
3161 <td class="filesize">39572</td>
3162 <td class="filedate">07/12/2006 10:55:31 +0000</td>
3163 <td class="comment">MiniUPnP daemon release source code</td>
3164 <td></td>
3165</tr>
3166<tr>
3167 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC1.tar.gz'>miniupnpc-1.0-RC1.tar.gz</a></td>
3168 <td class="filesize">23582</td>
3169 <td class="filedate">07/12/2006 10:55:30 +0000</td>
3170 <td class="comment">MiniUPnP client release source code</td>
3171 <td></td>
3172</tr>
3173<tr>
3174 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061201.zip'>upnpc-exe-win32-20061201.zip</a></td>
3175 <td class="filesize">10378</td>
3176 <td class="filedate">01/12/2006 00:33:08 +0000</td>
3177 <td class="comment">Windows executable</td>
3178 <td></td>
3179</tr>
3180<tr>
3181 <td class="filename"><a href='download.php?file=miniupnpd20061130.tar.gz'>miniupnpd20061130.tar.gz</a></td>
3182 <td class="filesize">37184</td>
3183 <td class="filedate">30/11/2006 12:25:25 +0000</td>
3184 <td class="comment">MiniUPnP daemon source code</td>
3185 <td></td>
3186</tr>
3187<tr>
3188 <td class="filename"><a href='download.php?file=miniupnpd20061129.tar.gz'>miniupnpd20061129.tar.gz</a></td>
3189 <td class="filesize">36045</td>
3190 <td class="filedate">29/11/2006 00:10:49 +0000</td>
3191 <td class="comment">MiniUPnP daemon source code</td>
3192 <td></td>
3193</tr>
3194<tr>
3195 <td class="filename"><a href='download.php?file=miniupnpd20061127.tar.gz'>miniupnpd20061127.tar.gz</a></td>
3196 <td class="filesize">34155</td>
3197 <td class="filedate">26/11/2006 23:15:28 +0000</td>
3198 <td class="comment">MiniUPnP daemon source code</td>
3199 <td></td>
3200</tr>
3201<tr>
3202 <td class="filename"><a href='download.php?file=miniupnpc20061123.tar.gz'>miniupnpc20061123.tar.gz</a></td>
3203 <td class="filesize">21004</td>
3204 <td class="filedate">23/11/2006 22:41:46 +0000</td>
3205 <td class="comment">MiniUPnP client source code</td>
3206 <td></td>
3207</tr>
3208<tr>
3209 <td class="filename" colspan="2"><a href='download.php?file=miniupnpd-bin-openwrt20061123.tar.gz'>miniupnpd-bin-openwrt20061123.tar.gz</a></td>
3210 <td class="filedate">23/11/2006 22:41:44 +0000</td>
3211 <td class="comment">Precompiled binaries for openwrt</td>
3212 <td></td>
3213</tr>
3214<tr>
3215 <td class="filename"><a href='download.php?file=miniupnpd20061123.tar.gz'>miniupnpd20061123.tar.gz</a></td>
3216 <td class="filesize">33809</td>
3217 <td class="filedate">23/11/2006 22:28:29 +0000</td>
3218 <td class="comment">MiniUPnP daemon source code</td>
3219 <td></td>
3220</tr>
3221<tr>
3222 <td class="filename"><a href='download.php?file=miniupnpc20061119.tar.gz'>miniupnpc20061119.tar.gz</a></td>
3223 <td class="filesize">20897</td>
3224 <td class="filedate">19/11/2006 22:50:37 +0000</td>
3225 <td class="comment">MiniUPnP client source code</td>
3226 <td></td>
3227</tr>
3228<tr>
3229 <td class="filename"><a href='download.php?file=miniupnpd20061119.tar.gz'>miniupnpd20061119.tar.gz</a></td>
3230 <td class="filesize">32580</td>
3231 <td class="filedate">19/11/2006 22:50:36 +0000</td>
3232 <td class="comment">MiniUPnP daemon source code</td>
3233 <td></td>
3234</tr>
3235<tr>
3236 <td class="filename"><a href='download.php?file=miniupnpd20061117.tar.gz'>miniupnpd20061117.tar.gz</a></td>
3237 <td class="filesize">32646</td>
3238 <td class="filedate">17/11/2006 13:29:33 +0000</td>
3239 <td class="comment">MiniUPnP daemon source code</td>
3240 <td></td>
3241</tr>
3242<tr>
3243 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061112.zip'>upnpc-exe-win32-20061112.zip</a></td>
3244 <td class="filesize">10262</td>
3245 <td class="filedate">12/11/2006 22:41:25 +0000</td>
3246 <td class="comment">Windows executable</td>
3247 <td></td>
3248</tr>
3249<tr>
3250 <td class="filename"><a href='download.php?file=miniupnpd20061112.tar.gz'>miniupnpd20061112.tar.gz</a></td>
3251 <td class="filesize">32023</td>
3252 <td class="filedate">12/11/2006 21:30:32 +0000</td>
3253 <td class="comment">MiniUPnP daemon source code</td>
3254 <td></td>
3255</tr>
3256<tr>
3257 <td class="filename"><a href='download.php?file=miniupnpc20061112.tar.gz'>miniupnpc20061112.tar.gz</a></td>
3258 <td class="filesize">21047</td>
3259 <td class="filedate">12/11/2006 21:30:31 +0000</td>
3260 <td class="comment">MiniUPnP client source code</td>
3261 <td></td>
3262</tr>
3263<tr>
3264 <td class="filename"><a href='download.php?file=miniupnpd20061110.tar.gz'>miniupnpd20061110.tar.gz</a></td>
3265 <td class="filesize">27926</td>
3266 <td class="filedate">09/11/2006 23:35:02 +0000</td>
3267 <td class="comment">MiniUPnP daemon source code</td>
3268 <td></td>
3269</tr>
3270<tr>
3271 <td class="filename"><a href='download.php?file=miniupnpc20061110.tar.gz'>miniupnpc20061110.tar.gz</a></td>
3272 <td class="filesize">21009</td>
3273 <td class="filedate">09/11/2006 23:32:19 +0000</td>
3274 <td class="comment">MiniUPnP client source code</td>
3275 <td></td>
3276</tr>
3277<tr>
3278 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061101.zip'>upnpc-exe-win32-20061101.zip</a></td>
3279 <td class="filesize">10089</td>
3280 <td class="filedate">08/11/2006 20:35:09 +0000</td>
3281 <td class="comment">Windows executable</td>
3282 <td></td>
3283</tr>
3284<tr>
3285 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061020.zip'>upnpc-exe-win32-20061020.zip</a></td>
3286 <td class="filesize">9183</td>
3287 <td class="filedate">08/11/2006 20:35:08 +0000</td>
3288 <td class="comment">Windows executable</td>
3289 <td></td>
3290</tr>
3291<tr>
3292 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060909.zip'>upnpc-exe-win32-20060909.zip</a></td>
3293 <td class="filesize">9994</td>
3294 <td class="filedate">08/11/2006 20:35:07 +0000</td>
3295 <td class="comment">Windows executable</td>
3296 <td></td>
3297</tr>
3298<tr>
3299 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060801.zip'>upnpc-exe-win32-20060801.zip</a></td>
3300 <td class="filesize">10002</td>
3301 <td class="filedate">08/11/2006 20:35:06 +0000</td>
3302 <td class="comment">Windows executable</td>
3303 <td></td>
3304</tr>
3305<tr>
3306 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060711.zip'>upnpc-exe-win32-20060711.zip</a></td>
3307 <td class="filesize">13733</td>
3308 <td class="filedate">08/11/2006 20:35:05 +0000</td>
3309 <td class="comment">Windows executable</td>
3310 <td></td>
3311</tr>
3312<tr>
3313 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060709.zip'>upnpc-exe-win32-20060709.zip</a></td>
3314 <td class="filesize">13713</td>
3315 <td class="filedate">08/11/2006 20:35:04 +0000</td>
3316 <td class="comment">Windows executable</td>
3317 <td></td>
3318</tr>
3319<tr>
3320 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060704.zip'>upnpc-exe-win32-20060704.zip</a></td>
3321 <td class="filesize">13297</td>
3322 <td class="filedate">08/11/2006 20:35:03 +0000</td>
3323 <td class="comment">Windows executable</td>
3324 <td></td>
3325</tr>
3326<tr>
3327 <td class="filename"><a href='download.php?file=miniupnpc20061107.tar.gz'>miniupnpc20061107.tar.gz</a></td>
3328 <td class="filesize">20708</td>
3329 <td class="filedate">06/11/2006 23:36:57 +0000</td>
3330 <td class="comment">MiniUPnP client source code</td>
3331 <td></td>
3332</tr>
3333<tr>
3334 <td class="filename"><a href='download.php?file=miniupnpd20061107.tar.gz'>miniupnpd20061107.tar.gz</a></td>
3335 <td class="filesize">26992</td>
3336 <td class="filedate">06/11/2006 23:35:06 +0000</td>
3337 <td class="comment">MiniUPnP daemon source code</td>
3338 <td></td>
3339</tr>
3340<tr>
3341 <td class="filename"><a href='download.php?file=miniupnpc20061106.tar.gz'>miniupnpc20061106.tar.gz</a></td>
3342 <td class="filesize">20575</td>
3343 <td class="filedate">06/11/2006 17:02:15 +0000</td>
3344 <td class="comment">MiniUPnP client source code</td>
3345 <td></td>
3346</tr>
3347<tr>
3348 <td class="filename"><a href='download.php?file=miniupnpd20061106.tar.gz'>miniupnpd20061106.tar.gz</a></td>
3349 <td class="filesize">26597</td>
3350 <td class="filedate">06/11/2006 15:39:10 +0000</td>
3351 <td class="comment">MiniUPnP daemon source code</td>
3352 <td></td>
3353</tr>
3354<tr>
3355 <td class="filename"><a href='download.php?file=miniupnpc20061101.tar.gz'>miniupnpc20061101.tar.gz</a></td>
3356 <td class="filesize">20395</td>
3357 <td class="filedate">04/11/2006 18:16:15 +0000</td>
3358 <td class="comment">MiniUPnP client source code</td>
3359 <td></td>
3360</tr>
3361<tr>
3362 <td class="filename"><a href='download.php?file=miniupnpc20061031.tar.gz'>miniupnpc20061031.tar.gz</a></td>
3363 <td class="filesize">20396</td>
3364 <td class="filedate">04/11/2006 18:16:13 +0000</td>
3365 <td class="comment">MiniUPnP client source code</td>
3366 <td></td>
3367</tr>
3368<tr>
3369 <td class="filename"><a href='download.php?file=miniupnpc20061023.tar.gz'>miniupnpc20061023.tar.gz</a></td>
3370 <td class="filesize">20109</td>
3371 <td class="filedate">04/11/2006 18:16:12 +0000</td>
3372 <td class="comment">MiniUPnP client source code</td>
3373 <td></td>
3374</tr>
3375<tr>
3376 <td class="filename"><a href='download.php?file=miniupnpc20061020.tar.gz'>miniupnpc20061020.tar.gz</a></td>
3377 <td class="filesize">19739</td>
3378 <td class="filedate">04/11/2006 18:16:10 +0000</td>
3379 <td class="comment">MiniUPnP client source code</td>
3380 <td></td>
3381</tr>
3382<tr>
3383 <td class="filename"><a href='download.php?file=miniupnpc20060909.tar.gz'>miniupnpc20060909.tar.gz</a></td>
3384 <td class="filesize">19285</td>
3385 <td class="filedate">04/11/2006 18:16:09 +0000</td>
3386 <td class="comment">MiniUPnP client source code</td>
3387 <td></td>
3388</tr>
3389<tr>
3390 <td class="filename"><a href='download.php?file=miniupnpc20060731.tar.gz'>miniupnpc20060731.tar.gz</a></td>
3391 <td class="filesize">19032</td>
3392 <td class="filedate">04/11/2006 18:16:07 +0000</td>
3393 <td class="comment">MiniUPnP client source code</td>
3394 <td></td>
3395</tr>
3396<tr>
3397 <td class="filename"><a href='download.php?file=miniupnpc20060711.tar.gz'>miniupnpc20060711.tar.gz</a></td>
3398 <td class="filesize">19151</td>
3399 <td class="filedate">04/11/2006 18:16:06 +0000</td>
3400 <td class="comment">MiniUPnP client source code</td>
3401 <td></td>
3402</tr>
3403<tr>
3404 <td class="filename"><a href='download.php?file=miniupnpc20060709.tar.gz'>miniupnpc20060709.tar.gz</a></td>
3405 <td class="filesize">19080</td>
3406 <td class="filedate">04/11/2006 18:16:04 +0000</td>
3407 <td class="comment">MiniUPnP client source code</td>
3408 <td></td>
3409</tr>
3410<tr>
3411 <td class="filename"><a href='download.php?file=miniupnpc20060703.tar.gz'>miniupnpc20060703.tar.gz</a></td>
3412 <td class="filesize">17906</td>
3413 <td class="filedate">04/11/2006 18:16:03 +0000</td>
3414 <td class="comment">MiniUPnP client source code</td>
3415 <td></td>
3416</tr>
3417<tr>
3418 <td class="filename"><a href='download.php?file=miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td>
3419 <td class="filesize">14840</td>
3420 <td class="filedate">04/11/2006 18:16:01 +0000</td>
3421 <td class="comment">Jo&atilde;o Paulo Barraca version of the upnp client</td>
3422 <td></td>
3423</tr>
3424<tr>
3425 <td class="filename"><a href='download.php?file=miniupnpd20061029.tar.gz'>miniupnpd20061029.tar.gz</a></td>
3426 <td class="filesize">24197</td>
3427 <td class="filedate">03/11/2006 13:40:30 +0000</td>
3428 <td class="comment">MiniUPnP daemon source code</td>
3429 <td></td>
3430</tr>
3431<tr>
3432 <td class="filename"><a href='download.php?file=miniupnpd20061027.tar.gz'>miniupnpd20061027.tar.gz</a></td>
3433 <td class="filesize">23904</td>
3434 <td class="filedate">03/11/2006 13:40:29 +0000</td>
3435 <td class="comment">MiniUPnP daemon source code</td>
3436 <td></td>
3437</tr>
3438<tr>
3439 <td class="filename"><a href='download.php?file=miniupnpd20061028.tar.gz'>miniupnpd20061028.tar.gz</a></td>
3440 <td class="filesize">24383</td>
3441 <td class="filedate">03/11/2006 13:40:29 +0000</td>
3442 <td class="comment">MiniUPnP daemon source code</td>
3443 <td></td>
3444</tr>
3445<tr>
3446 <td class="filename"><a href='download.php?file=miniupnpd20061018.tar.gz'>miniupnpd20061018.tar.gz</a></td>
3447 <td class="filesize">23051</td>
3448 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3449 <td class="comment">MiniUPnP daemon source code</td>
3450 <td></td>
3451</tr>
3452<tr>
3453 <td class="filename"><a href='download.php?file=miniupnpd20061023.tar.gz'>miniupnpd20061023.tar.gz</a></td>
3454 <td class="filesize">23478</td>
3455 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3456 <td class="comment">MiniUPnP daemon source code</td>
3457 <td></td>
3458</tr>
3459<tr>
3460 <td class="filename"><a href='download.php?file=miniupnpd20060930.tar.gz'>miniupnpd20060930.tar.gz</a></td>
3461 <td class="filesize">22832</td>
3462 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3463 <td class="comment">MiniUPnP daemon source code</td>
3464 <td></td>
3465</tr>
3466<tr>
3467 <td class="filename"><a href='download.php?file=miniupnpd20060924.tar.gz'>miniupnpd20060924.tar.gz</a></td>
3468 <td class="filesize">22038</td>
3469 <td class="filedate">03/11/2006 13:40:27 +0000</td>
3470 <td class="comment">MiniUPnP daemon source code</td>
3471 <td></td>
3472</tr>
3473<tr>
3474 <td class="filename"><a href='download.php?file=miniupnpd20060919.tar.gz'>miniupnpd20060919.tar.gz</a></td>
3475 <td class="filesize">21566</td>
3476 <td class="filedate">03/11/2006 13:40:27 +0000</td>
3477 <td class="comment">MiniUPnP daemon source code</td>
3478 <td></td>
3479</tr>
3480<tr>
3481 <td class="filename"><a href='download.php?file=miniupnpd20060729.tar.gz'>miniupnpd20060729.tar.gz</a></td>
3482 <td class="filesize">19202</td>
3483 <td class="filedate">03/11/2006 13:40:26 +0000</td>
3484 <td class="comment">MiniUPnP daemon source code</td>
3485 <td></td>
3486</tr>
3487<tr>
3488 <td class="filename"><a href='download.php?file=miniupnpd20060909.tar.gz'>miniupnpd20060909.tar.gz</a></td>
3489 <td class="filesize">19952</td>
3490 <td class="filedate">03/11/2006 13:40:26 +0000</td>
3491 <td class="comment">MiniUPnP daemon source code</td>
3492 <td></td>
3493</tr>
3494</table>
3495
3496<p><a href="..">Home</a></p>
3497<p>Contact: miniupnp _AT_ free _DOT_ fr</p>
3498<p align="center">
3499<a href="https://validator.w3.org/check?uri=referer"><img src="https://www.w3.org/Icons/valid-xhtml10" alt="Valid XHTML 1.0 Transitional" height="31" width="88" /></a>
3500<a href="https://jigsaw.w3.org/css-validator/check/referer"><img style="border:0;width:88px;height:31px" src="https://jigsaw.w3.org/css-validator/images/vcss" alt="Valid CSS!" /></a>
3501<!--
3502 <a href="https://freshmeat.net/projects/miniupnp"><img src="https://s3.amazonaws.com/entp-tender-production/assets/bc5be96f147ec8db3c10fc017f1f53889904ef5b/fm_logo_white_150_normal.png" border="0" alt="freshmeat.net" /></a>
3503-->
3504<!-- https://futuresimple.github.com/images/github_logo.png -->
3505<!-- <a href="https://github.com/miniupnp/miniupnp"><img src="https://assets-cdn.github.com/images/modules/logos_page/GitHub-Logo.png" alt="github.com" height="31" /></a> -->
3506<a href="https://github.com/miniupnp/miniupnp"><img style="position: absolute; top: 0; left: 0; border: 0;" src="https://github.blog/wp-content/uploads/2008/12/forkme_left_green_007200.png" alt="Fork me on GitHub" /></a>
3507</p>
3508
3509<script type="text/javascript">
3510var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
3511document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
3512</script>
3513<script type="text/javascript">
3514try {
3515 var ua = 'UA-10295521';
3516 if(window.location.hostname == 'miniupnp.free.fr')
3517 ua += '-1';
3518 else if(window.location.hostname == 'miniupnp.tuxfamily.org')
3519 ua += '-2';
3520 else ua = '';
3521 if(ua != '') {
3522 var pageTracker = _gat._getTracker(ua);
3523 pageTracker._trackPageview();
3524 }
3525} catch(err) {}</script>
3526</body>
3527</html>
3528
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 7b2dac7b86..077472b8b3 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -6,21 +6,59 @@
6# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
7# 7#
8 8
9import contextlib
10import shutil
9import unittest 11import unittest
12import unittest.mock
13import urllib.parse
10import hashlib 14import hashlib
11import tempfile 15import tempfile
12import collections 16import collections
13import os 17import os
18import signal
19import tarfile
14from bb.fetch2 import URI 20from bb.fetch2 import URI
15from bb.fetch2 import FetchMethod 21from bb.fetch2 import FetchMethod
16import bb 22import bb
23import bb.utils
17from bb.tests.support.httpserver import HTTPService 24from bb.tests.support.httpserver import HTTPService
18 25
19def skipIfNoNetwork(): 26def skipIfNoNetwork():
20 if os.environ.get("BB_SKIP_NETTESTS") == "yes": 27 if os.environ.get("BB_SKIP_NETTESTS") == "yes":
21 return unittest.skip("Network tests being skipped") 28 return unittest.skip("network test")
22 return lambda f: f 29 return lambda f: f
23 30
31
32@contextlib.contextmanager
33def hide_directory(directory):
34 """Hide the given directory and restore it after the context is left"""
35 temp_name = directory + ".bak"
36 os.rename(directory, temp_name)
37 try:
38 yield
39 finally:
40 os.rename(temp_name, directory)
41
42
43class TestTimeout(Exception):
44 # Indicate to pytest that this is not a test suite
45 __test__ = False
46
47class Timeout():
48
49 def __init__(self, seconds):
50 self.seconds = seconds
51
52 def handle_timeout(self, signum, frame):
53 raise TestTimeout("Test failed: timeout reached")
54
55 def __enter__(self):
56 signal.signal(signal.SIGALRM, self.handle_timeout)
57 signal.alarm(self.seconds)
58
59 def __exit__(self, exc_type, exc_val, exc_tb):
60 signal.alarm(0)
61
24class URITest(unittest.TestCase): 62class URITest(unittest.TestCase):
25 test_uris = { 63 test_uris = {
26 "http://www.google.com/index.html" : { 64 "http://www.google.com/index.html" : {
@@ -286,6 +324,36 @@ class URITest(unittest.TestCase):
286 'params': {"someparam" : "1"}, 324 'params': {"someparam" : "1"},
287 'query': {}, 325 'query': {},
288 'relative': True 326 'relative': True
327 },
328 "https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip": {
329 'uri': 'https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip',
330 'scheme': 'https',
331 'hostname': 'www.innodisk.com',
332 'port': None,
333 'hostport': 'www.innodisk.com',
334 'path': '/Download_file',
335 'userinfo': '',
336 'userinfo': '',
337 'username': '',
338 'password': '',
339 'params': {"downloadfilename" : "EGPL-T101.zip"},
340 'query': {"9BE0BF6657": None},
341 'relative': False
342 },
343 "file://example@.service": {
344 'uri': 'file:example%40.service',
345 'scheme': 'file',
346 'hostname': '',
347 'port': None,
348 'hostport': '',
349 'path': 'example@.service',
350 'userinfo': '',
351 'userinfo': '',
352 'username': '',
353 'password': '',
354 'params': {},
355 'query': {},
356 'relative': True
289 } 357 }
290 358
291 } 359 }
@@ -376,7 +444,7 @@ class FetcherTest(unittest.TestCase):
376 def setUp(self): 444 def setUp(self):
377 self.origdir = os.getcwd() 445 self.origdir = os.getcwd()
378 self.d = bb.data.init() 446 self.d = bb.data.init()
379 self.tempdir = tempfile.mkdtemp() 447 self.tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
380 self.dldir = os.path.join(self.tempdir, "download") 448 self.dldir = os.path.join(self.tempdir, "download")
381 os.mkdir(self.dldir) 449 os.mkdir(self.dldir)
382 self.d.setVar("DL_DIR", self.dldir) 450 self.d.setVar("DL_DIR", self.dldir)
@@ -390,63 +458,104 @@ class FetcherTest(unittest.TestCase):
390 if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes": 458 if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes":
391 print("Not cleaning up %s. Please remove manually." % self.tempdir) 459 print("Not cleaning up %s. Please remove manually." % self.tempdir)
392 else: 460 else:
461 bb.process.run('chmod u+rw -R %s' % self.tempdir)
393 bb.utils.prunedir(self.tempdir) 462 bb.utils.prunedir(self.tempdir)
394 463
464 def git(self, cmd, cwd=None):
465 if isinstance(cmd, str):
466 cmd = 'git -c safe.bareRepository=all ' + cmd
467 else:
468 cmd = ['git', '-c', 'safe.bareRepository=all'] + cmd
469 if cwd is None:
470 cwd = self.gitdir
471 return bb.process.run(cmd, cwd=cwd)[0]
472
473 def git_init(self, cwd=None):
474 self.git('init', cwd=cwd)
475 # Explicitly set initial branch to master as
476 # a common setup is to use other default
477 # branch than master.
478 self.git(['checkout', '-b', 'master'], cwd=cwd)
479
480 try:
481 self.git(['config', 'user.email'], cwd=cwd)
482 except bb.process.ExecutionError:
483 self.git(['config', 'user.email', 'you@example.com'], cwd=cwd)
484
485 try:
486 self.git(['config', 'user.name'], cwd=cwd)
487 except bb.process.ExecutionError:
488 self.git(['config', 'user.name', 'Your Name'], cwd=cwd)
489
395class MirrorUriTest(FetcherTest): 490class MirrorUriTest(FetcherTest):
396 491
397 replaceuris = { 492 replaceuris = {
398 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/") 493 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "http://somewhere.org/somedir/")
399 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", 494 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
400 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 495 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
401 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 496 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
402 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 497 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
403 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 498 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
404 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http") 499 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
405 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 500 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
406 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake") 501 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
407 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890", 502 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
408 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache") 503 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
409 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 504 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
410 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/") 505 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
411 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 506 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
412 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3") 507 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
413 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", 508 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
414 ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz") 509 ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
415 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", 510 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
416 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist") 511 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist")
417 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", 512 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
418 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/") 513 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
419 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2", 514 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
420 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 515 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
421 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 516 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
422 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 517 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
423 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 518 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
424 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http") 519 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
425 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 520 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
426 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org") 521 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org")
427 : "http://somewhere2.org/somefile_1.2.3.tar.gz", 522 : "http://somewhere2.org/somefile_1.2.3.tar.gz",
428 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/") 523 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/")
429 : "http://somewhere2.org/somefile_1.2.3.tar.gz", 524 : "http://somewhere2.org/somefile_1.2.3.tar.gz",
430 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http") 525 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http")
431 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", 526 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
527 ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://user2@git.openembedded.org/bitbake;protocol=http")
528 : "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
529 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=git;branch=master", "git://someserver.org/bitbake", "git://someotherserver.org/bitbake;protocol=https")
530 : "git://someotherserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=https;branch=master",
531 ("gitsm://git.qemu.org/git/seabios.git/;protocol=https;name=roms/seabios;subpath=roms/seabios;bareclone=1;nobranch=1;rev=1234567890123456789012345678901234567890", "gitsm://.*/.*", "http://petalinux.xilinx.com/sswreleases/rel-v${XILINX_VER_MAIN}/downloads") : "http://petalinux.xilinx.com/sswreleases/rel-v%24%7BXILINX_VER_MAIN%7D/downloads/git2_git.qemu.org.git.seabios.git..tar.gz",
532 ("https://somewhere.org/example/1.0.0/example;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/PATH")
533 : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
534 ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz")
535 : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
536 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/")
537 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
538 ("git://internal.git.server.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/")
539 : None,
432 540
433 #Renaming files doesn't work 541 #Renaming files doesn't work
434 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz" 542 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
435 #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 543 #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
436 } 544 }
437 545
438 mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \ 546 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \
439 "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \ 547 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \
440 "https://.*/.* file:///someotherpath/downloads/ \n" \ 548 "https?://.*/.* file:///someotherpath/downloads/ " \
441 "http://.*/.* file:///someotherpath/downloads/ \n" 549 "svn://svn.server1.com/ svn://svn.server2.com/"
442 550
443 def test_urireplace(self): 551 def test_urireplace(self):
552 self.d.setVar("FILESPATH", ".")
444 for k, v in self.replaceuris.items(): 553 for k, v in self.replaceuris.items():
445 ud = bb.fetch.FetchData(k[0], self.d) 554 ud = bb.fetch.FetchData(k[0], self.d)
446 ud.setup_localpath(self.d) 555 ud.setup_localpath(self.d)
447 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2])) 556 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2]))
448 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d) 557 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d)
449 self.assertEqual([v], newuris) 558 self.assertEqual([v] if v else [], newuris)
450 559
451 def test_urilist1(self): 560 def test_urilist1(self):
452 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 561 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
@@ -461,10 +570,17 @@ class MirrorUriTest(FetcherTest):
461 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 570 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
462 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) 571 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
463 572
573 def test_urilistsvn(self):
574 # Catch svn:// -> svn:// bug
575 fetcher = bb.fetch.FetchData("svn://svn.server1.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2", self.d)
576 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
577 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
578 self.assertEqual(uris, ['svn://svn.server2.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2'])
579
464 def test_mirror_of_mirror(self): 580 def test_mirror_of_mirror(self):
465 # Test if mirror of a mirror works 581 # Test if mirror of a mirror works
466 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n" 582 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/"
467 mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n" 583 mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/"
468 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 584 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
469 mirrors = bb.fetch2.mirror_from_string(mirrorvar) 585 mirrors = bb.fetch2.mirror_from_string(mirrorvar)
470 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 586 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
@@ -473,30 +589,30 @@ class MirrorUriTest(FetcherTest):
473 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz', 589 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
474 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz']) 590 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
475 591
476 recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ \n" \ 592 recmirrorvar = "https://.*/[^/]* http://aaaa/A/A/A/ " \
477 "https://.*/[^/]* https://BBBB/B/B/B/ \n" 593 "https://.*/[^/]* https://bbbb/B/B/B/"
478 594
479 def test_recursive(self): 595 def test_recursive(self):
480 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 596 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
481 mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar) 597 mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar)
482 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 598 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
483 self.assertEqual(uris, ['http://AAAA/A/A/A/bitbake/bitbake-1.0.tar.gz', 599 self.assertEqual(uris, ['http://aaaa/A/A/A/bitbake/bitbake-1.0.tar.gz',
484 'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz', 600 'https://bbbb/B/B/B/bitbake/bitbake-1.0.tar.gz',
485 'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz']) 601 'http://aaaa/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz'])
486 602
487 603
488class GitDownloadDirectoryNamingTest(FetcherTest): 604class GitDownloadDirectoryNamingTest(FetcherTest):
489 def setUp(self): 605 def setUp(self):
490 super(GitDownloadDirectoryNamingTest, self).setUp() 606 super(GitDownloadDirectoryNamingTest, self).setUp()
491 self.recipe_url = "git://git.openembedded.org/bitbake" 607 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
492 self.recipe_dir = "git.openembedded.org.bitbake" 608 self.recipe_dir = "git.openembedded.org.bitbake"
493 self.mirror_url = "git://github.com/openembedded/bitbake.git" 609 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
494 self.mirror_dir = "github.com.openembedded.bitbake.git" 610 self.mirror_dir = "github.com.openembedded.bitbake.git"
495 611
496 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 612 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
497 613
498 def setup_mirror_rewrite(self): 614 def setup_mirror_rewrite(self):
499 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") 615 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
500 616
501 @skipIfNoNetwork() 617 @skipIfNoNetwork()
502 def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self): 618 def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self):
@@ -536,16 +652,16 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
536class TarballNamingTest(FetcherTest): 652class TarballNamingTest(FetcherTest):
537 def setUp(self): 653 def setUp(self):
538 super(TarballNamingTest, self).setUp() 654 super(TarballNamingTest, self).setUp()
539 self.recipe_url = "git://git.openembedded.org/bitbake" 655 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
540 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" 656 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
541 self.mirror_url = "git://github.com/openembedded/bitbake.git" 657 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
542 self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz" 658 self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
543 659
544 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') 660 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
545 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 661 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
546 662
547 def setup_mirror_rewrite(self): 663 def setup_mirror_rewrite(self):
548 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") 664 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
549 665
550 @skipIfNoNetwork() 666 @skipIfNoNetwork()
551 def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self): 667 def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self):
@@ -570,9 +686,9 @@ class TarballNamingTest(FetcherTest):
570class GitShallowTarballNamingTest(FetcherTest): 686class GitShallowTarballNamingTest(FetcherTest):
571 def setUp(self): 687 def setUp(self):
572 super(GitShallowTarballNamingTest, self).setUp() 688 super(GitShallowTarballNamingTest, self).setUp()
573 self.recipe_url = "git://git.openembedded.org/bitbake" 689 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
574 self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz" 690 self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
575 self.mirror_url = "git://github.com/openembedded/bitbake.git" 691 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
576 self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz" 692 self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
577 693
578 self.d.setVar('BB_GIT_SHALLOW', '1') 694 self.d.setVar('BB_GIT_SHALLOW', '1')
@@ -580,7 +696,7 @@ class GitShallowTarballNamingTest(FetcherTest):
580 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 696 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
581 697
582 def setup_mirror_rewrite(self): 698 def setup_mirror_rewrite(self):
583 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") 699 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
584 700
585 @skipIfNoNetwork() 701 @skipIfNoNetwork()
586 def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self): 702 def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self):
@@ -602,6 +718,39 @@ class GitShallowTarballNamingTest(FetcherTest):
602 self.assertIn(self.mirror_tarball, dir) 718 self.assertIn(self.mirror_tarball, dir)
603 719
604 720
721class CleanTarballTest(FetcherTest):
722 def setUp(self):
723 super(CleanTarballTest, self).setUp()
724 self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https;branch=master"
725 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
726
727 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
728 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
729
730 @skipIfNoNetwork()
731 def test_that_the_tarball_contents_does_not_leak_info(self):
732 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
733
734 fetcher.download()
735
736 fetcher.unpack(self.unpackdir)
737 mtime = bb.process.run('git log --all -1 --format=%ct',
738 cwd=os.path.join(self.unpackdir, 'git'))
739 self.assertEqual(len(mtime), 2)
740 mtime = int(mtime[0])
741
742 archive = tarfile.open(os.path.join(self.dldir, self.recipe_tarball))
743 self.assertNotEqual(len(archive.members), 0)
744 for member in archive.members:
745 if member.name == ".":
746 continue
747 self.assertEqual(member.uname, 'oe', "user name for %s differs" % member.name)
748 self.assertEqual(member.uid, 0, "uid for %s differs" % member.name)
749 self.assertEqual(member.gname, 'oe', "group name for %s differs" % member.name)
750 self.assertEqual(member.gid, 0, "gid for %s differs" % member.name)
751 self.assertEqual(member.mtime, mtime, "mtime for %s differs" % member.name)
752
753
605class FetcherLocalTest(FetcherTest): 754class FetcherLocalTest(FetcherTest):
606 def setUp(self): 755 def setUp(self):
607 def touch(fn): 756 def touch(fn):
@@ -613,12 +762,16 @@ class FetcherLocalTest(FetcherTest):
613 os.makedirs(self.localsrcdir) 762 os.makedirs(self.localsrcdir)
614 touch(os.path.join(self.localsrcdir, 'a')) 763 touch(os.path.join(self.localsrcdir, 'a'))
615 touch(os.path.join(self.localsrcdir, 'b')) 764 touch(os.path.join(self.localsrcdir, 'b'))
765 touch(os.path.join(self.localsrcdir, 'c@d'))
616 os.makedirs(os.path.join(self.localsrcdir, 'dir')) 766 os.makedirs(os.path.join(self.localsrcdir, 'dir'))
617 touch(os.path.join(self.localsrcdir, 'dir', 'c')) 767 touch(os.path.join(self.localsrcdir, 'dir', 'c'))
618 touch(os.path.join(self.localsrcdir, 'dir', 'd')) 768 touch(os.path.join(self.localsrcdir, 'dir', 'd'))
619 os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir')) 769 os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir'))
620 touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e')) 770 touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e'))
621 touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device')) 771 touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device'))
772 bb.process.run('tar cf archive.tar -C dir .', cwd=self.localsrcdir)
773 bb.process.run('tar czf archive.tar.gz -C dir .', cwd=self.localsrcdir)
774 bb.process.run('tar cjf archive.tar.bz2 -C dir .', cwd=self.localsrcdir)
622 self.d.setVar("FILESPATH", self.localsrcdir) 775 self.d.setVar("FILESPATH", self.localsrcdir)
623 776
624 def fetchUnpack(self, uris): 777 def fetchUnpack(self, uris):
@@ -632,10 +785,19 @@ class FetcherLocalTest(FetcherTest):
632 flst.sort() 785 flst.sort()
633 return flst 786 return flst
634 787
788 def test_local_checksum_fails_no_file(self):
789 self.d.setVar("SRC_URI", "file://404")
790 with self.assertRaises(bb.BBHandledException):
791 bb.fetch.get_checksum_file_list(self.d)
792
635 def test_local(self): 793 def test_local(self):
636 tree = self.fetchUnpack(['file://a', 'file://dir/c']) 794 tree = self.fetchUnpack(['file://a', 'file://dir/c'])
637 self.assertEqual(tree, ['a', 'dir/c']) 795 self.assertEqual(tree, ['a', 'dir/c'])
638 796
797 def test_local_at(self):
798 tree = self.fetchUnpack(['file://c@d'])
799 self.assertEqual(tree, ['c@d'])
800
639 def test_local_backslash(self): 801 def test_local_backslash(self):
640 tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device']) 802 tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device'])
641 self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device']) 803 self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device'])
@@ -673,57 +835,58 @@ class FetcherLocalTest(FetcherTest):
673 with self.assertRaises(bb.fetch2.UnpackError): 835 with self.assertRaises(bb.fetch2.UnpackError):
674 self.fetchUnpack(['file://a;subdir=/bin/sh']) 836 self.fetchUnpack(['file://a;subdir=/bin/sh'])
675 837
676 def test_local_gitfetch_usehead(self): 838 def test_local_striplevel(self):
839 tree = self.fetchUnpack(['file://archive.tar;subdir=bar;striplevel=1'])
840 self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
841
842 def test_local_striplevel_gzip(self):
843 tree = self.fetchUnpack(['file://archive.tar.gz;subdir=bar;striplevel=1'])
844 self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
845
846 def test_local_striplevel_bzip2(self):
847 tree = self.fetchUnpack(['file://archive.tar.bz2;subdir=bar;striplevel=1'])
848 self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
849
850 def dummyGitTest(self, suffix):
677 # Create dummy local Git repo 851 # Create dummy local Git repo
678 src_dir = tempfile.mkdtemp(dir=self.tempdir, 852 src_dir = tempfile.mkdtemp(dir=self.tempdir,
679 prefix='gitfetch_localusehead_') 853 prefix='gitfetch_localusehead_')
680 src_dir = os.path.abspath(src_dir) 854 self.gitdir = os.path.abspath(src_dir)
681 bb.process.run("git init", cwd=src_dir) 855 self.git_init()
682 bb.process.run("git commit --allow-empty -m'Dummy commit'", 856 self.git(['commit', '--allow-empty', '-m', 'Dummy commit'])
683 cwd=src_dir)
684 # Use other branch than master 857 # Use other branch than master
685 bb.process.run("git checkout -b my-devel", cwd=src_dir) 858 self.git(['checkout', '-b', 'my-devel'])
686 bb.process.run("git commit --allow-empty -m'Dummy commit 2'", 859 self.git(['commit', '--allow-empty', '-m', 'Dummy commit 2'])
687 cwd=src_dir) 860 orig_rev = self.git(['rev-parse', 'HEAD']).strip()
688 stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
689 orig_rev = stdout[0].strip()
690 861
691 # Fetch and check revision 862 # Fetch and check revision
692 self.d.setVar("SRCREV", "AUTOINC") 863 self.d.setVar("SRCREV", "AUTOINC")
693 url = "git://" + src_dir + ";protocol=file;usehead=1" 864 self.d.setVar("__BBSRCREV_SEEN", "1")
865 url = "git://" + self.gitdir + ";branch=master;protocol=file;" + suffix
694 fetcher = bb.fetch.Fetch([url], self.d) 866 fetcher = bb.fetch.Fetch([url], self.d)
695 fetcher.download() 867 fetcher.download()
696 fetcher.unpack(self.unpackdir) 868 fetcher.unpack(self.unpackdir)
697 stdout = bb.process.run("git rev-parse HEAD", 869 unpack_rev = self.git(['rev-parse', 'HEAD'],
698 cwd=os.path.join(self.unpackdir, 'git')) 870 cwd=os.path.join(self.unpackdir, 'git')).strip()
699 unpack_rev = stdout[0].strip()
700 self.assertEqual(orig_rev, unpack_rev) 871 self.assertEqual(orig_rev, unpack_rev)
701 872
873 def test_local_gitfetch_usehead(self):
874 self.dummyGitTest("usehead=1")
875
702 def test_local_gitfetch_usehead_withname(self): 876 def test_local_gitfetch_usehead_withname(self):
703 # Create dummy local Git repo 877 self.dummyGitTest("usehead=1;name=newName")
704 src_dir = tempfile.mkdtemp(dir=self.tempdir,
705 prefix='gitfetch_localusehead_')
706 src_dir = os.path.abspath(src_dir)
707 bb.process.run("git init", cwd=src_dir)
708 bb.process.run("git commit --allow-empty -m'Dummy commit'",
709 cwd=src_dir)
710 # Use other branch than master
711 bb.process.run("git checkout -b my-devel", cwd=src_dir)
712 bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
713 cwd=src_dir)
714 stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
715 orig_rev = stdout[0].strip()
716 878
717 # Fetch and check revision 879 def test_local_gitfetch_shared(self):
718 self.d.setVar("SRCREV", "AUTOINC") 880 self.dummyGitTest("usehead=1;name=sharedName")
719 url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName" 881 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
720 fetcher = bb.fetch.Fetch([url], self.d) 882 self.assertTrue(os.path.exists(alt))
721 fetcher.download() 883
722 fetcher.unpack(self.unpackdir) 884 def test_local_gitfetch_noshared(self):
723 stdout = bb.process.run("git rev-parse HEAD", 885 self.d.setVar('BB_GIT_NOSHARED', '1')
724 cwd=os.path.join(self.unpackdir, 'git')) 886 self.unpackdir += '_noshared'
725 unpack_rev = stdout[0].strip() 887 self.dummyGitTest("usehead=1;name=noSharedName")
726 self.assertEqual(orig_rev, unpack_rev) 888 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
889 self.assertFalse(os.path.exists(alt))
727 890
728class FetcherNoNetworkTest(FetcherTest): 891class FetcherNoNetworkTest(FetcherTest):
729 def setUp(self): 892 def setUp(self):
@@ -831,12 +994,12 @@ class FetcherNoNetworkTest(FetcherTest):
831class FetcherNetworkTest(FetcherTest): 994class FetcherNetworkTest(FetcherTest):
832 @skipIfNoNetwork() 995 @skipIfNoNetwork()
833 def test_fetch(self): 996 def test_fetch(self):
834 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) 997 fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
835 fetcher.download() 998 fetcher.download()
836 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 999 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
837 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892) 1000 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
838 self.d.setVar("BB_NO_NETWORK", "1") 1001 self.d.setVar("BB_NO_NETWORK", "1")
839 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) 1002 fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
840 fetcher.download() 1003 fetcher.download()
841 fetcher.unpack(self.unpackdir) 1004 fetcher.unpack(self.unpackdir)
842 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9) 1005 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
@@ -844,21 +1007,22 @@ class FetcherNetworkTest(FetcherTest):
844 1007
845 @skipIfNoNetwork() 1008 @skipIfNoNetwork()
846 def test_fetch_mirror(self): 1009 def test_fetch_mirror(self):
847 self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") 1010 self.d.setVar("MIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
848 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 1011 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
849 fetcher.download() 1012 fetcher.download()
850 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 1013 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
851 1014
852 @skipIfNoNetwork() 1015 @skipIfNoNetwork()
853 def test_fetch_mirror_of_mirror(self): 1016 def test_fetch_mirror_of_mirror(self):
854 self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake") 1017 self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ http://invalid2.yoctoproject.org/.* https://downloads.yoctoproject.org/releases/bitbake")
855 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 1018 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
856 fetcher.download() 1019 fetcher.download()
857 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 1020 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
858 1021
859 @skipIfNoNetwork() 1022 @skipIfNoNetwork()
860 def test_fetch_file_mirror_of_mirror(self): 1023 def test_fetch_file_mirror_of_mirror(self):
861 self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake") 1024 self.d.setVar("FILESPATH", ".")
1025 self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ file:///some1where/.* file://some2where/ file://some2where/.* https://downloads.yoctoproject.org/releases/bitbake")
862 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 1026 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
863 os.mkdir(self.dldir + "/some2where") 1027 os.mkdir(self.dldir + "/some2where")
864 fetcher.download() 1028 fetcher.download()
@@ -866,16 +1030,46 @@ class FetcherNetworkTest(FetcherTest):
866 1030
867 @skipIfNoNetwork() 1031 @skipIfNoNetwork()
868 def test_fetch_premirror(self): 1032 def test_fetch_premirror(self):
869 self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") 1033 self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
870 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 1034 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
871 fetcher.download() 1035 fetcher.download()
872 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 1036 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
873 1037
874 @skipIfNoNetwork() 1038 @skipIfNoNetwork()
1039 def test_fetch_specify_downloadfilename(self):
1040 fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
1041 fetcher.download()
1042 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
1043
1044 @skipIfNoNetwork()
1045 def test_fetch_premirror_specify_downloadfilename_regex_uri(self):
1046 self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake/")
1047 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
1048 fetcher.download()
1049 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
1050
1051 @skipIfNoNetwork()
1052 # BZ13039
1053 def test_fetch_premirror_specify_downloadfilename_specific_uri(self):
1054 self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake https://downloads.yoctoproject.org/releases/bitbake")
1055 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
1056 fetcher.download()
1057 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
1058
1059 @skipIfNoNetwork()
1060 def test_fetch_premirror_use_downloadfilename_to_fetch(self):
1061 # Ensure downloadfilename is used when fetching from premirror.
1062 self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
1063 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
1064 fetcher.download()
1065 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
1066
1067 @skipIfNoNetwork()
875 def gitfetcher(self, url1, url2): 1068 def gitfetcher(self, url1, url2):
876 def checkrevision(self, fetcher): 1069 def checkrevision(self, fetcher):
877 fetcher.unpack(self.unpackdir) 1070 fetcher.unpack(self.unpackdir)
878 revision = bb.process.run("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git")[0].strip() 1071 revision = self.git(['rev-parse', 'HEAD'],
1072 cwd=os.path.join(self.unpackdir, 'git')).strip()
879 self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5") 1073 self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
880 1074
881 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1") 1075 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
@@ -893,25 +1087,19 @@ class FetcherNetworkTest(FetcherTest):
893 1087
894 @skipIfNoNetwork() 1088 @skipIfNoNetwork()
895 def test_gitfetch(self): 1089 def test_gitfetch(self):
896 url1 = url2 = "git://git.openembedded.org/bitbake" 1090 url1 = url2 = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
897 self.gitfetcher(url1, url2) 1091 self.gitfetcher(url1, url2)
898 1092
899 @skipIfNoNetwork() 1093 @skipIfNoNetwork()
900 def test_gitfetch_goodsrcrev(self): 1094 def test_gitfetch_goodsrcrev(self):
901 # SRCREV is set but matches rev= parameter 1095 # SRCREV is set but matches rev= parameter
902 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5" 1096 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
903 self.gitfetcher(url1, url2) 1097 self.gitfetcher(url1, url2)
904 1098
905 @skipIfNoNetwork() 1099 @skipIfNoNetwork()
906 def test_gitfetch_badsrcrev(self): 1100 def test_gitfetch_badsrcrev(self):
907 # SRCREV is set but does not match rev= parameter 1101 # SRCREV is set but does not match rev= parameter
908 url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5" 1102 url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
909 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
910
911 @skipIfNoNetwork()
912 def test_gitfetch_tagandrev(self):
913 # SRCREV is set but does not match rev= parameter
914 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
915 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) 1103 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
916 1104
917 @skipIfNoNetwork() 1105 @skipIfNoNetwork()
@@ -920,7 +1108,7 @@ class FetcherNetworkTest(FetcherTest):
920 # `usehead=1' and instead fetch the specified SRCREV. See 1108 # `usehead=1' and instead fetch the specified SRCREV. See
921 # test_local_gitfetch_usehead() for a positive use of the usehead 1109 # test_local_gitfetch_usehead() for a positive use of the usehead
922 # feature. 1110 # feature.
923 url = "git://git.openembedded.org/bitbake;usehead=1" 1111 url = "git://git.openembedded.org/bitbake;usehead=1;branch=master;protocol=https"
924 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) 1112 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
925 1113
926 @skipIfNoNetwork() 1114 @skipIfNoNetwork()
@@ -929,38 +1117,38 @@ class FetcherNetworkTest(FetcherTest):
929 # `usehead=1' and instead fetch the specified SRCREV. See 1117 # `usehead=1' and instead fetch the specified SRCREV. See
930 # test_local_gitfetch_usehead() for a positive use of the usehead 1118 # test_local_gitfetch_usehead() for a positive use of the usehead
931 # feature. 1119 # feature.
932 url = "git://git.openembedded.org/bitbake;usehead=1;name=newName" 1120 url = "git://git.openembedded.org/bitbake;usehead=1;name=newName;branch=master;protocol=https"
933 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) 1121 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
934 1122
935 @skipIfNoNetwork() 1123 @skipIfNoNetwork()
936 def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self): 1124 def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self):
937 recipeurl = "git://git.openembedded.org/bitbake" 1125 recipeurl = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
938 mirrorurl = "git://someserver.org/bitbake" 1126 mirrorurl = "git://someserver.org/bitbake;branch=master;protocol=https"
939 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n") 1127 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
940 self.gitfetcher(recipeurl, mirrorurl) 1128 self.gitfetcher(recipeurl, mirrorurl)
941 1129
942 @skipIfNoNetwork() 1130 @skipIfNoNetwork()
943 def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self): 1131 def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self):
944 recipeurl = "git://someserver.org/bitbake" 1132 recipeurl = "git://someserver.org/bitbake;branch=master;protocol=https"
945 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n") 1133 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
946 self.gitfetcher(recipeurl, recipeurl) 1134 self.gitfetcher(recipeurl, recipeurl)
947 1135
948 @skipIfNoNetwork() 1136 @skipIfNoNetwork()
949 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self): 1137 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self):
950 realurl = "git://git.openembedded.org/bitbake" 1138 realurl = "https://git.openembedded.org/bitbake"
951 recipeurl = "git://someserver.org/bitbake" 1139 recipeurl = "git://someserver.org/bitbake;protocol=https;branch=master"
952 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git") 1140 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
953 os.chdir(self.tempdir) 1141 os.chdir(self.tempdir)
954 bb.process.run("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True) 1142 self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir)
955 self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (recipeurl, self.sourcedir)) 1143 self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file" % (recipeurl, self.sourcedir))
956 self.gitfetcher(recipeurl, recipeurl) 1144 self.gitfetcher(recipeurl, recipeurl)
957 1145
958 @skipIfNoNetwork() 1146 @skipIfNoNetwork()
959 def test_git_submodule(self): 1147 def test_git_submodule(self):
960 # URL with ssh submodules 1148 # URL with ssh submodules
961 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7" 1149 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7;branch=master;protocol=https"
962 # Original URL (comment this if you have ssh access to git.yoctoproject.org) 1150 # Original URL (comment this if you have ssh access to git.yoctoproject.org)
963 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee" 1151 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee;branch=master;protocol=https"
964 fetcher = bb.fetch.Fetch([url], self.d) 1152 fetcher = bb.fetch.Fetch([url], self.d)
965 fetcher.download() 1153 fetcher.download()
966 # Previous cwd has been deleted 1154 # Previous cwd has been deleted
@@ -977,10 +1165,29 @@ class FetcherNetworkTest(FetcherTest):
977 self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing') 1165 self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing')
978 1166
979 @skipIfNoNetwork() 1167 @skipIfNoNetwork()
1168 def test_git_submodule_restricted_network_premirrors(self):
1169 # this test is to ensure that premirrors will be tried in restricted network
1170 # that is, BB_ALLOWED_NETWORKS does not contain the domain the url uses
1171 url = "gitsm://github.com/grpc/grpc.git;protocol=https;name=grpc;branch=v1.60.x;rev=0ef13a7555dbaadd4633399242524129eef5e231"
1172 # create a download directory to be used as premirror later
1173 tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
1174 dl_premirror = os.path.join(tempdir, "download-premirror")
1175 os.mkdir(dl_premirror)
1176 self.d.setVar("DL_DIR", dl_premirror)
1177 fetcher = bb.fetch.Fetch([url], self.d)
1178 fetcher.download()
1179 # now use the premirror in restricted network
1180 self.d.setVar("DL_DIR", self.dldir)
1181 self.d.setVar("PREMIRRORS", "gitsm://.*/.* gitsm://%s/git2/MIRRORNAME;protocol=file" % dl_premirror)
1182 self.d.setVar("BB_ALLOWED_NETWORKS", "*.some.domain")
1183 fetcher = bb.fetch.Fetch([url], self.d)
1184 fetcher.download()
1185
1186 @skipIfNoNetwork()
980 def test_git_submodule_dbus_broker(self): 1187 def test_git_submodule_dbus_broker(self):
981 # The following external repositories have show failures in fetch and unpack operations 1188 # The following external repositories have show failures in fetch and unpack operations
982 # We want to avoid regressions! 1189 # We want to avoid regressions!
983 url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main" 1190 url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
984 fetcher = bb.fetch.Fetch([url], self.d) 1191 fetcher = bb.fetch.Fetch([url], self.d)
985 fetcher.download() 1192 fetcher.download()
986 # Previous cwd has been deleted 1193 # Previous cwd has been deleted
@@ -996,7 +1203,7 @@ class FetcherNetworkTest(FetcherTest):
996 1203
997 @skipIfNoNetwork() 1204 @skipIfNoNetwork()
998 def test_git_submodule_CLI11(self): 1205 def test_git_submodule_CLI11(self):
999 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf" 1206 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
1000 fetcher = bb.fetch.Fetch([url], self.d) 1207 fetcher = bb.fetch.Fetch([url], self.d)
1001 fetcher.download() 1208 fetcher.download()
1002 # Previous cwd has been deleted 1209 # Previous cwd has been deleted
@@ -1011,12 +1218,12 @@ class FetcherNetworkTest(FetcherTest):
1011 @skipIfNoNetwork() 1218 @skipIfNoNetwork()
1012 def test_git_submodule_update_CLI11(self): 1219 def test_git_submodule_update_CLI11(self):
1013 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """ 1220 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
1014 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714" 1221 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
1015 fetcher = bb.fetch.Fetch([url], self.d) 1222 fetcher = bb.fetch.Fetch([url], self.d)
1016 fetcher.download() 1223 fetcher.download()
1017 1224
1018 # CLI11 that pulls in a newer nlohmann-json 1225 # CLI11 that pulls in a newer nlohmann-json
1019 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca" 1226 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
1020 fetcher = bb.fetch.Fetch([url], self.d) 1227 fetcher = bb.fetch.Fetch([url], self.d)
1021 fetcher.download() 1228 fetcher.download()
1022 # Previous cwd has been deleted 1229 # Previous cwd has been deleted
@@ -1030,7 +1237,7 @@ class FetcherNetworkTest(FetcherTest):
1030 1237
1031 @skipIfNoNetwork() 1238 @skipIfNoNetwork()
1032 def test_git_submodule_aktualizr(self): 1239 def test_git_submodule_aktualizr(self):
1033 url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44" 1240 url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
1034 fetcher = bb.fetch.Fetch([url], self.d) 1241 fetcher = bb.fetch.Fetch([url], self.d)
1035 fetcher.download() 1242 fetcher.download()
1036 # Previous cwd has been deleted 1243 # Previous cwd has been deleted
@@ -1050,7 +1257,7 @@ class FetcherNetworkTest(FetcherTest):
1050 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """ 1257 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
1051 1258
1052 # This repository also has submodules where the module (name), path and url do not align 1259 # This repository also has submodules where the module (name), path and url do not align
1053 url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699" 1260 url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699;branch=main"
1054 fetcher = bb.fetch.Fetch([url], self.d) 1261 fetcher = bb.fetch.Fetch([url], self.d)
1055 fetcher.download() 1262 fetcher.download()
1056 # Previous cwd has been deleted 1263 # Previous cwd has been deleted
@@ -1073,9 +1280,17 @@ class FetcherNetworkTest(FetcherTest):
1073 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout') 1280 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout')
1074 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout') 1281 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout')
1075 1282
1283 @skipIfNoNetwork()
1284 def test_git_submodule_reference_to_parent(self):
1285 self.recipe_url = "gitsm://github.com/gflags/gflags.git;protocol=https;branch=master"
1286 self.d.setVar("SRCREV", "14e1138441bbbb584160cb1c0a0426ec1bac35f1")
1287 with Timeout(60):
1288 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
1289 with self.assertRaises(bb.fetch2.FetchError):
1290 fetcher.download()
1291
1076class SVNTest(FetcherTest): 1292class SVNTest(FetcherTest):
1077 def skipIfNoSvn(): 1293 def skipIfNoSvn():
1078 import shutil
1079 if not shutil.which("svn"): 1294 if not shutil.which("svn"):
1080 return unittest.skip("svn not installed, tests being skipped") 1295 return unittest.skip("svn not installed, tests being skipped")
1081 1296
@@ -1107,8 +1322,9 @@ class SVNTest(FetcherTest):
1107 cwd=repo_dir) 1322 cwd=repo_dir)
1108 1323
1109 bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir) 1324 bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
1110 # Github will emulate SVN. Use this to check if we're downloding... 1325 # Github won't emulate SVN anymore (see https://github.blog/2023-01-20-sunsetting-subversion-support/)
1111 bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .", 1326 # Use still accessible svn repo (only trunk to avoid longer downloads)
1327 bb.process.run("svn propset svn:externals 'bitbake https://svn.apache.org/repos/asf/serf/trunk' .",
1112 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) 1328 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
1113 bb.process.run("svn commit --non-interactive -m 'Add external'", 1329 bb.process.run("svn commit --non-interactive -m 'Add external'",
1114 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) 1330 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1136,8 +1352,8 @@ class SVNTest(FetcherTest):
1136 1352
1137 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") 1353 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
1138 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") 1354 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
1139 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should NOT exist") 1355 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should NOT exist")
1140 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should NOT exit") 1356 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should NOT exit")
1141 1357
1142 @skipIfNoSvn() 1358 @skipIfNoSvn()
1143 def test_external_svn(self): 1359 def test_external_svn(self):
@@ -1150,66 +1366,71 @@ class SVNTest(FetcherTest):
1150 1366
1151 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") 1367 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
1152 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") 1368 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
1153 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should exist") 1369 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should exist")
1154 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should exit") 1370 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should exit")
1155 1371
1156class TrustedNetworksTest(FetcherTest): 1372class TrustedNetworksTest(FetcherTest):
1157 def test_trusted_network(self): 1373 def test_trusted_network(self):
1158 # Ensure trusted_network returns False when the host IS in the list. 1374 # Ensure trusted_network returns False when the host IS in the list.
1159 url = "git://Someserver.org/foo;rev=1" 1375 url = "git://Someserver.org/foo;rev=1;branch=master"
1160 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org") 1376 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
1161 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1377 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1162 1378
1163 def test_wild_trusted_network(self): 1379 def test_wild_trusted_network(self):
1164 # Ensure trusted_network returns true when the *.host IS in the list. 1380 # Ensure trusted_network returns true when the *.host IS in the list.
1165 url = "git://Someserver.org/foo;rev=1" 1381 url = "git://Someserver.org/foo;rev=1;branch=master"
1166 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") 1382 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
1167 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1383 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1168 1384
1169 def test_prefix_wild_trusted_network(self): 1385 def test_prefix_wild_trusted_network(self):
1170 # Ensure trusted_network returns true when the prefix matches *.host. 1386 # Ensure trusted_network returns true when the prefix matches *.host.
1171 url = "git://git.Someserver.org/foo;rev=1" 1387 url = "git://git.Someserver.org/foo;rev=1;branch=master"
1172 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") 1388 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
1173 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1389 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1174 1390
1175 def test_two_prefix_wild_trusted_network(self): 1391 def test_two_prefix_wild_trusted_network(self):
1176 # Ensure trusted_network returns true when the prefix matches *.host. 1392 # Ensure trusted_network returns true when the prefix matches *.host.
1177 url = "git://something.git.Someserver.org/foo;rev=1" 1393 url = "git://something.git.Someserver.org/foo;rev=1;branch=master"
1178 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") 1394 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
1179 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1395 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1180 1396
1181 def test_port_trusted_network(self): 1397 def test_port_trusted_network(self):
1182 # Ensure trusted_network returns True, even if the url specifies a port. 1398 # Ensure trusted_network returns True, even if the url specifies a port.
1183 url = "git://someserver.org:8080/foo;rev=1" 1399 url = "git://someserver.org:8080/foo;rev=1;branch=master"
1184 self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org") 1400 self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org")
1185 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1401 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1186 1402
1187 def test_untrusted_network(self): 1403 def test_untrusted_network(self):
1188 # Ensure trusted_network returns False when the host is NOT in the list. 1404 # Ensure trusted_network returns False when the host is NOT in the list.
1189 url = "git://someserver.org/foo;rev=1" 1405 url = "git://someserver.org/foo;rev=1;branch=master"
1190 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") 1406 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
1191 self.assertFalse(bb.fetch.trusted_network(self.d, url)) 1407 self.assertFalse(bb.fetch.trusted_network(self.d, url))
1192 1408
1193 def test_wild_untrusted_network(self): 1409 def test_wild_untrusted_network(self):
1194 # Ensure trusted_network returns False when the host is NOT in the list. 1410 # Ensure trusted_network returns False when the host is NOT in the list.
1195 url = "git://*.someserver.org/foo;rev=1" 1411 url = "git://*.someserver.org/foo;rev=1;branch=master"
1196 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") 1412 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
1197 self.assertFalse(bb.fetch.trusted_network(self.d, url)) 1413 self.assertFalse(bb.fetch.trusted_network(self.d, url))
1198 1414
1199class URLHandle(unittest.TestCase): 1415class URLHandle(unittest.TestCase):
1200 1416 # Quote password as per RFC3986
1417 password = urllib.parse.quote(r"!#$%^&*()-_={}[]\|:?,.<>~`", r"!$&'/()*+,;=")
1201 datatable = { 1418 datatable = {
1202 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}), 1419 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
1203 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}), 1420 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
1204 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])), 1421 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
1205 "git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}), 1422 "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}),
1206 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}), 1423 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
1424 "file://example@.service": ('file', '', 'example@.service', '', '', {}),
1425 "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}),
1426 'git://s.o-me_ONE:%s@git.openembedded.org/bitbake;branch=main;protocol=https' % password: ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', password, {'branch': 'main', 'protocol' : 'https'}),
1207 } 1427 }
1208 # we require a pathname to encodeurl but users can still pass such urls to 1428 # we require a pathname to encodeurl but users can still pass such urls to
1209 # decodeurl and we need to handle them 1429 # decodeurl and we need to handle them
1210 decodedata = datatable.copy() 1430 decodedata = datatable.copy()
1211 decodedata.update({ 1431 decodedata.update({
1212 "http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}), 1432 "http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}),
1433 "npmsw://some.registry.url;package=@pkg;version=latest": ('npmsw', 'some.registry.url', '/', '', '', {'package': '@pkg', 'version': 'latest'}),
1213 }) 1434 })
1214 1435
1215 def test_decodeurl(self): 1436 def test_decodeurl(self):
@@ -1220,138 +1441,179 @@ class URLHandle(unittest.TestCase):
1220 def test_encodeurl(self): 1441 def test_encodeurl(self):
1221 for k, v in self.datatable.items(): 1442 for k, v in self.datatable.items():
1222 result = bb.fetch.encodeurl(v) 1443 result = bb.fetch.encodeurl(v)
1444 if result.startswith("file:"):
1445 result = urllib.parse.unquote(result)
1223 self.assertEqual(result, k) 1446 self.assertEqual(result, k)
1224 1447
1225class FetchLatestVersionTest(FetcherTest): 1448class FetchLatestVersionTest(FetcherTest):
1226 1449
1227 test_git_uris = { 1450 test_git_uris = {
1228 # version pattern "X.Y.Z" 1451 # version pattern "X.Y.Z"
1229 ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "") 1452 ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "", "")
1230 : "1.99.4", 1453 : "1.99.4",
1231 # version pattern "vX.Y" 1454 # version pattern "vX.Y"
1232 # mirror of git.infradead.org since network issues interfered with testing 1455 # mirror of git.infradead.org since network issues interfered with testing
1233 ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "") 1456 ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git;branch=master;protocol=https", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "", "")
1234 : "1.5.0", 1457 : "1.5.0",
1235 # version pattern "pkg_name-X.Y" 1458 # version pattern "pkg_name-X.Y"
1236 # mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing 1459 # mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing
1237 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "") 1460 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto;branch=master;protocol=https", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "", "")
1238 : "1.0", 1461 : "1.0",
1239 # version pattern "pkg_name-vX.Y.Z" 1462 # version pattern "pkg_name-vX.Y.Z"
1240 ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "") 1463 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
1241 : "1.4.0", 1464 : "1.4.0",
1242 # combination version pattern 1465 # combination version pattern
1243 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "") 1466 ("sysprof", "git://git.yoctoproject.org/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
1244 : "1.2.0", 1467 : "1.2.0",
1245 ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "") 1468 ("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
1246 : "2014.01", 1469 : "2014.01",
1247 # version pattern "yyyymmdd" 1470 # version pattern "yyyymmdd"
1248 ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https", "4ed19e11c2975105b71b956440acdb25d46a347d", "") 1471 ("mobile-broadband-provider-info", "git://git.yoctoproject.org/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
1249 : "20120614", 1472 : "20120614",
1250 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX 1473 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX
1251 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing 1474 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
1252 ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))") 1475 ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap;branch=master;protocol=https", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))", "")
1253 : "0.4.3", 1476 : "0.4.3",
1254 ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))") 1477 ("build-appliance-image", "git://git.yoctoproject.org/poky;branch=master;protocol=https", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))", "")
1255 : "11.0.0", 1478 : "11.0.0",
1256 ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))") 1479 ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))", "")
1257 : "1.3.59", 1480 : "1.3.59",
1258 ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))") 1481 ("remake", "git://github.com/rocky/remake.git;protocol=https;branch=master", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))", "")
1259 : "3.82+dbg0.9", 1482 : "3.82+dbg0.9",
1483 ("sysdig", "git://github.com/draios/sysdig.git;branch=dev;protocol=https", "4fb6288275f567f63515df0ff0a6518043ecfa9b", r"^(?P<pver>\d+(\.\d+)+)", "10.0.0")
1484 : "0.28.0",
1260 } 1485 }
1261 1486
1487 WgetTestData = collections.namedtuple("WgetTestData", ["pn", "path", "pv", "check_uri", "check_regex"], defaults=[None, None, None])
1262 test_wget_uris = { 1488 test_wget_uris = {
1263 # 1489 #
1264 # packages with versions inside directory name 1490 # packages with versions inside directory name
1265 # 1491 #
1266 # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2 1492 # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2
1267 ("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "") 1493 WgetTestData("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2")
1268 : "2.24.2", 1494 : "2.24.2",
1269 # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz 1495 # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz
1270 ("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "") 1496 WgetTestData("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz")
1271 : "1.6.0", 1497 : "1.6.0",
1272 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz 1498 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
1273 ("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "") 1499 WgetTestData("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz")
1274 : "2.8.12.1", 1500 : "2.8.12.1",
1501 # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz
1502 WgetTestData("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz")
1503 : "2.10.3",
1275 # 1504 #
1276 # packages with versions only in current directory 1505 # packages with versions only in current directory
1277 # 1506 #
1278 # http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 1507 # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2
1279 ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "") 1508 WgetTestData("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2")
1280 : "2.19", 1509 : "2.19",
1281 # http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 1510 # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2
1282 ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "") 1511 WgetTestData("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2")
1283 : "20120814", 1512 : "20120814",
1284 # 1513 #
1285 # packages with "99" in the name of possible version 1514 # packages with "99" in the name of possible version
1286 # 1515 #
1287 # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz 1516 # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz
1288 ("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "") 1517 WgetTestData("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz")
1289 : "5.0", 1518 : "5.0",
1290 # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2 1519 # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2
1291 ("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "") 1520 WgetTestData("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2")
1292 : "1.15.1", 1521 : "1.15.1",
1293 # 1522 #
1294 # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX 1523 # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX
1295 # 1524 #
1296 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 1525 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
1297 # https://github.com/apple/cups/releases 1526 # https://github.com/apple/cups/releases
1298 ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") 1527 WgetTestData("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", check_uri="/apple/cups/releases", check_regex=r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
1299 : "2.0.0", 1528 : "2.0.0",
1300 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz 1529 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
1301 # http://ftp.debian.org/debian/pool/main/d/db5.3/ 1530 # http://ftp.debian.org/debian/pool/main/d/db5.3/
1302 ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") 1531 WgetTestData("db", "/berkeley-db/db-5.3.21.tar.gz", check_uri="/debian/pool/main/d/db5.3/", check_regex=r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
1303 : "5.3.10", 1532 : "5.3.10",
1533 #
1534 # packages where the tarball compression changed in the new version
1535 #
1536 # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz
1537 WgetTestData("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz")
1538 : "2.8",
1539
1540 #
1541 # packages where the path doesn't actually contain the filename, so downloadfilename should be respected
1542 #
1543 WgetTestData("miniupnpd", "/software/miniupnp/download.php?file=miniupnpd_2.1.20191006.tar.gz;downloadfilename=miniupnpd_2.1.20191006.tar.gz", pv="2.1.20191006", check_uri="/software/miniupnp/download.php", check_regex=r"miniupnpd-(?P<pver>\d+(\.\d+)+)\.tar")
1544 : "2.3.7",
1304 } 1545 }
1305 1546
1547 test_crate_uris = {
1548 # basic example; version pattern "A.B.C+cargo-D.E.F"
1549 ("cargo-c", "crate://crates.io/cargo-c/0.9.18+cargo-0.69")
1550 : "0.9.29"
1551 }
1552
1306 @skipIfNoNetwork() 1553 @skipIfNoNetwork()
1307 def test_git_latest_versionstring(self): 1554 def test_git_latest_versionstring(self):
1308 for k, v in self.test_git_uris.items(): 1555 for k, v in self.test_git_uris.items():
1309 self.d.setVar("PN", k[0]) 1556 with self.subTest(pn=k[0]):
1310 self.d.setVar("SRCREV", k[2]) 1557 self.d.setVar("PN", k[0])
1311 self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3]) 1558 self.d.setVar("SRCREV", k[2])
1312 ud = bb.fetch2.FetchData(k[1], self.d) 1559 self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3])
1313 pupver= ud.method.latest_versionstring(ud, self.d) 1560 ud = bb.fetch2.FetchData(k[1], self.d)
1314 verstring = pupver[0] 1561 pupver= ud.method.latest_versionstring(ud, self.d)
1315 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) 1562 verstring = pupver[0]
1316 r = bb.utils.vercmp_string(v, verstring) 1563 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1317 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) 1564 r = bb.utils.vercmp_string(v, verstring)
1565 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1566 if k[4]:
1567 r = bb.utils.vercmp_string(verstring, k[4])
1568 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4]))
1318 1569
1319 def test_wget_latest_versionstring(self): 1570 def test_wget_latest_versionstring(self):
1320 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" 1571 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
1321 server = HTTPService(testdata) 1572 server = HTTPService(testdata, host="127.0.0.1")
1322 server.start() 1573 server.start()
1323 port = server.port 1574 port = server.port
1324 try: 1575 try:
1325 for k, v in self.test_wget_uris.items(): 1576 for data, v in self.test_wget_uris.items():
1577 with self.subTest(pn=data.pn):
1578 self.d.setVar("PN", data.pn)
1579 self.d.setVar("PV", data.pv)
1580 if data.check_uri:
1581 checkuri = "http://127.0.0.1:%s/%s" % (port, data.check_uri)
1582 self.d.setVar("UPSTREAM_CHECK_URI", checkuri)
1583 if data.check_regex:
1584 self.d.setVar("UPSTREAM_CHECK_REGEX", data.check_regex)
1585
1586 url = "http://127.0.0.1:%s/%s" % (port, data.path)
1587 ud = bb.fetch2.FetchData(url, self.d)
1588 pupver = ud.method.latest_versionstring(ud, self.d)
1589 verstring = pupver[0]
1590 self.assertTrue(verstring, msg="Could not find upstream version for %s" % data.pn)
1591 r = bb.utils.vercmp_string(v, verstring)
1592 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (data.pn, v, verstring))
1593 finally:
1594 server.stop()
1595
1596 @skipIfNoNetwork()
1597 def test_crate_latest_versionstring(self):
1598 for k, v in self.test_crate_uris.items():
1599 with self.subTest(pn=k[0]):
1326 self.d.setVar("PN", k[0]) 1600 self.d.setVar("PN", k[0])
1327 checkuri = "" 1601 ud = bb.fetch2.FetchData(k[1], self.d)
1328 if k[2]:
1329 checkuri = "http://localhost:%s/" % port + k[2]
1330 self.d.setVar("UPSTREAM_CHECK_URI", checkuri)
1331 self.d.setVar("UPSTREAM_CHECK_REGEX", k[3])
1332 url = "http://localhost:%s/" % port + k[1]
1333 ud = bb.fetch2.FetchData(url, self.d)
1334 pupver = ud.method.latest_versionstring(ud, self.d) 1602 pupver = ud.method.latest_versionstring(ud, self.d)
1335 verstring = pupver[0] 1603 verstring = pupver[0]
1336 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) 1604 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1337 r = bb.utils.vercmp_string(v, verstring) 1605 r = bb.utils.vercmp_string(v, verstring)
1338 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) 1606 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1339 finally:
1340 server.stop()
1341
1342 1607
1343class FetchCheckStatusTest(FetcherTest): 1608class FetchCheckStatusTest(FetcherTest):
1344 test_wget_uris = ["http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", 1609 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
1345 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz", 1610 "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
1346 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz", 1611 "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
1347 "https://yoctoproject.org/", 1612 "https://yoctoproject.org/",
1348 "https://yoctoproject.org/documentation", 1613 "https://docs.yoctoproject.org",
1349 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz", 1614 "https://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
1350 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz", 1615 "https://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
1351 "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz", 1616 "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
1352 "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
1353 "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
1354 "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
1355 # GitHub releases are hosted on Amazon S3, which doesn't support HEAD 1617 # GitHub releases are hosted on Amazon S3, which doesn't support HEAD
1356 "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz" 1618 "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
1357 ] 1619 ]
@@ -1389,7 +1651,7 @@ class GitMakeShallowTest(FetcherTest):
1389 FetcherTest.setUp(self) 1651 FetcherTest.setUp(self)
1390 self.gitdir = os.path.join(self.tempdir, 'gitshallow') 1652 self.gitdir = os.path.join(self.tempdir, 'gitshallow')
1391 bb.utils.mkdirhier(self.gitdir) 1653 bb.utils.mkdirhier(self.gitdir)
1392 bb.process.run('git init', cwd=self.gitdir) 1654 self.git_init()
1393 1655
1394 def assertRefs(self, expected_refs): 1656 def assertRefs(self, expected_refs):
1395 actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines() 1657 actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines()
@@ -1403,13 +1665,6 @@ class GitMakeShallowTest(FetcherTest):
1403 actual_count = len(revs.splitlines()) 1665 actual_count = len(revs.splitlines())
1404 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) 1666 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count))
1405 1667
1406 def git(self, cmd):
1407 if isinstance(cmd, str):
1408 cmd = 'git ' + cmd
1409 else:
1410 cmd = ['git'] + cmd
1411 return bb.process.run(cmd, cwd=self.gitdir)[0]
1412
1413 def make_shallow(self, args=None): 1668 def make_shallow(self, args=None):
1414 if args is None: 1669 if args is None:
1415 args = ['HEAD'] 1670 args = ['HEAD']
@@ -1512,13 +1767,13 @@ class GitShallowTest(FetcherTest):
1512 self.srcdir = os.path.join(self.tempdir, 'gitsource') 1767 self.srcdir = os.path.join(self.tempdir, 'gitsource')
1513 1768
1514 bb.utils.mkdirhier(self.srcdir) 1769 bb.utils.mkdirhier(self.srcdir)
1515 self.git('init', cwd=self.srcdir) 1770 self.git_init(cwd=self.srcdir)
1516 self.d.setVar('WORKDIR', self.tempdir) 1771 self.d.setVar('WORKDIR', self.tempdir)
1517 self.d.setVar('S', self.gitdir) 1772 self.d.setVar('S', self.gitdir)
1518 self.d.delVar('PREMIRRORS') 1773 self.d.delVar('PREMIRRORS')
1519 self.d.delVar('MIRRORS') 1774 self.d.delVar('MIRRORS')
1520 1775
1521 uri = 'git://%s;protocol=file;subdir=${S}' % self.srcdir 1776 uri = 'git://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1522 self.d.setVar('SRC_URI', uri) 1777 self.d.setVar('SRC_URI', uri)
1523 self.d.setVar('SRCREV', '${AUTOREV}') 1778 self.d.setVar('SRCREV', '${AUTOREV}')
1524 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') 1779 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
@@ -1526,11 +1781,14 @@ class GitShallowTest(FetcherTest):
1526 self.d.setVar('BB_GIT_SHALLOW', '1') 1781 self.d.setVar('BB_GIT_SHALLOW', '1')
1527 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0') 1782 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0')
1528 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1') 1783 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
1784 self.d.setVar("__BBSRCREV_SEEN", "1")
1529 1785
1530 def assertRefs(self, expected_refs, cwd=None): 1786 def assertRefs(self, expected_refs, cwd=None):
1531 if cwd is None: 1787 if cwd is None:
1532 cwd = self.gitdir 1788 cwd = self.gitdir
1533 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines() 1789 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines()
1790 # Resolve references into the same format as the comparision (needed by git 2.48 onwards)
1791 actual_refs = self.git(['rev-parse', '--symbolic-full-name'] + actual_refs, cwd=cwd).splitlines()
1534 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines() 1792 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines()
1535 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs))) 1793 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs)))
1536 1794
@@ -1543,15 +1801,6 @@ class GitShallowTest(FetcherTest):
1543 actual_count = len(revs.splitlines()) 1801 actual_count = len(revs.splitlines())
1544 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) 1802 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count))
1545 1803
1546 def git(self, cmd, cwd=None):
1547 if isinstance(cmd, str):
1548 cmd = 'git ' + cmd
1549 else:
1550 cmd = ['git'] + cmd
1551 if cwd is None:
1552 cwd = self.gitdir
1553 return bb.process.run(cmd, cwd=cwd)[0]
1554
1555 def add_empty_file(self, path, cwd=None, msg=None): 1804 def add_empty_file(self, path, cwd=None, msg=None):
1556 if msg is None: 1805 if msg is None:
1557 msg = path 1806 msg = path
@@ -1586,7 +1835,6 @@ class GitShallowTest(FetcherTest):
1586 def fetch_shallow(self, uri=None, disabled=False, keepclone=False): 1835 def fetch_shallow(self, uri=None, disabled=False, keepclone=False):
1587 """Fetch a uri, generating a shallow tarball, then unpack using it""" 1836 """Fetch a uri, generating a shallow tarball, then unpack using it"""
1588 fetcher, ud = self.fetch_and_unpack(uri) 1837 fetcher, ud = self.fetch_and_unpack(uri)
1589 assert os.path.exists(ud.clonedir), 'Git clone in DLDIR (%s) does not exist for uri %s' % (ud.clonedir, uri)
1590 1838
1591 # Confirm that the unpacked repo is unshallow 1839 # Confirm that the unpacked repo is unshallow
1592 if not disabled: 1840 if not disabled:
@@ -1594,8 +1842,10 @@ class GitShallowTest(FetcherTest):
1594 1842
1595 # fetch and unpack, from the shallow tarball 1843 # fetch and unpack, from the shallow tarball
1596 bb.utils.remove(self.gitdir, recurse=True) 1844 bb.utils.remove(self.gitdir, recurse=True)
1597 bb.utils.remove(ud.clonedir, recurse=True) 1845 if os.path.exists(ud.clonedir):
1598 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) 1846 bb.process.run('chmod u+w -R "%s"' % ud.clonedir)
1847 bb.utils.remove(ud.clonedir, recurse=True)
1848 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True)
1599 1849
1600 # confirm that the unpacked repo is used when no git clone or git 1850 # confirm that the unpacked repo is used when no git clone or git
1601 # mirror tarball is available 1851 # mirror tarball is available
@@ -1678,7 +1928,12 @@ class GitShallowTest(FetcherTest):
1678 self.add_empty_file('c') 1928 self.add_empty_file('c')
1679 self.assertRevCount(3, cwd=self.srcdir) 1929 self.assertRevCount(3, cwd=self.srcdir)
1680 1930
1931 # Clone without tarball
1932 self.d.setVar('BB_GIT_SHALLOW', '0')
1933 fetcher, ud = self.fetch()
1934
1681 # Clone and generate mirror tarball 1935 # Clone and generate mirror tarball
1936 self.d.setVar('BB_GIT_SHALLOW', '1')
1682 fetcher, ud = self.fetch() 1937 fetcher, ud = self.fetch()
1683 1938
1684 # Ensure we have a current mirror tarball, but an out of date clone 1939 # Ensure we have a current mirror tarball, but an out of date clone
@@ -1690,6 +1945,7 @@ class GitShallowTest(FetcherTest):
1690 fetcher, ud = self.fetch() 1945 fetcher, ud = self.fetch()
1691 fetcher.unpack(self.d.getVar('WORKDIR')) 1946 fetcher.unpack(self.d.getVar('WORKDIR'))
1692 self.assertRevCount(1) 1947 self.assertRevCount(1)
1948 assert os.path.exists(os.path.join(self.d.getVar('WORKDIR'), 'git', 'c'))
1693 1949
1694 def test_shallow_single_branch_no_merge(self): 1950 def test_shallow_single_branch_no_merge(self):
1695 self.add_empty_file('a') 1951 self.add_empty_file('a')
@@ -1745,7 +2001,7 @@ class GitShallowTest(FetcherTest):
1745 2001
1746 smdir = os.path.join(self.tempdir, 'gitsubmodule') 2002 smdir = os.path.join(self.tempdir, 'gitsubmodule')
1747 bb.utils.mkdirhier(smdir) 2003 bb.utils.mkdirhier(smdir)
1748 self.git('init', cwd=smdir) 2004 self.git_init(cwd=smdir)
1749 # Make this look like it was cloned from a remote... 2005 # Make this look like it was cloned from a remote...
1750 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) 2006 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
1751 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) 2007 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1753,11 +2009,11 @@ class GitShallowTest(FetcherTest):
1753 self.add_empty_file('bsub', cwd=smdir) 2009 self.add_empty_file('bsub', cwd=smdir)
1754 2010
1755 self.git('submodule init', cwd=self.srcdir) 2011 self.git('submodule init', cwd=self.srcdir)
1756 self.git('submodule add file://%s' % smdir, cwd=self.srcdir) 2012 self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
1757 self.git('submodule update', cwd=self.srcdir) 2013 self.git('submodule update', cwd=self.srcdir)
1758 self.git('commit -m submodule -a', cwd=self.srcdir) 2014 self.git('commit -m submodule -a', cwd=self.srcdir)
1759 2015
1760 uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir 2016 uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1761 fetcher, ud = self.fetch_shallow(uri) 2017 fetcher, ud = self.fetch_shallow(uri)
1762 2018
1763 # Verify the main repository is shallow 2019 # Verify the main repository is shallow
@@ -1775,7 +2031,7 @@ class GitShallowTest(FetcherTest):
1775 2031
1776 smdir = os.path.join(self.tempdir, 'gitsubmodule') 2032 smdir = os.path.join(self.tempdir, 'gitsubmodule')
1777 bb.utils.mkdirhier(smdir) 2033 bb.utils.mkdirhier(smdir)
1778 self.git('init', cwd=smdir) 2034 self.git_init(cwd=smdir)
1779 # Make this look like it was cloned from a remote... 2035 # Make this look like it was cloned from a remote...
1780 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) 2036 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
1781 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) 2037 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1783,19 +2039,19 @@ class GitShallowTest(FetcherTest):
1783 self.add_empty_file('bsub', cwd=smdir) 2039 self.add_empty_file('bsub', cwd=smdir)
1784 2040
1785 self.git('submodule init', cwd=self.srcdir) 2041 self.git('submodule init', cwd=self.srcdir)
1786 self.git('submodule add file://%s' % smdir, cwd=self.srcdir) 2042 self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
1787 self.git('submodule update', cwd=self.srcdir) 2043 self.git('submodule update', cwd=self.srcdir)
1788 self.git('commit -m submodule -a', cwd=self.srcdir) 2044 self.git('commit -m submodule -a', cwd=self.srcdir)
1789 2045
1790 uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir 2046 uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1791 2047
1792 # Fetch once to generate the shallow tarball 2048 # Fetch once to generate the shallow tarball
1793 fetcher, ud = self.fetch(uri) 2049 fetcher, ud = self.fetch(uri)
1794 2050
1795 # Set up the mirror 2051 # Set up the mirror
1796 mirrordir = os.path.join(self.tempdir, 'mirror') 2052 mirrordir = os.path.join(self.tempdir, 'mirror')
1797 os.rename(self.dldir, mirrordir) 2053 bb.utils.rename(self.dldir, mirrordir)
1798 self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/\n' % mirrordir) 2054 self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/' % mirrordir)
1799 2055
1800 # Fetch from the mirror 2056 # Fetch from the mirror
1801 bb.utils.remove(self.dldir, recurse=True) 2057 bb.utils.remove(self.dldir, recurse=True)
@@ -1818,80 +2074,27 @@ class GitShallowTest(FetcherTest):
1818 self.git('annex init', cwd=self.srcdir) 2074 self.git('annex init', cwd=self.srcdir)
1819 open(os.path.join(self.srcdir, 'c'), 'w').close() 2075 open(os.path.join(self.srcdir, 'c'), 'w').close()
1820 self.git('annex add c', cwd=self.srcdir) 2076 self.git('annex add c', cwd=self.srcdir)
1821 self.git('commit -m annex-c -a', cwd=self.srcdir) 2077 self.git('commit --author "Foo Bar <foo@bar>" -m annex-c -a', cwd=self.srcdir)
1822 bb.process.run('chmod u+w -R %s' % os.path.join(self.srcdir, '.git', 'annex')) 2078 bb.process.run('chmod u+w -R %s' % self.srcdir)
1823 2079
1824 uri = 'gitannex://%s;protocol=file;subdir=${S}' % self.srcdir 2080 uri = 'gitannex://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1825 fetcher, ud = self.fetch_shallow(uri) 2081 fetcher, ud = self.fetch_shallow(uri)
1826 2082
1827 self.assertRevCount(1) 2083 self.assertRevCount(1)
1828 assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0] 2084 assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0]
1829 assert os.path.exists(os.path.join(self.gitdir, 'c')) 2085 assert os.path.exists(os.path.join(self.gitdir, 'c'))
1830 2086
1831 def test_shallow_multi_one_uri(self):
1832 # Create initial git repo
1833 self.add_empty_file('a')
1834 self.add_empty_file('b')
1835 self.git('checkout -b a_branch', cwd=self.srcdir)
1836 self.add_empty_file('c')
1837 self.add_empty_file('d')
1838 self.git('checkout master', cwd=self.srcdir)
1839 self.git('tag v0.0 a_branch', cwd=self.srcdir)
1840 self.add_empty_file('e')
1841 self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir)
1842 self.add_empty_file('f')
1843 self.assertRevCount(7, cwd=self.srcdir)
1844
1845 uri = self.d.getVar('SRC_URI').split()[0]
1846 uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
1847
1848 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
1849 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0')
1850 self.d.setVar('SRCREV_master', '${AUTOREV}')
1851 self.d.setVar('SRCREV_a_branch', '${AUTOREV}')
1852
1853 self.fetch_shallow(uri)
1854
1855 self.assertRevCount(5)
1856 self.assertRefs(['master', 'origin/master', 'origin/a_branch'])
1857
1858 def test_shallow_multi_one_uri_depths(self):
1859 # Create initial git repo
1860 self.add_empty_file('a')
1861 self.add_empty_file('b')
1862 self.git('checkout -b a_branch', cwd=self.srcdir)
1863 self.add_empty_file('c')
1864 self.add_empty_file('d')
1865 self.git('checkout master', cwd=self.srcdir)
1866 self.add_empty_file('e')
1867 self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir)
1868 self.add_empty_file('f')
1869 self.assertRevCount(7, cwd=self.srcdir)
1870
1871 uri = self.d.getVar('SRC_URI').split()[0]
1872 uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
1873
1874 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
1875 self.d.setVar('BB_GIT_SHALLOW_DEPTH_master', '3')
1876 self.d.setVar('BB_GIT_SHALLOW_DEPTH_a_branch', '1')
1877 self.d.setVar('SRCREV_master', '${AUTOREV}')
1878 self.d.setVar('SRCREV_a_branch', '${AUTOREV}')
1879
1880 self.fetch_shallow(uri)
1881
1882 self.assertRevCount(4, ['--all'])
1883 self.assertRefs(['master', 'origin/master', 'origin/a_branch'])
1884
1885 def test_shallow_clone_preferred_over_shallow(self): 2087 def test_shallow_clone_preferred_over_shallow(self):
1886 self.add_empty_file('a') 2088 self.add_empty_file('a')
1887 self.add_empty_file('b') 2089 self.add_empty_file('b')
1888 2090
1889 # Fetch once to generate the shallow tarball 2091 # Fetch once to generate the shallow tarball
2092 self.d.setVar('BB_GIT_SHALLOW', '0')
1890 fetcher, ud = self.fetch() 2093 fetcher, ud = self.fetch()
1891 assert os.path.exists(os.path.join(self.dldir, ud.mirrortarballs[0]))
1892 2094
1893 # Fetch and unpack with both the clonedir and shallow tarball available 2095 # Fetch and unpack with both the clonedir and shallow tarball available
1894 bb.utils.remove(self.gitdir, recurse=True) 2096 bb.utils.remove(self.gitdir, recurse=True)
2097 self.d.setVar('BB_GIT_SHALLOW', '1')
1895 fetcher, ud = self.fetch_and_unpack() 2098 fetcher, ud = self.fetch_and_unpack()
1896 2099
1897 # The unpacked tree should *not* be shallow 2100 # The unpacked tree should *not* be shallow
@@ -1910,9 +2113,9 @@ class GitShallowTest(FetcherTest):
1910 # Set up the mirror 2113 # Set up the mirror
1911 mirrordir = os.path.join(self.tempdir, 'mirror') 2114 mirrordir = os.path.join(self.tempdir, 'mirror')
1912 bb.utils.mkdirhier(mirrordir) 2115 bb.utils.mkdirhier(mirrordir)
1913 self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/\n' % mirrordir) 2116 self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/' % mirrordir)
1914 2117
1915 os.rename(os.path.join(self.dldir, mirrortarball), 2118 bb.utils.rename(os.path.join(self.dldir, mirrortarball),
1916 os.path.join(mirrordir, mirrortarball)) 2119 os.path.join(mirrordir, mirrortarball))
1917 2120
1918 # Fetch from the mirror 2121 # Fetch from the mirror
@@ -1999,7 +2202,7 @@ class GitShallowTest(FetcherTest):
1999 2202
2000 self.fetch_shallow() 2203 self.fetch_shallow()
2001 2204
2002 self.assertRevCount(5) 2205 self.assertRevCount(2)
2003 2206
2004 def test_shallow_invalid_revs(self): 2207 def test_shallow_invalid_revs(self):
2005 self.add_empty_file('a') 2208 self.add_empty_file('a')
@@ -2018,7 +2221,10 @@ class GitShallowTest(FetcherTest):
2018 self.git('tag v0.0 master', cwd=self.srcdir) 2221 self.git('tag v0.0 master', cwd=self.srcdir)
2019 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0') 2222 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
2020 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0') 2223 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0')
2021 self.fetch_shallow() 2224
2225 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs("BitBake.Fetcher", level="ERROR") as cm:
2226 self.fetch_shallow()
2227 self.assertIn("fatal: no commits selected for shallow requests", cm.output[0])
2022 2228
2023 def test_shallow_fetch_missing_revs_fails(self): 2229 def test_shallow_fetch_missing_revs_fails(self):
2024 self.add_empty_file('a') 2230 self.add_empty_file('a')
@@ -2032,8 +2238,35 @@ class GitShallowTest(FetcherTest):
2032 self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0]) 2238 self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0])
2033 2239
2034 @skipIfNoNetwork() 2240 @skipIfNoNetwork()
2241 def test_git_shallow_fetch_premirrors(self):
2242 url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
2243
2244 # Create a separate premirror directory within tempdir
2245 premirror = os.path.join(self.tempdir, "premirror")
2246 os.mkdir(premirror)
2247
2248 # Fetch a non-shallow clone into the premirror subdir
2249 self.d.setVar('BB_GIT_SHALLOW', '0')
2250 self.d.setVar("DL_DIR", premirror)
2251 fetcher, ud = self.fetch(url)
2252
2253 # Fetch a shallow clone from the premirror subdir with unpacking
2254 # using the original recipe URL and the premirror mapping
2255 self.d.setVar('BB_GIT_SHALLOW', '1')
2256 self.d.setVar("DL_DIR", self.dldir)
2257 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2258 self.d.setVar('BB_NO_NETWORK', '1')
2259 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0')
2260 self.d.setVar("PREMIRRORS", "git://.*/.* git://{0};protocol=file".format(premirror + "/git2/" + ud.host + ud.path.replace("/", ".")))
2261 fetcher = self.fetch_and_unpack(url)
2262
2263 # Verify that the unpacked sources are shallow clones
2264 self.assertRevCount(1)
2265 assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow'))
2266
2267 @skipIfNoNetwork()
2035 def test_bitbake(self): 2268 def test_bitbake(self):
2036 self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir) 2269 self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
2037 self.git('config core.bare true', cwd=self.srcdir) 2270 self.git('config core.bare true', cwd=self.srcdir)
2038 self.git('fetch', cwd=self.srcdir) 2271 self.git('fetch', cwd=self.srcdir)
2039 2272
@@ -2049,7 +2282,7 @@ class GitShallowTest(FetcherTest):
2049 revs = len(self.git('rev-list master').splitlines()) 2282 revs = len(self.git('rev-list master').splitlines())
2050 self.assertNotEqual(orig_revs, revs) 2283 self.assertNotEqual(orig_revs, revs)
2051 self.assertRefs(['master', 'origin/master']) 2284 self.assertRefs(['master', 'origin/master'])
2052 self.assertRevCount(orig_revs - 1758) 2285 self.assertRevCount(orig_revs - 1760)
2053 2286
2054 def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self): 2287 def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self):
2055 self.add_empty_file('a') 2288 self.add_empty_file('a')
@@ -2063,27 +2296,43 @@ class GitShallowTest(FetcherTest):
2063 self.assertIn("No up to date source found", context.exception.msg) 2296 self.assertIn("No up to date source found", context.exception.msg)
2064 self.assertIn("clone directory not available or not up to date", context.exception.msg) 2297 self.assertIn("clone directory not available or not up to date", context.exception.msg)
2065 2298
2066 @skipIfNoNetwork() 2299 def test_shallow_check_is_shallow(self):
2067 def test_that_unpack_does_work_when_using_git_shallow_tarball_but_tarball_is_not_available(self): 2300 self.add_empty_file('a')
2068 self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0') 2301 self.add_empty_file('b')
2069 self.d.setVar('BB_GIT_SHALLOW', '1')
2070 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
2071 fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests"], self.d)
2072 fetcher.download()
2073 2302
2074 bb.utils.remove(self.dldir + "/*.tar.gz") 2303 # Fetch and unpack without the clonedir and *only* shallow tarball available
2075 fetcher.unpack(self.unpackdir) 2304 bb.utils.remove(self.gitdir, recurse=True)
2305 fetcher, ud = self.fetch_and_unpack()
2076 2306
2077 dir = os.listdir(self.unpackdir + "/git/") 2307 # The unpacked tree *should* be shallow
2078 self.assertIn("fstests.doap", dir) 2308 self.assertRevCount(1)
2309 assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow'))
2310
2311 def test_shallow_succeeds_with_tag_containing_slash(self):
2312 self.add_empty_file('a')
2313 self.add_empty_file('b')
2314 self.git('tag t1/t2/t3', cwd=self.srcdir)
2315 self.assertRevCount(2, cwd=self.srcdir)
2316
2317 srcrev = self.git('rev-parse HEAD', cwd=self.srcdir).strip()
2318 self.d.setVar('SRCREV', srcrev)
2319 uri = self.d.getVar('SRC_URI').split()[0]
2320 uri = '%s;tag=t1/t2/t3' % uri
2321 self.fetch_shallow(uri)
2322 self.assertRevCount(1)
2079 2323
2080class GitLfsTest(FetcherTest): 2324class GitLfsTest(FetcherTest):
2325 def skipIfNoGitLFS():
2326 if not shutil.which('git-lfs'):
2327 return unittest.skip('git-lfs not installed')
2328 return lambda f: f
2329
2081 def setUp(self): 2330 def setUp(self):
2082 FetcherTest.setUp(self) 2331 FetcherTest.setUp(self)
2083 2332
2084 self.gitdir = os.path.join(self.tempdir, 'git') 2333 self.gitdir = os.path.join(self.tempdir, 'git')
2085 self.srcdir = os.path.join(self.tempdir, 'gitsource') 2334 self.srcdir = os.path.join(self.tempdir, 'gitsource')
2086 2335
2087 self.d.setVar('WORKDIR', self.tempdir) 2336 self.d.setVar('WORKDIR', self.tempdir)
2088 self.d.setVar('S', self.gitdir) 2337 self.d.setVar('S', self.gitdir)
2089 self.d.delVar('PREMIRRORS') 2338 self.d.delVar('PREMIRRORS')
@@ -2091,22 +2340,24 @@ class GitLfsTest(FetcherTest):
2091 2340
2092 self.d.setVar('SRCREV', '${AUTOREV}') 2341 self.d.setVar('SRCREV', '${AUTOREV}')
2093 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') 2342 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
2343 self.d.setVar("__BBSRCREV_SEEN", "1")
2094 2344
2095 bb.utils.mkdirhier(self.srcdir) 2345 bb.utils.mkdirhier(self.srcdir)
2096 self.git('init', cwd=self.srcdir) 2346 self.git_init(cwd=self.srcdir)
2097 with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs: 2347 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text')
2098 attrs.write('*.mp3 filter=lfs -text')
2099 self.git(['add', '.gitattributes'], cwd=self.srcdir)
2100 self.git(['commit', '-m', "attributes", '.gitattributes'], cwd=self.srcdir)
2101 2348
2102 def git(self, cmd, cwd=None): 2349 def commit(self, *, cwd=None):
2103 if isinstance(cmd, str): 2350 cwd = cwd or self.srcdir
2104 cmd = 'git ' + cmd 2351 self.git(["commit", "-m", "Change"], cwd=cwd)
2105 else: 2352 return self.git(["rev-parse", "HEAD"], cwd=cwd).strip()
2106 cmd = ['git'] + cmd 2353
2107 if cwd is None: 2354 def commit_file(self, filename, content, *, cwd=None):
2108 cwd = self.gitdir 2355 cwd = cwd or self.srcdir
2109 return bb.process.run(cmd, cwd=cwd)[0] 2356
2357 with open(os.path.join(cwd, filename), "w") as f:
2358 f.write(content)
2359 self.git(["add", filename], cwd=cwd)
2360 return self.commit(cwd=cwd)
2110 2361
2111 def fetch(self, uri=None, download=True): 2362 def fetch(self, uri=None, download=True):
2112 uris = self.d.getVar('SRC_URI').split() 2363 uris = self.d.getVar('SRC_URI').split()
@@ -2119,65 +2370,259 @@ class GitLfsTest(FetcherTest):
2119 ud = fetcher.ud[uri] 2370 ud = fetcher.ud[uri]
2120 return fetcher, ud 2371 return fetcher, ud
2121 2372
2373 def get_real_git_lfs_file(self):
2374 self.d.setVar('PATH', os.environ.get('PATH'))
2375 fetcher, ud = self.fetch()
2376 fetcher.unpack(self.d.getVar('WORKDIR'))
2377 unpacked_lfs_file = os.path.join(self.d.getVar('WORKDIR'), 'git', "Cat_poster_1.jpg")
2378 return unpacked_lfs_file
2379
2380 @skipIfNoGitLFS()
2381 def test_gitsm_lfs(self):
2382 """Test that the gitsm fetcher caches objects stored via LFS"""
2383 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2384
2385 def fetch_and_verify(revision, filename, content):
2386 self.d.setVar('SRCREV', revision)
2387 fetcher, ud = self.fetch()
2388
2389 with hide_directory(submoduledir), hide_directory(self.srcdir):
2390 workdir = self.d.getVar('WORKDIR')
2391 fetcher.unpack(workdir)
2392
2393 with open(os.path.join(workdir, "git", filename)) as f:
2394 self.assertEqual(f.read(), content)
2395
2396 # Create the git repository that will later be used as a submodule
2397 submoduledir = self.tempdir + "/submodule"
2398 bb.utils.mkdirhier(submoduledir)
2399 self.git_init(submoduledir)
2400 self.git(["lfs", "install", "--local"], cwd=submoduledir)
2401 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir)
2402
2403 submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir)
2404 _ = self.commit_file("a.mp3", "submodule version 2", cwd=submoduledir)
2405
2406 # Add the submodule to the repository at its current HEAD revision
2407 self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"],
2408 cwd=self.srcdir)
2409 base_commit_1 = self.commit()
2410
2411 # Let the submodule point at a different revision
2412 self.git(["checkout", submodule_commit_1], self.srcdir + "/submodule")
2413 self.git(["add", "submodule"], cwd=self.srcdir)
2414 base_commit_2 = self.commit()
2415
2416 # Add a LFS file to the repository
2417 base_commit_3 = self.commit_file("a.mp3", "version 1")
2418 # Update the added LFS file
2419 base_commit_4 = self.commit_file("a.mp3", "version 2")
2420
2421 self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master" % self.srcdir)
2422
2423 # Verify that LFS objects referenced from submodules are fetched and checked out
2424 fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 2")
2425 # Verify that the repository inside the download cache of a submodile is extended with any
2426 # additional LFS objects needed when checking out a different revision.
2427 fetch_and_verify(base_commit_2, "submodule/a.mp3", "submodule version 1")
2428 # Verify that LFS objects referenced from the base repository are fetched and checked out
2429 fetch_and_verify(base_commit_3, "a.mp3", "version 1")
2430 # Verify that the cached repository is extended with any additional LFS objects required
2431 # when checking out a different revision.
2432 fetch_and_verify(base_commit_4, "a.mp3", "version 2")
2433
2434 @skipIfNoGitLFS()
2435 def test_gitsm_lfs_disabled(self):
2436 """Test that the gitsm fetcher does not use LFS when explicitly disabled"""
2437 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2438
2439 def fetch_and_verify(revision, filename, content):
2440 self.d.setVar('SRCREV', revision)
2441 fetcher, ud = self.fetch()
2442
2443 with hide_directory(submoduledir), hide_directory(self.srcdir):
2444 workdir = self.d.getVar('WORKDIR')
2445 fetcher.unpack(workdir)
2446
2447 with open(os.path.join(workdir, "git", filename)) as f:
2448 # Assume that LFS did not perform smudging when the expected content is
2449 # missing.
2450 self.assertNotEqual(f.read(), content)
2451
2452 # Create the git repository that will later be used as a submodule
2453 submoduledir = self.tempdir + "/submodule"
2454 bb.utils.mkdirhier(submoduledir)
2455 self.git_init(submoduledir)
2456 self.git(["lfs", "install", "--local"], cwd=submoduledir)
2457 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir)
2458
2459 submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir)
2460
2461 # Add the submodule to the repository at its current HEAD revision
2462 self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"],
2463 cwd=self.srcdir)
2464 base_commit_1 = self.commit()
2465
2466 # Add a LFS file to the repository
2467 base_commit_2 = self.commit_file("a.mp3", "version 1")
2468
2469 self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master;lfs=0" % self.srcdir)
2470
2471 # Verify that LFS objects referenced from submodules are not fetched nor checked out
2472 fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 1")
2473 # Verify that the LFS objects referenced from the base repository are not fetched nor
2474 # checked out
2475 fetch_and_verify(base_commit_2, "a.mp3", "version 1")
2476
2477 @skipIfNoGitLFS()
2478 def test_fetch_lfs_on_srcrev_change(self):
2479 """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested"""
2480 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2481
2482 def fetch_and_verify(revision, filename, content):
2483 self.d.setVar('SRCREV', revision)
2484 fetcher, ud = self.fetch()
2485
2486 with hide_directory(self.srcdir):
2487 workdir = self.d.getVar('WORKDIR')
2488 fetcher.unpack(workdir)
2489
2490 with open(os.path.join(workdir, "git", filename)) as f:
2491 self.assertEqual(f.read(), content)
2492
2493 commit_1 = self.commit_file("a.mp3", "version 1")
2494 commit_2 = self.commit_file("a.mp3", "version 2")
2495
2496 self.d.setVar('SRC_URI', "git://%s;protocol=file;lfs=1;branch=master" % self.srcdir)
2497
2498 # Seed the local download folder by fetching the latest commit and verifying that the LFS contents are
2499 # available even when the upstream repository disappears.
2500 fetch_and_verify(commit_2, "a.mp3", "version 2")
2501 # Verify that even when an older revision is fetched, the needed LFS objects are fetched into the download
2502 # folder.
2503 fetch_and_verify(commit_1, "a.mp3", "version 1")
2504
2505 @skipIfNoGitLFS()
2506 @skipIfNoNetwork()
2507 def test_real_git_lfs_repo_succeeds_without_lfs_param(self):
2508 self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master")
2509 f = self.get_real_git_lfs_file()
2510 self.assertTrue(os.path.exists(f))
2511 self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
2512
2513 @skipIfNoGitLFS()
2514 @skipIfNoNetwork()
2515 def test_real_git_lfs_repo_succeeds(self):
2516 self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=1")
2517 f = self.get_real_git_lfs_file()
2518 self.assertTrue(os.path.exists(f))
2519 self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
2520
2521 @skipIfNoGitLFS()
2522 @skipIfNoNetwork()
2523 def test_real_git_lfs_repo_skips(self):
2524 self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=0")
2525 f = self.get_real_git_lfs_file()
2526 # This is the actual non-smudged placeholder file on the repo if git-lfs does not run
2527 lfs_file = (
2528 'version https://git-lfs.github.com/spec/v1\n'
2529 'oid sha256:34be66b1a39a1955b46a12588df9d5f6fc1da790e05cf01f3c7422f4bbbdc26b\n'
2530 'size 11423554\n'
2531 )
2532
2533 with open(f) as fh:
2534 self.assertEqual(lfs_file, fh.read())
2535
2536 @skipIfNoGitLFS()
2122 def test_lfs_enabled(self): 2537 def test_lfs_enabled(self):
2123 import shutil 2538 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2539 self.d.setVar('SRC_URI', uri)
2540
2541 # With git-lfs installed, test that we can fetch and unpack
2542 fetcher, ud = self.fetch()
2543 shutil.rmtree(self.gitdir, ignore_errors=True)
2544 fetcher.unpack(self.d.getVar('WORKDIR'))
2124 2545
2125 uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir 2546 @skipIfNoGitLFS()
2547 def test_lfs_disabled(self):
2548 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2126 self.d.setVar('SRC_URI', uri) 2549 self.d.setVar('SRC_URI', uri)
2127 2550
2128 # Careful: suppress initial attempt at downloading until 2551 # Verify that the fetcher can survive even if the source
2129 # we know whether git-lfs is installed. 2552 # repository has Git LFS usage configured.
2130 fetcher, ud = self.fetch(uri=None, download=False) 2553 fetcher, ud = self.fetch()
2131 self.assertIsNotNone(ud.method._find_git_lfs) 2554 fetcher.unpack(self.d.getVar('WORKDIR'))
2132 2555
2133 # If git-lfs can be found, the unpack should be successful. Only 2556 @skipIfNoGitLFS()
2134 # attempt this with the real live copy of git-lfs installed. 2557 def test_lfs_enabled_not_installed_during_unpack(self):
2135 if ud.method._find_git_lfs(self.d): 2558 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2136 fetcher.download() 2559 self.d.setVar('SRC_URI', uri)
2137 shutil.rmtree(self.gitdir, ignore_errors=True)
2138 fetcher.unpack(self.d.getVar('WORKDIR'))
2139 2560
2561 # Careful: suppress initial attempt at downloading
2562 fetcher, ud = self.fetch(uri=None, download=False)
2563
2564 fetcher.download()
2140 # If git-lfs cannot be found, the unpack should throw an error 2565 # If git-lfs cannot be found, the unpack should throw an error
2141 with self.assertRaises(bb.fetch2.FetchError): 2566 with self.assertRaises(bb.fetch2.FetchError):
2567 with unittest.mock.patch("shutil.which", return_value=None):
2568 shutil.rmtree(self.gitdir, ignore_errors=True)
2569 fetcher.unpack(self.d.getVar('WORKDIR'))
2570
2571 def test_lfs_enabled_not_installed(self):
2572 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2573 self.d.setVar('SRC_URI', uri)
2574
2575 # Careful: suppress initial attempt at downloading
2576 fetcher, ud = self.fetch(uri=None, download=False)
2577
2578 # If git-lfs cannot be found, the download should throw an error
2579 with unittest.mock.patch("shutil.which", return_value=None):
2580 with self.assertRaises(bb.fetch2.FetchError):
2581 fetcher.download()
2582
2583 def test_lfs_disabled_not_installed(self):
2584 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2585 self.d.setVar('SRC_URI', uri)
2586
2587 # Careful: suppress initial attempt at downloading
2588 fetcher, ud = self.fetch(uri=None, download=False)
2589
2590 # Even if git-lfs cannot be found, the download / unpack should be successful
2591 with unittest.mock.patch("shutil.which", return_value=None):
2142 fetcher.download() 2592 fetcher.download()
2143 ud.method._find_git_lfs = lambda d: False
2144 shutil.rmtree(self.gitdir, ignore_errors=True) 2593 shutil.rmtree(self.gitdir, ignore_errors=True)
2145 fetcher.unpack(self.d.getVar('WORKDIR')) 2594 fetcher.unpack(self.d.getVar('WORKDIR'))
2146 2595
2147 def test_lfs_disabled(self): 2596 def test_lfs_enabled_not_installed_but_not_needed(self):
2148 import shutil 2597 srcdir = os.path.join(self.tempdir, "emptygit")
2598 bb.utils.mkdirhier(srcdir)
2599 self.git_init(srcdir)
2600 self.commit_file("test", "test content", cwd=srcdir)
2149 2601
2150 uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir 2602 uri = 'git://%s;protocol=file;lfs=1;branch=master' % srcdir
2151 self.d.setVar('SRC_URI', uri) 2603 self.d.setVar('SRC_URI', uri)
2152 2604
2153 # In contrast to test_lfs_enabled(), allow the implicit download 2605 # Careful: suppress initial attempt at downloading
2154 # done by self.fetch() to occur here. The point of this test case 2606 fetcher, ud = self.fetch(uri=None, download=False)
2155 # is to verify that the fetcher can survive even if the source
2156 # repository has Git LFS usage configured.
2157 fetcher, ud = self.fetch()
2158 self.assertIsNotNone(ud.method._find_git_lfs)
2159
2160 # If git-lfs can be found, the unpack should be successful. A
2161 # live copy of git-lfs is not required for this case, so
2162 # unconditionally forge its presence.
2163 ud.method._find_git_lfs = lambda d: True
2164 shutil.rmtree(self.gitdir, ignore_errors=True)
2165 fetcher.unpack(self.d.getVar('WORKDIR'))
2166 2607
2167 # If git-lfs cannot be found, the unpack should be successful 2608 # It shouldnt't matter that git-lfs cannot be found as the repository configuration does not
2168 ud.method._find_git_lfs = lambda d: False 2609 # specify any LFS filters.
2169 shutil.rmtree(self.gitdir, ignore_errors=True) 2610 with unittest.mock.patch("shutil.which", return_value=None):
2170 fetcher.unpack(self.d.getVar('WORKDIR')) 2611 fetcher.download()
2612 shutil.rmtree(self.gitdir, ignore_errors=True)
2613 fetcher.unpack(self.d.getVar('WORKDIR'))
2171 2614
2172class GitURLWithSpacesTest(FetcherTest): 2615class GitURLWithSpacesTest(FetcherTest):
2173 test_git_urls = { 2616 test_git_urls = {
2174 "git://tfs-example.org:22/tfs/example%20path/example.git" : { 2617 "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : {
2175 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git', 2618 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master',
2619 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example.git',
2176 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git', 2620 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git',
2177 'path': '/tfs/example path/example.git' 2621 'path': '/tfs/example path/example.git'
2178 }, 2622 },
2179 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git" : { 2623 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : {
2180 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git', 2624 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master',
2625 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git',
2181 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git', 2626 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git',
2182 'path': '/tfs/example path/example repo.git' 2627 'path': '/tfs/example path/example repo.git'
2183 } 2628 }
@@ -2200,19 +2645,137 @@ class GitURLWithSpacesTest(FetcherTest):
2200 self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock')) 2645 self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock'))
2201 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname'])) 2646 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname']))
2202 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz')) 2647 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz'))
2648 self.assertEqual(ud.method._get_repo_url(ud), ref['repo_url'])
2649
2650class CrateTest(FetcherTest):
2651 @skipIfNoNetwork()
2652 def test_crate_url(self):
2653
2654 uri = "crate://crates.io/glob/0.2.11"
2655 self.d.setVar('SRC_URI', uri)
2656
2657 uris = self.d.getVar('SRC_URI').split()
2658 d = self.d
2659
2660 fetcher = bb.fetch2.Fetch(uris, self.d)
2661 ud = fetcher.ud[fetcher.urls[0]]
2662
2663 self.assertIn("name", ud.parm)
2664 self.assertEqual(ud.parm["name"], "glob-0.2.11")
2665 self.assertIn("downloadfilename", ud.parm)
2666 self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
2667
2668 fetcher.download()
2669 fetcher.unpack(self.tempdir)
2670 self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
2671 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
2672 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
2673 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
2674
2675 @skipIfNoNetwork()
2676 def test_crate_url_matching_recipe(self):
2677
2678 self.d.setVar('BP', 'glob-0.2.11')
2679
2680 uri = "crate://crates.io/glob/0.2.11"
2681 self.d.setVar('SRC_URI', uri)
2682
2683 uris = self.d.getVar('SRC_URI').split()
2684 d = self.d
2685
2686 fetcher = bb.fetch2.Fetch(uris, self.d)
2687 ud = fetcher.ud[fetcher.urls[0]]
2688
2689 self.assertIn("name", ud.parm)
2690 self.assertEqual(ud.parm["name"], "glob-0.2.11")
2691 self.assertIn("downloadfilename", ud.parm)
2692 self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
2693
2694 fetcher.download()
2695 fetcher.unpack(self.tempdir)
2696 self.assertEqual(sorted(os.listdir(self.tempdir)), ['download', 'glob-0.2.11', 'unpacked'])
2697 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
2698 self.assertTrue(os.path.exists(self.tempdir + "/glob-0.2.11/src/lib.rs"))
2699
2700 @skipIfNoNetwork()
2701 def test_crate_url_params(self):
2702
2703 uri = "crate://crates.io/aho-corasick/0.7.20;name=aho-corasick-renamed"
2704 self.d.setVar('SRC_URI', uri)
2705
2706 uris = self.d.getVar('SRC_URI').split()
2707 d = self.d
2708
2709 fetcher = bb.fetch2.Fetch(uris, self.d)
2710 ud = fetcher.ud[fetcher.urls[0]]
2711
2712 self.assertIn("name", ud.parm)
2713 self.assertEqual(ud.parm["name"], "aho-corasick-renamed")
2714 self.assertIn("downloadfilename", ud.parm)
2715 self.assertEqual(ud.parm["downloadfilename"], "aho-corasick-0.7.20.crate")
2716
2717 fetcher.download()
2718 fetcher.unpack(self.tempdir)
2719 self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
2720 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['aho-corasick-0.7.20.crate', 'aho-corasick-0.7.20.crate.done'])
2721 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/.cargo-checksum.json"))
2722 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/src/lib.rs"))
2723
2724 @skipIfNoNetwork()
2725 def test_crate_url_multi(self):
2726
2727 uri = "crate://crates.io/glob/0.2.11 crate://crates.io/time/0.1.35"
2728 self.d.setVar('SRC_URI', uri)
2729
2730 uris = self.d.getVar('SRC_URI').split()
2731 d = self.d
2732
2733 fetcher = bb.fetch2.Fetch(uris, self.d)
2734 ud = fetcher.ud[fetcher.urls[0]]
2735
2736 self.assertIn("name", ud.parm)
2737 self.assertEqual(ud.parm["name"], "glob-0.2.11")
2738 self.assertIn("downloadfilename", ud.parm)
2739 self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
2740
2741 ud = fetcher.ud[fetcher.urls[1]]
2742 self.assertIn("name", ud.parm)
2743 self.assertEqual(ud.parm["name"], "time-0.1.35")
2744 self.assertIn("downloadfilename", ud.parm)
2745 self.assertEqual(ud.parm["downloadfilename"], "time-0.1.35.crate")
2746
2747 fetcher.download()
2748 fetcher.unpack(self.tempdir)
2749 self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
2750 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done', 'time-0.1.35.crate', 'time-0.1.35.crate.done'])
2751 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
2752 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
2753 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/.cargo-checksum.json"))
2754 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/src/lib.rs"))
2755
2756 @skipIfNoNetwork()
2757 def test_crate_incorrect_cksum(self):
2758 uri = "crate://crates.io/aho-corasick/0.7.20"
2759 self.d.setVar('SRC_URI', uri)
2760 self.d.setVarFlag("SRC_URI", "aho-corasick-0.7.20.sha256sum", hashlib.sha256("Invalid".encode("utf-8")).hexdigest())
2761
2762 uris = self.d.getVar('SRC_URI').split()
2763
2764 fetcher = bb.fetch2.Fetch(uris, self.d)
2765 with self.assertRaisesRegex(bb.fetch2.FetchError, "Fetcher failure for URL"):
2766 fetcher.download()
2203 2767
2204class NPMTest(FetcherTest): 2768class NPMTest(FetcherTest):
2205 def skipIfNoNpm(): 2769 def skipIfNoNpm():
2206 import shutil
2207 if not shutil.which('npm'): 2770 if not shutil.which('npm'):
2208 return unittest.skip('npm not installed, tests being skipped') 2771 return unittest.skip('npm not installed')
2209 return lambda f: f 2772 return lambda f: f
2210 2773
2211 @skipIfNoNpm() 2774 @skipIfNoNpm()
2212 @skipIfNoNetwork() 2775 @skipIfNoNetwork()
2213 def test_npm(self): 2776 def test_npm(self):
2214 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2777 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2215 fetcher = bb.fetch.Fetch([url], self.d) 2778 fetcher = bb.fetch.Fetch(urls, self.d)
2216 ud = fetcher.ud[fetcher.urls[0]] 2779 ud = fetcher.ud[fetcher.urls[0]]
2217 fetcher.download() 2780 fetcher.download()
2218 self.assertTrue(os.path.exists(ud.localpath)) 2781 self.assertTrue(os.path.exists(ud.localpath))
@@ -2225,9 +2788,9 @@ class NPMTest(FetcherTest):
2225 @skipIfNoNpm() 2788 @skipIfNoNpm()
2226 @skipIfNoNetwork() 2789 @skipIfNoNetwork()
2227 def test_npm_bad_checksum(self): 2790 def test_npm_bad_checksum(self):
2228 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2791 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2229 # Fetch once to get a tarball 2792 # Fetch once to get a tarball
2230 fetcher = bb.fetch.Fetch([url], self.d) 2793 fetcher = bb.fetch.Fetch(urls, self.d)
2231 ud = fetcher.ud[fetcher.urls[0]] 2794 ud = fetcher.ud[fetcher.urls[0]]
2232 fetcher.download() 2795 fetcher.download()
2233 self.assertTrue(os.path.exists(ud.localpath)) 2796 self.assertTrue(os.path.exists(ud.localpath))
@@ -2244,17 +2807,48 @@ class NPMTest(FetcherTest):
2244 @skipIfNoNpm() 2807 @skipIfNoNpm()
2245 @skipIfNoNetwork() 2808 @skipIfNoNetwork()
2246 def test_npm_premirrors(self): 2809 def test_npm_premirrors(self):
2247 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2810 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2248 # Fetch once to get a tarball 2811 # Fetch once to get a tarball
2249 fetcher = bb.fetch.Fetch([url], self.d) 2812 fetcher = bb.fetch.Fetch(urls, self.d)
2813 ud = fetcher.ud[fetcher.urls[0]]
2814 fetcher.download()
2815 self.assertTrue(os.path.exists(ud.localpath))
2816
2817 # Setup the mirror by renaming the download directory
2818 mirrordir = os.path.join(self.tempdir, 'mirror')
2819 bb.utils.rename(self.dldir, mirrordir)
2820 os.mkdir(self.dldir)
2821
2822 # Configure the premirror to be used
2823 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/npm2' % mirrordir)
2824 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2825
2826 # Fetch again
2827 self.assertFalse(os.path.exists(ud.localpath))
2828 # The npm fetcher doesn't handle that the .resolved file disappears
2829 # while the fetcher object exists, which it does when we rename the
2830 # download directory to "mirror" above. Thus we need a new fetcher to go
2831 # with the now empty download directory.
2832 fetcher = bb.fetch.Fetch(urls, self.d)
2833 ud = fetcher.ud[fetcher.urls[0]]
2834 fetcher.download()
2835 self.assertTrue(os.path.exists(ud.localpath))
2836
2837 @skipIfNoNpm()
2838 @skipIfNoNetwork()
2839 def test_npm_premirrors_with_specified_filename(self):
2840 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2841 # Fetch once to get a tarball
2842 fetcher = bb.fetch.Fetch(urls, self.d)
2250 ud = fetcher.ud[fetcher.urls[0]] 2843 ud = fetcher.ud[fetcher.urls[0]]
2251 fetcher.download() 2844 fetcher.download()
2252 self.assertTrue(os.path.exists(ud.localpath)) 2845 self.assertTrue(os.path.exists(ud.localpath))
2253 # Setup the mirror 2846 # Setup the mirror
2254 mirrordir = os.path.join(self.tempdir, 'mirror') 2847 mirrordir = os.path.join(self.tempdir, 'mirror')
2255 bb.utils.mkdirhier(mirrordir) 2848 bb.utils.mkdirhier(mirrordir)
2256 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 2849 mirrorfilename = os.path.join(mirrordir, os.path.basename(ud.localpath))
2257 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 2850 os.replace(ud.localpath, mirrorfilename)
2851 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s' % mirrorfilename)
2258 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') 2852 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2259 # Fetch again 2853 # Fetch again
2260 self.assertFalse(os.path.exists(ud.localpath)) 2854 self.assertFalse(os.path.exists(ud.localpath))
@@ -2265,8 +2859,8 @@ class NPMTest(FetcherTest):
2265 @skipIfNoNetwork() 2859 @skipIfNoNetwork()
2266 def test_npm_mirrors(self): 2860 def test_npm_mirrors(self):
2267 # Fetch once to get a tarball 2861 # Fetch once to get a tarball
2268 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2862 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2269 fetcher = bb.fetch.Fetch([url], self.d) 2863 fetcher = bb.fetch.Fetch(urls, self.d)
2270 ud = fetcher.ud[fetcher.urls[0]] 2864 ud = fetcher.ud[fetcher.urls[0]]
2271 fetcher.download() 2865 fetcher.download()
2272 self.assertTrue(os.path.exists(ud.localpath)) 2866 self.assertTrue(os.path.exists(ud.localpath))
@@ -2274,7 +2868,7 @@ class NPMTest(FetcherTest):
2274 mirrordir = os.path.join(self.tempdir, 'mirror') 2868 mirrordir = os.path.join(self.tempdir, 'mirror')
2275 bb.utils.mkdirhier(mirrordir) 2869 bb.utils.mkdirhier(mirrordir)
2276 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 2870 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
2277 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 2871 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
2278 # Update the resolved url to an invalid url 2872 # Update the resolved url to an invalid url
2279 with open(ud.resolvefile, 'r') as f: 2873 with open(ud.resolvefile, 'r') as f:
2280 url = f.read() 2874 url = f.read()
@@ -2290,27 +2884,27 @@ class NPMTest(FetcherTest):
2290 @skipIfNoNpm() 2884 @skipIfNoNpm()
2291 @skipIfNoNetwork() 2885 @skipIfNoNetwork()
2292 def test_npm_destsuffix_downloadfilename(self): 2886 def test_npm_destsuffix_downloadfilename(self):
2293 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz' 2887 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz']
2294 fetcher = bb.fetch.Fetch([url], self.d) 2888 fetcher = bb.fetch.Fetch(urls, self.d)
2295 fetcher.download() 2889 fetcher.download()
2296 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'foo-bar.tgz'))) 2890 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz')))
2297 fetcher.unpack(self.unpackdir) 2891 fetcher.unpack(self.unpackdir)
2298 unpackdir = os.path.join(self.unpackdir, 'foo', 'bar') 2892 unpackdir = os.path.join(self.unpackdir, 'foo', 'bar')
2299 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json'))) 2893 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json')))
2300 2894
2301 def test_npm_no_network_no_tarball(self): 2895 def test_npm_no_network_no_tarball(self):
2302 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2896 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2303 self.d.setVar('BB_NO_NETWORK', '1') 2897 self.d.setVar('BB_NO_NETWORK', '1')
2304 fetcher = bb.fetch.Fetch([url], self.d) 2898 fetcher = bb.fetch.Fetch(urls, self.d)
2305 with self.assertRaises(bb.fetch2.NetworkAccess): 2899 with self.assertRaises(bb.fetch2.NetworkAccess):
2306 fetcher.download() 2900 fetcher.download()
2307 2901
2308 @skipIfNoNpm() 2902 @skipIfNoNpm()
2309 @skipIfNoNetwork() 2903 @skipIfNoNetwork()
2310 def test_npm_no_network_with_tarball(self): 2904 def test_npm_no_network_with_tarball(self):
2311 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2905 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2312 # Fetch once to get a tarball 2906 # Fetch once to get a tarball
2313 fetcher = bb.fetch.Fetch([url], self.d) 2907 fetcher = bb.fetch.Fetch(urls, self.d)
2314 fetcher.download() 2908 fetcher.download()
2315 # Disable network access 2909 # Disable network access
2316 self.d.setVar('BB_NO_NETWORK', '1') 2910 self.d.setVar('BB_NO_NETWORK', '1')
@@ -2323,8 +2917,8 @@ class NPMTest(FetcherTest):
2323 @skipIfNoNpm() 2917 @skipIfNoNpm()
2324 @skipIfNoNetwork() 2918 @skipIfNoNetwork()
2325 def test_npm_registry_alternate(self): 2919 def test_npm_registry_alternate(self):
2326 url = 'npm://registry.freajs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2920 urls = ['npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0']
2327 fetcher = bb.fetch.Fetch([url], self.d) 2921 fetcher = bb.fetch.Fetch(urls, self.d)
2328 fetcher.download() 2922 fetcher.download()
2329 fetcher.unpack(self.unpackdir) 2923 fetcher.unpack(self.unpackdir)
2330 unpackdir = os.path.join(self.unpackdir, 'npm') 2924 unpackdir = os.path.join(self.unpackdir, 'npm')
@@ -2333,8 +2927,8 @@ class NPMTest(FetcherTest):
2333 @skipIfNoNpm() 2927 @skipIfNoNpm()
2334 @skipIfNoNetwork() 2928 @skipIfNoNetwork()
2335 def test_npm_version_latest(self): 2929 def test_npm_version_latest(self):
2336 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest' 2930 url = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest']
2337 fetcher = bb.fetch.Fetch([url], self.d) 2931 fetcher = bb.fetch.Fetch(url, self.d)
2338 fetcher.download() 2932 fetcher.download()
2339 fetcher.unpack(self.unpackdir) 2933 fetcher.unpack(self.unpackdir)
2340 unpackdir = os.path.join(self.unpackdir, 'npm') 2934 unpackdir = os.path.join(self.unpackdir, 'npm')
@@ -2343,46 +2937,46 @@ class NPMTest(FetcherTest):
2343 @skipIfNoNpm() 2937 @skipIfNoNpm()
2344 @skipIfNoNetwork() 2938 @skipIfNoNetwork()
2345 def test_npm_registry_invalid(self): 2939 def test_npm_registry_invalid(self):
2346 url = 'npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2940 urls = ['npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2347 fetcher = bb.fetch.Fetch([url], self.d) 2941 fetcher = bb.fetch.Fetch(urls, self.d)
2348 with self.assertRaises(bb.fetch2.FetchError): 2942 with self.assertRaises(bb.fetch2.FetchError):
2349 fetcher.download() 2943 fetcher.download()
2350 2944
2351 @skipIfNoNpm() 2945 @skipIfNoNpm()
2352 @skipIfNoNetwork() 2946 @skipIfNoNetwork()
2353 def test_npm_package_invalid(self): 2947 def test_npm_package_invalid(self):
2354 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0' 2948 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0']
2355 fetcher = bb.fetch.Fetch([url], self.d) 2949 fetcher = bb.fetch.Fetch(urls, self.d)
2356 with self.assertRaises(bb.fetch2.FetchError): 2950 with self.assertRaises(bb.fetch2.FetchError):
2357 fetcher.download() 2951 fetcher.download()
2358 2952
2359 @skipIfNoNpm() 2953 @skipIfNoNpm()
2360 @skipIfNoNetwork() 2954 @skipIfNoNetwork()
2361 def test_npm_version_invalid(self): 2955 def test_npm_version_invalid(self):
2362 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid' 2956 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid']
2363 with self.assertRaises(bb.fetch2.ParameterError): 2957 with self.assertRaises(bb.fetch2.ParameterError):
2364 fetcher = bb.fetch.Fetch([url], self.d) 2958 fetcher = bb.fetch.Fetch(urls, self.d)
2365 2959
2366 @skipIfNoNpm() 2960 @skipIfNoNpm()
2367 @skipIfNoNetwork() 2961 @skipIfNoNetwork()
2368 def test_npm_registry_none(self): 2962 def test_npm_registry_none(self):
2369 url = 'npm://;package=@savoirfairelinux/node-server-example;version=1.0.0' 2963 urls = ['npm://;package=@savoirfairelinux/node-server-example;version=1.0.0']
2370 with self.assertRaises(bb.fetch2.MalformedUrl): 2964 with self.assertRaises(bb.fetch2.MalformedUrl):
2371 fetcher = bb.fetch.Fetch([url], self.d) 2965 fetcher = bb.fetch.Fetch(urls, self.d)
2372 2966
2373 @skipIfNoNpm() 2967 @skipIfNoNpm()
2374 @skipIfNoNetwork() 2968 @skipIfNoNetwork()
2375 def test_npm_package_none(self): 2969 def test_npm_package_none(self):
2376 url = 'npm://registry.npmjs.org;version=1.0.0' 2970 urls = ['npm://registry.npmjs.org;version=1.0.0']
2377 with self.assertRaises(bb.fetch2.MissingParameterError): 2971 with self.assertRaises(bb.fetch2.MissingParameterError):
2378 fetcher = bb.fetch.Fetch([url], self.d) 2972 fetcher = bb.fetch.Fetch(urls, self.d)
2379 2973
2380 @skipIfNoNpm() 2974 @skipIfNoNpm()
2381 @skipIfNoNetwork() 2975 @skipIfNoNetwork()
2382 def test_npm_version_none(self): 2976 def test_npm_version_none(self):
2383 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example' 2977 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example']
2384 with self.assertRaises(bb.fetch2.MissingParameterError): 2978 with self.assertRaises(bb.fetch2.MissingParameterError):
2385 fetcher = bb.fetch.Fetch([url], self.d) 2979 fetcher = bb.fetch.Fetch(urls, self.d)
2386 2980
2387 def create_shrinkwrap_file(self, data): 2981 def create_shrinkwrap_file(self, data):
2388 import json 2982 import json
@@ -2391,32 +2985,30 @@ class NPMTest(FetcherTest):
2391 bb.utils.mkdirhier(datadir) 2985 bb.utils.mkdirhier(datadir)
2392 with open(swfile, 'w') as f: 2986 with open(swfile, 'w') as f:
2393 json.dump(data, f) 2987 json.dump(data, f)
2394 # Also configure the S directory
2395 self.sdir = os.path.join(self.unpackdir, 'S')
2396 self.d.setVar('S', self.sdir)
2397 return swfile 2988 return swfile
2398 2989
2399 @skipIfNoNpm()
2400 @skipIfNoNetwork() 2990 @skipIfNoNetwork()
2401 def test_npmsw(self): 2991 def test_npmsw(self):
2402 swfile = self.create_shrinkwrap_file({ 2992 swfile = self.create_shrinkwrap_file({
2403 'dependencies': { 2993 'packages': {
2404 'array-flatten': { 2994 'node_modules/array-flatten': {
2405 'version': '1.1.1', 2995 'version': '1.1.1',
2406 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 2996 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2407 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=', 2997 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=',
2408 'dependencies': { 2998 'dependencies': {
2409 'content-type': { 2999 'content-type': "1.0.4"
2410 'version': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
2411 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
2412 'dependencies': {
2413 'cookie': {
2414 'version': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
2415 'from': 'git+https://github.com/jshttp/cookie.git'
2416 }
2417 }
2418 }
2419 } 3000 }
3001 },
3002 'node_modules/array-flatten/node_modules/content-type': {
3003 'version': '1.0.4',
3004 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
3005 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
3006 'dependencies': {
3007 'cookie': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
3008 }
3009 },
3010 'node_modules/array-flatten/node_modules/content-type/node_modules/cookie': {
3011 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
2420 } 3012 }
2421 } 3013 }
2422 }) 3014 })
@@ -2426,22 +3018,34 @@ class NPMTest(FetcherTest):
2426 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) 3018 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
2427 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) 3019 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2428 fetcher.unpack(self.unpackdir) 3020 fetcher.unpack(self.unpackdir)
2429 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'npm-shrinkwrap.json'))) 3021 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm-shrinkwrap.json')))
2430 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) 3022 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json')))
2431 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json'))) 3023 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json')))
2432 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json'))) 3024 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json')))
3025
3026 @skipIfNoNetwork()
3027 def test_npmsw_git(self):
3028 swfile = self.create_shrinkwrap_file({
3029 'packages': {
3030 'node_modules/cookie': {
3031 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
3032 }
3033 }
3034 })
3035 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
3036 fetcher.download()
3037 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2433 3038
2434 @skipIfNoNpm()
2435 @skipIfNoNetwork() 3039 @skipIfNoNetwork()
2436 def test_npmsw_dev(self): 3040 def test_npmsw_dev(self):
2437 swfile = self.create_shrinkwrap_file({ 3041 swfile = self.create_shrinkwrap_file({
2438 'dependencies': { 3042 'packages': {
2439 'array-flatten': { 3043 'node_modules/array-flatten': {
2440 'version': '1.1.1', 3044 'version': '1.1.1',
2441 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3045 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2442 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3046 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
2443 }, 3047 },
2444 'content-type': { 3048 'node_modules/content-type': {
2445 'version': '1.0.4', 3049 'version': '1.0.4',
2446 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', 3050 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
2447 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', 3051 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
@@ -2460,12 +3064,11 @@ class NPMTest(FetcherTest):
2460 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz'))) 3064 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz')))
2461 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) 3065 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
2462 3066
2463 @skipIfNoNpm()
2464 @skipIfNoNetwork() 3067 @skipIfNoNetwork()
2465 def test_npmsw_destsuffix(self): 3068 def test_npmsw_destsuffix(self):
2466 swfile = self.create_shrinkwrap_file({ 3069 swfile = self.create_shrinkwrap_file({
2467 'dependencies': { 3070 'packages': {
2468 'array-flatten': { 3071 'node_modules/array-flatten': {
2469 'version': '1.1.1', 3072 'version': '1.1.1',
2470 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3073 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2471 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3074 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2479,8 +3082,8 @@ class NPMTest(FetcherTest):
2479 3082
2480 def test_npmsw_no_network_no_tarball(self): 3083 def test_npmsw_no_network_no_tarball(self):
2481 swfile = self.create_shrinkwrap_file({ 3084 swfile = self.create_shrinkwrap_file({
2482 'dependencies': { 3085 'packages': {
2483 'array-flatten': { 3086 'node_modules/array-flatten': {
2484 'version': '1.1.1', 3087 'version': '1.1.1',
2485 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3088 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2486 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3089 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2502,8 +3105,8 @@ class NPMTest(FetcherTest):
2502 self.d.setVar('BB_NO_NETWORK', '1') 3105 self.d.setVar('BB_NO_NETWORK', '1')
2503 # Fetch again 3106 # Fetch again
2504 swfile = self.create_shrinkwrap_file({ 3107 swfile = self.create_shrinkwrap_file({
2505 'dependencies': { 3108 'packages': {
2506 'array-flatten': { 3109 'node_modules/array-flatten': {
2507 'version': '1.1.1', 3110 'version': '1.1.1',
2508 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3111 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2509 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3112 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2513,15 +3116,14 @@ class NPMTest(FetcherTest):
2513 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) 3116 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2514 fetcher.download() 3117 fetcher.download()
2515 fetcher.unpack(self.unpackdir) 3118 fetcher.unpack(self.unpackdir)
2516 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) 3119 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json')))
2517 3120
2518 @skipIfNoNpm()
2519 @skipIfNoNetwork() 3121 @skipIfNoNetwork()
2520 def test_npmsw_npm_reusability(self): 3122 def test_npmsw_npm_reusability(self):
2521 # Fetch once with npmsw 3123 # Fetch once with npmsw
2522 swfile = self.create_shrinkwrap_file({ 3124 swfile = self.create_shrinkwrap_file({
2523 'dependencies': { 3125 'packages': {
2524 'array-flatten': { 3126 'node_modules/array-flatten': {
2525 'version': '1.1.1', 3127 'version': '1.1.1',
2526 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3128 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2527 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3129 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2538,13 +3140,12 @@ class NPMTest(FetcherTest):
2538 fetcher.unpack(self.unpackdir) 3140 fetcher.unpack(self.unpackdir)
2539 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json'))) 3141 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json')))
2540 3142
2541 @skipIfNoNpm()
2542 @skipIfNoNetwork() 3143 @skipIfNoNetwork()
2543 def test_npmsw_bad_checksum(self): 3144 def test_npmsw_bad_checksum(self):
2544 # Try to fetch with bad checksum 3145 # Try to fetch with bad checksum
2545 swfile = self.create_shrinkwrap_file({ 3146 swfile = self.create_shrinkwrap_file({
2546 'dependencies': { 3147 'packages': {
2547 'array-flatten': { 3148 'node_modules/array-flatten': {
2548 'version': '1.1.1', 3149 'version': '1.1.1',
2549 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3150 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2550 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg=' 3151 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg='
@@ -2556,8 +3157,8 @@ class NPMTest(FetcherTest):
2556 fetcher.download() 3157 fetcher.download()
2557 # Fetch correctly to get a tarball 3158 # Fetch correctly to get a tarball
2558 swfile = self.create_shrinkwrap_file({ 3159 swfile = self.create_shrinkwrap_file({
2559 'dependencies': { 3160 'packages': {
2560 'array-flatten': { 3161 'node_modules/array-flatten': {
2561 'version': '1.1.1', 3162 'version': '1.1.1',
2562 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3163 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2563 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3164 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2590,13 +3191,13 @@ class NPMTest(FetcherTest):
2590 mirrordir = os.path.join(self.tempdir, 'mirror') 3191 mirrordir = os.path.join(self.tempdir, 'mirror')
2591 bb.utils.mkdirhier(mirrordir) 3192 bb.utils.mkdirhier(mirrordir)
2592 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 3193 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
2593 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 3194 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
2594 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') 3195 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2595 # Fetch again 3196 # Fetch again
2596 self.assertFalse(os.path.exists(ud.localpath)) 3197 self.assertFalse(os.path.exists(ud.localpath))
2597 swfile = self.create_shrinkwrap_file({ 3198 swfile = self.create_shrinkwrap_file({
2598 'dependencies': { 3199 'packages': {
2599 'array-flatten': { 3200 'node_modules/array-flatten': {
2600 'version': '1.1.1', 3201 'version': '1.1.1',
2601 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3202 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2602 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3203 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2619,12 +3220,12 @@ class NPMTest(FetcherTest):
2619 mirrordir = os.path.join(self.tempdir, 'mirror') 3220 mirrordir = os.path.join(self.tempdir, 'mirror')
2620 bb.utils.mkdirhier(mirrordir) 3221 bb.utils.mkdirhier(mirrordir)
2621 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 3222 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
2622 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 3223 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
2623 # Fetch again with invalid url 3224 # Fetch again with invalid url
2624 self.assertFalse(os.path.exists(ud.localpath)) 3225 self.assertFalse(os.path.exists(ud.localpath))
2625 swfile = self.create_shrinkwrap_file({ 3226 swfile = self.create_shrinkwrap_file({
2626 'dependencies': { 3227 'packages': {
2627 'array-flatten': { 3228 'node_modules/array-flatten': {
2628 'version': '1.1.1', 3229 'version': '1.1.1',
2629 'resolved': 'https://invalid', 3230 'resolved': 'https://invalid',
2630 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3231 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2634,3 +3235,521 @@ class NPMTest(FetcherTest):
2634 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) 3235 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2635 fetcher.download() 3236 fetcher.download()
2636 self.assertTrue(os.path.exists(ud.localpath)) 3237 self.assertTrue(os.path.exists(ud.localpath))
3238
3239 @skipIfNoNetwork()
3240 def test_npmsw_bundled(self):
3241 swfile = self.create_shrinkwrap_file({
3242 'packages': {
3243 'node_modules/array-flatten': {
3244 'version': '1.1.1',
3245 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3246 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
3247 },
3248 'node_modules/content-type': {
3249 'version': '1.0.4',
3250 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
3251 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
3252 'inBundle': True
3253 }
3254 }
3255 })
3256 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
3257 fetcher.download()
3258 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz')))
3259 self.assertFalse(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
3260
3261class GitSharedTest(FetcherTest):
3262 def setUp(self):
3263 super(GitSharedTest, self).setUp()
3264 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
3265 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
3266 self.d.setVar("__BBSRCREV_SEEN", "1")
3267
3268 @skipIfNoNetwork()
3269 def test_shared_unpack(self):
3270 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3271
3272 fetcher.download()
3273 fetcher.unpack(self.unpackdir)
3274 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
3275 self.assertTrue(os.path.exists(alt))
3276
3277 @skipIfNoNetwork()
3278 def test_noshared_unpack(self):
3279 self.d.setVar('BB_GIT_NOSHARED', '1')
3280 self.unpackdir += '_noshared'
3281 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3282
3283 fetcher.download()
3284 fetcher.unpack(self.unpackdir)
3285 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
3286 self.assertFalse(os.path.exists(alt))
3287
3288class GitTagVerificationTests(FetcherTest):
3289
3290 @skipIfNoNetwork()
3291 def test_tag_rev_match(self):
3292 # Test a url with rev= and tag= set works
3293 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3294 fetcher.download()
3295 fetcher.unpack(self.unpackdir)
3296
3297 def test_annotated_tag_rev_match(self):
3298 # Test a url with rev= and tag= set works
3299 # rev is the annotated tag revision in this case
3300 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=6d363159e4b7dc566fc40d069b2615e61774a7d8;tag=2.8.7"], self.d)
3301 fetcher.download()
3302 fetcher.unpack(self.unpackdir)
3303
3304 @skipIfNoNetwork()
3305 def test_tag_rev_match2(self):
3306 # Test a url with SRCREV and tag= set works
3307 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3308 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;tag=2.8.7"], self.d)
3309 fetcher.download()
3310 fetcher.unpack(self.unpackdir)
3311
3312 @skipIfNoNetwork()
3313 def test_tag_rev_match3(self):
3314 # Test a url with SRCREV, rev= and tag= set works
3315 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3316 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3317 fetcher.download()
3318 fetcher.unpack(self.unpackdir)
3319
3320 @skipIfNoNetwork()
3321 def test_tag_rev_match4(self):
3322 # Test a url with SRCREV and rev= mismatching errors
3323 self.d.setVar('SRCREV', 'bade540fc31a1c26839efd2c7785a751ce24ebfb')
3324 with self.assertRaises(bb.fetch2.FetchError):
3325 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3326
3327 @skipIfNoNetwork()
3328 def test_tag_rev_match5(self):
3329 # Test a url with SRCREV, rev= and tag= set works when using shallow clones
3330 self.d.setVar('BB_GIT_SHALLOW', '1')
3331 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3332 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3333 fetcher.download()
3334 fetcher.unpack(self.unpackdir)
3335
3336 @skipIfNoNetwork()
3337 def test_tag_rev_match6(self):
3338 # Test a url with SRCREV, rev= and a mismatched tag= when using shallow clones
3339 self.d.setVar('BB_GIT_SHALLOW', '1')
3340 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.6"], self.d)
3341 fetcher.download()
3342 with self.assertRaises(bb.fetch2.FetchError):
3343 fetcher.unpack(self.unpackdir)
3344
3345 @skipIfNoNetwork()
3346 def test_tag_rev_match7(self):
3347 # Test a url with SRCREV, rev= and a mismatched tag=
3348 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3349 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.6"], self.d)
3350 fetcher.download()
3351 with self.assertRaises(bb.fetch2.FetchError):
3352 fetcher.unpack(self.unpackdir)
3353
3354
3355class FetchPremirroronlyLocalTest(FetcherTest):
3356
3357 def setUp(self):
3358 super(FetchPremirroronlyLocalTest, self).setUp()
3359 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3360 os.mkdir(self.mirrordir)
3361 self.reponame = "bitbake"
3362 self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
3363 self.recipe_url = "git://git.fake.repo/bitbake;branch=master;protocol=https"
3364 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3365 self.d.setVar("BB_NO_NETWORK", "1")
3366 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3367 self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
3368 self.mirrorfile = os.path.join(self.mirrordir, self.mirrorname)
3369 self.testfilename = "bitbake-fetch.test"
3370
3371 def make_git_repo(self):
3372 recipeurl = "git:/git.fake.repo/bitbake"
3373 os.makedirs(self.gitdir)
3374 self.git_init(cwd=self.gitdir)
3375 for i in range(0):
3376 self.git_new_commit()
3377 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3378
3379 def git_new_commit(self):
3380 import random
3381 os.unlink(os.path.join(self.mirrordir, self.mirrorname))
3382 branch = self.git("branch --show-current", self.gitdir).split()
3383 with open(os.path.join(self.gitdir, self.testfilename), "w") as testfile:
3384 testfile.write("File {} from branch {}; Useless random data {}".format(self.testfilename, branch, random.random()))
3385 self.git("add {}".format(self.testfilename), self.gitdir)
3386 self.git("commit -a -m \"This random commit {} in branch {}. I'm useless.\"".format(random.random(), branch), self.gitdir)
3387 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3388 return self.git("rev-parse HEAD", self.gitdir).strip()
3389
3390 def git_new_branch(self, name):
3391 self.git_new_commit()
3392 head = self.git("rev-parse HEAD", self.gitdir).strip()
3393 self.git("checkout -b {}".format(name), self.gitdir)
3394 newrev = self.git_new_commit()
3395 self.git("checkout {}".format(head), self.gitdir)
3396 return newrev
3397
3398 def test_mirror_multiple_fetches(self):
3399 self.make_git_repo()
3400 self.d.setVar("SRCREV", self.git_new_commit())
3401 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3402 fetcher.download()
3403 fetcher.unpack(self.unpackdir)
3404 ## New commit in premirror. it's not in the download_dir
3405 self.d.setVar("SRCREV", self.git_new_commit())
3406 fetcher2 = bb.fetch.Fetch([self.recipe_url], self.d)
3407 fetcher2.download()
3408 fetcher2.unpack(self.unpackdir)
3409 ## New commit in premirror. it's not in the download_dir
3410 self.d.setVar("SRCREV", self.git_new_commit())
3411 fetcher3 = bb.fetch.Fetch([self.recipe_url], self.d)
3412 fetcher3.download()
3413 fetcher3.unpack(self.unpackdir)
3414
3415
3416 def test_mirror_commit_nonexistent(self):
3417 self.make_git_repo()
3418 self.d.setVar("SRCREV", "0"*40)
3419 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3420 with self.assertRaises(bb.fetch2.NetworkAccess):
3421 fetcher.download()
3422
3423 def test_mirror_commit_exists(self):
3424 self.make_git_repo()
3425 self.d.setVar("SRCREV", self.git_new_commit())
3426 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3427 fetcher.download()
3428 fetcher.unpack(self.unpackdir)
3429
3430 def test_mirror_tarball_nonexistent(self):
3431 self.d.setVar("SRCREV", "0"*40)
3432 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3433 with self.assertRaises(bb.fetch2.NetworkAccess):
3434 fetcher.download()
3435
3436
3437class FetchPremirroronlyNetworkTest(FetcherTest):
3438
3439 def setUp(self):
3440 super(FetchPremirroronlyNetworkTest, self).setUp()
3441 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3442 os.mkdir(self.mirrordir)
3443 self.reponame = "fstests"
3444 self.clonedir = os.path.join(self.tempdir, "git")
3445 self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame))
3446 self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https;branch=master"
3447 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3448 self.d.setVar("BB_NO_NETWORK", "0")
3449 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3450
3451 def make_git_repo(self):
3452 self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz"
3453 os.makedirs(self.clonedir)
3454 self.git("clone --bare {}".format(self.recipe_url), self.clonedir)
3455 self.git("update-ref HEAD 15413486df1f5a5b5af699b6f3ba5f0984e52a9f", self.gitdir)
3456 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3457 shutil.rmtree(self.clonedir)
3458
3459 @skipIfNoNetwork()
3460 def test_mirror_tarball_updated(self):
3461 self.make_git_repo()
3462 ## Upstream commit is in the mirror
3463 self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f")
3464 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3465 fetcher.download()
3466
3467 @skipIfNoNetwork()
3468 def test_mirror_tarball_outdated(self):
3469 self.make_git_repo()
3470 ## Upstream commit not in the mirror
3471 self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec")
3472 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3473 with self.assertRaises(bb.fetch2.NetworkAccess):
3474 fetcher.download()
3475
3476class FetchPremirroronlyMercurialTest(FetcherTest):
3477 """ Test for premirrors with mercurial repos
3478 the test covers also basic hg:// clone (see fetch_and_create_tarball
3479 """
3480 def skipIfNoHg():
3481 if not shutil.which('hg'):
3482 return unittest.skip('Mercurial not installed')
3483 return lambda f: f
3484
3485 def setUp(self):
3486 super(FetchPremirroronlyMercurialTest, self).setUp()
3487 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3488 os.mkdir(self.mirrordir)
3489 self.reponame = "libgnt"
3490 self.clonedir = os.path.join(self.tempdir, "hg")
3491 self.recipe_url = "hg://keep.imfreedom.org/libgnt;module=libgnt"
3492 self.d.setVar("SRCREV", "53e8b422faaf")
3493 self.mirrorname = "hg_libgnt_keep.imfreedom.org_.libgnt.tar.gz"
3494
3495 def fetch_and_create_tarball(self):
3496 """
3497 Ask bitbake to download repo and prepare mirror tarball for us
3498 """
3499 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
3500 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3501 fetcher.download()
3502 mirrorfile = os.path.join(self.d.getVar("DL_DIR"), self.mirrorname)
3503 self.assertTrue(os.path.exists(mirrorfile), "Mirror tarball {} has not been created".format(mirrorfile))
3504 ## moving tarball to mirror directory
3505 os.rename(mirrorfile, os.path.join(self.mirrordir, self.mirrorname))
3506 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "0")
3507
3508
3509 @skipIfNoNetwork()
3510 @skipIfNoHg()
3511 def test_premirror_mercurial(self):
3512 self.fetch_and_create_tarball()
3513 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3514 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3515 self.d.setVar("BB_NO_NETWORK", "1")
3516 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3517 fetcher.download()
3518
3519class FetchPremirroronlyBrokenTarball(FetcherTest):
3520
3521 def setUp(self):
3522 super(FetchPremirroronlyBrokenTarball, self).setUp()
3523 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3524 os.mkdir(self.mirrordir)
3525 self.reponame = "bitbake"
3526 self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
3527 self.recipe_url = "git://git.fake.repo/bitbake;protocol=https;branch=master"
3528 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3529 self.d.setVar("BB_NO_NETWORK", "1")
3530 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3531 self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
3532 with open(os.path.join(self.mirrordir, self.mirrorname), 'w') as targz:
3533 targz.write("This is not tar.gz file!")
3534
3535 def test_mirror_broken_download(self):
3536 self.d.setVar("SRCREV", "0"*40)
3537 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3538 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs:
3539 fetcher.download()
3540 output = "".join(logs.output)
3541 self.assertFalse(" not a git repository (or any parent up to mount point /)" in output)
3542
3543class GoModTest(FetcherTest):
3544
3545 @skipIfNoNetwork()
3546 def test_gomod_url(self):
3547 urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
3548 'sha256sum=9bb69aea32f1d59711701f9562d66432c9c0374205e5009d1d1a62f03fb4fdad']
3549
3550 fetcher = bb.fetch2.Fetch(urls, self.d)
3551 ud = fetcher.ud[urls[0]]
3552 self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.zip')
3553 self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.zip')
3554 self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3555
3556 fetcher.download()
3557 fetcher.unpack(self.unpackdir)
3558 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3559 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
3560 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3561 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
3562 '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
3563
3564 @skipIfNoNetwork()
3565 def test_gomod_url_go_mod_only(self):
3566 urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;mod=1;'
3567 'sha256sum=7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873']
3568
3569 fetcher = bb.fetch2.Fetch(urls, self.d)
3570 ud = fetcher.ud[urls[0]]
3571 self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.mod')
3572 self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.mod')
3573 self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3574
3575 fetcher.download()
3576 fetcher.unpack(self.unpackdir)
3577 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3578 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3579
3580 @skipIfNoNetwork()
3581 def test_gomod_url_sha256sum_varflag(self):
3582 urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0']
3583 self.d.setVarFlag('SRC_URI', 'gopkg.in/ini.v1@v1.67.0.sha256sum', 'bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6')
3584
3585 fetcher = bb.fetch2.Fetch(urls, self.d)
3586 ud = fetcher.ud[urls[0]]
3587 self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
3588 self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip')
3589 self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0')
3590
3591 fetcher.download()
3592 fetcher.unpack(self.unpackdir)
3593 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3594 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3595 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3596 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3597 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3598
3599 @skipIfNoNetwork()
3600 def test_gomod_url_no_go_mod_in_module(self):
3601 urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0;'
3602 'sha256sum=bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6']
3603
3604 fetcher = bb.fetch2.Fetch(urls, self.d)
3605 ud = fetcher.ud[urls[0]]
3606 self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
3607 self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip')
3608 self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0')
3609
3610 fetcher.download()
3611 fetcher.unpack(self.unpackdir)
3612 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3613 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3614 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3615 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3616 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3617
3618 @skipIfNoNetwork()
3619 def test_gomod_url_host_only(self):
3620 urls = ['gomod://go.opencensus.io;version=v0.24.0;'
3621 'sha256sum=203a767d7f8e7c1ebe5588220ad168d1e15b14ae70a636de7ca9a4a88a7e0d0c']
3622
3623 fetcher = bb.fetch2.Fetch(urls, self.d)
3624 ud = fetcher.ud[urls[0]]
3625 self.assertEqual(ud.url, 'https://proxy.golang.org/go.opencensus.io/%40v/v0.24.0.zip')
3626 self.assertEqual(ud.parm['downloadfilename'], 'go.opencensus.io@v0.24.0.zip')
3627 self.assertEqual(ud.parm['name'], 'go.opencensus.io@v0.24.0')
3628
3629 fetcher.download()
3630 fetcher.unpack(self.unpackdir)
3631 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3632 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
3633 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
3634 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
3635 '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
3636
3637class GoModGitTest(FetcherTest):
3638
3639 @skipIfNoNetwork()
3640 def test_gomodgit_url_repo(self):
3641 urls = ['gomodgit://golang.org/x/net;version=v0.9.0;'
3642 'repo=go.googlesource.com/net;'
3643 'srcrev=694cff8668bac64e0864b552bffc280cd27f21b1']
3644
3645 fetcher = bb.fetch2.Fetch(urls, self.d)
3646 ud = fetcher.ud[urls[0]]
3647 self.assertEqual(ud.host, 'go.googlesource.com')
3648 self.assertEqual(ud.path, '/net')
3649 self.assertEqual(ud.name, 'golang.org/x/net@v0.9.0')
3650 self.assertEqual(self.d.getVar('SRCREV_golang.org/x/net@v0.9.0'), '694cff8668bac64e0864b552bffc280cd27f21b1')
3651
3652 fetcher.download()
3653 self.assertTrue(os.path.exists(ud.localpath))
3654
3655 fetcher.unpack(self.unpackdir)
3656 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3657 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'ed42bd05533fd84ae290a5d33ebd3695a0a2b06131beebd5450825bee8603aca')))
3658 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3659 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.zip')))
3660 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')))
3661 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')),
3662 'c5d6851ede50ec1c001afb763040194b68961bf06997e2605e8bf06dcd2aeb2e')
3663
3664 @skipIfNoNetwork()
3665 def test_gomodgit_url_subdir(self):
3666 urls = ['gomodgit://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
3667 'repo=github.com/Azure/azure-sdk-for-go;subdir=sdk/storage/azblob;'
3668 'srcrev=ec928e0ed34db682b3f783d3739d1c538142e0c3']
3669
3670 fetcher = bb.fetch2.Fetch(urls, self.d)
3671 ud = fetcher.ud[urls[0]]
3672 self.assertEqual(ud.host, 'github.com')
3673 self.assertEqual(ud.path, '/Azure/azure-sdk-for-go')
3674 self.assertEqual(ud.parm['subpath'], 'sdk/storage/azblob')
3675 self.assertEqual(ud.name, 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3676 self.assertEqual(self.d.getVar('SRCREV_github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0'), 'ec928e0ed34db682b3f783d3739d1c538142e0c3')
3677
3678 fetcher.download()
3679 self.assertTrue(os.path.exists(ud.localpath))
3680
3681 fetcher.unpack(self.unpackdir)
3682 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3683 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'd31d6145676ed3066ce573a8198f326dea5be45a43b3d8f41ce7787fd71d66b3')))
3684 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3685 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
3686 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3687 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
3688 '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
3689
3690 @skipIfNoNetwork()
3691 def test_gomodgit_url_srcrev_var(self):
3692 urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0']
3693 self.d.setVar('SRCREV_gopkg.in/ini.v1@v1.67.0', 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3694
3695 fetcher = bb.fetch2.Fetch(urls, self.d)
3696 ud = fetcher.ud[urls[0]]
3697 self.assertEqual(ud.host, 'gopkg.in')
3698 self.assertEqual(ud.path, '/ini.v1')
3699 self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0')
3700 self.assertEqual(ud.parm['srcrev'], 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3701
3702 fetcher.download()
3703 fetcher.unpack(self.unpackdir)
3704 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3705 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
3706 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3707 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3708 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3709 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3710 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3711
3712 @skipIfNoNetwork()
3713 def test_gomodgit_url_no_go_mod_in_module(self):
3714 urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0;'
3715 'srcrev=b2f570e5b5b844226bbefe6fb521d891f529a951']
3716
3717 fetcher = bb.fetch2.Fetch(urls, self.d)
3718 ud = fetcher.ud[urls[0]]
3719 self.assertEqual(ud.host, 'gopkg.in')
3720 self.assertEqual(ud.path, '/ini.v1')
3721 self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0')
3722 self.assertEqual(self.d.getVar('SRCREV_gopkg.in/ini.v1@v1.67.0'), 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3723
3724 fetcher.download()
3725 fetcher.unpack(self.unpackdir)
3726 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3727 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
3728 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3729 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3730 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3731 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3732 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3733
3734 @skipIfNoNetwork()
3735 def test_gomodgit_url_host_only(self):
3736 urls = ['gomodgit://go.opencensus.io;version=v0.24.0;'
3737 'repo=github.com/census-instrumentation/opencensus-go;'
3738 'srcrev=b1a01ee95db0e690d91d7193d037447816fae4c5']
3739
3740 fetcher = bb.fetch2.Fetch(urls, self.d)
3741 ud = fetcher.ud[urls[0]]
3742 self.assertEqual(ud.host, 'github.com')
3743 self.assertEqual(ud.path, '/census-instrumentation/opencensus-go')
3744 self.assertEqual(ud.name, 'go.opencensus.io@v0.24.0')
3745 self.assertEqual(self.d.getVar('SRCREV_go.opencensus.io@v0.24.0'), 'b1a01ee95db0e690d91d7193d037447816fae4c5')
3746
3747 fetcher.download()
3748 fetcher.unpack(self.unpackdir)
3749 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3750 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'aae3ac7b2122ed3345654e6327855e9682f4a5350d63e93dbcfc51c4419df0e1')))
3751 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3752 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
3753 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
3754 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
3755 '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py
index 9e21e18425..e3cba67ad4 100644
--- a/bitbake/lib/bb/tests/parse.py
+++ b/bitbake/lib/bb/tests/parse.py
@@ -75,6 +75,59 @@ unset B[flag]
75 self.assertEqual(d.getVarFlag("A","flag"), None) 75 self.assertEqual(d.getVarFlag("A","flag"), None)
76 self.assertEqual(d.getVar("B"), "2") 76 self.assertEqual(d.getVar("B"), "2")
77 77
78 defaulttest = """
79A = "set value"
80A ??= "default value"
81
82A[flag_set_vs_question] = "set flag"
83A[flag_set_vs_question] ?= "question flag"
84
85A[flag_set_vs_default] = "set flag"
86A[flag_set_vs_default] ??= "default flag"
87
88A[flag_question] ?= "question flag"
89
90A[flag_default] ??= "default flag"
91
92A[flag_question_vs_default] ?= "question flag"
93A[flag_question_vs_default] ??= "default flag"
94
95A[flag_default_vs_question] ??= "default flag"
96A[flag_default_vs_question] ?= "question flag"
97
98A[flag_set_question_default] = "set flag"
99A[flag_set_question_default] ?= "question flag"
100A[flag_set_question_default] ??= "default flag"
101
102A[flag_set_default_question] = "set flag"
103A[flag_set_default_question] ??= "default flag"
104A[flag_set_default_question] ?= "question flag"
105
106A[flag_set_twice] = "set flag first"
107A[flag_set_twice] = "set flag second"
108
109A[flag_question_twice] ?= "question flag first"
110A[flag_question_twice] ?= "question flag second"
111
112A[flag_default_twice] ??= "default flag first"
113A[flag_default_twice] ??= "default flag second"
114"""
115 def test_parse_defaulttest(self):
116 f = self.parsehelper(self.defaulttest)
117 d = bb.parse.handle(f.name, self.d)['']
118 self.assertEqual(d.getVar("A"), "set value")
119 self.assertEqual(d.getVarFlag("A","flag_set_vs_question"), "set flag")
120 self.assertEqual(d.getVarFlag("A","flag_set_vs_default"), "set flag")
121 self.assertEqual(d.getVarFlag("A","flag_question"), "question flag")
122 self.assertEqual(d.getVarFlag("A","flag_default"), "default flag")
123 self.assertEqual(d.getVarFlag("A","flag_question_vs_default"), "question flag")
124 self.assertEqual(d.getVarFlag("A","flag_default_vs_question"), "question flag")
125 self.assertEqual(d.getVarFlag("A","flag_set_question_default"), "set flag")
126 self.assertEqual(d.getVarFlag("A","flag_set_default_question"), "set flag")
127 self.assertEqual(d.getVarFlag("A","flag_set_twice"), "set flag second")
128 self.assertEqual(d.getVarFlag("A","flag_question_twice"), "question flag first")
129 self.assertEqual(d.getVarFlag("A","flag_default_twice"), "default flag second")
130
78 exporttest = """ 131 exporttest = """
79A = "a" 132A = "a"
80export B = "b" 133export B = "b"
@@ -98,8 +151,8 @@ exportD = "d"
98 151
99 152
100 overridetest = """ 153 overridetest = """
101RRECOMMENDS_${PN} = "a" 154RRECOMMENDS:${PN} = "a"
102RRECOMMENDS_${PN}_libc = "b" 155RRECOMMENDS:${PN}:libc = "b"
103OVERRIDES = "libc:${PN}" 156OVERRIDES = "libc:${PN}"
104PN = "gtk+" 157PN = "gtk+"
105""" 158"""
@@ -110,16 +163,16 @@ PN = "gtk+"
110 self.assertEqual(d.getVar("RRECOMMENDS"), "b") 163 self.assertEqual(d.getVar("RRECOMMENDS"), "b")
111 bb.data.expandKeys(d) 164 bb.data.expandKeys(d)
112 self.assertEqual(d.getVar("RRECOMMENDS"), "b") 165 self.assertEqual(d.getVar("RRECOMMENDS"), "b")
113 d.setVar("RRECOMMENDS_gtk+", "c") 166 d.setVar("RRECOMMENDS:gtk+", "c")
114 self.assertEqual(d.getVar("RRECOMMENDS"), "c") 167 self.assertEqual(d.getVar("RRECOMMENDS"), "c")
115 168
116 overridetest2 = """ 169 overridetest2 = """
117EXTRA_OECONF = "" 170EXTRA_OECONF = ""
118EXTRA_OECONF_class-target = "b" 171EXTRA_OECONF:class-target = "b"
119EXTRA_OECONF_append = " c" 172EXTRA_OECONF:append = " c"
120""" 173"""
121 174
122 def test_parse_overrides(self): 175 def test_parse_overrides2(self):
123 f = self.parsehelper(self.overridetest2) 176 f = self.parsehelper(self.overridetest2)
124 d = bb.parse.handle(f.name, self.d)[''] 177 d = bb.parse.handle(f.name, self.d)['']
125 d.appendVar("EXTRA_OECONF", " d") 178 d.appendVar("EXTRA_OECONF", " d")
@@ -128,7 +181,7 @@ EXTRA_OECONF_append = " c"
128 181
129 overridetest3 = """ 182 overridetest3 = """
130DESCRIPTION = "A" 183DESCRIPTION = "A"
131DESCRIPTION_${PN}-dev = "${DESCRIPTION} B" 184DESCRIPTION:${PN}-dev = "${DESCRIPTION} B"
132PN = "bc" 185PN = "bc"
133""" 186"""
134 187
@@ -136,15 +189,15 @@ PN = "bc"
136 f = self.parsehelper(self.overridetest3) 189 f = self.parsehelper(self.overridetest3)
137 d = bb.parse.handle(f.name, self.d)[''] 190 d = bb.parse.handle(f.name, self.d)['']
138 bb.data.expandKeys(d) 191 bb.data.expandKeys(d)
139 self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B") 192 self.assertEqual(d.getVar("DESCRIPTION:bc-dev"), "A B")
140 d.setVar("DESCRIPTION", "E") 193 d.setVar("DESCRIPTION", "E")
141 d.setVar("DESCRIPTION_bc-dev", "C D") 194 d.setVar("DESCRIPTION:bc-dev", "C D")
142 d.setVar("OVERRIDES", "bc-dev") 195 d.setVar("OVERRIDES", "bc-dev")
143 self.assertEqual(d.getVar("DESCRIPTION"), "C D") 196 self.assertEqual(d.getVar("DESCRIPTION"), "C D")
144 197
145 198
146 classextend = """ 199 classextend = """
147VAR_var_override1 = "B" 200VAR_var:override1 = "B"
148EXTRA = ":override1" 201EXTRA = ":override1"
149OVERRIDES = "nothing${EXTRA}" 202OVERRIDES = "nothing${EXTRA}"
150 203
@@ -164,6 +217,7 @@ python () {
164 # become unset/disappear. 217 # become unset/disappear.
165 # 218 #
166 def test_parse_classextend_contamination(self): 219 def test_parse_classextend_contamination(self):
220 self.d.setVar("__bbclasstype", "recipe")
167 cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass") 221 cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
168 #clsname = os.path.basename(cls.name).replace(".bbclass", "") 222 #clsname = os.path.basename(cls.name).replace(".bbclass", "")
169 self.classextend = self.classextend.replace("###CLASS###", cls.name) 223 self.classextend = self.classextend.replace("###CLASS###", cls.name)
@@ -176,7 +230,19 @@ python () {
176 230
177 addtask_deltask = """ 231 addtask_deltask = """
178addtask do_patch after do_foo after do_unpack before do_configure before do_compile 232addtask do_patch after do_foo after do_unpack before do_configure before do_compile
179addtask do_fetch do_patch 233addtask do_fetch2 do_patch2
234
235addtask do_myplaintask
236addtask do_myplaintask2
237deltask do_myplaintask2
238addtask do_mytask# comment
239addtask do_mytask2 # comment2
240addtask do_mytask3
241deltask do_mytask3# comment
242deltask do_mytask4 # comment2
243
244# Ensure a missing task prefix on after works
245addtask do_mytask5 after mytask
180 246
181MYVAR = "do_patch" 247MYVAR = "do_patch"
182EMPTYVAR = "" 248EMPTYVAR = ""
@@ -184,13 +250,216 @@ deltask do_fetch ${MYVAR} ${EMPTYVAR}
184deltask ${EMPTYVAR} 250deltask ${EMPTYVAR}
185""" 251"""
186 def test_parse_addtask_deltask(self): 252 def test_parse_addtask_deltask(self):
187 import sys 253
188 f = self.parsehelper(self.addtask_deltask) 254 f = self.parsehelper(self.addtask_deltask)
189 d = bb.parse.handle(f.name, self.d)[''] 255 d = bb.parse.handle(f.name, self.d)['']
190 256
191 stdout = sys.stdout.getvalue() 257 self.assertSequenceEqual(['do_fetch2', 'do_patch2', 'do_myplaintask', 'do_mytask', 'do_mytask2', 'do_mytask5'], bb.build.listtasks(d))
192 self.assertTrue("addtask contained multiple 'before' keywords" in stdout) 258 self.assertEqual(['do_mytask'], d.getVarFlag("do_mytask5", "deps"))
193 self.assertTrue("addtask contained multiple 'after' keywords" in stdout) 259
194 self.assertTrue('addtask ignored: " do_patch"' in stdout) 260 broken_multiline_comment = """
195 #self.assertTrue('dependent task do_foo for do_patch does not exist' in stdout) 261# First line of comment \\
262# Second line of comment \\
263
264"""
265 def test_parse_broken_multiline_comment(self):
266 f = self.parsehelper(self.broken_multiline_comment)
267 with self.assertRaises(bb.BBHandledException):
268 d = bb.parse.handle(f.name, self.d)['']
269
270
271 comment_in_var = """
272VAR = " \\
273 SOMEVAL \\
274# some comment \\
275 SOMEOTHERVAL \\
276"
277"""
278 def test_parse_comment_in_var(self):
279 f = self.parsehelper(self.comment_in_var)
280 with self.assertRaises(bb.BBHandledException):
281 d = bb.parse.handle(f.name, self.d)['']
282
283
284 at_sign_in_var_flag = """
285A[flag@.service] = "nonet"
286B[flag@.target] = "ntb"
287C[f] = "flag"
196 288
289unset A[flag@.service]
290"""
291 def test_parse_at_sign_in_var_flag(self):
292 f = self.parsehelper(self.at_sign_in_var_flag)
293 d = bb.parse.handle(f.name, self.d)['']
294 self.assertEqual(d.getVar("A"), None)
295 self.assertEqual(d.getVar("B"), None)
296 self.assertEqual(d.getVarFlag("A","flag@.service"), None)
297 self.assertEqual(d.getVarFlag("B","flag@.target"), "ntb")
298 self.assertEqual(d.getVarFlag("C","f"), "flag")
299
300 def test_parse_invalid_at_sign_in_var_flag(self):
301 invalid_at_sign = self.at_sign_in_var_flag.replace("B[f", "B[@f")
302 f = self.parsehelper(invalid_at_sign)
303 with self.assertRaises(bb.parse.ParseError):
304 d = bb.parse.handle(f.name, self.d)['']
305
306 export_function_recipe = """
307inherit someclass
308"""
309
310 export_function_recipe2 = """
311inherit someclass
312
313do_compile () {
314 false
315}
316
317python do_compilepython () {
318 bb.note("Something else")
319}
320
321"""
322 export_function_class = """
323someclass_do_compile() {
324 true
325}
326
327python someclass_do_compilepython () {
328 bb.note("Something")
329}
330
331EXPORT_FUNCTIONS do_compile do_compilepython
332"""
333
334 export_function_class2 = """
335secondclass_do_compile() {
336 true
337}
338
339python secondclass_do_compilepython () {
340 bb.note("Something")
341}
342
343EXPORT_FUNCTIONS do_compile do_compilepython
344"""
345
346 def test_parse_export_functions(self):
347 def check_function_flags(d):
348 self.assertEqual(d.getVarFlag("do_compile", "func"), 1)
349 self.assertEqual(d.getVarFlag("do_compilepython", "func"), 1)
350 self.assertEqual(d.getVarFlag("do_compile", "python"), None)
351 self.assertEqual(d.getVarFlag("do_compilepython", "python"), "1")
352
353 with tempfile.TemporaryDirectory() as tempdir:
354 self.d.setVar("__bbclasstype", "recipe")
355 recipename = tempdir + "/recipe.bb"
356 os.makedirs(tempdir + "/classes")
357 with open(tempdir + "/classes/someclass.bbclass", "w") as f:
358 f.write(self.export_function_class)
359 f.flush()
360 with open(tempdir + "/classes/secondclass.bbclass", "w") as f:
361 f.write(self.export_function_class2)
362 f.flush()
363
364 with open(recipename, "w") as f:
365 f.write(self.export_function_recipe)
366 f.flush()
367 os.chdir(tempdir)
368 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
369 self.assertIn("someclass_do_compile", d.getVar("do_compile"))
370 self.assertIn("someclass_do_compilepython", d.getVar("do_compilepython"))
371 check_function_flags(d)
372
373 recipename2 = tempdir + "/recipe2.bb"
374 with open(recipename2, "w") as f:
375 f.write(self.export_function_recipe2)
376 f.flush()
377
378 d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
379 self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
380 self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
381 self.assertIn("false", d.getVar("do_compile"))
382 self.assertIn("else", d.getVar("do_compilepython"))
383 check_function_flags(d)
384
385 with open(recipename, "a+") as f:
386 f.write("\ninherit secondclass\n")
387 f.flush()
388 with open(recipename2, "a+") as f:
389 f.write("\ninherit secondclass\n")
390 f.flush()
391
392 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
393 self.assertIn("secondclass_do_compile", d.getVar("do_compile"))
394 self.assertIn("secondclass_do_compilepython", d.getVar("do_compilepython"))
395 check_function_flags(d)
396
397 d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
398 self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
399 self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
400 self.assertIn("false", d.getVar("do_compile"))
401 self.assertIn("else", d.getVar("do_compilepython"))
402 check_function_flags(d)
403
404 export_function_unclosed_tab = """
405do_compile () {
406 bb.note("Something")
407\t}
408"""
409 export_function_unclosed_space = """
410do_compile () {
411 bb.note("Something")
412 }
413"""
414 export_function_residue = """
415do_compile () {
416 bb.note("Something")
417}
418
419include \\
420"""
421
422 def test_unclosed_functions(self):
423 def test_helper(content, expected_error):
424 with tempfile.TemporaryDirectory() as tempdir:
425 recipename = tempdir + "/recipe_unclosed.bb"
426 with open(recipename, "w") as f:
427 f.write(content)
428 f.flush()
429 os.chdir(tempdir)
430 with self.assertRaises(bb.parse.ParseError) as error:
431 bb.parse.handle(recipename, bb.data.createCopy(self.d))
432 self.assertIn(expected_error, str(error.exception))
433
434 with tempfile.TemporaryDirectory() as tempdir:
435 test_helper(self.export_function_unclosed_tab, "Unparsed lines from unclosed function")
436 test_helper(self.export_function_unclosed_space, "Unparsed lines from unclosed function")
437 test_helper(self.export_function_residue, "Unparsed lines")
438
439 recipename_closed = tempdir + "/recipe_closed.bb"
440 with open(recipename_closed, "w") as in_file:
441 lines = self.export_function_unclosed_tab.split("\n")
442 lines[3] = "}"
443 in_file.write("\n".join(lines))
444 in_file.flush()
445 bb.parse.handle(recipename_closed, bb.data.createCopy(self.d))
446
447 special_character_assignment = """
448A+="a"
449A+ = "b"
450+ = "c"
451"""
452 ambigous_assignment = """
453+= "d"
454"""
455 def test_parse_special_character_assignment(self):
456 f = self.parsehelper(self.special_character_assignment)
457 d = bb.parse.handle(f.name, self.d)['']
458 self.assertEqual(d.getVar("A"), " a")
459 self.assertEqual(d.getVar("A+"), "b")
460 self.assertEqual(d.getVar("+"), "c")
461
462 f = self.parsehelper(self.ambigous_assignment)
463 with self.assertRaises(bb.parse.ParseError) as error:
464 bb.parse.handle(f.name, self.d)
465 self.assertIn("Empty variable name in assignment", str(error.exception))
diff --git a/bitbake/lib/bb/tests/persist_data.py b/bitbake/lib/bb/tests/persist_data.py
deleted file mode 100644
index f641b5acbc..0000000000
--- a/bitbake/lib/bb/tests/persist_data.py
+++ /dev/null
@@ -1,129 +0,0 @@
1#
2# BitBake Test for lib/bb/persist_data/
3#
4# Copyright (C) 2018 Garmin Ltd.
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import unittest
10import bb.data
11import bb.persist_data
12import tempfile
13import threading
14
15class PersistDataTest(unittest.TestCase):
16 def _create_data(self):
17 return bb.persist_data.persist('TEST_PERSIST_DATA', self.d)
18
19 def setUp(self):
20 self.d = bb.data.init()
21 self.tempdir = tempfile.TemporaryDirectory()
22 self.d['PERSISTENT_DIR'] = self.tempdir.name
23 self.data = self._create_data()
24 self.items = {
25 'A1': '1',
26 'B1': '2',
27 'C2': '3'
28 }
29 self.stress_count = 10000
30 self.thread_count = 5
31
32 for k,v in self.items.items():
33 self.data[k] = v
34
35 def tearDown(self):
36 self.tempdir.cleanup()
37
38 def _iter_helper(self, seen, iterator):
39 with iter(iterator):
40 for v in iterator:
41 self.assertTrue(v in seen)
42 seen.remove(v)
43 self.assertEqual(len(seen), 0, '%s not seen' % seen)
44
45 def test_get(self):
46 for k, v in self.items.items():
47 self.assertEqual(self.data[k], v)
48
49 self.assertIsNone(self.data.get('D'))
50 with self.assertRaises(KeyError):
51 self.data['D']
52
53 def test_set(self):
54 for k, v in self.items.items():
55 self.data[k] += '-foo'
56
57 for k, v in self.items.items():
58 self.assertEqual(self.data[k], v + '-foo')
59
60 def test_delete(self):
61 self.data['D'] = '4'
62 self.assertEqual(self.data['D'], '4')
63 del self.data['D']
64 self.assertIsNone(self.data.get('D'))
65 with self.assertRaises(KeyError):
66 self.data['D']
67
68 def test_contains(self):
69 for k in self.items:
70 self.assertTrue(k in self.data)
71 self.assertTrue(self.data.has_key(k))
72 self.assertFalse('NotFound' in self.data)
73 self.assertFalse(self.data.has_key('NotFound'))
74
75 def test_len(self):
76 self.assertEqual(len(self.data), len(self.items))
77
78 def test_iter(self):
79 self._iter_helper(set(self.items.keys()), self.data)
80
81 def test_itervalues(self):
82 self._iter_helper(set(self.items.values()), self.data.itervalues())
83
84 def test_iteritems(self):
85 self._iter_helper(set(self.items.items()), self.data.iteritems())
86
87 def test_get_by_pattern(self):
88 self._iter_helper({'1', '2'}, self.data.get_by_pattern('_1'))
89
90 def _stress_read(self, data):
91 for i in range(self.stress_count):
92 for k in self.items:
93 data[k]
94
95 def _stress_write(self, data):
96 for i in range(self.stress_count):
97 for k, v in self.items.items():
98 data[k] = v + str(i)
99
100 def _validate_stress(self):
101 for k, v in self.items.items():
102 self.assertEqual(self.data[k], v + str(self.stress_count - 1))
103
104 def test_stress(self):
105 self._stress_read(self.data)
106 self._stress_write(self.data)
107 self._validate_stress()
108
109 def test_stress_threads(self):
110 def read_thread():
111 data = self._create_data()
112 self._stress_read(data)
113
114 def write_thread():
115 data = self._create_data()
116 self._stress_write(data)
117
118 threads = []
119 for i in range(self.thread_count):
120 threads.append(threading.Thread(target=read_thread))
121 threads.append(threading.Thread(target=write_thread))
122
123 for t in threads:
124 t.start()
125 self._stress_read(self.data)
126 for t in threads:
127 t.join()
128 self._validate_stress()
129
diff --git a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
index b57650d591..80b003b2b5 100644
--- a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
+++ b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
@@ -9,7 +9,7 @@ def stamptask(d):
9 with open(stampname, "a+") as f: 9 with open(stampname, "a+") as f:
10 f.write(d.getVar("BB_UNIHASH") + "\n") 10 f.write(d.getVar("BB_UNIHASH") + "\n")
11 11
12 if d.getVar("BB_CURRENT_MC") != "default": 12 if d.getVar("BB_CURRENT_MC") != "":
13 thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}") 13 thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}")
14 if thistask in d.getVar("SLOWTASKS").split(): 14 if thistask in d.getVar("SLOWTASKS").split():
15 bb.note("Slowing task %s" % thistask) 15 bb.note("Slowing task %s" % thistask)
diff --git a/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf b/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
index efebf001a9..05d7fd07dd 100644
--- a/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
+++ b/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
@@ -12,6 +12,6 @@ STAMP = "${TMPDIR}/stamps/${PN}"
12T = "${TMPDIR}/workdir/${PN}/temp" 12T = "${TMPDIR}/workdir/${PN}/temp"
13BB_NUMBER_THREADS = "4" 13BB_NUMBER_THREADS = "4"
14 14
15BB_HASHBASE_WHITELIST = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE" 15BB_BASEHASH_IGNORE_VARS = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE BB_CURRENTTASK"
16 16
17include conf/multiconfig/${BB_CURRENT_MC}.conf 17include conf/multiconfig/${BB_CURRENT_MC}.conf
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb
new file mode 100644
index 0000000000..3c7dca0257
--- /dev/null
+++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb
@@ -0,0 +1,2 @@
1do_build[mcdepends] = "mc::mc-1:h1:do_invalid"
2
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb
diff --git a/bitbake/lib/bb/tests/runqueue.py b/bitbake/lib/bb/tests/runqueue.py
index 3d51779d6c..74f5ded2e6 100644
--- a/bitbake/lib/bb/tests/runqueue.py
+++ b/bitbake/lib/bb/tests/runqueue.py
@@ -26,20 +26,23 @@ class RunQueueTests(unittest.TestCase):
26 a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot" 26 a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot"
27 b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot" 27 b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot"
28 28
29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False): 29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False, allowfailure=False):
30 env = os.environ.copy() 30 env = os.environ.copy()
31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests")) 31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests"))
32 env["BB_ENV_EXTRAWHITE"] = "SSTATEVALID SLOWTASKS" 32 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR"
33 env["SSTATEVALID"] = sstatevalid 33 env["SSTATEVALID"] = sstatevalid
34 env["SLOWTASKS"] = slowtasks 34 env["SLOWTASKS"] = slowtasks
35 env["TOPDIR"] = builddir
35 if extraenv: 36 if extraenv:
36 for k in extraenv: 37 for k in extraenv:
37 env[k] = extraenv[k] 38 env[k] = extraenv[k]
38 env["BB_ENV_EXTRAWHITE"] = env["BB_ENV_EXTRAWHITE"] + " " + k 39 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = env["BB_ENV_PASSTHROUGH_ADDITIONS"] + " " + k
39 try: 40 try:
40 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir) 41 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir)
41 print(output) 42 print(output)
42 except subprocess.CalledProcessError as e: 43 except subprocess.CalledProcessError as e:
44 if allowfailure:
45 return e.output
43 self.fail("Command %s failed with %s" % (cmd, e.output)) 46 self.fail("Command %s failed with %s" % (cmd, e.output))
44 tasks = [] 47 tasks = []
45 tasklog = builddir + "/task.log" 48 tasklog = builddir + "/task.log"
@@ -58,6 +61,8 @@ class RunQueueTests(unittest.TestCase):
58 expected = ['a1:' + x for x in self.alltasks] 61 expected = ['a1:' + x for x in self.alltasks]
59 self.assertEqual(set(tasks), set(expected)) 62 self.assertEqual(set(tasks), set(expected))
60 63
64 self.shutdown(tempdir)
65
61 def test_single_setscenevalid(self): 66 def test_single_setscenevalid(self):
62 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 67 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
63 cmd = ["bitbake", "a1"] 68 cmd = ["bitbake", "a1"]
@@ -68,6 +73,8 @@ class RunQueueTests(unittest.TestCase):
68 'a1:populate_sysroot', 'a1:build'] 73 'a1:populate_sysroot', 'a1:build']
69 self.assertEqual(set(tasks), set(expected)) 74 self.assertEqual(set(tasks), set(expected))
70 75
76 self.shutdown(tempdir)
77
71 def test_intermediate_setscenevalid(self): 78 def test_intermediate_setscenevalid(self):
72 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 79 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
73 cmd = ["bitbake", "a1"] 80 cmd = ["bitbake", "a1"]
@@ -77,6 +84,8 @@ class RunQueueTests(unittest.TestCase):
77 'a1:populate_sysroot_setscene', 'a1:build'] 84 'a1:populate_sysroot_setscene', 'a1:build']
78 self.assertEqual(set(tasks), set(expected)) 85 self.assertEqual(set(tasks), set(expected))
79 86
87 self.shutdown(tempdir)
88
80 def test_intermediate_notcovered(self): 89 def test_intermediate_notcovered(self):
81 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 90 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
82 cmd = ["bitbake", "a1"] 91 cmd = ["bitbake", "a1"]
@@ -86,6 +95,8 @@ class RunQueueTests(unittest.TestCase):
86 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] 95 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
87 self.assertEqual(set(tasks), set(expected)) 96 self.assertEqual(set(tasks), set(expected))
88 97
98 self.shutdown(tempdir)
99
89 def test_all_setscenevalid(self): 100 def test_all_setscenevalid(self):
90 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 101 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
91 cmd = ["bitbake", "a1"] 102 cmd = ["bitbake", "a1"]
@@ -95,6 +106,8 @@ class RunQueueTests(unittest.TestCase):
95 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] 106 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
96 self.assertEqual(set(tasks), set(expected)) 107 self.assertEqual(set(tasks), set(expected))
97 108
109 self.shutdown(tempdir)
110
98 def test_no_settasks(self): 111 def test_no_settasks(self):
99 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 112 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
100 cmd = ["bitbake", "a1", "-c", "patch"] 113 cmd = ["bitbake", "a1", "-c", "patch"]
@@ -103,6 +116,8 @@ class RunQueueTests(unittest.TestCase):
103 expected = ['a1:fetch', 'a1:unpack', 'a1:patch'] 116 expected = ['a1:fetch', 'a1:unpack', 'a1:patch']
104 self.assertEqual(set(tasks), set(expected)) 117 self.assertEqual(set(tasks), set(expected))
105 118
119 self.shutdown(tempdir)
120
106 def test_mix_covered_notcovered(self): 121 def test_mix_covered_notcovered(self):
107 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 122 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
108 cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"] 123 cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"]
@@ -111,6 +126,7 @@ class RunQueueTests(unittest.TestCase):
111 expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene'] 126 expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene']
112 self.assertEqual(set(tasks), set(expected)) 127 self.assertEqual(set(tasks), set(expected))
113 128
129 self.shutdown(tempdir)
114 130
115 # Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks 131 # Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks
116 def test_mixed_direct_tasks_setscene_tasks(self): 132 def test_mixed_direct_tasks_setscene_tasks(self):
@@ -122,6 +138,8 @@ class RunQueueTests(unittest.TestCase):
122 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] 138 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
123 self.assertEqual(set(tasks), set(expected)) 139 self.assertEqual(set(tasks), set(expected))
124 140
141 self.shutdown(tempdir)
142
125 # This test slows down the execution of do_package_setscene until after other real tasks have 143 # This test slows down the execution of do_package_setscene until after other real tasks have
126 # started running which tests for a bug where tasks were being lost from the buildable list of real 144 # started running which tests for a bug where tasks were being lost from the buildable list of real
127 # tasks if they weren't in tasks_covered or tasks_notcovered 145 # tasks if they weren't in tasks_covered or tasks_notcovered
@@ -136,12 +154,14 @@ class RunQueueTests(unittest.TestCase):
136 'a1:populate_sysroot', 'a1:build'] 154 'a1:populate_sysroot', 'a1:build']
137 self.assertEqual(set(tasks), set(expected)) 155 self.assertEqual(set(tasks), set(expected))
138 156
139 def test_setscenewhitelist(self): 157 self.shutdown(tempdir)
158
159 def test_setscene_ignore_tasks(self):
140 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 160 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
141 cmd = ["bitbake", "a1"] 161 cmd = ["bitbake", "a1"]
142 extraenv = { 162 extraenv = {
143 "BB_SETSCENE_ENFORCE" : "1", 163 "BB_SETSCENE_ENFORCE" : "1",
144 "BB_SETSCENE_ENFORCE_WHITELIST" : "a1:do_package_write_rpm a1:do_build" 164 "BB_SETSCENE_ENFORCE_IGNORE_TASKS" : "a1:do_package_write_rpm a1:do_build"
145 } 165 }
146 sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_populate_lic a1:do_populate_sysroot" 166 sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_populate_lic a1:do_populate_sysroot"
147 tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv) 167 tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv)
@@ -149,6 +169,8 @@ class RunQueueTests(unittest.TestCase):
149 'a1:populate_sysroot_setscene', 'a1:package_setscene'] 169 'a1:populate_sysroot_setscene', 'a1:package_setscene']
150 self.assertEqual(set(tasks), set(expected)) 170 self.assertEqual(set(tasks), set(expected))
151 171
172 self.shutdown(tempdir)
173
152 # Tests for problems with dependencies between setscene tasks 174 # Tests for problems with dependencies between setscene tasks
153 def test_no_setscenevalid_harddeps(self): 175 def test_no_setscenevalid_harddeps(self):
154 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 176 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -162,6 +184,8 @@ class RunQueueTests(unittest.TestCase):
162 'd1:populate_sysroot', 'd1:build'] 184 'd1:populate_sysroot', 'd1:build']
163 self.assertEqual(set(tasks), set(expected)) 185 self.assertEqual(set(tasks), set(expected))
164 186
187 self.shutdown(tempdir)
188
165 def test_no_setscenevalid_withdeps(self): 189 def test_no_setscenevalid_withdeps(self):
166 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 190 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
167 cmd = ["bitbake", "b1"] 191 cmd = ["bitbake", "b1"]
@@ -172,6 +196,8 @@ class RunQueueTests(unittest.TestCase):
172 expected.remove('a1:package_qa') 196 expected.remove('a1:package_qa')
173 self.assertEqual(set(tasks), set(expected)) 197 self.assertEqual(set(tasks), set(expected))
174 198
199 self.shutdown(tempdir)
200
175 def test_single_a1_setscenevalid_withdeps(self): 201 def test_single_a1_setscenevalid_withdeps(self):
176 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 202 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
177 cmd = ["bitbake", "b1"] 203 cmd = ["bitbake", "b1"]
@@ -182,6 +208,8 @@ class RunQueueTests(unittest.TestCase):
182 'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks] 208 'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks]
183 self.assertEqual(set(tasks), set(expected)) 209 self.assertEqual(set(tasks), set(expected))
184 210
211 self.shutdown(tempdir)
212
185 def test_single_b1_setscenevalid_withdeps(self): 213 def test_single_b1_setscenevalid_withdeps(self):
186 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 214 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
187 cmd = ["bitbake", "b1"] 215 cmd = ["bitbake", "b1"]
@@ -193,6 +221,8 @@ class RunQueueTests(unittest.TestCase):
193 expected.remove('b1:package') 221 expected.remove('b1:package')
194 self.assertEqual(set(tasks), set(expected)) 222 self.assertEqual(set(tasks), set(expected))
195 223
224 self.shutdown(tempdir)
225
196 def test_intermediate_setscenevalid_withdeps(self): 226 def test_intermediate_setscenevalid_withdeps(self):
197 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 227 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
198 cmd = ["bitbake", "b1"] 228 cmd = ["bitbake", "b1"]
@@ -203,6 +233,8 @@ class RunQueueTests(unittest.TestCase):
203 expected.remove('b1:package') 233 expected.remove('b1:package')
204 self.assertEqual(set(tasks), set(expected)) 234 self.assertEqual(set(tasks), set(expected))
205 235
236 self.shutdown(tempdir)
237
206 def test_all_setscenevalid_withdeps(self): 238 def test_all_setscenevalid_withdeps(self):
207 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 239 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
208 cmd = ["bitbake", "b1"] 240 cmd = ["bitbake", "b1"]
@@ -213,6 +245,8 @@ class RunQueueTests(unittest.TestCase):
213 'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene'] 245 'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene']
214 self.assertEqual(set(tasks), set(expected)) 246 self.assertEqual(set(tasks), set(expected))
215 247
248 self.shutdown(tempdir)
249
216 def test_multiconfig_setscene_optimise(self): 250 def test_multiconfig_setscene_optimise(self):
217 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 251 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
218 extraenv = { 252 extraenv = {
@@ -232,6 +266,8 @@ class RunQueueTests(unittest.TestCase):
232 expected.remove(x) 266 expected.remove(x)
233 self.assertEqual(set(tasks), set(expected)) 267 self.assertEqual(set(tasks), set(expected))
234 268
269 self.shutdown(tempdir)
270
235 def test_multiconfig_bbmask(self): 271 def test_multiconfig_bbmask(self):
236 # This test validates that multiconfigs can independently mask off 272 # This test validates that multiconfigs can independently mask off
237 # recipes they do not want with BBMASK. It works by having recipes 273 # recipes they do not want with BBMASK. It works by having recipes
@@ -248,11 +284,13 @@ class RunQueueTests(unittest.TestCase):
248 cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"] 284 cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"]
249 self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv) 285 self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv)
250 286
287 self.shutdown(tempdir)
288
251 def test_multiconfig_mcdepends(self): 289 def test_multiconfig_mcdepends(self):
252 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 290 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
253 extraenv = { 291 extraenv = {
254 "BBMULTICONFIG" : "mc-1 mc_2", 292 "BBMULTICONFIG" : "mc-1 mc_2",
255 "BB_SIGNATURE_HANDLER" : "TestMulticonfigDepends", 293 "BB_SIGNATURE_HANDLER" : "basichash",
256 "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb", 294 "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb",
257 } 295 }
258 tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True) 296 tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
@@ -278,7 +316,15 @@ class RunQueueTests(unittest.TestCase):
278 ["mc_2:a1:%s" % t for t in rerun_tasks] 316 ["mc_2:a1:%s" % t for t in rerun_tasks]
279 self.assertEqual(set(tasks), set(expected)) 317 self.assertEqual(set(tasks), set(expected))
280 318
281 @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required') 319 # Check that a multiconfig that doesn't exist rasies a correct error message
320 error_output = self.run_bitbakecmd(["bitbake", "g1"], tempdir, "", extraenv=extraenv, cleanup=True, allowfailure=True)
321 self.assertIn("non-existent task", error_output)
322 # If the word 'Traceback' or 'KeyError' is in the output we've regressed
323 self.assertNotIn("Traceback", error_output)
324 self.assertNotIn("KeyError", error_output)
325
326 self.shutdown(tempdir)
327
282 def test_hashserv_single(self): 328 def test_hashserv_single(self):
283 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 329 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
284 extraenv = { 330 extraenv = {
@@ -304,7 +350,6 @@ class RunQueueTests(unittest.TestCase):
304 350
305 self.shutdown(tempdir) 351 self.shutdown(tempdir)
306 352
307 @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
308 def test_hashserv_double(self): 353 def test_hashserv_double(self):
309 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 354 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
310 extraenv = { 355 extraenv = {
@@ -329,7 +374,6 @@ class RunQueueTests(unittest.TestCase):
329 374
330 self.shutdown(tempdir) 375 self.shutdown(tempdir)
331 376
332 @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
333 def test_hashserv_multiple_setscene(self): 377 def test_hashserv_multiple_setscene(self):
334 # Runs e1:do_package_setscene twice 378 # Runs e1:do_package_setscene twice
335 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 379 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -361,7 +405,6 @@ class RunQueueTests(unittest.TestCase):
361 405
362 def shutdown(self, tempdir): 406 def shutdown(self, tempdir):
363 # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup 407 # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup
364 while os.path.exists(tempdir + "/hashserve.sock"): 408 while (os.path.exists(tempdir + "/hashserve.sock") or os.path.exists(tempdir + "cache/hashserv.db-wal") or os.path.exists(tempdir + "/bitbake.lock")):
365 time.sleep(0.5) 409 time.sleep(0.5)
366 410
367
diff --git a/bitbake/lib/bb/tests/siggen.py b/bitbake/lib/bb/tests/siggen.py
index c21ab4e4fb..0dc67e6cc2 100644
--- a/bitbake/lib/bb/tests/siggen.py
+++ b/bitbake/lib/bb/tests/siggen.py
@@ -17,75 +17,12 @@ import bb.siggen
17 17
18class SiggenTest(unittest.TestCase): 18class SiggenTest(unittest.TestCase):
19 19
20 def test_clean_basepath_simple_target_basepath(self): 20 def test_build_pnid(self):
21 basepath = '/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask' 21 tests = {
22 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask' 22 ('', 'helloworld', 'do_sometask') : 'helloworld:do_sometask',
23 ('XX', 'helloworld', 'do_sometask') : 'mc:XX:helloworld:do_sometask',
24 }
23 25
24 actual_cleaned = bb.siggen.clean_basepath(basepath) 26 for t in tests:
27 self.assertEqual(bb.siggen.build_pnid(*t), tests[t])
25 28
26 self.assertEqual(actual_cleaned, expected_cleaned)
27
28 def test_clean_basepath_basic_virtual_basepath(self):
29 basepath = 'virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
30 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something'
31
32 actual_cleaned = bb.siggen.clean_basepath(basepath)
33
34 self.assertEqual(actual_cleaned, expected_cleaned)
35
36 def test_clean_basepath_mc_basepath(self):
37 basepath = 'mc:somemachine:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
38 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:mc:somemachine'
39
40 actual_cleaned = bb.siggen.clean_basepath(basepath)
41
42 self.assertEqual(actual_cleaned, expected_cleaned)
43
44 def test_clean_basepath_virtual_long_prefix_basepath(self):
45 basepath = 'virtual:something:A:B:C:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
46 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:A:B:C'
47
48 actual_cleaned = bb.siggen.clean_basepath(basepath)
49
50 self.assertEqual(actual_cleaned, expected_cleaned)
51
52 def test_clean_basepath_mc_virtual_basepath(self):
53 basepath = 'mc:somemachine:virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
54 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:mc:somemachine'
55
56 actual_cleaned = bb.siggen.clean_basepath(basepath)
57
58 self.assertEqual(actual_cleaned, expected_cleaned)
59
60 def test_clean_basepath_mc_virtual_long_prefix_basepath(self):
61 basepath = 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
62 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:C:B:A:mc:X'
63
64 actual_cleaned = bb.siggen.clean_basepath(basepath)
65
66 self.assertEqual(actual_cleaned, expected_cleaned)
67
68
69 # def test_clean_basepath_performance(self):
70 # input_basepaths = [
71 # 'mc:X:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
72 # 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
73 # 'virtual:something:C:B:A:/different/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
74 # 'virtual:something:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
75 # '/this/is/most/common/input/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
76 # '/and/should/be/tested/with/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
77 # '/more/weight/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
78 # ]
79
80 # time_start = time.time()
81
82 # i = 2000000
83 # while i >= 0:
84 # for basepath in input_basepaths:
85 # bb.siggen.clean_basepath(basepath)
86 # i -= 1
87
88 # elapsed = time.time() - time_start
89 # print('{} ({}s)'.format(self.id(), round(elapsed, 3)))
90
91 # self.assertTrue(False)
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py
index a7ff33db52..52b7bf85bf 100644
--- a/bitbake/lib/bb/tests/utils.py
+++ b/bitbake/lib/bb/tests/utils.py
@@ -130,6 +130,14 @@ class Checksum(unittest.TestCase):
130 checksum = bb.utils.sha256_file(f.name) 130 checksum = bb.utils.sha256_file(f.name)
131 self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f") 131 self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f")
132 132
133 def test_goh1(self):
134 import hashlib
135 with tempfile.NamedTemporaryFile() as f:
136 f.write(self.filler)
137 f.flush()
138 checksum = bb.utils.goh1_file(f.name)
139 self.assertEqual(checksum, "81191f04d4abf413e5badd234814e4202d9efa73e6f9437e9ddd6b8165b569ef")
140
133class EditMetadataFile(unittest.TestCase): 141class EditMetadataFile(unittest.TestCase):
134 _origfile = """ 142 _origfile = """
135# A comment 143# A comment
@@ -418,7 +426,7 @@ MULTILINE = " stuff \\
418 ['MULTILINE'], 426 ['MULTILINE'],
419 handle_var) 427 handle_var)
420 428
421 testvalue = re.sub('\s+', ' ', value_in_callback.strip()) 429 testvalue = re.sub(r'\s+', ' ', value_in_callback.strip())
422 self.assertEqual(expected_value, testvalue) 430 self.assertEqual(expected_value, testvalue)
423 431
424class EditBbLayersConf(unittest.TestCase): 432class EditBbLayersConf(unittest.TestCase):
@@ -666,3 +674,32 @@ class GetReferencedVars(unittest.TestCase):
666 674
667 layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}] 675 layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}]
668 self.check_referenced("${SRC_URI}", layers) 676 self.check_referenced("${SRC_URI}", layers)
677
678
679class EnvironmentTests(unittest.TestCase):
680 def test_environment(self):
681 os.environ["A"] = "this is A"
682 self.assertIn("A", os.environ)
683 self.assertEqual(os.environ["A"], "this is A")
684 self.assertNotIn("B", os.environ)
685
686 with bb.utils.environment(B="this is B"):
687 self.assertIn("A", os.environ)
688 self.assertEqual(os.environ["A"], "this is A")
689 self.assertIn("B", os.environ)
690 self.assertEqual(os.environ["B"], "this is B")
691
692 self.assertIn("A", os.environ)
693 self.assertEqual(os.environ["A"], "this is A")
694 self.assertNotIn("B", os.environ)
695
696class FilemodeTests(unittest.TestCase):
697 def test_filemode_convert(self):
698 self.assertEqual(0o775, bb.utils.to_filemode("0o775"))
699 self.assertEqual(0o775, bb.utils.to_filemode(0o775))
700 self.assertEqual(0o775, bb.utils.to_filemode("775"))
701 with self.assertRaises(ValueError):
702 bb.utils.to_filemode("xyz")
703 with self.assertRaises(ValueError):
704 bb.utils.to_filemode("999")
705
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index 763c329810..e7fbcbca0a 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -10,10 +10,12 @@
10import logging 10import logging
11import os 11import os
12import sys 12import sys
13import time
13import atexit 14import atexit
14import re 15import re
15from collections import OrderedDict, defaultdict 16from collections import OrderedDict, defaultdict
16from functools import partial 17from functools import partial, wraps
18from contextlib import contextmanager
17 19
18import bb.cache 20import bb.cache
19import bb.cooker 21import bb.cooker
@@ -25,6 +27,135 @@ import bb.remotedata
25from bb.main import setup_bitbake, BitBakeConfigParameters 27from bb.main import setup_bitbake, BitBakeConfigParameters
26import bb.fetch2 28import bb.fetch2
27 29
30def wait_for(f):
31 """
32 Wrap a function that makes an asynchronous tinfoil call using
33 self.run_command() and wait for events to say that the call has been
34 successful, or an error has occurred.
35 """
36 @wraps(f)
37 def wrapper(self, *args, handle_events=True, extra_events=None, event_callback=None, **kwargs):
38 if handle_events:
39 # A reasonable set of default events matching up with those we handle below
40 eventmask = [
41 'bb.event.BuildStarted',
42 'bb.event.BuildCompleted',
43 'logging.LogRecord',
44 'bb.event.NoProvider',
45 'bb.command.CommandCompleted',
46 'bb.command.CommandFailed',
47 'bb.build.TaskStarted',
48 'bb.build.TaskFailed',
49 'bb.build.TaskSucceeded',
50 'bb.build.TaskFailedSilent',
51 'bb.build.TaskProgress',
52 'bb.runqueue.runQueueTaskStarted',
53 'bb.runqueue.sceneQueueTaskStarted',
54 'bb.event.ProcessStarted',
55 'bb.event.ProcessProgress',
56 'bb.event.ProcessFinished',
57 ]
58 if extra_events:
59 eventmask.extend(extra_events)
60 ret = self.set_event_mask(eventmask)
61
62 includelogs = self.config_data.getVar('BBINCLUDELOGS')
63 loglines = self.config_data.getVar('BBINCLUDELOGS_LINES')
64
65 # Call actual function
66 ret = f(self, *args, **kwargs)
67
68 if handle_events:
69 lastevent = time.time()
70 result = False
71 # Borrowed from knotty, instead somewhat hackily we use the helper
72 # as the object to store "shutdown" on
73 helper = bb.ui.uihelper.BBUIHelper()
74 helper.shutdown = 0
75 parseprogress = None
76 termfilter = bb.ui.knotty.TerminalFilter(helper, helper, self.logger.handlers, quiet=self.quiet)
77 try:
78 while True:
79 try:
80 event = self.wait_event(0.25)
81 if event:
82 lastevent = time.time()
83 if event_callback and event_callback(event):
84 continue
85 if helper.eventHandler(event):
86 if isinstance(event, bb.build.TaskFailedSilent):
87 self.logger.warning("Logfile for failed setscene task is %s" % event.logfile)
88 elif isinstance(event, bb.build.TaskFailed):
89 bb.ui.knotty.print_event_log(event, includelogs, loglines, termfilter)
90 continue
91 if isinstance(event, bb.event.ProcessStarted):
92 if self.quiet > 1:
93 continue
94 parseprogress = bb.ui.knotty.new_progress(event.processname, event.total)
95 parseprogress.start(False)
96 continue
97 if isinstance(event, bb.event.ProcessProgress):
98 if self.quiet > 1:
99 continue
100 if parseprogress:
101 parseprogress.update(event.progress)
102 else:
103 bb.warn("Got ProcessProgress event for something that never started?")
104 continue
105 if isinstance(event, bb.event.ProcessFinished):
106 if self.quiet > 1:
107 continue
108 if parseprogress:
109 parseprogress.finish()
110 parseprogress = None
111 continue
112 if isinstance(event, bb.command.CommandCompleted):
113 result = True
114 break
115 if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
116 self.logger.error(str(event))
117 result = False
118 break
119 if isinstance(event, logging.LogRecord):
120 if event.taskpid == 0 or event.levelno > logging.INFO:
121 self.logger.handle(event)
122 continue
123 if isinstance(event, bb.event.NoProvider):
124 self.logger.error(str(event))
125 result = False
126 break
127 elif helper.shutdown > 1:
128 break
129 termfilter.updateFooter()
130 if time.time() > (lastevent + (3*60)):
131 if not self.run_command('ping', handle_events=False):
132 print("\nUnable to ping server and no events, closing down...\n")
133 return False
134 except KeyboardInterrupt:
135 termfilter.clearFooter()
136 if helper.shutdown == 1:
137 print("\nSecond Keyboard Interrupt, stopping...\n")
138 ret = self.run_command("stateForceShutdown")
139 if ret and ret[2]:
140 self.logger.error("Unable to cleanly stop: %s" % ret[2])
141 elif helper.shutdown == 0:
142 print("\nKeyboard Interrupt, closing down...\n")
143 interrupted = True
144 ret = self.run_command("stateShutdown")
145 if ret and ret[2]:
146 self.logger.error("Unable to cleanly shutdown: %s" % ret[2])
147 helper.shutdown = helper.shutdown + 1
148 termfilter.clearFooter()
149 finally:
150 termfilter.finish()
151 if helper.failed_tasks:
152 result = False
153 return result
154 else:
155 return ret
156
157 return wrapper
158
28 159
29# We need this in order to shut down the connection to the bitbake server, 160# We need this in order to shut down the connection to the bitbake server,
30# otherwise the process will never properly exit 161# otherwise the process will never properly exit
@@ -52,6 +183,10 @@ class TinfoilDataStoreConnectorVarHistory:
52 def remoteCommand(self, cmd, *args, **kwargs): 183 def remoteCommand(self, cmd, *args, **kwargs):
53 return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs) 184 return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs)
54 185
186 def emit(self, var, oval, val, o, d):
187 ret = self.tinfoil.run_command('dataStoreConnectorVarHistCmdEmit', self.dsindex, var, oval, val, d.dsindex)
188 o.write(ret)
189
55 def __getattr__(self, name): 190 def __getattr__(self, name):
56 if not hasattr(bb.data_smart.VariableHistory, name): 191 if not hasattr(bb.data_smart.VariableHistory, name):
57 raise AttributeError("VariableHistory has no such method %s" % name) 192 raise AttributeError("VariableHistory has no such method %s" % name)
@@ -183,11 +318,19 @@ class TinfoilCookerAdapter:
183 self._cache[name] = attrvalue 318 self._cache[name] = attrvalue
184 return attrvalue 319 return attrvalue
185 320
321 class TinfoilSkiplistByMcAdapter:
322 def __init__(self, tinfoil):
323 self.tinfoil = tinfoil
324
325 def __getitem__(self, mc):
326 return self.tinfoil.get_skipped_recipes(mc)
327
186 def __init__(self, tinfoil): 328 def __init__(self, tinfoil):
187 self.tinfoil = tinfoil 329 self.tinfoil = tinfoil
188 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split() 330 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split()
189 self.collections = {} 331 self.collections = {}
190 self.recipecaches = {} 332 self.recipecaches = {}
333 self.skiplist_by_mc = self.TinfoilSkiplistByMcAdapter(tinfoil)
191 for mc in self.multiconfigs: 334 for mc in self.multiconfigs:
192 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc) 335 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc)
193 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc) 336 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc)
@@ -196,8 +339,6 @@ class TinfoilCookerAdapter:
196 # Grab these only when they are requested since they aren't always used 339 # Grab these only when they are requested since they aren't always used
197 if name in self._cache: 340 if name in self._cache:
198 return self._cache[name] 341 return self._cache[name]
199 elif name == 'skiplist':
200 attrvalue = self.tinfoil.get_skipped_recipes()
201 elif name == 'bbfile_config_priorities': 342 elif name == 'bbfile_config_priorities':
202 ret = self.tinfoil.run_command('getLayerPriorities') 343 ret = self.tinfoil.run_command('getLayerPriorities')
203 bbfile_config_priorities = [] 344 bbfile_config_priorities = []
@@ -320,11 +461,11 @@ class Tinfoil:
320 self.recipes_parsed = False 461 self.recipes_parsed = False
321 self.quiet = 0 462 self.quiet = 0
322 self.oldhandlers = self.logger.handlers[:] 463 self.oldhandlers = self.logger.handlers[:]
464 self.localhandlers = []
323 if setup_logging: 465 if setup_logging:
324 # This is the *client-side* logger, nothing to do with 466 # This is the *client-side* logger, nothing to do with
325 # logging messages from the server 467 # logging messages from the server
326 bb.msg.logger_create('BitBake', output) 468 bb.msg.logger_create('BitBake', output)
327 self.localhandlers = []
328 for handler in self.logger.handlers: 469 for handler in self.logger.handlers:
329 if handler not in self.oldhandlers: 470 if handler not in self.oldhandlers:
330 self.localhandlers.append(handler) 471 self.localhandlers.append(handler)
@@ -440,11 +581,17 @@ class Tinfoil:
440 to initialise Tinfoil and use it with config_only=True first and 581 to initialise Tinfoil and use it with config_only=True first and
441 then conditionally call this function to parse recipes later. 582 then conditionally call this function to parse recipes later.
442 """ 583 """
443 config_params = TinfoilConfigParameters(config_only=False) 584 config_params = TinfoilConfigParameters(config_only=False, quiet=self.quiet)
444 self.run_actions(config_params) 585 self.run_actions(config_params)
445 self.recipes_parsed = True 586 self.recipes_parsed = True
446 587
447 def run_command(self, command, *params): 588 def modified_files(self):
589 """
590 Notify the server it needs to revalidate it's caches since the client has modified files
591 """
592 self.run_command("revalidateCaches")
593
594 def run_command(self, command, *params, handle_events=True):
448 """ 595 """
449 Run a command on the server (as implemented in bb.command). 596 Run a command on the server (as implemented in bb.command).
450 Note that there are two types of command - synchronous and 597 Note that there are two types of command - synchronous and
@@ -464,7 +611,7 @@ class Tinfoil:
464 try: 611 try:
465 result = self.server_connection.connection.runCommand(commandline) 612 result = self.server_connection.connection.runCommand(commandline)
466 finally: 613 finally:
467 while True: 614 while handle_events:
468 event = self.wait_event() 615 event = self.wait_event()
469 if not event: 616 if not event:
470 break 617 break
@@ -489,7 +636,7 @@ class Tinfoil:
489 Wait for an event from the server for the specified time. 636 Wait for an event from the server for the specified time.
490 A timeout of 0 means don't wait if there are no events in the queue. 637 A timeout of 0 means don't wait if there are no events in the queue.
491 Returns the next event in the queue or None if the timeout was 638 Returns the next event in the queue or None if the timeout was
492 reached. Note that in order to recieve any events you will 639 reached. Note that in order to receive any events you will
493 first need to set the internal event mask using set_event_mask() 640 first need to set the internal event mask using set_event_mask()
494 (otherwise whatever event mask the UI set up will be in effect). 641 (otherwise whatever event mask the UI set up will be in effect).
495 """ 642 """
@@ -503,12 +650,12 @@ class Tinfoil:
503 """ 650 """
504 return defaultdict(list, self.run_command('getOverlayedRecipes', mc)) 651 return defaultdict(list, self.run_command('getOverlayedRecipes', mc))
505 652
506 def get_skipped_recipes(self): 653 def get_skipped_recipes(self, mc=''):
507 """ 654 """
508 Find recipes which were skipped (i.e. SkipRecipe was raised 655 Find recipes which were skipped (i.e. SkipRecipe was raised
509 during parsing). 656 during parsing).
510 """ 657 """
511 return OrderedDict(self.run_command('getSkippedRecipes')) 658 return OrderedDict(self.run_command('getSkippedRecipes', mc))
512 659
513 def get_all_providers(self, mc=''): 660 def get_all_providers(self, mc=''):
514 return defaultdict(list, self.run_command('allProviders', mc)) 661 return defaultdict(list, self.run_command('allProviders', mc))
@@ -522,6 +669,7 @@ class Tinfoil:
522 def get_runtime_providers(self, rdep): 669 def get_runtime_providers(self, rdep):
523 return self.run_command('getRuntimeProviders', rdep) 670 return self.run_command('getRuntimeProviders', rdep)
524 671
672 # TODO: teach this method about mc
525 def get_recipe_file(self, pn): 673 def get_recipe_file(self, pn):
526 """ 674 """
527 Get the file name for the specified recipe/target. Raises 675 Get the file name for the specified recipe/target. Raises
@@ -530,6 +678,7 @@ class Tinfoil:
530 """ 678 """
531 best = self.find_best_provider(pn) 679 best = self.find_best_provider(pn)
532 if not best or (len(best) > 3 and not best[3]): 680 if not best or (len(best) > 3 and not best[3]):
681 # TODO: pass down mc
533 skiplist = self.get_skipped_recipes() 682 skiplist = self.get_skipped_recipes()
534 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist) 683 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist)
535 skipreasons = taskdata.get_reasons(pn) 684 skipreasons = taskdata.get_reasons(pn)
@@ -622,6 +771,29 @@ class Tinfoil:
622 fn = self.get_recipe_file(pn) 771 fn = self.get_recipe_file(pn)
623 return self.parse_recipe_file(fn) 772 return self.parse_recipe_file(fn)
624 773
774 @contextmanager
775 def _data_tracked_if_enabled(self):
776 """
777 A context manager to enable data tracking for a code segment if data
778 tracking was enabled for this tinfoil instance.
779 """
780 if self.tracking:
781 # Enable history tracking just for the operation
782 self.run_command('enableDataTracking')
783
784 # Here goes the operation with the optional data tracking
785 yield
786
787 if self.tracking:
788 self.run_command('disableDataTracking')
789
790 def finalizeData(self):
791 """
792 Run anonymous functions and expand keys
793 """
794 with self._data_tracked_if_enabled():
795 return self._reconvert_type(self.run_command('finalizeData'), 'DataStoreConnectionHandle')
796
625 def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None): 797 def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None):
626 """ 798 """
627 Parse the specified recipe file (with or without bbappends) 799 Parse the specified recipe file (with or without bbappends)
@@ -634,10 +806,7 @@ class Tinfoil:
634 appendlist: optional list of bbappend files to apply, if you 806 appendlist: optional list of bbappend files to apply, if you
635 want to filter them 807 want to filter them
636 """ 808 """
637 if self.tracking: 809 with self._data_tracked_if_enabled():
638 # Enable history tracking just for the parse operation
639 self.run_command('enableDataTracking')
640 try:
641 if appends and appendlist == []: 810 if appends and appendlist == []:
642 appends = False 811 appends = False
643 if config_data: 812 if config_data:
@@ -649,9 +818,6 @@ class Tinfoil:
649 return self._reconvert_type(dscon, 'DataStoreConnectionHandle') 818 return self._reconvert_type(dscon, 'DataStoreConnectionHandle')
650 else: 819 else:
651 return None 820 return None
652 finally:
653 if self.tracking:
654 self.run_command('disableDataTracking')
655 821
656 def build_file(self, buildfile, task, internal=True): 822 def build_file(self, buildfile, task, internal=True):
657 """ 823 """
@@ -663,6 +829,10 @@ class Tinfoil:
663 """ 829 """
664 return self.run_command('buildFile', buildfile, task, internal) 830 return self.run_command('buildFile', buildfile, task, internal)
665 831
832 @wait_for
833 def build_file_sync(self, *args):
834 self.build_file(*args)
835
666 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None): 836 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None):
667 """ 837 """
668 Builds the specified targets. This is equivalent to a normal invocation 838 Builds the specified targets. This is equivalent to a normal invocation
@@ -725,6 +895,7 @@ class Tinfoil:
725 895
726 ret = self.run_command('buildTargets', targets, task) 896 ret = self.run_command('buildTargets', targets, task)
727 if handle_events: 897 if handle_events:
898 lastevent = time.time()
728 result = False 899 result = False
729 # Borrowed from knotty, instead somewhat hackily we use the helper 900 # Borrowed from knotty, instead somewhat hackily we use the helper
730 # as the object to store "shutdown" on 901 # as the object to store "shutdown" on
@@ -737,6 +908,7 @@ class Tinfoil:
737 try: 908 try:
738 event = self.wait_event(0.25) 909 event = self.wait_event(0.25)
739 if event: 910 if event:
911 lastevent = time.time()
740 if event_callback and event_callback(event): 912 if event_callback and event_callback(event):
741 continue 913 continue
742 if helper.eventHandler(event): 914 if helper.eventHandler(event):
@@ -757,7 +929,7 @@ class Tinfoil:
757 if parseprogress: 929 if parseprogress:
758 parseprogress.update(event.progress) 930 parseprogress.update(event.progress)
759 else: 931 else:
760 bb.warn("Got ProcessProgress event for someting that never started?") 932 bb.warn("Got ProcessProgress event for something that never started?")
761 continue 933 continue
762 if isinstance(event, bb.event.ProcessFinished): 934 if isinstance(event, bb.event.ProcessFinished):
763 if self.quiet > 1: 935 if self.quiet > 1:
@@ -769,7 +941,7 @@ class Tinfoil:
769 if isinstance(event, bb.command.CommandCompleted): 941 if isinstance(event, bb.command.CommandCompleted):
770 result = True 942 result = True
771 break 943 break
772 if isinstance(event, bb.command.CommandFailed): 944 if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
773 self.logger.error(str(event)) 945 self.logger.error(str(event))
774 result = False 946 result = False
775 break 947 break
@@ -781,10 +953,13 @@ class Tinfoil:
781 self.logger.error(str(event)) 953 self.logger.error(str(event))
782 result = False 954 result = False
783 break 955 break
784
785 elif helper.shutdown > 1: 956 elif helper.shutdown > 1:
786 break 957 break
787 termfilter.updateFooter() 958 termfilter.updateFooter()
959 if time.time() > (lastevent + (3*60)):
960 if not self.run_command('ping', handle_events=False):
961 print("\nUnable to ping server and no events, closing down...\n")
962 return False
788 except KeyboardInterrupt: 963 except KeyboardInterrupt:
789 termfilter.clearFooter() 964 termfilter.clearFooter()
790 if helper.shutdown == 1: 965 if helper.shutdown == 1:
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index 43aa592842..4ee45d67a2 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -45,7 +45,7 @@ from pprint import pformat
45import logging 45import logging
46from datetime import datetime, timedelta 46from datetime import datetime, timedelta
47 47
48from django.db import transaction, connection 48from django.db import transaction
49 49
50 50
51# pylint: disable=invalid-name 51# pylint: disable=invalid-name
@@ -227,6 +227,12 @@ class ORMWrapper(object):
227 build.completed_on = timezone.now() 227 build.completed_on = timezone.now()
228 build.outcome = outcome 228 build.outcome = outcome
229 build.save() 229 build.save()
230
231 # We force a sync point here to force the outcome status commit,
232 # which resolves a race condition with the build completion takedown
233 transaction.set_autocommit(True)
234 transaction.set_autocommit(False)
235
230 signal_runbuilds() 236 signal_runbuilds()
231 237
232 def update_target_set_license_manifest(self, target, license_manifest_path): 238 def update_target_set_license_manifest(self, target, license_manifest_path):
@@ -483,14 +489,14 @@ class ORMWrapper(object):
483 489
484 # we already created the root directory, so ignore any 490 # we already created the root directory, so ignore any
485 # entry for it 491 # entry for it
486 if len(path) == 0: 492 if not path:
487 continue 493 continue
488 494
489 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) 495 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
490 if len(parent_path) == 0: 496 if not parent_path:
491 parent_path = "/" 497 parent_path = "/"
492 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 498 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
493 tf_obj = Target_File.objects.create( 499 Target_File.objects.create(
494 target = target_obj, 500 target = target_obj,
495 path = path, 501 path = path,
496 size = size, 502 size = size,
@@ -553,9 +559,12 @@ class ORMWrapper(object):
553 # we might have an invalid link; no way to detect this. just set it to None 559 # we might have an invalid link; no way to detect this. just set it to None
554 filetarget_obj = None 560 filetarget_obj = None
555 561
556 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 562 try:
563 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
564 except Target_File.DoesNotExist:
565 parent_obj = None
557 566
558 tf_obj = Target_File.objects.create( 567 Target_File.objects.create(
559 target = target_obj, 568 target = target_obj,
560 path = path, 569 path = path,
561 size = size, 570 size = size,
@@ -571,7 +580,7 @@ class ORMWrapper(object):
571 assert isinstance(build_obj, Build) 580 assert isinstance(build_obj, Build)
572 assert isinstance(target_obj, Target) 581 assert isinstance(target_obj, Target)
573 582
574 errormsg = "" 583 errormsg = []
575 for p in packagedict: 584 for p in packagedict:
576 # Search name swtiches round the installed name vs package name 585 # Search name swtiches round the installed name vs package name
577 # by default installed name == package name 586 # by default installed name == package name
@@ -633,10 +642,10 @@ class ORMWrapper(object):
633 packagefile_objects.append(Package_File( package = packagedict[p]['object'], 642 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
634 path = targetpath, 643 path = targetpath,
635 size = targetfilesize)) 644 size = targetfilesize))
636 if len(packagefile_objects): 645 if packagefile_objects:
637 Package_File.objects.bulk_create(packagefile_objects) 646 Package_File.objects.bulk_create(packagefile_objects)
638 except KeyError as e: 647 except KeyError as e:
639 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e ) 648 errormsg.append(" stpi: Key error, package %s key %s \n" % (p, e))
640 649
641 # save disk installed size 650 # save disk installed size
642 packagedict[p]['object'].installed_size = packagedict[p]['size'] 651 packagedict[p]['object'].installed_size = packagedict[p]['size']
@@ -673,13 +682,13 @@ class ORMWrapper(object):
673 logger.warning("Could not add dependency to the package %s " 682 logger.warning("Could not add dependency to the package %s "
674 "because %s is an unknown package", p, px) 683 "because %s is an unknown package", p, px)
675 684
676 if len(packagedeps_objs) > 0: 685 if packagedeps_objs:
677 Package_Dependency.objects.bulk_create(packagedeps_objs) 686 Package_Dependency.objects.bulk_create(packagedeps_objs)
678 else: 687 else:
679 logger.info("No package dependencies created") 688 logger.info("No package dependencies created")
680 689
681 if len(errormsg) > 0: 690 if errormsg:
682 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg) 691 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg))
683 692
684 def save_target_image_file_information(self, target_obj, file_name, file_size): 693 def save_target_image_file_information(self, target_obj, file_name, file_size):
685 Target_Image_File.objects.create(target=target_obj, 694 Target_Image_File.objects.create(target=target_obj,
@@ -767,7 +776,7 @@ class ORMWrapper(object):
767 packagefile_objects.append(Package_File( package = bp_object, 776 packagefile_objects.append(Package_File( package = bp_object,
768 path = path, 777 path = path,
769 size = package_info['FILES_INFO'][path] )) 778 size = package_info['FILES_INFO'][path] ))
770 if len(packagefile_objects): 779 if packagefile_objects:
771 Package_File.objects.bulk_create(packagefile_objects) 780 Package_File.objects.bulk_create(packagefile_objects)
772 781
773 def _po_byname(p): 782 def _po_byname(p):
@@ -809,7 +818,7 @@ class ORMWrapper(object):
809 packagedeps_objs.append(Package_Dependency( package = bp_object, 818 packagedeps_objs.append(Package_Dependency( package = bp_object,
810 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)) 819 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
811 820
812 if len(packagedeps_objs) > 0: 821 if packagedeps_objs:
813 Package_Dependency.objects.bulk_create(packagedeps_objs) 822 Package_Dependency.objects.bulk_create(packagedeps_objs)
814 823
815 return bp_object 824 return bp_object
@@ -826,7 +835,7 @@ class ORMWrapper(object):
826 desc = vardump[root_var]['doc'] 835 desc = vardump[root_var]['doc']
827 if desc is None: 836 if desc is None:
828 desc = '' 837 desc = ''
829 if len(desc): 838 if desc:
830 HelpText.objects.get_or_create(build=build_obj, 839 HelpText.objects.get_or_create(build=build_obj,
831 area=HelpText.VARIABLE, 840 area=HelpText.VARIABLE,
832 key=k, text=desc) 841 key=k, text=desc)
@@ -846,7 +855,7 @@ class ORMWrapper(object):
846 file_name = vh['file'], 855 file_name = vh['file'],
847 line_number = vh['line'], 856 line_number = vh['line'],
848 operation = vh['op'])) 857 operation = vh['op']))
849 if len(varhist_objects): 858 if varhist_objects:
850 VariableHistory.objects.bulk_create(varhist_objects) 859 VariableHistory.objects.bulk_create(varhist_objects)
851 860
852 861
@@ -893,9 +902,6 @@ class BuildInfoHelper(object):
893 self.task_order = 0 902 self.task_order = 0
894 self.autocommit_step = 1 903 self.autocommit_step = 1
895 self.server = server 904 self.server = server
896 # we use manual transactions if the database doesn't autocommit on us
897 if not connection.features.autocommits_when_autocommit_is_off:
898 transaction.set_autocommit(False)
899 self.orm_wrapper = ORMWrapper() 905 self.orm_wrapper = ORMWrapper()
900 self.has_build_history = has_build_history 906 self.has_build_history = has_build_history
901 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0] 907 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
@@ -1059,27 +1065,6 @@ class BuildInfoHelper(object):
1059 1065
1060 return recipe_info 1066 return recipe_info
1061 1067
1062 def _get_path_information(self, task_object):
1063 self._ensure_build()
1064
1065 assert isinstance(task_object, Task)
1066 build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
1067 build_stats_path = []
1068
1069 for t in self.internal_state['targets']:
1070 buildname = self.internal_state['build'].build_name
1071 pe, pv = task_object.recipe.version.split(":",1)
1072 if len(pe) > 0:
1073 package = task_object.recipe.name + "-" + pe + "_" + pv
1074 else:
1075 package = task_object.recipe.name + "-" + pv
1076
1077 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
1078 buildname=buildname,
1079 package=package))
1080
1081 return build_stats_path
1082
1083 1068
1084 ################################ 1069 ################################
1085 ## external available methods to store information 1070 ## external available methods to store information
@@ -1313,12 +1298,11 @@ class BuildInfoHelper(object):
1313 task_information['outcome'] = Task.OUTCOME_FAILED 1298 task_information['outcome'] = Task.OUTCOME_FAILED
1314 del self.internal_state['taskdata'][identifier] 1299 del self.internal_state['taskdata'][identifier]
1315 1300
1316 if not connection.features.autocommits_when_autocommit_is_off: 1301 # we force a sync point here, to get the progress bar to show
1317 # we force a sync point here, to get the progress bar to show 1302 if self.autocommit_step % 3 == 0:
1318 if self.autocommit_step % 3 == 0: 1303 transaction.set_autocommit(True)
1319 transaction.set_autocommit(True) 1304 transaction.set_autocommit(False)
1320 transaction.set_autocommit(False) 1305 self.autocommit_step += 1
1321 self.autocommit_step += 1
1322 1306
1323 self.orm_wrapper.get_update_task_object(task_information, True) # must exist 1307 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1324 1308
@@ -1404,7 +1388,7 @@ class BuildInfoHelper(object):
1404 assert 'pn' in event._depgraph 1388 assert 'pn' in event._depgraph
1405 assert 'tdepends' in event._depgraph 1389 assert 'tdepends' in event._depgraph
1406 1390
1407 errormsg = "" 1391 errormsg = []
1408 1392
1409 # save layer version priorities 1393 # save layer version priorities
1410 if 'layer-priorities' in event._depgraph.keys(): 1394 if 'layer-priorities' in event._depgraph.keys():
@@ -1496,7 +1480,7 @@ class BuildInfoHelper(object):
1496 elif dep in self.internal_state['recipes']: 1480 elif dep in self.internal_state['recipes']:
1497 dependency = self.internal_state['recipes'][dep] 1481 dependency = self.internal_state['recipes'][dep]
1498 else: 1482 else:
1499 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep) 1483 errormsg.append(" stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep))
1500 continue 1484 continue
1501 recipe_dep = Recipe_Dependency(recipe=target, 1485 recipe_dep = Recipe_Dependency(recipe=target,
1502 depends_on=dependency, 1486 depends_on=dependency,
@@ -1537,8 +1521,8 @@ class BuildInfoHelper(object):
1537 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep )) 1521 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1538 Task_Dependency.objects.bulk_create(taskdeps_objects) 1522 Task_Dependency.objects.bulk_create(taskdeps_objects)
1539 1523
1540 if len(errormsg) > 0: 1524 if errormsg:
1541 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg) 1525 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg))
1542 1526
1543 1527
1544 def store_build_package_information(self, event): 1528 def store_build_package_information(self, event):
@@ -1618,7 +1602,7 @@ class BuildInfoHelper(object):
1618 1602
1619 if 'backlog' in self.internal_state: 1603 if 'backlog' in self.internal_state:
1620 # if we have a backlog of events, do our best to save them here 1604 # if we have a backlog of events, do our best to save them here
1621 if len(self.internal_state['backlog']): 1605 if self.internal_state['backlog']:
1622 tempevent = self.internal_state['backlog'].pop() 1606 tempevent = self.internal_state['backlog'].pop()
1623 logger.debug("buildinfohelper: Saving stored event %s " 1607 logger.debug("buildinfohelper: Saving stored event %s "
1624 % tempevent) 1608 % tempevent)
@@ -1765,7 +1749,6 @@ class BuildInfoHelper(object):
1765 1749
1766 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0] 1750 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
1767 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0] 1751 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
1768 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1769 1752
1770 # location of the manifest files for this build; 1753 # location of the manifest files for this build;
1771 # note that this file is only produced if an image is produced 1754 # note that this file is only produced if an image is produced
@@ -1786,6 +1769,18 @@ class BuildInfoHelper(object):
1786 # filter out anything which isn't an image target 1769 # filter out anything which isn't an image target
1787 image_targets = [target for target in targets if target.is_image] 1770 image_targets = [target for target in targets if target.is_image]
1788 1771
1772 if len(image_targets) > 0:
1773 #if there are image targets retrieve image_name
1774 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1775 if not image_name:
1776 #When build target is an image and image_name is not found as an environment variable
1777 logger.info("IMAGE_NAME not found, extracting from bitbake command")
1778 cmd = self.server.runCommand(['getVariable','BB_CMDLINE'])[0]
1779 #filter out tokens that are command line options
1780 cmd = [token for token in cmd if not token.startswith('-')]
1781 image_name = cmd[1].split(':', 1)[0] # remove everything after : in image name
1782 logger.info("IMAGE_NAME found as : %s " % image_name)
1783
1789 for image_target in image_targets: 1784 for image_target in image_targets:
1790 # this is set to True if we find at least one file relating to 1785 # this is set to True if we find at least one file relating to
1791 # this target; if this remains False after the scan, we copy the 1786 # this target; if this remains False after the scan, we copy the
@@ -1990,8 +1985,6 @@ class BuildInfoHelper(object):
1990 # Do not skip command line build events 1985 # Do not skip command line build events
1991 self.store_log_event(tempevent,False) 1986 self.store_log_event(tempevent,False)
1992 1987
1993 if not connection.features.autocommits_when_autocommit_is_off:
1994 transaction.set_autocommit(True)
1995 1988
1996 # unset the brbe; this is to prevent subsequent command-line builds 1989 # unset the brbe; this is to prevent subsequent command-line builds
1997 # being incorrectly attached to the previous Toaster-triggered build; 1990 # being incorrectly attached to the previous Toaster-triggered build;
diff --git a/bitbake/lib/bb/ui/eventreplay.py b/bitbake/lib/bb/ui/eventreplay.py
new file mode 100644
index 0000000000..d62ecbfa56
--- /dev/null
+++ b/bitbake/lib/bb/ui/eventreplay.py
@@ -0,0 +1,86 @@
1#!/usr/bin/env python3
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5# This file re-uses code spread throughout other Bitbake source files.
6# As such, all other copyrights belong to their own right holders.
7#
8
9
10import os
11import sys
12import json
13import pickle
14import codecs
15
16
17class EventPlayer:
18 """Emulate a connection to a bitbake server."""
19
20 def __init__(self, eventfile, variables):
21 self.eventfile = eventfile
22 self.variables = variables
23 self.eventmask = []
24
25 def waitEvent(self, _timeout):
26 """Read event from the file."""
27 line = self.eventfile.readline().strip()
28 if not line:
29 return
30 try:
31 decodedline = json.loads(line)
32 if 'allvariables' in decodedline:
33 self.variables = decodedline['allvariables']
34 return
35 if not 'vars' in decodedline:
36 raise ValueError
37 event_str = decodedline['vars'].encode('utf-8')
38 event = pickle.loads(codecs.decode(event_str, 'base64'))
39 event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
40 if event_name not in self.eventmask:
41 return
42 return event
43 except ValueError as err:
44 print("Failed loading ", line)
45 raise err
46
47 def runCommand(self, command_line):
48 """Emulate running a command on the server."""
49 name = command_line[0]
50
51 if name == "getVariable":
52 var_name = command_line[1]
53 variable = self.variables.get(var_name)
54 if variable:
55 return variable['v'], None
56 return None, "Missing variable %s" % var_name
57
58 elif name == "getAllKeysWithFlags":
59 dump = {}
60 flaglist = command_line[1]
61 for key, val in self.variables.items():
62 try:
63 if not key.startswith("__"):
64 dump[key] = {
65 'v': val['v'],
66 'history' : val['history'],
67 }
68 for flag in flaglist:
69 dump[key][flag] = val[flag]
70 except Exception as err:
71 print(err)
72 return (dump, None)
73
74 elif name == 'setEventMask':
75 self.eventmask = command_line[-1]
76 return True, None
77
78 else:
79 raise Exception("Command %s not implemented" % command_line[0])
80
81 def getEventHandle(self):
82 """
83 This method is called by toasterui.
84 The return value is passed to self.runCommand but not used there.
85 """
86 pass
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index 0efa614dfc..9a589a5c8e 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -21,10 +21,17 @@ import fcntl
21import struct 21import struct
22import copy 22import copy
23import atexit 23import atexit
24from itertools import groupby
24 25
25from bb.ui import uihelper 26from bb.ui import uihelper
27import bb.build
28import bb.command
29import bb.cooker
30import bb.event
31import bb.runqueue
32import bb.utils
26 33
27featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS] 34featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
28 35
29logger = logging.getLogger("BitBake") 36logger = logging.getLogger("BitBake")
30interactive = sys.stdout.isatty() 37interactive = sys.stdout.isatty()
@@ -102,7 +109,7 @@ def new_progress(msg, maxval):
102 return NonInteractiveProgress(msg, maxval) 109 return NonInteractiveProgress(msg, maxval)
103 110
104def pluralise(singular, plural, qty): 111def pluralise(singular, plural, qty):
105 if(qty == 1): 112 if qty == 1:
106 return singular % qty 113 return singular % qty
107 else: 114 else:
108 return plural % qty 115 return plural % qty
@@ -111,6 +118,7 @@ def pluralise(singular, plural, qty):
111class InteractConsoleLogFilter(logging.Filter): 118class InteractConsoleLogFilter(logging.Filter):
112 def __init__(self, tf): 119 def __init__(self, tf):
113 self.tf = tf 120 self.tf = tf
121 super().__init__()
114 122
115 def filter(self, record): 123 def filter(self, record):
116 if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")): 124 if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")):
@@ -178,7 +186,7 @@ class TerminalFilter(object):
178 new[3] = new[3] & ~termios.ECHO 186 new[3] = new[3] & ~termios.ECHO
179 termios.tcsetattr(fd, termios.TCSADRAIN, new) 187 termios.tcsetattr(fd, termios.TCSADRAIN, new)
180 curses.setupterm() 188 curses.setupterm()
181 if curses.tigetnum("colors") > 2: 189 if curses.tigetnum("colors") > 2 and os.environ.get('NO_COLOR', '') == '':
182 for h in handlers: 190 for h in handlers:
183 try: 191 try:
184 h.formatter.enable_color() 192 h.formatter.enable_color()
@@ -227,7 +235,9 @@ class TerminalFilter(object):
227 235
228 def keepAlive(self, t): 236 def keepAlive(self, t):
229 if not self.cuu: 237 if not self.cuu:
230 print("Bitbake still alive (%ds)" % t) 238 print("Bitbake still alive (no events for %ds). Active tasks:" % t)
239 for t in self.helper.running_tasks:
240 print(t)
231 sys.stdout.flush() 241 sys.stdout.flush()
232 242
233 def updateFooter(self): 243 def updateFooter(self):
@@ -249,58 +259,68 @@ class TerminalFilter(object):
249 return 259 return
250 tasks = [] 260 tasks = []
251 for t in runningpids: 261 for t in runningpids:
262 start_time = activetasks[t].get("starttime", None)
263 if start_time:
264 msg = "%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"])
265 else:
266 msg = "%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"])
252 progress = activetasks[t].get("progress", None) 267 progress = activetasks[t].get("progress", None)
253 if progress is not None: 268 if progress is not None:
254 pbar = activetasks[t].get("progressbar", None) 269 pbar = activetasks[t].get("progressbar", None)
255 rate = activetasks[t].get("rate", None) 270 rate = activetasks[t].get("rate", None)
256 start_time = activetasks[t].get("starttime", None)
257 if not pbar or pbar.bouncing != (progress < 0): 271 if not pbar or pbar.bouncing != (progress < 0):
258 if progress < 0: 272 if progress < 0:
259 pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle) 273 pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle)
260 pbar.bouncing = True 274 pbar.bouncing = True
261 else: 275 else:
262 pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle) 276 pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle)
263 pbar.bouncing = False 277 pbar.bouncing = False
264 activetasks[t]["progressbar"] = pbar 278 activetasks[t]["progressbar"] = pbar
265 tasks.append((pbar, progress, rate, start_time)) 279 tasks.append((pbar, msg, progress, rate, start_time))
266 else: 280 else:
267 start_time = activetasks[t].get("starttime", None) 281 tasks.append(msg)
268 if start_time:
269 tasks.append("%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"]))
270 else:
271 tasks.append("%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]))
272 282
273 if self.main.shutdown: 283 if self.main.shutdown:
274 content = "Waiting for %s running tasks to finish:" % len(activetasks) 284 content = pluralise("Waiting for %s running task to finish",
285 "Waiting for %s running tasks to finish", len(activetasks))
286 if not self.quiet:
287 content += ':'
275 print(content) 288 print(content)
276 else: 289 else:
290 scene_tasks = "%s of %s" % (self.helper.setscene_current, self.helper.setscene_total)
291 cur_tasks = "%s of %s" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
292
293 content = ''
294 if not self.quiet:
295 msg = "Setscene tasks: %s" % scene_tasks
296 content += msg + "\n"
297 print(msg)
298
277 if self.quiet: 299 if self.quiet:
278 content = "Running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) 300 msg = "Running tasks (%s, %s)" % (scene_tasks, cur_tasks)
279 elif not len(activetasks): 301 elif not len(activetasks):
280 content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) 302 msg = "No currently running tasks (%s)" % cur_tasks
281 else: 303 else:
282 content = "Currently %2s running tasks (%s of %s)" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total) 304 msg = "Currently %2s running tasks (%s)" % (len(activetasks), cur_tasks)
283 maxtask = self.helper.tasknumber_total 305 maxtask = self.helper.tasknumber_total
284 if not self.main_progress or self.main_progress.maxval != maxtask: 306 if not self.main_progress or self.main_progress.maxval != maxtask:
285 widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()] 307 widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()]
286 self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle) 308 self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle)
287 self.main_progress.start(False) 309 self.main_progress.start(False)
288 self.main_progress.setmessage(content) 310 self.main_progress.setmessage(msg)
289 progress = self.helper.tasknumber_current - 1 311 progress = max(0, self.helper.tasknumber_current - 1)
290 if progress < 0: 312 content += self.main_progress.update(progress)
291 progress = 0
292 content = self.main_progress.update(progress)
293 print('') 313 print('')
294 lines = 1 + int(len(content) / (self.columns + 1)) 314 lines = self.getlines(content)
295 if self.quiet == 0: 315 if not self.quiet:
296 for tasknum, task in enumerate(tasks[:(self.rows - 2)]): 316 for tasknum, task in enumerate(tasks[:(self.rows - 1 - lines)]):
297 if isinstance(task, tuple): 317 if isinstance(task, tuple):
298 pbar, progress, rate, start_time = task 318 pbar, msg, progress, rate, start_time = task
299 if not pbar.start_time: 319 if not pbar.start_time:
300 pbar.start(False) 320 pbar.start(False)
301 if start_time: 321 if start_time:
302 pbar.start_time = start_time 322 pbar.start_time = start_time
303 pbar.setmessage('%s:%s' % (tasknum, pbar.msg.split(':', 1)[1])) 323 pbar.setmessage('%s: %s' % (tasknum, msg))
304 pbar.setextra(rate) 324 pbar.setextra(rate)
305 if progress > -1: 325 if progress > -1:
306 content = pbar.update(progress) 326 content = pbar.update(progress)
@@ -310,11 +330,17 @@ class TerminalFilter(object):
310 else: 330 else:
311 content = "%s: %s" % (tasknum, task) 331 content = "%s: %s" % (tasknum, task)
312 print(content) 332 print(content)
313 lines = lines + 1 + int(len(content) / (self.columns + 1)) 333 lines = lines + self.getlines(content)
314 self.footer_present = lines 334 self.footer_present = lines
315 self.lastpids = runningpids[:] 335 self.lastpids = runningpids[:]
316 self.lastcount = self.helper.tasknumber_current 336 self.lastcount = self.helper.tasknumber_current
317 337
338 def getlines(self, content):
339 lines = 0
340 for line in content.split("\n"):
341 lines = lines + 1 + int(len(line) / (self.columns + 1))
342 return lines
343
318 def finish(self): 344 def finish(self):
319 if self.stdinbackup: 345 if self.stdinbackup:
320 fd = sys.stdin.fileno() 346 fd = sys.stdin.fileno()
@@ -327,7 +353,7 @@ def print_event_log(event, includelogs, loglines, termfilter):
327 termfilter.clearFooter() 353 termfilter.clearFooter()
328 bb.error("Logfile of failure stored in: %s" % logfile) 354 bb.error("Logfile of failure stored in: %s" % logfile)
329 if includelogs and not event.errprinted: 355 if includelogs and not event.errprinted:
330 print("Log data follows:") 356 bb.plain("Log data follows:")
331 f = open(logfile, "r") 357 f = open(logfile, "r")
332 lines = [] 358 lines = []
333 while True: 359 while True:
@@ -340,11 +366,11 @@ def print_event_log(event, includelogs, loglines, termfilter):
340 if len(lines) > int(loglines): 366 if len(lines) > int(loglines):
341 lines.pop(0) 367 lines.pop(0)
342 else: 368 else:
343 print('| %s' % l) 369 bb.plain('| %s' % l)
344 f.close() 370 f.close()
345 if lines: 371 if lines:
346 for line in lines: 372 for line in lines:
347 print(line) 373 bb.plain(line)
348 374
349def _log_settings_from_server(server, observe_only): 375def _log_settings_from_server(server, observe_only):
350 # Get values of variables which control our output 376 # Get values of variables which control our output
@@ -401,6 +427,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
401 except bb.BBHandledException: 427 except bb.BBHandledException:
402 drain_events_errorhandling(eventHandler) 428 drain_events_errorhandling(eventHandler)
403 return 1 429 return 1
430 except Exception as e:
431 # bitbake-server comms failure
432 early_logger = bb.msg.logger_create('bitbake', sys.stdout)
433 early_logger.fatal("Attempting to set server environment: %s", e)
434 return 1
404 435
405 if params.options.quiet == 0: 436 if params.options.quiet == 0:
406 console_loglevel = loglevel 437 console_loglevel = loglevel
@@ -531,13 +562,30 @@ def main(server, eventHandler, params, tf = TerminalFilter):
531 } 562 }
532 }) 563 })
533 564
534 bb.utils.mkdirhier(os.path.dirname(consolelogfile)) 565 consolelogdirname = os.path.dirname(consolelogfile)
535 loglink = os.path.join(os.path.dirname(consolelogfile), 'console-latest.log') 566 # `bb.utils.mkdirhier` has this check, but it reports failure using bb.fatal, which logs
567 # to the very logger we are trying to set up.
568 if '${' in str(consolelogdirname):
569 print(
570 "FATAL: Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR pollution.".format(
571 consolelogdirname))
572 if '${MACHINE}' in consolelogdirname:
573 print("HINT: It looks like you forgot to set MACHINE in local.conf.")
574
575 bb.utils.mkdirhier(consolelogdirname)
576 loglink = os.path.join(consolelogdirname, 'console-latest.log')
536 bb.utils.remove(loglink) 577 bb.utils.remove(loglink)
537 try: 578 try:
538 os.symlink(os.path.basename(consolelogfile), loglink) 579 os.symlink(os.path.basename(consolelogfile), loglink)
539 except OSError: 580 except OSError:
540 pass 581 pass
582
583 # Add the logging domains specified by the user on the command line
584 for (domainarg, iterator) in groupby(params.debug_domains):
585 dlevel = len(tuple(iterator))
586 l = logconfig["loggers"].setdefault("BitBake.%s" % domainarg, {})
587 l["level"] = logging.DEBUG - dlevel + 1
588 l.setdefault("handlers", []).extend(["BitBake.verbconsole"])
541 589
542 conf = bb.msg.setLoggingConfig(logconfig, logconfigfile) 590 conf = bb.msg.setLoggingConfig(logconfig, logconfigfile)
543 591
@@ -546,6 +594,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
546 else: 594 else:
547 log_exec_tty = False 595 log_exec_tty = False
548 596
597 should_print_hyperlinks = sys.stdout.isatty() and os.environ.get('NO_COLOR', '') == ''
598
549 helper = uihelper.BBUIHelper() 599 helper = uihelper.BBUIHelper()
550 600
551 # Look for the specially designated handlers which need to be passed to the 601 # Look for the specially designated handlers which need to be passed to the
@@ -559,7 +609,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
559 return 609 return
560 610
561 llevel, debug_domains = bb.msg.constructLogOptions() 611 llevel, debug_domains = bb.msg.constructLogOptions()
562 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list]) 612 try:
613 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
614 except (BrokenPipeError, EOFError) as e:
615 # bitbake-server comms failure
616 logger.fatal("Attempting to set event mask: %s", e)
617 return 1
563 618
564 # The logging_tree module is *extremely* helpful in debugging logging 619 # The logging_tree module is *extremely* helpful in debugging logging
565 # domains. Uncomment here to dump the logging tree when bitbake starts 620 # domains. Uncomment here to dump the logging tree when bitbake starts
@@ -568,7 +623,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
568 623
569 universe = False 624 universe = False
570 if not params.observe_only: 625 if not params.observe_only:
571 params.updateFromServer(server) 626 try:
627 params.updateFromServer(server)
628 except Exception as e:
629 logger.fatal("Fetching command line: %s", e)
630 return 1
572 cmdline = params.parseActions() 631 cmdline = params.parseActions()
573 if not cmdline: 632 if not cmdline:
574 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") 633 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
@@ -579,7 +638,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
579 if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]: 638 if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]:
580 universe = True 639 universe = True
581 640
582 ret, error = server.runCommand(cmdline['action']) 641 try:
642 ret, error = server.runCommand(cmdline['action'])
643 except (BrokenPipeError, EOFError) as e:
644 # bitbake-server comms failure
645 logger.fatal("Command '{}' failed: %s".format(cmdline), e)
646 return 1
583 if error: 647 if error:
584 logger.error("Command '%s' failed: %s" % (cmdline, error)) 648 logger.error("Command '%s' failed: %s" % (cmdline, error))
585 return 1 649 return 1
@@ -595,28 +659,42 @@ def main(server, eventHandler, params, tf = TerminalFilter):
595 return_value = 0 659 return_value = 0
596 errors = 0 660 errors = 0
597 warnings = 0 661 warnings = 0
598 taskfailures = [] 662 taskfailures = {}
599 663
600 printinterval = 5000 664 printintervaldelta = 10 * 60 # 10 minutes
601 lastprint = time.time() 665 printinterval = printintervaldelta
666 pinginterval = 1 * 60 # 1 minute
667 lastevent = lastprint = time.time()
602 668
603 termfilter = tf(main, helper, console_handlers, params.options.quiet) 669 termfilter = tf(main, helper, console_handlers, params.options.quiet)
604 atexit.register(termfilter.finish) 670 atexit.register(termfilter.finish)
605 671
606 while True: 672 # shutdown levels
673 # 0 - normal operation
674 # 1 - no new task execution, let current running tasks finish
675 # 2 - interrupting currently executing tasks
676 # 3 - we're done, exit
677 while main.shutdown < 3:
607 try: 678 try:
608 if (lastprint + printinterval) <= time.time(): 679 if (lastprint + printinterval) <= time.time():
609 termfilter.keepAlive(printinterval) 680 termfilter.keepAlive(printinterval)
610 printinterval += 5000 681 printinterval += printintervaldelta
611 event = eventHandler.waitEvent(0) 682 event = eventHandler.waitEvent(0)
612 if event is None: 683 if event is None:
613 if main.shutdown > 1: 684 if (lastevent + pinginterval) <= time.time():
614 break 685 ret, error = server.runCommand(["ping"])
686 if error or not ret:
687 termfilter.clearFooter()
688 print("No reply after pinging server (%s, %s), exiting." % (str(error), str(ret)))
689 return_value = 3
690 main.shutdown = 3
691 lastevent = time.time()
615 if not parseprogress: 692 if not parseprogress:
616 termfilter.updateFooter() 693 termfilter.updateFooter()
617 event = eventHandler.waitEvent(0.25) 694 event = eventHandler.waitEvent(0.25)
618 if event is None: 695 if event is None:
619 continue 696 continue
697 lastevent = time.time()
620 helper.eventHandler(event) 698 helper.eventHandler(event)
621 if isinstance(event, bb.runqueue.runQueueExitWait): 699 if isinstance(event, bb.runqueue.runQueueExitWait):
622 if not main.shutdown: 700 if not main.shutdown:
@@ -638,8 +716,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
638 716
639 if isinstance(event, logging.LogRecord): 717 if isinstance(event, logging.LogRecord):
640 lastprint = time.time() 718 lastprint = time.time()
641 printinterval = 5000 719 printinterval = printintervaldelta
642 if event.levelno >= bb.msg.BBLogFormatter.ERROR: 720 if event.levelno >= bb.msg.BBLogFormatter.ERRORONCE:
643 errors = errors + 1 721 errors = errors + 1
644 return_value = 1 722 return_value = 1
645 elif event.levelno == bb.msg.BBLogFormatter.WARNING: 723 elif event.levelno == bb.msg.BBLogFormatter.WARNING:
@@ -653,10 +731,10 @@ def main(server, eventHandler, params, tf = TerminalFilter):
653 continue 731 continue
654 732
655 # Prefix task messages with recipe/task 733 # Prefix task messages with recipe/task
656 if event.taskpid in helper.pidmap and event.levelno != bb.msg.BBLogFormatter.PLAIN: 734 if event.taskpid in helper.pidmap and event.levelno not in [bb.msg.BBLogFormatter.PLAIN, bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]:
657 taskinfo = helper.running_tasks[helper.pidmap[event.taskpid]] 735 taskinfo = helper.running_tasks[helper.pidmap[event.taskpid]]
658 event.msg = taskinfo['title'] + ': ' + event.msg 736 event.msg = taskinfo['title'] + ': ' + event.msg
659 if hasattr(event, 'fn'): 737 if hasattr(event, 'fn') and event.levelno not in [bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]:
660 event.msg = event.fn + ': ' + event.msg 738 event.msg = event.fn + ': ' + event.msg
661 logging.getLogger(event.name).handle(event) 739 logging.getLogger(event.name).handle(event)
662 continue 740 continue
@@ -667,6 +745,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
667 if isinstance(event, bb.build.TaskFailed): 745 if isinstance(event, bb.build.TaskFailed):
668 return_value = 1 746 return_value = 1
669 print_event_log(event, includelogs, loglines, termfilter) 747 print_event_log(event, includelogs, loglines, termfilter)
748 k = "{}:{}".format(event._fn, event._task)
749 taskfailures[k] = event.logfile
670 if isinstance(event, bb.build.TaskBase): 750 if isinstance(event, bb.build.TaskBase):
671 logger.info(event._message) 751 logger.info(event._message)
672 continue 752 continue
@@ -721,15 +801,15 @@ def main(server, eventHandler, params, tf = TerminalFilter):
721 if event.error: 801 if event.error:
722 errors = errors + 1 802 errors = errors + 1
723 logger.error(str(event)) 803 logger.error(str(event))
724 main.shutdown = 2 804 main.shutdown = 3
725 continue 805 continue
726 if isinstance(event, bb.command.CommandExit): 806 if isinstance(event, bb.command.CommandExit):
727 if not return_value: 807 if not return_value:
728 return_value = event.exitcode 808 return_value = event.exitcode
729 main.shutdown = 2 809 main.shutdown = 3
730 continue 810 continue
731 if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)): 811 if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
732 main.shutdown = 2 812 main.shutdown = 3
733 continue 813 continue
734 if isinstance(event, bb.event.MultipleProviders): 814 if isinstance(event, bb.event.MultipleProviders):
735 logger.info(str(event)) 815 logger.info(str(event))
@@ -745,7 +825,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
745 continue 825 continue
746 826
747 if isinstance(event, bb.runqueue.sceneQueueTaskStarted): 827 if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
748 logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring)) 828 logger.info("Running setscene task %d of %d (%s)" % (event.stats.setscene_covered + event.stats.setscene_active + event.stats.setscene_notcovered + 1, event.stats.setscene_total, event.taskstring))
749 continue 829 continue
750 830
751 if isinstance(event, bb.runqueue.runQueueTaskStarted): 831 if isinstance(event, bb.runqueue.runQueueTaskStarted):
@@ -762,7 +842,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
762 842
763 if isinstance(event, bb.runqueue.runQueueTaskFailed): 843 if isinstance(event, bb.runqueue.runQueueTaskFailed):
764 return_value = 1 844 return_value = 1
765 taskfailures.append(event.taskstring) 845 taskfailures.setdefault(event.taskstring)
766 logger.error(str(event)) 846 logger.error(str(event))
767 continue 847 continue
768 848
@@ -814,15 +894,26 @@ def main(server, eventHandler, params, tf = TerminalFilter):
814 894
815 logger.error("Unknown event: %s", event) 895 logger.error("Unknown event: %s", event)
816 896
897 except (BrokenPipeError, EOFError) as e:
898 # bitbake-server comms failure, don't attempt further comms and exit
899 logger.fatal("Executing event: %s", e)
900 return_value = 1
901 errors = errors + 1
902 main.shutdown = 3
817 except EnvironmentError as ioerror: 903 except EnvironmentError as ioerror:
818 termfilter.clearFooter() 904 termfilter.clearFooter()
819 # ignore interrupted io 905 # ignore interrupted io
820 if ioerror.args[0] == 4: 906 if ioerror.args[0] == 4:
821 continue 907 continue
822 sys.stderr.write(str(ioerror)) 908 sys.stderr.write(str(ioerror))
823 if not params.observe_only:
824 _, error = server.runCommand(["stateForceShutdown"])
825 main.shutdown = 2 909 main.shutdown = 2
910 if not params.observe_only:
911 try:
912 _, error = server.runCommand(["stateForceShutdown"])
913 except (BrokenPipeError, EOFError) as e:
914 # bitbake-server comms failure, don't attempt further comms and exit
915 logger.fatal("Unable to force shutdown: %s", e)
916 main.shutdown = 3
826 except KeyboardInterrupt: 917 except KeyboardInterrupt:
827 termfilter.clearFooter() 918 termfilter.clearFooter()
828 if params.observe_only: 919 if params.observe_only:
@@ -831,9 +922,13 @@ def main(server, eventHandler, params, tf = TerminalFilter):
831 922
832 def state_force_shutdown(): 923 def state_force_shutdown():
833 print("\nSecond Keyboard Interrupt, stopping...\n") 924 print("\nSecond Keyboard Interrupt, stopping...\n")
834 _, error = server.runCommand(["stateForceShutdown"]) 925 try:
835 if error: 926 _, error = server.runCommand(["stateForceShutdown"])
836 logger.error("Unable to cleanly stop: %s" % error) 927 if error:
928 logger.error("Unable to cleanly stop: %s" % error)
929 except (BrokenPipeError, EOFError) as e:
930 # bitbake-server comms failure
931 logger.fatal("Unable to cleanly stop: %s", e)
837 932
838 if not params.observe_only and main.shutdown == 1: 933 if not params.observe_only and main.shutdown == 1:
839 state_force_shutdown() 934 state_force_shutdown()
@@ -846,32 +941,49 @@ def main(server, eventHandler, params, tf = TerminalFilter):
846 _, error = server.runCommand(["stateShutdown"]) 941 _, error = server.runCommand(["stateShutdown"])
847 if error: 942 if error:
848 logger.error("Unable to cleanly shutdown: %s" % error) 943 logger.error("Unable to cleanly shutdown: %s" % error)
944 except (BrokenPipeError, EOFError) as e:
945 # bitbake-server comms failure
946 logger.fatal("Unable to cleanly shutdown: %s", e)
849 except KeyboardInterrupt: 947 except KeyboardInterrupt:
850 state_force_shutdown() 948 state_force_shutdown()
851 949
852 main.shutdown = main.shutdown + 1 950 main.shutdown = main.shutdown + 1
853 pass
854 except Exception as e: 951 except Exception as e:
855 import traceback 952 import traceback
856 sys.stderr.write(traceback.format_exc()) 953 sys.stderr.write(traceback.format_exc())
857 if not params.observe_only:
858 _, error = server.runCommand(["stateForceShutdown"])
859 main.shutdown = 2 954 main.shutdown = 2
955 if not params.observe_only:
956 try:
957 _, error = server.runCommand(["stateForceShutdown"])
958 except (BrokenPipeError, EOFError) as e:
959 # bitbake-server comms failure, don't attempt further comms and exit
960 logger.fatal("Unable to force shutdown: %s", e)
961 main.shudown = 3
860 return_value = 1 962 return_value = 1
861 try: 963 try:
862 termfilter.clearFooter() 964 termfilter.clearFooter()
863 summary = "" 965 summary = ""
966 def format_hyperlink(url, link_text):
967 if should_print_hyperlinks:
968 start = f'\033]8;;{url}\033\\'
969 end = '\033]8;;\033\\'
970 return f'{start}{link_text}{end}'
971 return link_text
972
864 if taskfailures: 973 if taskfailures:
865 summary += pluralise("\nSummary: %s task failed:", 974 summary += pluralise("\nSummary: %s task failed:",
866 "\nSummary: %s tasks failed:", len(taskfailures)) 975 "\nSummary: %s tasks failed:", len(taskfailures))
867 for failure in taskfailures: 976 for (failure, log_file) in taskfailures.items():
868 summary += "\n %s" % failure 977 summary += "\n %s" % failure
978 if log_file:
979 hyperlink = format_hyperlink(f"file://{log_file}", log_file)
980 summary += "\n log: {}".format(hyperlink)
869 if warnings: 981 if warnings:
870 summary += pluralise("\nSummary: There was %s WARNING message shown.", 982 summary += pluralise("\nSummary: There was %s WARNING message.",
871 "\nSummary: There were %s WARNING messages shown.", warnings) 983 "\nSummary: There were %s WARNING messages.", warnings)
872 if return_value and errors: 984 if return_value and errors:
873 summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.", 985 summary += pluralise("\nSummary: There was %s ERROR message, returning a non-zero exit code.",
874 "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors) 986 "\nSummary: There were %s ERROR messages, returning a non-zero exit code.", errors)
875 if summary and params.options.quiet == 0: 987 if summary and params.options.quiet == 0:
876 print(summary) 988 print(summary)
877 989
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
index cf1c876a51..18a706547a 100644
--- a/bitbake/lib/bb/ui/ncurses.py
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -227,6 +227,9 @@ class NCursesUI:
227 shutdown = 0 227 shutdown = 0
228 228
229 try: 229 try:
230 if not params.observe_only:
231 params.updateToServer(server, os.environ.copy())
232
230 params.updateFromServer(server) 233 params.updateFromServer(server)
231 cmdline = params.parseActions() 234 cmdline = params.parseActions()
232 if not cmdline: 235 if not cmdline:
diff --git a/bitbake/lib/bb/ui/taskexp.py b/bitbake/lib/bb/ui/taskexp.py
index 2b246710ca..bedfd69b09 100644
--- a/bitbake/lib/bb/ui/taskexp.py
+++ b/bitbake/lib/bb/ui/taskexp.py
@@ -8,6 +8,7 @@
8# 8#
9 9
10import sys 10import sys
11import traceback
11 12
12try: 13try:
13 import gi 14 import gi
@@ -176,7 +177,7 @@ class gtkthread(threading.Thread):
176 quit = threading.Event() 177 quit = threading.Event()
177 def __init__(self, shutdown): 178 def __init__(self, shutdown):
178 threading.Thread.__init__(self) 179 threading.Thread.__init__(self)
179 self.setDaemon(True) 180 self.daemon = True
180 self.shutdown = shutdown 181 self.shutdown = shutdown
181 if not Gtk.init_check()[0]: 182 if not Gtk.init_check()[0]:
182 sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n") 183 sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n")
@@ -196,6 +197,7 @@ def main(server, eventHandler, params):
196 gtkgui.start() 197 gtkgui.start()
197 198
198 try: 199 try:
200 params.updateToServer(server, os.environ.copy())
199 params.updateFromServer(server) 201 params.updateFromServer(server)
200 cmdline = params.parseActions() 202 cmdline = params.parseActions()
201 if not cmdline: 203 if not cmdline:
@@ -218,6 +220,9 @@ def main(server, eventHandler, params):
218 except client.Fault as x: 220 except client.Fault as x:
219 print("XMLRPC Fault getting commandline:\n %s" % x) 221 print("XMLRPC Fault getting commandline:\n %s" % x)
220 return 222 return
223 except Exception as e:
224 print("Exception in startup:\n %s" % traceback.format_exc())
225 return
221 226
222 if gtkthread.quit.isSet(): 227 if gtkthread.quit.isSet():
223 return 228 return
diff --git a/bitbake/lib/bb/ui/taskexp_ncurses.py b/bitbake/lib/bb/ui/taskexp_ncurses.py
new file mode 100755
index 0000000000..ea94a4987f
--- /dev/null
+++ b/bitbake/lib/bb/ui/taskexp_ncurses.py
@@ -0,0 +1,1511 @@
1#
2# BitBake Graphical ncurses-based Dependency Explorer
3# * Based on the GTK implementation
4# * Intended to run on any Linux host
5#
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 - 2008 Richard Purdie
8# Copyright (C) 2022 - 2024 David Reyna
9#
10# SPDX-License-Identifier: GPL-2.0-only
11#
12
13#
14# Execution example:
15# $ bitbake -g -u taskexp_ncurses zlib acl
16#
17# Self-test example (executes a script of GUI actions):
18# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl
19# ...
20# $ echo $?
21# 0
22# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl foo
23# ERROR: Nothing PROVIDES 'foo'. Close matches:
24# ofono
25# $ echo $?
26# 1
27#
28# Self-test with no terminal example (only tests dependency fetch from bitbake):
29# $ TASK_EXP_UNIT_TEST_NOTERM=1 bitbake -g -u taskexp_ncurses quilt
30# $ echo $?
31# 0
32#
33# Features:
34# * Ncurses is used for the presentation layer. Only the 'curses'
35# library is used (none of the extension libraries), plus only
36# one main screen is used (no sub-windows)
37# * Uses the 'generateDepTreeEvent' bitbake event to fetch the
38# dynamic dependency data based on passed recipes
39# * Computes and provides reverse dependencies
40# * Supports task sorting on:
41# (a) Task dependency order within each recipe
42# (b) Pure alphabetical order
43# (c) Provisions for third sort order (bitbake order?)
44# * The 'Filter' does a "*string*" wildcard filter on tasks in the
45# main window, dynamically re-ordering and re-centering the content
46# * A 'Print' function exports the selected task or its whole recipe
47# task set to the default file "taskdep.txt"
48# * Supports a progress bar for bitbake loads and file printing
49# * Line art for box drawing supported, ASCII art an alernative
50# * No horizontal scrolling support. Selected task's full name
51# shown in bottom bar
52# * Dynamically catches terminals that are (or become) too small
53# * Exception to insure return to normal terminal on errors
54# * Debugging support, self test option
55#
56
57import sys
58import traceback
59import curses
60import re
61import time
62
63# Bitbake server support
64import threading
65from xmlrpc import client
66import bb
67import bb.event
68
69# Dependency indexes (depends_model)
70(TYPE_DEP, TYPE_RDEP) = (0, 1)
71DEPENDS_TYPE = 0
72DEPENDS_TASK = 1
73DEPENDS_DEPS = 2
74# Task indexes (task_list)
75TASK_NAME = 0
76TASK_PRIMARY = 1
77TASK_SORT_ALPHA = 2
78TASK_SORT_DEPS = 3
79TASK_SORT_BITBAKE = 4
80# Sort options (default is SORT_DEPS)
81SORT_ALPHA = 0
82SORT_DEPS = 1
83SORT_BITBAKE_ENABLE = False # NOTE: future sort
84SORT_BITBAKE = 2
85sort_model = SORT_DEPS
86# Print options
87PRINT_MODEL_1 = 0
88PRINT_MODEL_2 = 1
89print_model = PRINT_MODEL_2
90print_file_name = "taskdep_print.log"
91print_file_backup_name = "taskdep_print_backup.log"
92is_printed = False
93is_filter = False
94
95# Standard (and backup) key mappings
96CHAR_NUL = 0 # Used as self-test nop char
97CHAR_BS_H = 8 # Alternate backspace key
98CHAR_TAB = 9
99CHAR_RETURN = 10
100CHAR_ESCAPE = 27
101CHAR_UP = ord('{') # Used as self-test ASCII char
102CHAR_DOWN = ord('}') # Used as self-test ASCII char
103
104# Color_pair IDs
105CURSES_NORMAL = 0
106CURSES_HIGHLIGHT = 1
107CURSES_WARNING = 2
108
109
110#################################################
111### Debugging support
112###
113
114verbose = False
115
116# Debug: message display slow-step through display update issues
117def alert(msg,screen):
118 if msg:
119 screen.addstr(0, 10, '[%-4s]' % msg)
120 screen.refresh();
121 curses.napms(2000)
122 else:
123 if do_line_art:
124 for i in range(10, 24):
125 screen.addch(0, i, curses.ACS_HLINE)
126 else:
127 screen.addstr(0, 10, '-' * 14)
128 screen.refresh();
129
130# Debug: display edge conditions on frame movements
131def debug_frame(nbox_ojb):
132 if verbose:
133 nbox_ojb.screen.addstr(0, 50, '[I=%2d,O=%2d,S=%3s,H=%2d,M=%4d]' % (
134 nbox_ojb.cursor_index,
135 nbox_ojb.cursor_offset,
136 nbox_ojb.scroll_offset,
137 nbox_ojb.inside_height,
138 len(nbox_ojb.task_list),
139 ))
140 nbox_ojb.screen.refresh();
141
142#
143# Unit test (assumes that 'quilt-native' is always present)
144#
145
146unit_test = os.environ.get('TASK_EXP_UNIT_TEST')
147unit_test_cmnds=[
148 '# Default selected task in primary box',
149 'tst_selected=<TASK>.do_recipe_qa',
150 '# Default selected task in deps',
151 'tst_entry=<TAB>',
152 'tst_selected=',
153 '# Default selected task in rdeps',
154 'tst_entry=<TAB>',
155 'tst_selected=<TASK>.do_fetch',
156 "# Test 'select' back to primary box",
157 'tst_entry=<CR>',
158 '#tst_entry=<DOWN>', # optional injected error
159 'tst_selected=<TASK>.do_fetch',
160 '# Check filter',
161 'tst_entry=/uilt-nativ/',
162 'tst_selected=quilt-native.do_recipe_qa',
163 '# Check print',
164 'tst_entry=p',
165 'tst_printed=quilt-native.do_fetch',
166 '#tst_printed=quilt-foo.do_nothing', # optional injected error
167 '# Done!',
168 'tst_entry=q',
169]
170unit_test_idx=0
171unit_test_command_chars=''
172unit_test_results=[]
173def unit_test_action(active_package):
174 global unit_test_idx
175 global unit_test_command_chars
176 global unit_test_results
177 ret = CHAR_NUL
178 if unit_test_command_chars:
179 ch = unit_test_command_chars[0]
180 unit_test_command_chars = unit_test_command_chars[1:]
181 time.sleep(0.5)
182 ret = ord(ch)
183 else:
184 line = unit_test_cmnds[unit_test_idx]
185 unit_test_idx += 1
186 line = re.sub('#.*', '', line).strip()
187 line = line.replace('<TASK>',active_package.primary[0])
188 line = line.replace('<TAB>','\t').replace('<CR>','\n')
189 line = line.replace('<UP>','{').replace('<DOWN>','}')
190 if not line: line = 'nop=nop'
191 cmnd,value = line.split('=')
192 if cmnd == 'tst_entry':
193 unit_test_command_chars = value
194 elif cmnd == 'tst_selected':
195 active_selected = active_package.get_selected()
196 if active_selected != value:
197 unit_test_results.append("ERROR:SELFTEST:expected '%s' but got '%s' (NOTE:bitbake may have changed)" % (value,active_selected))
198 ret = ord('Q')
199 else:
200 unit_test_results.append("Pass:SELFTEST:found '%s'" % (value))
201 elif cmnd == 'tst_printed':
202 result = os.system('grep %s %s' % (value,print_file_name))
203 if result:
204 unit_test_results.append("ERROR:PRINTTEST:expected '%s' in '%s'" % (value,print_file_name))
205 ret = ord('Q')
206 else:
207 unit_test_results.append("Pass:PRINTTEST:found '%s'" % (value))
208 # Return the action (CHAR_NUL for no action til next round)
209 return(ret)
210
211# Unit test without an interative terminal (e.g. ptest)
212unit_test_noterm = os.environ.get('TASK_EXP_UNIT_TEST_NOTERM')
213
214
215#################################################
216### Window frame rendering
217###
218### By default, use the normal line art. Since
219### these extended characters are not ASCII, one
220### must use the ncursus API to render them
221### The alternate ASCII line art set is optionally
222### available via the 'do_line_art' flag
223
224# By default, render frames using line art
225do_line_art = True
226
227# ASCII render set option
228CHAR_HBAR = '-'
229CHAR_VBAR = '|'
230CHAR_UL_CORNER = '/'
231CHAR_UR_CORNER = '\\'
232CHAR_LL_CORNER = '\\'
233CHAR_LR_CORNER = '/'
234
235# Box frame drawing with line-art
236def line_art_frame(box):
237 x = box.base_x
238 y = box.base_y
239 w = box.width
240 h = box.height + 1
241
242 if do_line_art:
243 for i in range(1, w - 1):
244 box.screen.addch(y, x + i, curses.ACS_HLINE, box.color)
245 box.screen.addch(y + h - 1, x + i, curses.ACS_HLINE, box.color)
246 body_line = "%s" % (' ' * (w - 2))
247 for i in range(1, h - 1):
248 box.screen.addch(y + i, x, curses.ACS_VLINE, box.color)
249 box.screen.addstr(y + i, x + 1, body_line, box.color)
250 box.screen.addch(y + i, x + w - 1, curses.ACS_VLINE, box.color)
251 box.screen.addch(y, x, curses.ACS_ULCORNER, box.color)
252 box.screen.addch(y, x + w - 1, curses.ACS_URCORNER, box.color)
253 box.screen.addch(y + h - 1, x, curses.ACS_LLCORNER, box.color)
254 box.screen.addch(y + h - 1, x + w - 1, curses.ACS_LRCORNER, box.color)
255 else:
256 top_line = "%s%s%s" % (CHAR_UL_CORNER,CHAR_HBAR * (w - 2),CHAR_UR_CORNER)
257 body_line = "%s%s%s" % (CHAR_VBAR,' ' * (w - 2),CHAR_VBAR)
258 bot_line = "%s%s%s" % (CHAR_UR_CORNER,CHAR_HBAR * (w - 2),CHAR_UL_CORNER)
259 tag_line = "%s%s%s" % ('[',CHAR_HBAR * (w - 2),']')
260 # Top bar
261 box.screen.addstr(y, x, top_line)
262 # Middle frame
263 for i in range(1, (h - 1)):
264 box.screen.addstr(y+i, x, body_line)
265 # Bottom bar
266 box.screen.addstr(y + (h - 1), x, bot_line)
267
268# Connect the separate boxes
269def line_art_fixup(box):
270 if do_line_art:
271 box.screen.addch(box.base_y+2, box.base_x, curses.ACS_LTEE, box.color)
272 box.screen.addch(box.base_y+2, box.base_x+box.width-1, curses.ACS_RTEE, box.color)
273
274
275#################################################
276### Ncurses box object : box frame object to display
277### and manage a sub-window's display elements
278### using basic ncurses
279###
280### Supports:
281### * Frame drawing, content (re)drawing
282### * Content scrolling via ArrowUp, ArrowDn, PgUp, PgDN,
283### * Highlighting for active selected item
284### * Content sorting based on selected sort model
285###
286
287class NBox():
288 def __init__(self, screen, label, primary, base_x, base_y, width, height):
289 # Box description
290 self.screen = screen
291 self.label = label
292 self.primary = primary
293 self.color = curses.color_pair(CURSES_NORMAL) if screen else None
294 # Box boundaries
295 self.base_x = base_x
296 self.base_y = base_y
297 self.width = width
298 self.height = height
299 # Cursor/scroll management
300 self.cursor_enable = False
301 self.cursor_index = 0 # Absolute offset
302 self.cursor_offset = 0 # Frame centric offset
303 self.scroll_offset = 0 # Frame centric offset
304 # Box specific content
305 # Format of each entry is [package_name,is_primary_recipe,alpha_sort_key,deps_sort_key]
306 self.task_list = []
307
308 @property
309 def inside_width(self):
310 return(self.width-2)
311
312 @property
313 def inside_height(self):
314 return(self.height-2)
315
316 # Populate the box's content, include the sort mappings and is_primary flag
317 def task_list_append(self,task_name,dep):
318 task_sort_alpha = task_name
319 task_sort_deps = dep.get_dep_sort(task_name)
320 is_primary = False
321 for primary in self.primary:
322 if task_name.startswith(primary+'.'):
323 is_primary = True
324 if SORT_BITBAKE_ENABLE:
325 task_sort_bitbake = dep.get_bb_sort(task_name)
326 self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps,task_sort_bitbake])
327 else:
328 self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps])
329
330 def reset(self):
331 self.task_list = []
332 self.cursor_index = 0 # Absolute offset
333 self.cursor_offset = 0 # Frame centric offset
334 self.scroll_offset = 0 # Frame centric offset
335
336 # Sort the box's content based on the current sort model
337 def sort(self):
338 if SORT_ALPHA == sort_model:
339 self.task_list.sort(key = lambda x: x[TASK_SORT_ALPHA])
340 elif SORT_DEPS == sort_model:
341 self.task_list.sort(key = lambda x: x[TASK_SORT_DEPS])
342 elif SORT_BITBAKE == sort_model:
343 self.task_list.sort(key = lambda x: x[TASK_SORT_BITBAKE])
344
345 # The target package list (to hightlight), from the command line
346 def set_primary(self,primary):
347 self.primary = primary
348
349 # Draw the box's outside frame
350 def draw_frame(self):
351 line_art_frame(self)
352 # Title
353 self.screen.addstr(self.base_y,
354 (self.base_x + (self.width//2))-((len(self.label)+2)//2),
355 '['+self.label+']')
356 self.screen.refresh()
357
358 # Draw the box's inside text content
359 def redraw(self):
360 task_list_len = len(self.task_list)
361 # Middle frame
362 body_line = "%s" % (' ' * (self.inside_width-1) )
363 for i in range(0,self.inside_height+1):
364 if i < (task_list_len + self.scroll_offset):
365 str_ctl = "%%-%ss" % (self.width-3)
366 # Safety assert
367 if (i + self.scroll_offset) >= task_list_len:
368 alert("REDRAW:%2d,%4d,%4d" % (i,self.scroll_offset,task_list_len),self.screen)
369 break
370
371 task_obj = self.task_list[i + self.scroll_offset]
372 task = task_obj[TASK_NAME][:self.inside_width-1]
373 task_primary = task_obj[TASK_PRIMARY]
374
375 if task_primary:
376 line = str_ctl % task[:self.inside_width-1]
377 self.screen.addstr(self.base_y+1+i, self.base_x+2, line, curses.A_BOLD)
378 else:
379 line = str_ctl % task[:self.inside_width-1]
380 self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
381 else:
382 line = "%s" % (' ' * (self.inside_width-1) )
383 self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
384 self.screen.refresh()
385
386 # Show the current selected task over the bottom of the frame
387 def show_selected(self,selected_task):
388 if not selected_task:
389 selected_task = self.get_selected()
390 tag_line = "%s%s%s" % ('[',CHAR_HBAR * (self.width-2),']')
391 self.screen.addstr(self.base_y + self.height, self.base_x, tag_line)
392 self.screen.addstr(self.base_y + self.height,
393 (self.base_x + (self.width//2))-((len(selected_task)+2)//2),
394 '['+selected_task+']')
395 self.screen.refresh()
396
397 # Load box with new table of content
398 def update_content(self,task_list):
399 self.task_list = task_list
400 if self.cursor_enable:
401 cursor_update(turn_on=False)
402 self.cursor_index = 0
403 self.cursor_offset = 0
404 self.scroll_offset = 0
405 self.redraw()
406 if self.cursor_enable:
407 cursor_update(turn_on=True)
408
409 # Manage the box's highlighted task and blinking cursor character
410 def cursor_on(self,is_on):
411 self.cursor_enable = is_on
412 self.cursor_update(is_on)
413
414 # High-light the current pointed package, normal for released packages
415 def cursor_update(self,turn_on=True):
416 str_ctl = "%%-%ss" % (self.inside_width-1)
417 try:
418 if len(self.task_list):
419 task_obj = self.task_list[self.cursor_index]
420 task = task_obj[TASK_NAME][:self.inside_width-1]
421 task_primary = task_obj[TASK_PRIMARY]
422 task_font = curses.A_BOLD if task_primary else 0
423 else:
424 task = ''
425 task_font = 0
426 except Exception as e:
427 alert("CURSOR_UPDATE:%s" % (e),self.screen)
428 return
429 if turn_on:
430 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1,">", curses.color_pair(CURSES_HIGHLIGHT) | curses.A_BLINK)
431 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, curses.color_pair(CURSES_HIGHLIGHT) | task_font)
432 else:
433 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1," ")
434 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, task_font)
435
436 # Down arrow
437 def line_down(self):
438 if len(self.task_list) <= (self.cursor_index+1):
439 return
440 self.cursor_update(turn_on=False)
441 self.cursor_index += 1
442 self.cursor_offset += 1
443 if self.cursor_offset > (self.inside_height):
444 self.cursor_offset -= 1
445 self.scroll_offset += 1
446 self.redraw()
447 self.cursor_update(turn_on=True)
448 debug_frame(self)
449
450 # Up arrow
451 def line_up(self):
452 if 0 > (self.cursor_index-1):
453 return
454 self.cursor_update(turn_on=False)
455 self.cursor_index -= 1
456 self.cursor_offset -= 1
457 if self.cursor_offset < 0:
458 self.cursor_offset += 1
459 self.scroll_offset -= 1
460 self.redraw()
461 self.cursor_update(turn_on=True)
462 debug_frame(self)
463
464 # Page down
465 def page_down(self):
466 max_task = len(self.task_list)-1
467 if max_task < self.inside_height:
468 return
469 self.cursor_update(turn_on=False)
470 self.cursor_index += 10
471 self.cursor_index = min(self.cursor_index,max_task)
472 self.cursor_offset = min(self.inside_height,self.cursor_index)
473 self.scroll_offset = self.cursor_index - self.cursor_offset
474 self.redraw()
475 self.cursor_update(turn_on=True)
476 debug_frame(self)
477
478 # Page up
479 def page_up(self):
480 max_task = len(self.task_list)-1
481 if max_task < self.inside_height:
482 return
483 self.cursor_update(turn_on=False)
484 self.cursor_index -= 10
485 self.cursor_index = max(self.cursor_index,0)
486 self.cursor_offset = max(0, self.inside_height - (max_task - self.cursor_index))
487 self.scroll_offset = self.cursor_index - self.cursor_offset
488 self.redraw()
489 self.cursor_update(turn_on=True)
490 debug_frame(self)
491
492 # Return the currently selected task name for this box
493 def get_selected(self):
494 if self.task_list:
495 return(self.task_list[self.cursor_index][TASK_NAME])
496 else:
497 return('')
498
499#################################################
500### The helper sub-windows
501###
502
503# Show persistent help at the top of the screen
504class HelpBarView(NBox):
505 def __init__(self, screen, label, primary, base_x, base_y, width, height):
506 super(HelpBarView, self).__init__(screen, label, primary, base_x, base_y, width, height)
507
508 def show_help(self,show):
509 self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.inside_width))
510 if show:
511 help = "Help='?' Filter='/' NextBox=<Tab> Select=<Enter> Print='p','P' Quit='q'"
512 bar_size = self.inside_width - 5 - len(help)
513 self.screen.addstr(self.base_y,self.base_x+((self.inside_width-len(help))//2), help)
514 self.screen.refresh()
515
516# Pop up a detailed Help box
517class HelpBoxView(NBox):
518 def __init__(self, screen, label, primary, base_x, base_y, width, height, dep):
519 super(HelpBoxView, self).__init__(screen, label, primary, base_x, base_y, width, height)
520 self.x_pos = 0
521 self.y_pos = 0
522 self.dep = dep
523
524 # Instantial the pop-up help box
525 def show_help(self,show):
526 self.x_pos = self.base_x + 4
527 self.y_pos = self.base_y + 2
528
529 def add_line(line):
530 if line:
531 self.screen.addstr(self.y_pos,self.x_pos,line)
532 self.y_pos += 1
533
534 # Gather some statisics
535 dep_count = 0
536 rdep_count = 0
537 for task_obj in self.dep.depends_model:
538 if TYPE_DEP == task_obj[DEPENDS_TYPE]:
539 dep_count += 1
540 elif TYPE_RDEP == task_obj[DEPENDS_TYPE]:
541 rdep_count += 1
542
543 self.draw_frame()
544 line_art_fixup(self.dep)
545 add_line("Quit : 'q' ")
546 add_line("Filter task names : '/'")
547 add_line("Tab to next box : <Tab>")
548 add_line("Select a task : <Enter>")
549 add_line("Print task's deps : 'p'")
550 add_line("Print recipe's deps : 'P'")
551 add_line(" -> '%s'" % print_file_name)
552 add_line("Sort toggle : 's'")
553 add_line(" %s Recipe inner-depends order" % ('->' if (SORT_DEPS == sort_model) else '- '))
554 add_line(" %s Alpha-numeric order" % ('->' if (SORT_ALPHA == sort_model) else '- '))
555 if SORT_BITBAKE_ENABLE:
556 add_line(" %s Bitbake order" % ('->' if (TASK_SORT_BITBAKE == sort_model) else '- '))
557 add_line("Alternate backspace : <CTRL-H>")
558 add_line("")
559 add_line("Primary recipes = %s" % ','.join(self.primary))
560 add_line("Task count = %4d" % len(self.dep.pkg_model))
561 add_line("Deps count = %4d" % dep_count)
562 add_line("RDeps count = %4d" % rdep_count)
563 add_line("")
564 self.screen.addstr(self.y_pos,self.x_pos+7,"<Press any key>", curses.color_pair(CURSES_HIGHLIGHT))
565 self.screen.refresh()
566 c = self.screen.getch()
567
568# Show a progress bar
569class ProgressView(NBox):
570 def __init__(self, screen, label, primary, base_x, base_y, width, height):
571 super(ProgressView, self).__init__(screen, label, primary, base_x, base_y, width, height)
572
573 def progress(self,title,current,max):
574 if title:
575 self.label = title
576 else:
577 title = self.label
578 if max <=0: max = 10
579 bar_size = self.width - 7 - len(title)
580 bar_done = int( (float(current)/float(max)) * float(bar_size) )
581 self.screen.addstr(self.base_y,self.base_x, " %s:[%s%s]" % (title,'*' * bar_done,' ' * (bar_size-bar_done)))
582 self.screen.refresh()
583 return(current+1)
584
585 def clear(self):
586 self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
587 self.screen.refresh()
588
589# Implement a task filter bar
590class FilterView(NBox):
591 SEARCH_NOP = 0
592 SEARCH_GO = 1
593 SEARCH_CANCEL = 2
594
595 def __init__(self, screen, label, primary, base_x, base_y, width, height):
596 super(FilterView, self).__init__(screen, label, primary, base_x, base_y, width, height)
597 self.do_show = False
598 self.filter_str = ""
599
600 def clear(self,enable_show=True):
601 self.filter_str = ""
602
603 def show(self,enable_show=True):
604 self.do_show = enable_show
605 if self.do_show:
606 self.screen.addstr(self.base_y,self.base_x, "[ Filter: %-25s ] '/'=cancel, format='abc' " % self.filter_str[0:25])
607 else:
608 self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
609 self.screen.refresh()
610
611 def show_prompt(self):
612 self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), " ")
613 self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), "")
614
615 # Keys specific to the filter box (start/stop filter keys are in the main loop)
616 def input(self,c,ch):
617 ret = self.SEARCH_GO
618 if c in (curses.KEY_BACKSPACE,CHAR_BS_H):
619 # Backspace
620 if self.filter_str:
621 self.filter_str = self.filter_str[0:-1]
622 self.show()
623 elif ((ch >= 'a') and (ch <= 'z')) or ((ch >= 'A') and (ch <= 'Z')) or ((ch >= '0') and (ch <= '9')) or (ch in (' ','_','.','-')):
624 # The isalnum() acts strangly with keypad(True), so explicit bounds
625 self.filter_str += ch
626 self.show()
627 else:
628 ret = self.SEARCH_NOP
629 return(ret)
630
631
632#################################################
633### The primary dependency windows
634###
635
636# The main list of package tasks
637class PackageView(NBox):
638 def __init__(self, screen, label, primary, base_x, base_y, width, height):
639 super(PackageView, self).__init__(screen, label, primary, base_x, base_y, width, height)
640
641 # Find and verticaly center a selected task (from filter or from dependent box)
642 # The 'task_filter_str' can be a full or a partial (filter) task name
643 def find(self,task_filter_str):
644 found = False
645 max = self.height-2
646 if not task_filter_str:
647 return(found)
648 for i,task_obj in enumerate(self.task_list):
649 task = task_obj[TASK_NAME]
650 if task.startswith(task_filter_str):
651 self.cursor_on(False)
652 self.cursor_index = i
653
654 # Position selected at vertical center
655 vcenter = self.inside_height // 2
656 if self.cursor_index <= vcenter:
657 self.scroll_offset = 0
658 self.cursor_offset = self.cursor_index
659 elif self.cursor_index >= (len(self.task_list) - vcenter - 1):
660 self.cursor_offset = self.inside_height-1
661 self.scroll_offset = self.cursor_index - self.cursor_offset
662 else:
663 self.cursor_offset = vcenter
664 self.scroll_offset = self.cursor_index - self.cursor_offset
665
666 self.redraw()
667 self.cursor_on(True)
668 found = True
669 break
670 return(found)
671
672# The view of dependent packages
673class PackageDepView(NBox):
674 def __init__(self, screen, label, primary, base_x, base_y, width, height):
675 super(PackageDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
676
677# The view of reverse-dependent packages
678class PackageReverseDepView(NBox):
679 def __init__(self, screen, label, primary, base_x, base_y, width, height):
680 super(PackageReverseDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
681
682
683#################################################
684### DepExplorer : The parent frame and object
685###
686
687class DepExplorer(NBox):
688 def __init__(self,screen):
689 title = "Task Dependency Explorer"
690 super(DepExplorer, self).__init__(screen, 'Task Dependency Explorer','',0,0,80,23)
691
692 self.screen = screen
693 self.pkg_model = []
694 self.depends_model = []
695 self.dep_sort_map = {}
696 self.bb_sort_map = {}
697 self.filter_str = ''
698 self.filter_prev = 'deadbeef'
699
700 if self.screen:
701 self.help_bar_view = HelpBarView(screen, "Help",'',1,1,79,1)
702 self.help_box_view = HelpBoxView(screen, "Help",'',0,2,40,20,self)
703 self.progress_view = ProgressView(screen, "Progress",'',2,1,76,1)
704 self.filter_view = FilterView(screen, "Filter",'',2,1,76,1)
705 self.package_view = PackageView(screen, "Package",'alpha', 0,2,40,20)
706 self.dep_view = PackageDepView(screen, "Dependencies",'beta',40,2,40,10)
707 self.reverse_view = PackageReverseDepView(screen, "Dependent Tasks",'gamma',40,13,40,9)
708 self.draw_frames()
709
710 # Draw this main window's frame and all sub-windows
711 def draw_frames(self):
712 self.draw_frame()
713 self.package_view.draw_frame()
714 self.dep_view.draw_frame()
715 self.reverse_view.draw_frame()
716 if is_filter:
717 self.filter_view.show(True)
718 self.filter_view.show_prompt()
719 else:
720 self.help_bar_view.show_help(True)
721 self.package_view.redraw()
722 self.dep_view.redraw()
723 self.reverse_view.redraw()
724 self.show_selected(self.package_view.get_selected())
725 line_art_fixup(self)
726
727 # Parse the bitbake dependency event object
728 def parse(self, depgraph):
729 for task in depgraph["tdepends"]:
730 self.pkg_model.insert(0, task)
731 for depend in depgraph["tdepends"][task]:
732 self.depends_model.insert (0, (TYPE_DEP, task, depend))
733 self.depends_model.insert (0, (TYPE_RDEP, depend, task))
734 if self.screen:
735 self.dep_sort_prep()
736
737 # Prepare the dependency sort order keys
738 # This method creates sort keys per recipe tasks in
739 # the order of each recipe's internal dependecies
740 # Method:
741 # Filter the tasks in dep order in dep_sort_map = {}
742 # (a) Find a task that has no dependecies
743 # Ignore non-recipe specific tasks
744 # (b) Add it to the sort mapping dict with
745 # key of "<task_group>_<order>"
746 # (c) Remove it as a dependency from the other tasks
747 # (d) Repeat till all tasks are mapped
748 # Use placeholders to insure each sub-dict is instantiated
749 def dep_sort_prep(self):
750 self.progress_view.progress('DepSort',0,4)
751 # Init the task base entries
752 self.progress_view.progress('DepSort',1,4)
753 dep_table = {}
754 bb_index = 0
755 for task in self.pkg_model:
756 # First define the incoming bitbake sort order
757 self.bb_sort_map[task] = "%04d" % (bb_index)
758 bb_index += 1
759 task_group = task[0:task.find('.')]
760 if task_group not in dep_table:
761 dep_table[task_group] = {}
762 dep_table[task_group]['-'] = {} # Placeholder
763 if task not in dep_table[task_group]:
764 dep_table[task_group][task] = {}
765 dep_table[task_group][task]['-'] = {} # Placeholder
766 # Add the task dependecy entries
767 self.progress_view.progress('DepSort',2,4)
768 for task_obj in self.depends_model:
769 if task_obj[DEPENDS_TYPE] != TYPE_DEP:
770 continue
771 task = task_obj[DEPENDS_TASK]
772 task_dep = task_obj[DEPENDS_DEPS]
773 task_group = task[0:task.find('.')]
774 # Only track depends within same group
775 if task_dep.startswith(task_group+'.'):
776 dep_table[task_group][task][task_dep] = 1
777 self.progress_view.progress('DepSort',3,4)
778 for task_group in dep_table:
779 dep_index = 0
780 # Whittle down the tasks of each group
781 this_pass = 1
782 do_loop = True
783 while (len(dep_table[task_group]) > 1) and do_loop:
784 this_pass += 1
785 is_change = False
786 delete_list = []
787 for task in dep_table[task_group]:
788 if '-' == task:
789 continue
790 if 1 == len(dep_table[task_group][task]):
791 is_change = True
792 # No more deps, so collect this task...
793 self.dep_sort_map[task] = "%s_%04d" % (task_group,dep_index)
794 dep_index += 1
795 # ... remove it from other lists as resolved ...
796 for dep_task in dep_table[task_group]:
797 if task in dep_table[task_group][dep_task]:
798 del dep_table[task_group][dep_task][task]
799 # ... and remove it from from the task group
800 delete_list.append(task)
801 for task in delete_list:
802 del dep_table[task_group][task]
803 if not is_change:
804 alert("ERROR:DEP_SIEVE_NO_CHANGE:%s" % task_group,self.screen)
805 do_loop = False
806 continue
807 self.progress_view.progress('',4,4)
808 self.progress_view.clear()
809 self.help_bar_view.show_help(True)
810 if len(self.dep_sort_map) != len(self.pkg_model):
811 alert("ErrorDepSort:%d/%d" % (len(self.dep_sort_map),len(self.pkg_model)),self.screen)
812
813 # Look up a dep sort order key
814 def get_dep_sort(self,key):
815 if key in self.dep_sort_map:
816 return(self.dep_sort_map[key])
817 else:
818 return(key)
819
820 # Look up a bitbake sort order key
821 def get_bb_sort(self,key):
822 if key in self.bb_sort_map:
823 return(self.bb_sort_map[key])
824 else:
825 return(key)
826
827 # Find the selected package in the main frame, update the dependency frames content accordingly
828 def select(self, package_name, only_update_dependents=False):
829 if not package_name:
830 package_name = self.package_view.get_selected()
831 # alert("SELECT:%s:" % package_name,self.screen)
832
833 if self.filter_str != self.filter_prev:
834 self.package_view.cursor_on(False)
835 # Fill of the main package task list using new filter
836 self.package_view.task_list = []
837 for package in self.pkg_model:
838 if self.filter_str:
839 if self.filter_str in package:
840 self.package_view.task_list_append(package,self)
841 else:
842 self.package_view.task_list_append(package,self)
843 self.package_view.sort()
844 self.filter_prev = self.filter_str
845
846 # Old position is lost, assert new position of previous task (if still filtered in)
847 self.package_view.cursor_index = 0
848 self.package_view.cursor_offset = 0
849 self.package_view.scroll_offset = 0
850 self.package_view.redraw()
851 self.package_view.cursor_on(True)
852
853 # Make sure the selected package is in view, with implicit redraw()
854 if (not only_update_dependents):
855 self.package_view.find(package_name)
856 # In case selected name change (i.e. filter removed previous)
857 package_name = self.package_view.get_selected()
858
859 # Filter the package's dependent list to the dependent view
860 self.dep_view.reset()
861 for package_def in self.depends_model:
862 if (package_def[DEPENDS_TYPE] == TYPE_DEP) and (package_def[DEPENDS_TASK] == package_name):
863 self.dep_view.task_list_append(package_def[DEPENDS_DEPS],self)
864 self.dep_view.sort()
865 self.dep_view.redraw()
866 # Filter the package's dependent list to the reverse dependent view
867 self.reverse_view.reset()
868 for package_def in self.depends_model:
869 if (package_def[DEPENDS_TYPE] == TYPE_RDEP) and (package_def[DEPENDS_TASK] == package_name):
870 self.reverse_view.task_list_append(package_def[DEPENDS_DEPS],self)
871 self.reverse_view.sort()
872 self.reverse_view.redraw()
873 self.show_selected(package_name)
874 self.screen.refresh()
875
876 # The print-to-file method
877 def print_deps(self,whole_group=False):
878 global is_printed
879 # Print the selected deptree(s) to a file
880 if not is_printed:
881 try:
882 # Move to backup any exiting file before first write
883 if os.path.isfile(print_file_name):
884 os.system('mv -f %s %s' % (print_file_name,print_file_backup_name))
885 except Exception as e:
886 alert(e,self.screen)
887 alert('',self.screen)
888 print_list = []
889 selected_task = self.package_view.get_selected()
890 if not selected_task:
891 return
892 if not whole_group:
893 print_list.append(selected_task)
894 else:
895 # Use the presorted task_group order from 'package_view'
896 task_group = selected_task[0:selected_task.find('.')+1]
897 for task_obj in self.package_view.task_list:
898 task = task_obj[TASK_NAME]
899 if task.startswith(task_group):
900 print_list.append(task)
901 with open(print_file_name, "a") as fd:
902 print_max = len(print_list)
903 print_count = 1
904 self.progress_view.progress('Write "%s"' % print_file_name,0,print_max)
905 for task in print_list:
906 print_count = self.progress_view.progress('',print_count,print_max)
907 self.select(task)
908 self.screen.refresh();
909 # Utilize the current print output model
910 if print_model == PRINT_MODEL_1:
911 print("=== Dependendency Snapshot ===",file=fd)
912 print(" = Package =",file=fd)
913 print(' '+task,file=fd)
914 # Fill in the matching dependencies
915 print(" = Dependencies =",file=fd)
916 for task_obj in self.dep_view.task_list:
917 print(' '+ task_obj[TASK_NAME],file=fd)
918 print(" = Dependent Tasks =",file=fd)
919 for task_obj in self.reverse_view.task_list:
920 print(' '+ task_obj[TASK_NAME],file=fd)
921 if print_model == PRINT_MODEL_2:
922 print("=== Dependendency Snapshot ===",file=fd)
923 dep_count = len(self.dep_view.task_list) - 1
924 for i,task_obj in enumerate(self.dep_view.task_list):
925 print('%s%s' % ("Dep =" if (i==dep_count) else " ",task_obj[TASK_NAME]),file=fd)
926 if not self.dep_view.task_list:
927 print('Dep =',file=fd)
928 print("Package=%s" % task,file=fd)
929 for i,task_obj in enumerate(self.reverse_view.task_list):
930 print('%s%s' % ("RDep =" if (i==0) else " ",task_obj[TASK_NAME]),file=fd)
931 if not self.reverse_view.task_list:
932 print('RDep =',file=fd)
933 curses.napms(2000)
934 self.progress_view.clear()
935 self.help_bar_view.show_help(True)
936 print('',file=fd)
937 # Restore display to original selected task
938 self.select(selected_task)
939 is_printed = True
940
941#################################################
942### Load bitbake data
943###
944
945def bitbake_load(server, eventHandler, params, dep, curses_off, screen):
946 global bar_len_old
947 bar_len_old = 0
948
949 # Support no screen
950 def progress(msg,count,max):
951 global bar_len_old
952 if screen:
953 dep.progress_view.progress(msg,count,max)
954 else:
955 if msg:
956 if bar_len_old:
957 bar_len_old = 0
958 print("\n")
959 print(f"{msg}: ({count} of {max})")
960 else:
961 bar_len = int((count*40)/max)
962 if bar_len_old != bar_len:
963 print(f"{'*' * (bar_len-bar_len_old)}",end='',flush=True)
964 bar_len_old = bar_len
965 def clear():
966 if screen:
967 dep.progress_view.clear()
968 def clear_curses(screen):
969 if screen:
970 curses_off(screen)
971
972 #
973 # Trigger bitbake "generateDepTreeEvent"
974 #
975
976 cmdline = ''
977 try:
978 params.updateToServer(server, os.environ.copy())
979 params.updateFromServer(server)
980 cmdline = params.parseActions()
981 if not cmdline:
982 clear_curses(screen)
983 print("ERROR: nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
984 return 1,cmdline
985 if 'msg' in cmdline and cmdline['msg']:
986 clear_curses(screen)
987 print('ERROR: ' + cmdline['msg'])
988 return 1,cmdline
989 cmdline = cmdline['action']
990 if not cmdline or cmdline[0] != "generateDotGraph":
991 clear_curses(screen)
992 print("ERROR: This UI requires the -g option")
993 return 1,cmdline
994 ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
995 if error:
996 clear_curses(screen)
997 print("ERROR: running command '%s': %s" % (cmdline, error))
998 return 1,cmdline
999 elif not ret:
1000 clear_curses(screen)
1001 print("ERROR: running command '%s': returned %s" % (cmdline, ret))
1002 return 1,cmdline
1003 except client.Fault as x:
1004 clear_curses(screen)
1005 print("ERROR: XMLRPC Fault getting commandline:\n %s" % x)
1006 return 1,cmdline
1007 except Exception as e:
1008 clear_curses(screen)
1009 print("ERROR: in startup:\n %s" % traceback.format_exc())
1010 return 1,cmdline
1011
1012 #
1013 # Receive data from bitbake
1014 #
1015
1016 progress_total = 0
1017 load_bitbake = True
1018 quit = False
1019 try:
1020 while load_bitbake:
1021 try:
1022 event = eventHandler.waitEvent(0.25)
1023 if quit:
1024 _, error = server.runCommand(["stateForceShutdown"])
1025 clear_curses(screen)
1026 if error:
1027 print('Unable to cleanly stop: %s' % error)
1028 break
1029
1030 if event is None:
1031 continue
1032
1033 if isinstance(event, bb.event.CacheLoadStarted):
1034 progress_total = event.total
1035 progress('Loading Cache',0,progress_total)
1036 continue
1037
1038 if isinstance(event, bb.event.CacheLoadProgress):
1039 x = event.current
1040 progress('',x,progress_total)
1041 continue
1042
1043 if isinstance(event, bb.event.CacheLoadCompleted):
1044 clear()
1045 progress('Bitbake... ',1,2)
1046 continue
1047
1048 if isinstance(event, bb.event.ParseStarted):
1049 progress_total = event.total
1050 progress('Processing recipes',0,progress_total)
1051 if progress_total == 0:
1052 continue
1053
1054 if isinstance(event, bb.event.ParseProgress):
1055 x = event.current
1056 progress('',x,progress_total)
1057 continue
1058
1059 if isinstance(event, bb.event.ParseCompleted):
1060 progress('Generating dependency tree',0,3)
1061 continue
1062
1063 if isinstance(event, bb.event.DepTreeGenerated):
1064 progress('Generating dependency tree',1,3)
1065 dep.parse(event._depgraph)
1066 progress('Generating dependency tree',2,3)
1067
1068 if isinstance(event, bb.command.CommandCompleted):
1069 load_bitbake = False
1070 progress('Generating dependency tree',3,3)
1071 clear()
1072 if screen:
1073 dep.help_bar_view.show_help(True)
1074 continue
1075
1076 if isinstance(event, bb.event.NoProvider):
1077 clear_curses(screen)
1078 print('ERROR: %s' % event)
1079
1080 _, error = server.runCommand(["stateShutdown"])
1081 if error:
1082 print('ERROR: Unable to cleanly shutdown: %s' % error)
1083 return 1,cmdline
1084
1085 if isinstance(event, bb.command.CommandFailed):
1086 clear_curses(screen)
1087 print('ERROR: ' + str(event))
1088 return event.exitcode,cmdline
1089
1090 if isinstance(event, bb.command.CommandExit):
1091 clear_curses(screen)
1092 return event.exitcode,cmdline
1093
1094 if isinstance(event, bb.cooker.CookerExit):
1095 break
1096
1097 continue
1098 except EnvironmentError as ioerror:
1099 # ignore interrupted io
1100 if ioerror.args[0] == 4:
1101 pass
1102 except KeyboardInterrupt:
1103 if shutdown == 2:
1104 clear_curses(screen)
1105 print("\nThird Keyboard Interrupt, exit.\n")
1106 break
1107 if shutdown == 1:
1108 clear_curses(screen)
1109 print("\nSecond Keyboard Interrupt, stopping...\n")
1110 _, error = server.runCommand(["stateForceShutdown"])
1111 if error:
1112 print('Unable to cleanly stop: %s' % error)
1113 if shutdown == 0:
1114 clear_curses(screen)
1115 print("\nKeyboard Interrupt, closing down...\n")
1116 _, error = server.runCommand(["stateShutdown"])
1117 if error:
1118 print('Unable to cleanly shutdown: %s' % error)
1119 shutdown = shutdown + 1
1120 pass
1121 except Exception as e:
1122 # Safe exit on error
1123 clear_curses(screen)
1124 print("Exception : %s" % e)
1125 print("Exception in startup:\n %s" % traceback.format_exc())
1126
1127 return 0,cmdline
1128
1129#################################################
1130### main
1131###
1132
1133SCREEN_COL_MIN = 83
1134SCREEN_ROW_MIN = 26
1135
1136def main(server, eventHandler, params):
1137 global verbose
1138 global sort_model
1139 global print_model
1140 global is_printed
1141 global is_filter
1142 global screen_too_small
1143
1144 shutdown = 0
1145 screen_too_small = False
1146 quit = False
1147
1148 # Unit test with no terminal?
1149 if unit_test_noterm:
1150 # Load bitbake, test that there is valid dependency data, then exit
1151 screen = None
1152 print("* UNIT TEST:START")
1153 dep = DepExplorer(screen)
1154 print("* UNIT TEST:BITBAKE FETCH")
1155 ret,cmdline = bitbake_load(server, eventHandler, params, dep, None, screen)
1156 if ret:
1157 print("* UNIT TEST: BITBAKE FAILED")
1158 return ret
1159 # Test the acquired dependency data
1160 quilt_native_deps = 0
1161 quilt_native_rdeps = 0
1162 quilt_deps = 0
1163 quilt_rdeps = 0
1164 for i,task_obj in enumerate(dep.depends_model):
1165 if TYPE_DEP == task_obj[0]:
1166 task = task_obj[1]
1167 if task.startswith('quilt-native'):
1168 quilt_native_deps += 1
1169 elif task.startswith('quilt'):
1170 quilt_deps += 1
1171 elif TYPE_RDEP == task_obj[0]:
1172 task = task_obj[1]
1173 if task.startswith('quilt-native'):
1174 quilt_native_rdeps += 1
1175 elif task.startswith('quilt'):
1176 quilt_rdeps += 1
1177 # Print results
1178 failed = False
1179 if 0 < len(dep.depends_model):
1180 print(f"Pass:Bitbake dependency count = {len(dep.depends_model)}")
1181 else:
1182 failed = True
1183 print(f"FAIL:Bitbake dependency count = 0")
1184 if quilt_native_deps:
1185 print(f"Pass:Quilt-native depends count = {quilt_native_deps}")
1186 else:
1187 failed = True
1188 print(f"FAIL:Quilt-native depends count = 0")
1189 if quilt_native_rdeps:
1190 print(f"Pass:Quilt-native rdepends count = {quilt_native_rdeps}")
1191 else:
1192 failed = True
1193 print(f"FAIL:Quilt-native rdepends count = 0")
1194 if quilt_deps:
1195 print(f"Pass:Quilt depends count = {quilt_deps}")
1196 else:
1197 failed = True
1198 print(f"FAIL:Quilt depends count = 0")
1199 if quilt_rdeps:
1200 print(f"Pass:Quilt rdepends count = {quilt_rdeps}")
1201 else:
1202 failed = True
1203 print(f"FAIL:Quilt rdepends count = 0")
1204 print("* UNIT TEST:STOP")
1205 return failed
1206
1207 # Help method to dynamically test parent window too small
1208 def check_screen_size(dep, active_package):
1209 global screen_too_small
1210 rows, cols = screen.getmaxyx()
1211 if (rows >= SCREEN_ROW_MIN) and (cols >= SCREEN_COL_MIN):
1212 if screen_too_small:
1213 # Now big enough, remove error message and redraw screen
1214 dep.draw_frames()
1215 active_package.cursor_on(True)
1216 screen_too_small = False
1217 return True
1218 # Test on App init
1219 if not dep:
1220 # Do not start this app if screen not big enough
1221 curses.endwin()
1222 print("")
1223 print("ERROR(Taskexp_cli): Mininal screen size is %dx%d" % (SCREEN_COL_MIN,SCREEN_ROW_MIN))
1224 print("Current screen is Cols=%s,Rows=%d" % (cols,rows))
1225 return False
1226 # First time window too small
1227 if not screen_too_small:
1228 active_package.cursor_on(False)
1229 dep.screen.addstr(0,2,'[BIGGER WINDOW PLEASE]', curses.color_pair(CURSES_WARNING) | curses.A_BLINK)
1230 screen_too_small = True
1231 return False
1232
1233 # Helper method to turn off curses mode
1234 def curses_off(screen):
1235 if not screen: return
1236 # Safe error exit
1237 screen.keypad(False)
1238 curses.echo()
1239 curses.curs_set(1)
1240 curses.endwin()
1241
1242 if unit_test_results:
1243 print('\nUnit Test Results:')
1244 for line in unit_test_results:
1245 print(" %s" % line)
1246
1247 #
1248 # Initialize the ncurse environment
1249 #
1250
1251 screen = curses.initscr()
1252 try:
1253 if not check_screen_size(None, None):
1254 exit(1)
1255 try:
1256 curses.start_color()
1257 curses.use_default_colors();
1258 curses.init_pair(0xFF, curses.COLOR_BLACK, curses.COLOR_WHITE);
1259 curses.init_pair(CURSES_NORMAL, curses.COLOR_WHITE, curses.COLOR_BLACK)
1260 curses.init_pair(CURSES_HIGHLIGHT, curses.COLOR_WHITE, curses.COLOR_BLUE)
1261 curses.init_pair(CURSES_WARNING, curses.COLOR_WHITE, curses.COLOR_RED)
1262 except:
1263 curses.endwin()
1264 print("")
1265 print("ERROR(Taskexp_cli): Requires 256 colors. Please use this or the equivalent:")
1266 print(" $ export TERM='xterm-256color'")
1267 exit(1)
1268
1269 screen.keypad(True)
1270 curses.noecho()
1271 curses.curs_set(0)
1272 screen.refresh();
1273 except Exception as e:
1274 # Safe error exit
1275 curses_off(screen)
1276 print("Exception : %s" % e)
1277 print("Exception in startup:\n %s" % traceback.format_exc())
1278 exit(1)
1279
1280 try:
1281 #
1282 # Instantiate the presentation layers
1283 #
1284
1285 dep = DepExplorer(screen)
1286
1287 #
1288 # Prepare bitbake
1289 #
1290
1291 # Fetch bitbake dependecy data
1292 ret,cmdline = bitbake_load(server, eventHandler, params, dep, curses_off, screen)
1293 if ret: return ret
1294
1295 #
1296 # Preset the views
1297 #
1298
1299 # Cmdline example = ['generateDotGraph', ['acl', 'zlib'], 'build']
1300 primary_packages = cmdline[1]
1301 dep.package_view.set_primary(primary_packages)
1302 dep.dep_view.set_primary(primary_packages)
1303 dep.reverse_view.set_primary(primary_packages)
1304 dep.help_box_view.set_primary(primary_packages)
1305 dep.help_bar_view.show_help(True)
1306 active_package = dep.package_view
1307 active_package.cursor_on(True)
1308 dep.select(primary_packages[0]+'.')
1309 if unit_test:
1310 alert('UNIT_TEST',screen)
1311
1312 # Help method to start/stop the filter feature
1313 def filter_mode(new_filter_status):
1314 global is_filter
1315 if is_filter == new_filter_status:
1316 # Ignore no changes
1317 return
1318 if not new_filter_status:
1319 # Turn off
1320 curses.curs_set(0)
1321 #active_package.cursor_on(False)
1322 active_package = dep.package_view
1323 active_package.cursor_on(True)
1324 is_filter = False
1325 dep.help_bar_view.show_help(True)
1326 dep.filter_str = ''
1327 dep.select('')
1328 else:
1329 # Turn on
1330 curses.curs_set(1)
1331 dep.help_bar_view.show_help(False)
1332 dep.filter_view.clear()
1333 dep.filter_view.show(True)
1334 dep.filter_view.show_prompt()
1335 is_filter = True
1336
1337 #
1338 # Main user loop
1339 #
1340
1341 while not quit:
1342 if is_filter:
1343 dep.filter_view.show_prompt()
1344 if unit_test:
1345 c = unit_test_action(active_package)
1346 else:
1347 c = screen.getch()
1348 ch = chr(c)
1349
1350 # Do not draw if window now too small
1351 if not check_screen_size(dep,active_package):
1352 continue
1353
1354 if verbose:
1355 if c == CHAR_RETURN:
1356 screen.addstr(0, 4, "|%3d,CR |" % (c))
1357 else:
1358 screen.addstr(0, 4, "|%3d,%3s|" % (c,chr(c)))
1359
1360 # pre-map alternate filter close keys
1361 if is_filter and (c == CHAR_ESCAPE):
1362 # Alternate exit from filter
1363 ch = '/'
1364 c = ord(ch)
1365
1366 # Filter and non-filter mode command keys
1367 # https://docs.python.org/3/library/curses.html
1368 if c in (curses.KEY_UP,CHAR_UP):
1369 active_package.line_up()
1370 if active_package == dep.package_view:
1371 dep.select('',only_update_dependents=True)
1372 elif c in (curses.KEY_DOWN,CHAR_DOWN):
1373 active_package.line_down()
1374 if active_package == dep.package_view:
1375 dep.select('',only_update_dependents=True)
1376 elif curses.KEY_PPAGE == c:
1377 active_package.page_up()
1378 if active_package == dep.package_view:
1379 dep.select('',only_update_dependents=True)
1380 elif curses.KEY_NPAGE == c:
1381 active_package.page_down()
1382 if active_package == dep.package_view:
1383 dep.select('',only_update_dependents=True)
1384 elif CHAR_TAB == c:
1385 # Tab between boxes
1386 active_package.cursor_on(False)
1387 if active_package == dep.package_view:
1388 active_package = dep.dep_view
1389 elif active_package == dep.dep_view:
1390 active_package = dep.reverse_view
1391 else:
1392 active_package = dep.package_view
1393 active_package.cursor_on(True)
1394 elif curses.KEY_BTAB == c:
1395 # Shift-Tab reverse between boxes
1396 active_package.cursor_on(False)
1397 if active_package == dep.package_view:
1398 active_package = dep.reverse_view
1399 elif active_package == dep.reverse_view:
1400 active_package = dep.dep_view
1401 else:
1402 active_package = dep.package_view
1403 active_package.cursor_on(True)
1404 elif (CHAR_RETURN == c):
1405 # CR to select
1406 selected = active_package.get_selected()
1407 if selected:
1408 active_package.cursor_on(False)
1409 active_package = dep.package_view
1410 filter_mode(False)
1411 dep.select(selected)
1412 else:
1413 filter_mode(False)
1414 dep.select(primary_packages[0]+'.')
1415
1416 elif '/' == ch: # Enter/exit dep.filter_view
1417 if is_filter:
1418 filter_mode(False)
1419 else:
1420 filter_mode(True)
1421 elif is_filter:
1422 # If in filter mode, re-direct all these other keys to the filter box
1423 result = dep.filter_view.input(c,ch)
1424 dep.filter_str = dep.filter_view.filter_str
1425 dep.select('')
1426
1427 # Non-filter mode command keys
1428 elif 'p' == ch:
1429 dep.print_deps(whole_group=False)
1430 elif 'P' == ch:
1431 dep.print_deps(whole_group=True)
1432 elif 'w' == ch:
1433 # Toggle the print model
1434 if print_model == PRINT_MODEL_1:
1435 print_model = PRINT_MODEL_2
1436 else:
1437 print_model = PRINT_MODEL_1
1438 elif 's' == ch:
1439 # Toggle the sort model
1440 if sort_model == SORT_DEPS:
1441 sort_model = SORT_ALPHA
1442 elif sort_model == SORT_ALPHA:
1443 if SORT_BITBAKE_ENABLE:
1444 sort_model = TASK_SORT_BITBAKE
1445 else:
1446 sort_model = SORT_DEPS
1447 else:
1448 sort_model = SORT_DEPS
1449 active_package.cursor_on(False)
1450 current_task = active_package.get_selected()
1451 dep.package_view.sort()
1452 dep.dep_view.sort()
1453 dep.reverse_view.sort()
1454 active_package = dep.package_view
1455 active_package.cursor_on(True)
1456 dep.select(current_task)
1457 # Announce the new sort model
1458 alert("SORT=%s" % ("ALPHA" if (sort_model == SORT_ALPHA) else "DEPS"),screen)
1459 alert('',screen)
1460
1461 elif 'q' == ch:
1462 quit = True
1463 elif ch in ('h','?'):
1464 dep.help_box_view.show_help(True)
1465 dep.select(active_package.get_selected())
1466
1467 #
1468 # Debugging commands
1469 #
1470
1471 elif 'V' == ch:
1472 verbose = not verbose
1473 alert('Verbose=%s' % str(verbose),screen)
1474 alert('',screen)
1475 elif 'R' == ch:
1476 screen.refresh()
1477 elif 'B' == ch:
1478 # Progress bar unit test
1479 dep.progress_view.progress('Test',0,40)
1480 curses.napms(1000)
1481 dep.progress_view.progress('',10,40)
1482 curses.napms(1000)
1483 dep.progress_view.progress('',20,40)
1484 curses.napms(1000)
1485 dep.progress_view.progress('',30,40)
1486 curses.napms(1000)
1487 dep.progress_view.progress('',40,40)
1488 curses.napms(1000)
1489 dep.progress_view.clear()
1490 dep.help_bar_view.show_help(True)
1491 elif 'Q' == ch:
1492 # Simulated error
1493 curses_off(screen)
1494 print('ERROR: simulated error exit')
1495 return 1
1496
1497 # Safe exit
1498 curses_off(screen)
1499 except Exception as e:
1500 # Safe exit on error
1501 curses_off(screen)
1502 print("Exception : %s" % e)
1503 print("Exception in startup:\n %s" % traceback.format_exc())
1504
1505 # Reminder to pick up your printed results
1506 if is_printed:
1507 print("")
1508 print("You have output ready!")
1509 print(" * Your printed dependency file is: %s" % print_file_name)
1510 print(" * Your previous results saved in: %s" % print_file_backup_name)
1511 print("")
diff --git a/bitbake/lib/bb/ui/teamcity.py b/bitbake/lib/bb/ui/teamcity.py
index fca46c2874..7eeaab8d63 100644
--- a/bitbake/lib/bb/ui/teamcity.py
+++ b/bitbake/lib/bb/ui/teamcity.py
@@ -30,7 +30,6 @@ import bb.build
30import bb.command 30import bb.command
31import bb.cooker 31import bb.cooker
32import bb.event 32import bb.event
33import bb.exceptions
34import bb.runqueue 33import bb.runqueue
35from bb.ui import uihelper 34from bb.ui import uihelper
36 35
@@ -102,10 +101,6 @@ class TeamcityLogFormatter(logging.Formatter):
102 details = "" 101 details = ""
103 if hasattr(record, 'bb_exc_formatted'): 102 if hasattr(record, 'bb_exc_formatted'):
104 details = ''.join(record.bb_exc_formatted) 103 details = ''.join(record.bb_exc_formatted)
105 elif hasattr(record, 'bb_exc_info'):
106 etype, value, tb = record.bb_exc_info
107 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
108 details = ''.join(formatted)
109 104
110 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]: 105 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]:
111 # ERROR gets a separate errorDetails field 106 # ERROR gets a separate errorDetails field
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py
index ec5bd4f105..6bd21f1844 100644
--- a/bitbake/lib/bb/ui/toasterui.py
+++ b/bitbake/lib/bb/ui/toasterui.py
@@ -385,7 +385,7 @@ def main(server, eventHandler, params):
385 main.shutdown = 1 385 main.shutdown = 1
386 386
387 logger.info("ToasterUI build done, brbe: %s", brbe) 387 logger.info("ToasterUI build done, brbe: %s", brbe)
388 continue 388 break
389 389
390 if isinstance(event, (bb.command.CommandCompleted, 390 if isinstance(event, (bb.command.CommandCompleted,
391 bb.command.CommandFailed, 391 bb.command.CommandFailed,
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
index 8607d0523b..c2f830d530 100644
--- a/bitbake/lib/bb/ui/uievent.py
+++ b/bitbake/lib/bb/ui/uievent.py
@@ -44,7 +44,7 @@ class BBUIEventQueue:
44 for count_tries in range(5): 44 for count_tries in range(5):
45 ret = self.BBServer.registerEventHandler(self.host, self.port) 45 ret = self.BBServer.registerEventHandler(self.host, self.port)
46 46
47 if isinstance(ret, collections.Iterable): 47 if isinstance(ret, collections.abc.Iterable):
48 self.EventHandle, error = ret 48 self.EventHandle, error = ret
49 else: 49 else:
50 self.EventHandle = ret 50 self.EventHandle = ret
@@ -65,35 +65,27 @@ class BBUIEventQueue:
65 self.server = server 65 self.server = server
66 66
67 self.t = threading.Thread() 67 self.t = threading.Thread()
68 self.t.setDaemon(True) 68 self.t.daemon = True
69 self.t.run = self.startCallbackHandler 69 self.t.run = self.startCallbackHandler
70 self.t.start() 70 self.t.start()
71 71
72 def getEvent(self): 72 def getEvent(self):
73 73 with bb.utils.lock_timeout(self.eventQueueLock):
74 self.eventQueueLock.acquire() 74 if not self.eventQueue:
75 75 return None
76 if len(self.eventQueue) == 0: 76 item = self.eventQueue.pop(0)
77 self.eventQueueLock.release() 77 if not self.eventQueue:
78 return None 78 self.eventQueueNotify.clear()
79 79 return item
80 item = self.eventQueue.pop(0)
81
82 if len(self.eventQueue) == 0:
83 self.eventQueueNotify.clear()
84
85 self.eventQueueLock.release()
86 return item
87 80
88 def waitEvent(self, delay): 81 def waitEvent(self, delay):
89 self.eventQueueNotify.wait(delay) 82 self.eventQueueNotify.wait(delay)
90 return self.getEvent() 83 return self.getEvent()
91 84
92 def queue_event(self, event): 85 def queue_event(self, event):
93 self.eventQueueLock.acquire() 86 with bb.utils.lock_timeout(self.eventQueueLock):
94 self.eventQueue.append(event) 87 self.eventQueue.append(event)
95 self.eventQueueNotify.set() 88 self.eventQueueNotify.set()
96 self.eventQueueLock.release()
97 89
98 def send_event(self, event): 90 def send_event(self, event):
99 self.queue_event(pickle.loads(event)) 91 self.queue_event(pickle.loads(event))
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
index 48d808ae28..e6983bd559 100644
--- a/bitbake/lib/bb/ui/uihelper.py
+++ b/bitbake/lib/bb/ui/uihelper.py
@@ -31,7 +31,7 @@ class BBUIHelper:
31 31
32 if isinstance(event, bb.build.TaskStarted): 32 if isinstance(event, bb.build.TaskStarted):
33 tid = event._fn + ":" + event._task 33 tid = event._fn + ":" + event._task
34 if event._mc != "default": 34 if event._mc != "":
35 self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } 35 self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid }
36 else: 36 else:
37 self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } 37 self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid }
@@ -49,9 +49,11 @@ class BBUIHelper:
49 tid = event._fn + ":" + event._task 49 tid = event._fn + ":" + event._task
50 removetid(event.pid, tid) 50 removetid(event.pid, tid)
51 self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)}) 51 self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
52 elif isinstance(event, bb.runqueue.runQueueTaskStarted): 52 elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
53 self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1 53 self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed
54 self.tasknumber_total = event.stats.total 54 self.tasknumber_total = event.stats.total
55 self.setscene_current = event.stats.setscene_active + event.stats.setscene_covered + event.stats.setscene_notcovered
56 self.setscene_total = event.stats.setscene_total
55 self.needUpdate = True 57 self.needUpdate = True
56 elif isinstance(event, bb.build.TaskProgress): 58 elif isinstance(event, bb.build.TaskProgress):
57 if event.pid > 0 and event.pid in self.pidmap: 59 if event.pid > 0 and event.pid in self.pidmap:
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index b282d09abf..1cc74ed546 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -11,24 +11,29 @@ import re, fcntl, os, string, stat, shutil, time
11import sys 11import sys
12import errno 12import errno
13import logging 13import logging
14import bb 14import locale
15import bb.msg
16import multiprocessing 15import multiprocessing
17import fcntl
18import importlib 16import importlib
19from importlib import machinery 17import importlib.machinery
18import importlib.util
20import itertools 19import itertools
21import subprocess 20import subprocess
22import glob 21import glob
23import fnmatch 22import fnmatch
24import traceback 23import traceback
25import errno
26import signal 24import signal
27import collections 25import collections
28import copy 26import copy
27import ctypes
28import random
29import socket
30import struct
31import tempfile
29from subprocess import getstatusoutput 32from subprocess import getstatusoutput
30from contextlib import contextmanager 33from contextlib import contextmanager
31from ctypes import cdll 34from ctypes import cdll
35import bb
36import bb.msg
32 37
33logger = logging.getLogger("BitBake.Util") 38logger = logging.getLogger("BitBake.Util")
34python_extensions = importlib.machinery.all_suffixes() 39python_extensions = importlib.machinery.all_suffixes()
@@ -43,7 +48,7 @@ def clean_context():
43 48
44def get_context(): 49def get_context():
45 return _context 50 return _context
46 51
47 52
48def set_context(ctx): 53def set_context(ctx):
49 _context = ctx 54 _context = ctx
@@ -77,7 +82,16 @@ def explode_version(s):
77 return r 82 return r
78 83
79def split_version(s): 84def split_version(s):
80 """Split a version string into its constituent parts (PE, PV, PR)""" 85 """Split a version string into its constituent parts (PE, PV, PR).
86
87 Arguments:
88
89 - ``s``: version string. The format of the input string should be::
90
91 ${PE}:${PV}-${PR}
92
93 Returns a tuple ``(pe, pv, pr)``.
94 """
81 s = s.strip(" <>=") 95 s = s.strip(" <>=")
82 e = 0 96 e = 0
83 if s.count(':'): 97 if s.count(':'):
@@ -129,16 +143,30 @@ def vercmp(ta, tb):
129 return r 143 return r
130 144
131def vercmp_string(a, b): 145def vercmp_string(a, b):
132 """ Split version strings and compare them """ 146 """ Split version strings using ``bb.utils.split_version()`` and compare
147 them with ``bb.utils.vercmp().``
148
149 Arguments:
150
151 - ``a``: left version string operand.
152 - ``b``: right version string operand.
153
154 Returns what ``bb.utils.vercmp()`` returns."""
133 ta = split_version(a) 155 ta = split_version(a)
134 tb = split_version(b) 156 tb = split_version(b)
135 return vercmp(ta, tb) 157 return vercmp(ta, tb)
136 158
137def vercmp_string_op(a, b, op): 159def vercmp_string_op(a, b, op):
138 """ 160 """
139 Compare two versions and check if the specified comparison operator matches the result of the comparison. 161 Takes the return value ``bb.utils.vercmp()`` and returns the operation
140 This function is fairly liberal about what operators it will accept since there are a variety of styles 162 defined by ``op`` between the return value and 0.
141 depending on the context. 163
164 Arguments:
165
166 - ``a``: left version string operand.
167 - ``b``: right version string operand.
168 - ``op``: operator string. Can be one of ``=``, ``==``, ``<=``, ``>=``,
169 ``>``, ``>>``, ``<``, ``<<`` or ``!=``.
142 """ 170 """
143 res = vercmp_string(a, b) 171 res = vercmp_string(a, b)
144 if op in ('=', '=='): 172 if op in ('=', '=='):
@@ -158,9 +186,16 @@ def vercmp_string_op(a, b, op):
158 186
159def explode_deps(s): 187def explode_deps(s):
160 """ 188 """
161 Take an RDEPENDS style string of format: 189 Takes an RDEPENDS style string of format::
162 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 190
163 and return a list of dependencies. 191 DEPEND1 (optional version) DEPEND2 (optional version) ...
192
193 Arguments:
194
195 - ``s``: input RDEPENDS style string
196
197 Returns a list of dependencies.
198
164 Version information is ignored. 199 Version information is ignored.
165 """ 200 """
166 r = [] 201 r = []
@@ -182,9 +217,17 @@ def explode_deps(s):
182 217
183def explode_dep_versions2(s, *, sort=True): 218def explode_dep_versions2(s, *, sort=True):
184 """ 219 """
185 Take an RDEPENDS style string of format: 220 Takes an RDEPENDS style string of format::
186 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 221
187 and return a dictionary of dependencies and versions. 222 DEPEND1 (optional version) DEPEND2 (optional version) ...
223
224 Arguments:
225
226 - ``s``: input RDEPENDS style string
227 - ``*``: *Unused*.
228 - ``sort``: whether to sort the output or not.
229
230 Returns a dictionary of dependencies and versions.
188 """ 231 """
189 r = collections.OrderedDict() 232 r = collections.OrderedDict()
190 l = s.replace(",", "").split() 233 l = s.replace(",", "").split()
@@ -205,8 +248,8 @@ def explode_dep_versions2(s, *, sort=True):
205 inversion = True 248 inversion = True
206 # This list is based on behavior and supported comparisons from deb, opkg and rpm. 249 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
207 # 250 #
208 # Even though =<, <<, ==, !=, =>, and >> may not be supported, 251 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
209 # we list each possibly valid item. 252 # we list each possibly valid item.
210 # The build system is responsible for validation of what it supports. 253 # The build system is responsible for validation of what it supports.
211 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')): 254 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
212 lastcmp = i[0:2] 255 lastcmp = i[0:2]
@@ -249,10 +292,17 @@ def explode_dep_versions2(s, *, sort=True):
249 292
250def explode_dep_versions(s): 293def explode_dep_versions(s):
251 """ 294 """
252 Take an RDEPENDS style string of format: 295 Take an RDEPENDS style string of format::
253 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 296
254 skip null value and items appeared in dependancy string multiple times 297 DEPEND1 (optional version) DEPEND2 (optional version) ...
255 and return a dictionary of dependencies and versions. 298
299 Skips null values and items appeared in dependency string multiple times.
300
301 Arguments:
302
303 - ``s``: input RDEPENDS style string
304
305 Returns a dictionary of dependencies and versions.
256 """ 306 """
257 r = explode_dep_versions2(s) 307 r = explode_dep_versions2(s)
258 for d in r: 308 for d in r:
@@ -266,7 +316,17 @@ def explode_dep_versions(s):
266 316
267def join_deps(deps, commasep=True): 317def join_deps(deps, commasep=True):
268 """ 318 """
269 Take the result from explode_dep_versions and generate a dependency string 319 Take a result from ``bb.utils.explode_dep_versions()`` and generate a
320 dependency string.
321
322 Arguments:
323
324 - ``deps``: dictionary of dependencies and versions.
325 - ``commasep``: makes the return value separated by commas if ``True``,
326 separated by spaces otherwise.
327
328 Returns a comma-separated (space-separated if ``comma-sep`` is ``False``)
329 string of dependencies and versions.
270 """ 330 """
271 result = [] 331 result = []
272 for dep in deps: 332 for dep in deps:
@@ -340,7 +400,7 @@ def _print_exception(t, value, tb, realfile, text, context):
340 exception = traceback.format_exception_only(t, value) 400 exception = traceback.format_exception_only(t, value)
341 error.append('Error executing a python function in %s:\n' % realfile) 401 error.append('Error executing a python function in %s:\n' % realfile)
342 402
343 # Strip 'us' from the stack (better_exec call) unless that was where the 403 # Strip 'us' from the stack (better_exec call) unless that was where the
344 # error came from 404 # error came from
345 if tb.tb_next is not None: 405 if tb.tb_next is not None:
346 tb = tb.tb_next 406 tb = tb.tb_next
@@ -379,7 +439,7 @@ def _print_exception(t, value, tb, realfile, text, context):
379 439
380 error.append("Exception: %s" % ''.join(exception)) 440 error.append("Exception: %s" % ''.join(exception))
381 441
382 # If the exception is from spwaning a task, let's be helpful and display 442 # If the exception is from spawning a task, let's be helpful and display
383 # the output (which hopefully includes stderr). 443 # the output (which hopefully includes stderr).
384 if isinstance(value, subprocess.CalledProcessError) and value.output: 444 if isinstance(value, subprocess.CalledProcessError) and value.output:
385 error.append("Subprocess output:") 445 error.append("Subprocess output:")
@@ -400,7 +460,7 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception
400 code = better_compile(code, realfile, realfile) 460 code = better_compile(code, realfile, realfile)
401 try: 461 try:
402 exec(code, get_context(), context) 462 exec(code, get_context(), context)
403 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError): 463 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError):
404 # Error already shown so passthrough, no need for traceback 464 # Error already shown so passthrough, no need for traceback
405 raise 465 raise
406 except Exception as e: 466 except Exception as e:
@@ -427,33 +487,56 @@ def better_eval(source, locals, extraglobals = None):
427 return eval(source, ctx, locals) 487 return eval(source, ctx, locals)
428 488
429@contextmanager 489@contextmanager
430def fileslocked(files): 490def fileslocked(files, *args, **kwargs):
431 """Context manager for locking and unlocking file locks.""" 491 """Context manager for locking and unlocking file locks. Uses
492 ``bb.utils.lockfile()`` and ``bb.utils.unlockfile()`` to lock and unlock
493 files.
494
495 No return value."""
432 locks = [] 496 locks = []
433 if files: 497 if files:
434 for lockfile in files: 498 for lockfile in files:
435 locks.append(bb.utils.lockfile(lockfile)) 499 l = bb.utils.lockfile(lockfile, *args, **kwargs)
500 if l is not None:
501 locks.append(l)
436 502
437 try: 503 try:
438 yield 504 yield
439 finally: 505 finally:
506 locks.reverse()
440 for lock in locks: 507 for lock in locks:
441 bb.utils.unlockfile(lock) 508 bb.utils.unlockfile(lock)
442 509
443def lockfile(name, shared=False, retry=True, block=False): 510def lockfile(name, shared=False, retry=True, block=False):
444 """ 511 """
445 Use the specified file as a lock file, return when the lock has 512 Use the specified file (with filename ``name``) as a lock file, return when
446 been acquired. Returns a variable to pass to unlockfile(). 513 the lock has been acquired. Returns a variable to pass to unlockfile().
447 Parameters: 514
448 retry: True to re-try locking if it fails, False otherwise 515 Arguments:
449 block: True to block until the lock succeeds, False otherwise 516
517 - ``shared``: sets the lock as a shared lock instead of an
518 exclusive lock.
519 - ``retry``: ``True`` to re-try locking if it fails, ``False``
520 otherwise.
521 - ``block``: ``True`` to block until the lock succeeds,
522 ``False`` otherwise.
523
450 The retry and block parameters are kind of equivalent unless you 524 The retry and block parameters are kind of equivalent unless you
451 consider the possibility of sending a signal to the process to break 525 consider the possibility of sending a signal to the process to break
452 out - at which point you want block=True rather than retry=True. 526 out - at which point you want block=True rather than retry=True.
527
528 Returns the locked file descriptor in case of success, ``None`` otherwise.
453 """ 529 """
530 basename = os.path.basename(name)
531 if len(basename) > 255:
532 root, ext = os.path.splitext(basename)
533 basename = root[:255 - len(ext)] + ext
534
454 dirname = os.path.dirname(name) 535 dirname = os.path.dirname(name)
455 mkdirhier(dirname) 536 mkdirhier(dirname)
456 537
538 name = os.path.join(dirname, basename)
539
457 if not os.access(dirname, os.W_OK): 540 if not os.access(dirname, os.W_OK):
458 logger.error("Unable to acquire lock '%s', directory is not writable", 541 logger.error("Unable to acquire lock '%s', directory is not writable",
459 name) 542 name)
@@ -487,7 +570,7 @@ def lockfile(name, shared=False, retry=True, block=False):
487 return lf 570 return lf
488 lf.close() 571 lf.close()
489 except OSError as e: 572 except OSError as e:
490 if e.errno == errno.EACCES: 573 if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
491 logger.error("Unable to acquire lock '%s', %s", 574 logger.error("Unable to acquire lock '%s', %s",
492 e.strerror, name) 575 e.strerror, name)
493 sys.exit(1) 576 sys.exit(1)
@@ -501,7 +584,13 @@ def lockfile(name, shared=False, retry=True, block=False):
501 584
502def unlockfile(lf): 585def unlockfile(lf):
503 """ 586 """
504 Unlock a file locked using lockfile() 587 Unlock a file locked using ``bb.utils.lockfile()``.
588
589 Arguments:
590
591 - ``lf``: the locked file descriptor.
592
593 No return value.
505 """ 594 """
506 try: 595 try:
507 # If we had a shared lock, we need to promote to exclusive before 596 # If we had a shared lock, we need to promote to exclusive before
@@ -529,43 +618,97 @@ def _hasher(method, filename):
529 618
530def md5_file(filename): 619def md5_file(filename):
531 """ 620 """
532 Return the hex string representation of the MD5 checksum of filename. 621 Arguments:
622
623 - ``filename``: path to the input file.
624
625 Returns the hexadecimal string representation of the MD5 checksum of filename.
533 """ 626 """
534 import hashlib 627 import hashlib
535 return _hasher(hashlib.md5(), filename) 628 try:
629 sig = hashlib.new('MD5', usedforsecurity=False)
630 except TypeError:
631 # Some configurations don't appear to support two arguments
632 sig = hashlib.new('MD5')
633 return _hasher(sig, filename)
536 634
537def sha256_file(filename): 635def sha256_file(filename):
538 """ 636 """
539 Return the hex string representation of the 256-bit SHA checksum of 637 Returns the hexadecimal representation of the 256-bit SHA checksum of
540 filename. 638 filename.
639
640 Arguments:
641
642 - ``filename``: path to the file.
541 """ 643 """
542 import hashlib 644 import hashlib
543 return _hasher(hashlib.sha256(), filename) 645 return _hasher(hashlib.sha256(), filename)
544 646
545def sha1_file(filename): 647def sha1_file(filename):
546 """ 648 """
547 Return the hex string representation of the SHA1 checksum of the filename 649 Returns the hexadecimal representation of the SHA1 checksum of the filename
650
651 Arguments:
652
653 - ``filename``: path to the file.
548 """ 654 """
549 import hashlib 655 import hashlib
550 return _hasher(hashlib.sha1(), filename) 656 return _hasher(hashlib.sha1(), filename)
551 657
552def sha384_file(filename): 658def sha384_file(filename):
553 """ 659 """
554 Return the hex string representation of the SHA384 checksum of the filename 660 Returns the hexadecimal representation of the SHA384 checksum of the filename
661
662 Arguments:
663
664 - ``filename``: path to the file.
555 """ 665 """
556 import hashlib 666 import hashlib
557 return _hasher(hashlib.sha384(), filename) 667 return _hasher(hashlib.sha384(), filename)
558 668
559def sha512_file(filename): 669def sha512_file(filename):
560 """ 670 """
561 Return the hex string representation of the SHA512 checksum of the filename 671 Returns the hexadecimal representation of the SHA512 checksum of the filename
672
673 Arguments:
674
675 - ``filename``: path to the file.
562 """ 676 """
563 import hashlib 677 import hashlib
564 return _hasher(hashlib.sha512(), filename) 678 return _hasher(hashlib.sha512(), filename)
565 679
680def goh1_file(filename):
681 """
682 Returns the hexadecimal string representation of the Go mod h1 checksum of the
683 filename. The Go mod h1 checksum uses the Go dirhash package. The package
684 defines hashes over directory trees and is used by go mod for mod files and
685 zip archives.
686
687 Arguments:
688
689 - ``filename``: path to the file.
690 """
691 import hashlib
692 import zipfile
693
694 lines = []
695 if zipfile.is_zipfile(filename):
696 with zipfile.ZipFile(filename) as archive:
697 for fn in sorted(archive.namelist()):
698 method = hashlib.sha256()
699 method.update(archive.read(fn))
700 hash = method.hexdigest()
701 lines.append("%s %s\n" % (hash, fn))
702 else:
703 hash = _hasher(hashlib.sha256(), filename)
704 lines.append("%s go.mod\n" % hash)
705 method = hashlib.sha256()
706 method.update("".join(lines).encode('utf-8'))
707 return method.hexdigest()
708
566def preserved_envvars_exported(): 709def preserved_envvars_exported():
567 """Variables which are taken from the environment and placed in and exported 710 """Returns the list of variables which are taken from the environment and
568 from the metadata""" 711 placed in and exported from the metadata."""
569 return [ 712 return [
570 'BB_TASKHASH', 713 'BB_TASKHASH',
571 'HOME', 714 'HOME',
@@ -579,19 +722,42 @@ def preserved_envvars_exported():
579 ] 722 ]
580 723
581def preserved_envvars(): 724def preserved_envvars():
582 """Variables which are taken from the environment and placed in the metadata""" 725 """Returns the list of variables which are taken from the environment and
726 placed in the metadata."""
583 v = [ 727 v = [
584 'BBPATH', 728 'BBPATH',
585 'BB_PRESERVE_ENV', 729 'BB_PRESERVE_ENV',
586 'BB_ENV_WHITELIST', 730 'BB_ENV_PASSTHROUGH_ADDITIONS',
587 'BB_ENV_EXTRAWHITE',
588 ] 731 ]
589 return v + preserved_envvars_exported() 732 return v + preserved_envvars_exported()
590 733
734def check_system_locale():
735 """Make sure the required system locale are available and configured.
736
737 No return value."""
738 default_locale = locale.getlocale(locale.LC_CTYPE)
739
740 try:
741 locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8"))
742 except:
743 sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system")
744 else:
745 locale.setlocale(locale.LC_CTYPE, default_locale)
746
747 if sys.getfilesystemencoding() != "utf-8":
748 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n"
749 "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
750
591def filter_environment(good_vars): 751def filter_environment(good_vars):
592 """ 752 """
593 Create a pristine environment for bitbake. This will remove variables that 753 Create a pristine environment for bitbake. This will remove variables that
594 are not known and may influence the build in a negative way. 754 are not known and may influence the build in a negative way.
755
756 Arguments:
757
758 - ``good_vars``: list of variable to exclude from the filtering.
759
760 No return value.
595 """ 761 """
596 762
597 removed_vars = {} 763 removed_vars = {}
@@ -615,27 +781,29 @@ def filter_environment(good_vars):
615 781
616def approved_variables(): 782def approved_variables():
617 """ 783 """
618 Determine and return the list of whitelisted variables which are approved 784 Determine and return the list of variables which are approved
619 to remain in the environment. 785 to remain in the environment.
620 """ 786 """
621 if 'BB_PRESERVE_ENV' in os.environ: 787 if 'BB_PRESERVE_ENV' in os.environ:
622 return os.environ.keys() 788 return os.environ.keys()
623 approved = [] 789 approved = []
624 if 'BB_ENV_WHITELIST' in os.environ: 790 if 'BB_ENV_PASSTHROUGH' in os.environ:
625 approved = os.environ['BB_ENV_WHITELIST'].split() 791 approved = os.environ['BB_ENV_PASSTHROUGH'].split()
626 approved.extend(['BB_ENV_WHITELIST']) 792 approved.extend(['BB_ENV_PASSTHROUGH'])
627 else: 793 else:
628 approved = preserved_envvars() 794 approved = preserved_envvars()
629 if 'BB_ENV_EXTRAWHITE' in os.environ: 795 if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ:
630 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split()) 796 approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split())
631 if 'BB_ENV_EXTRAWHITE' not in approved: 797 if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved:
632 approved.extend(['BB_ENV_EXTRAWHITE']) 798 approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS'])
633 return approved 799 return approved
634 800
635def clean_environment(): 801def clean_environment():
636 """ 802 """
637 Clean up any spurious environment variables. This will remove any 803 Clean up any spurious environment variables. This will remove any
638 variables the user hasn't chosen to preserve. 804 variables the user hasn't chosen to preserve.
805
806 No return value.
639 """ 807 """
640 if 'BB_PRESERVE_ENV' not in os.environ: 808 if 'BB_PRESERVE_ENV' not in os.environ:
641 good_vars = approved_variables() 809 good_vars = approved_variables()
@@ -646,6 +814,8 @@ def clean_environment():
646def empty_environment(): 814def empty_environment():
647 """ 815 """
648 Remove all variables from the environment. 816 Remove all variables from the environment.
817
818 No return value.
649 """ 819 """
650 for s in list(os.environ.keys()): 820 for s in list(os.environ.keys()):
651 os.unsetenv(s) 821 os.unsetenv(s)
@@ -654,6 +824,12 @@ def empty_environment():
654def build_environment(d): 824def build_environment(d):
655 """ 825 """
656 Build an environment from all exported variables. 826 Build an environment from all exported variables.
827
828 Arguments:
829
830 - ``d``: the data store.
831
832 No return value.
657 """ 833 """
658 import bb.data 834 import bb.data
659 for var in bb.data.keys(d): 835 for var in bb.data.keys(d):
@@ -678,13 +854,23 @@ def _check_unsafe_delete_path(path):
678 return False 854 return False
679 855
680def remove(path, recurse=False, ionice=False): 856def remove(path, recurse=False, ionice=False):
681 """Equivalent to rm -f or rm -rf""" 857 """Equivalent to rm -f or rm -rf.
858
859 Arguments:
860
861 - ``path``: path to file/directory to remove.
862 - ``recurse``: deletes recursively if ``True``.
863 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
864 ionice``.
865
866 No return value.
867 """
682 if not path: 868 if not path:
683 return 869 return
684 if recurse: 870 if recurse:
685 for name in glob.glob(path): 871 for name in glob.glob(path):
686 if _check_unsafe_delete_path(path): 872 if _check_unsafe_delete_path(name):
687 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path) 873 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
688 # shutil.rmtree(name) would be ideal but its too slow 874 # shutil.rmtree(name) would be ideal but its too slow
689 cmd = [] 875 cmd = []
690 if ionice: 876 if ionice:
@@ -699,7 +885,17 @@ def remove(path, recurse=False, ionice=False):
699 raise 885 raise
700 886
701def prunedir(topdir, ionice=False): 887def prunedir(topdir, ionice=False):
702 """ Delete everything reachable from the directory named in 'topdir'. """ 888 """
889 Delete everything reachable from the directory named in ``topdir``.
890
891 Arguments:
892
893 - ``topdir``: directory path.
894 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
895 ionice``.
896
897 No return value.
898 """
703 # CAUTION: This is dangerous! 899 # CAUTION: This is dangerous!
704 if _check_unsafe_delete_path(topdir): 900 if _check_unsafe_delete_path(topdir):
705 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) 901 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
@@ -710,9 +906,16 @@ def prunedir(topdir, ionice=False):
710# but thats possibly insane and suffixes is probably going to be small 906# but thats possibly insane and suffixes is probably going to be small
711# 907#
712def prune_suffix(var, suffixes, d): 908def prune_suffix(var, suffixes, d):
713 """ 909 """
714 See if var ends with any of the suffixes listed and 910 Check if ``var`` ends with any of the suffixes listed in ``suffixes`` and
715 remove it if found 911 remove it if found.
912
913 Arguments:
914
915 - ``var``: string to check for suffixes.
916 - ``suffixes``: list of strings representing suffixes to check for.
917
918 Returns the string ``var`` without the suffix.
716 """ 919 """
717 for suffix in suffixes: 920 for suffix in suffixes:
718 if suffix and var.endswith(suffix): 921 if suffix and var.endswith(suffix):
@@ -721,9 +924,16 @@ def prune_suffix(var, suffixes, d):
721 924
722def mkdirhier(directory): 925def mkdirhier(directory):
723 """Create a directory like 'mkdir -p', but does not complain if 926 """Create a directory like 'mkdir -p', but does not complain if
724 directory already exists like os.makedirs 927 directory already exists like ``os.makedirs()``.
725 """
726 928
929 Arguments:
930
931 - ``directory``: path to the directory.
932
933 No return value.
934 """
935 if '${' in str(directory):
936 bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
727 try: 937 try:
728 os.makedirs(directory) 938 os.makedirs(directory)
729 except OSError as e: 939 except OSError as e:
@@ -731,10 +941,24 @@ def mkdirhier(directory):
731 raise e 941 raise e
732 942
733def movefile(src, dest, newmtime = None, sstat = None): 943def movefile(src, dest, newmtime = None, sstat = None):
734 """Moves a file from src to dest, preserving all permissions and 944 """Moves a file from ``src`` to ``dest``, preserving all permissions and
735 attributes; mtime will be preserved even when moving across 945 attributes; mtime will be preserved even when moving across
736 filesystems. Returns true on success and false on failure. Move is 946 filesystems. Returns ``True`` on success and ``False`` on failure. Move is
737 atomic. 947 atomic.
948
949 Arguments:
950
951 - ``src`` -- Source file.
952 - ``dest`` -- Destination file.
953 - ``newmtime`` -- new mtime to be passed as float seconds since the epoch.
954 - ``sstat`` -- os.stat_result to use for the destination file.
955
956 Returns an ``os.stat_result`` of the destination file if the
957 source file is a symbolic link or the ``sstat`` argument represents a
958 symbolic link - in which case the destination file will also be created as
959 a symbolic link.
960
961 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
738 """ 962 """
739 963
740 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 964 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
@@ -742,7 +966,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
742 if not sstat: 966 if not sstat:
743 sstat = os.lstat(src) 967 sstat = os.lstat(src)
744 except Exception as e: 968 except Exception as e:
745 print("movefile: Stating source file failed...", e) 969 logger.warning("movefile: Stating source file failed...", e)
746 return None 970 return None
747 971
748 destexists = 1 972 destexists = 1
@@ -770,7 +994,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
770 os.unlink(src) 994 os.unlink(src)
771 return os.lstat(dest) 995 return os.lstat(dest)
772 except Exception as e: 996 except Exception as e:
773 print("movefile: failed to properly create symlink:", dest, "->", target, e) 997 logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
774 return None 998 return None
775 999
776 renamefailed = 1 1000 renamefailed = 1
@@ -782,12 +1006,12 @@ def movefile(src, dest, newmtime = None, sstat = None):
782 1006
783 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]: 1007 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
784 try: 1008 try:
785 os.rename(src, destpath) 1009 bb.utils.rename(src, destpath)
786 renamefailed = 0 1010 renamefailed = 0
787 except Exception as e: 1011 except Exception as e:
788 if e.errno != errno.EXDEV: 1012 if e.errno != errno.EXDEV:
789 # Some random error. 1013 # Some random error.
790 print("movefile: Failed to move", src, "to", dest, e) 1014 logger.warning("movefile: Failed to move", src, "to", dest, e)
791 return None 1015 return None
792 # Invalid cross-device-link 'bind' mounted or actually Cross-Device 1016 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
793 1017
@@ -796,16 +1020,16 @@ def movefile(src, dest, newmtime = None, sstat = None):
796 if stat.S_ISREG(sstat[stat.ST_MODE]): 1020 if stat.S_ISREG(sstat[stat.ST_MODE]):
797 try: # For safety copy then move it over. 1021 try: # For safety copy then move it over.
798 shutil.copyfile(src, destpath + "#new") 1022 shutil.copyfile(src, destpath + "#new")
799 os.rename(destpath + "#new", destpath) 1023 bb.utils.rename(destpath + "#new", destpath)
800 didcopy = 1 1024 didcopy = 1
801 except Exception as e: 1025 except Exception as e:
802 print('movefile: copy', src, '->', dest, 'failed.', e) 1026 logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
803 return None 1027 return None
804 else: 1028 else:
805 #we don't yet handle special, so we need to fall back to /bin/mv 1029 #we don't yet handle special, so we need to fall back to /bin/mv
806 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'") 1030 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
807 if a[0] != 0: 1031 if a[0] != 0:
808 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a) 1032 logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
809 return None # failure 1033 return None # failure
810 try: 1034 try:
811 if didcopy: 1035 if didcopy:
@@ -813,7 +1037,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
813 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown 1037 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
814 os.unlink(src) 1038 os.unlink(src)
815 except Exception as e: 1039 except Exception as e:
816 print("movefile: Failed to chown/chmod/unlink", dest, e) 1040 logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
817 return None 1041 return None
818 1042
819 if newmtime: 1043 if newmtime:
@@ -825,9 +1049,24 @@ def movefile(src, dest, newmtime = None, sstat = None):
825 1049
826def copyfile(src, dest, newmtime = None, sstat = None): 1050def copyfile(src, dest, newmtime = None, sstat = None):
827 """ 1051 """
828 Copies a file from src to dest, preserving all permissions and 1052 Copies a file from ``src`` to ``dest``, preserving all permissions and
829 attributes; mtime will be preserved even when moving across 1053 attributes; mtime will be preserved even when moving across
830 filesystems. Returns true on success and false on failure. 1054 filesystems.
1055
1056 Arguments:
1057
1058 - ``src``: Source file.
1059 - ``dest``: Destination file.
1060 - ``newmtime``: new mtime to be passed as float seconds since the epoch.
1061 - ``sstat``: os.stat_result to use for the destination file.
1062
1063 Returns an ``os.stat_result`` of the destination file if the
1064 source file is a symbolic link or the ``sstat`` argument represents a
1065 symbolic link - in which case the destination file will also be created as
1066 a symbolic link.
1067
1068 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
1069
831 """ 1070 """
832 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 1071 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
833 try: 1072 try:
@@ -874,7 +1113,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
874 1113
875 # For safety copy then move it over. 1114 # For safety copy then move it over.
876 shutil.copyfile(src, dest + "#new") 1115 shutil.copyfile(src, dest + "#new")
877 os.rename(dest + "#new", dest) 1116 bb.utils.rename(dest + "#new", dest)
878 except Exception as e: 1117 except Exception as e:
879 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e)) 1118 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
880 return False 1119 return False
@@ -905,10 +1144,16 @@ def copyfile(src, dest, newmtime = None, sstat = None):
905 1144
906def break_hardlinks(src, sstat = None): 1145def break_hardlinks(src, sstat = None):
907 """ 1146 """
908 Ensures src is the only hardlink to this file. Other hardlinks, 1147 Ensures ``src`` is the only hardlink to this file. Other hardlinks,
909 if any, are not affected (other than in their st_nlink value, of 1148 if any, are not affected (other than in their st_nlink value, of
910 course). Returns true on success and false on failure. 1149 course).
1150
1151 Arguments:
1152
1153 - ``src``: source file path.
1154 - ``sstat``: os.stat_result to use when checking if the file is a link.
911 1155
1156 Returns ``True`` on success and ``False`` on failure.
912 """ 1157 """
913 try: 1158 try:
914 if not sstat: 1159 if not sstat:
@@ -922,11 +1167,24 @@ def break_hardlinks(src, sstat = None):
922 1167
923def which(path, item, direction = 0, history = False, executable=False): 1168def which(path, item, direction = 0, history = False, executable=False):
924 """ 1169 """
925 Locate `item` in the list of paths `path` (colon separated string like $PATH). 1170 Locate ``item`` in the list of paths ``path`` (colon separated string like
926 If `direction` is non-zero then the list is reversed. 1171 ``$PATH``).
927 If `history` is True then the list of candidates also returned as result,history. 1172
928 If `executable` is True then the candidate has to be an executable file, 1173 Arguments:
929 otherwise the candidate simply has to exist. 1174
1175 - ``path``: list of colon-separated paths.
1176 - ``item``: string to search for.
1177 - ``direction``: if non-zero then the list is reversed.
1178 - ``history``: if ``True`` then the list of candidates also returned as
1179 ``result,history`` where ``history`` is the list of previous path
1180 checked.
1181 - ``executable``: if ``True`` then the candidate defined by ``path`` has
1182 to be an executable file, otherwise if ``False`` the candidate simply
1183 has to exist.
1184
1185 Returns the item if found in the list of path, otherwise an empty string.
1186 If ``history`` is ``True``, return the list of previous path checked in a
1187 tuple with the found (or not found) item as ``(item, history)``.
930 """ 1188 """
931 1189
932 if executable: 1190 if executable:
@@ -953,10 +1211,29 @@ def which(path, item, direction = 0, history = False, executable=False):
953 return "", hist 1211 return "", hist
954 return "" 1212 return ""
955 1213
1214def to_filemode(input):
1215 """
1216 Take a bitbake variable contents defining a file mode and return
1217 the proper python representation of the number
1218
1219 Arguments:
1220
1221 - ``input``: a string or number to convert, e.g. a bitbake variable
1222 string, assumed to be an octal representation
1223
1224 Returns the python file mode as a number
1225 """
1226 # umask might come in as a number or text string..
1227 if type(input) is int:
1228 return input
1229 return int(input, 8)
1230
956@contextmanager 1231@contextmanager
957def umask(new_mask): 1232def umask(new_mask):
958 """ 1233 """
959 Context manager to set the umask to a specific mask, and restore it afterwards. 1234 Context manager to set the umask to a specific mask, and restore it afterwards.
1235
1236 No return value.
960 """ 1237 """
961 current_mask = os.umask(new_mask) 1238 current_mask = os.umask(new_mask)
962 try: 1239 try:
@@ -965,13 +1242,26 @@ def umask(new_mask):
965 os.umask(current_mask) 1242 os.umask(current_mask)
966 1243
967def to_boolean(string, default=None): 1244def to_boolean(string, default=None):
968 """ 1245 """
969 Check input string and return boolean value True/False/None 1246 Check input string and return boolean value True/False/None
970 depending upon the checks 1247 depending upon the checks.
1248
1249 Arguments:
1250
1251 - ``string``: input string.
1252 - ``default``: default return value if the input ``string`` is ``None``,
1253 ``0``, ``False`` or an empty string.
1254
1255 Returns ``True`` if the string is one of "y", "yes", "1", "true", ``False``
1256 if the string is one of "n", "no", "0", or "false". Return ``default`` if
1257 the input ``string`` is ``None``, ``0``, ``False`` or an empty string.
971 """ 1258 """
972 if not string: 1259 if not string:
973 return default 1260 return default
974 1261
1262 if isinstance(string, int):
1263 return string != 0
1264
975 normalized = string.lower() 1265 normalized = string.lower()
976 if normalized in ("y", "yes", "1", "true"): 1266 if normalized in ("y", "yes", "1", "true"):
977 return True 1267 return True
@@ -985,18 +1275,17 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
985 1275
986 Arguments: 1276 Arguments:
987 1277
988 variable -- the variable name. This will be fetched and expanded (using 1278 - ``variable``: the variable name. This will be fetched and expanded (using
989 d.getVar(variable)) and then split into a set(). 1279 d.getVar(variable)) and then split into a set().
990 1280 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
991 checkvalues -- if this is a string it is split on whitespace into a set(), 1281 otherwise coerced directly into a set().
992 otherwise coerced directly into a set(). 1282 - ``truevalue``: the value to return if checkvalues is a subset of variable.
1283 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
1284 not a subset of variable.
1285 - ``d``: the data store.
993 1286
994 truevalue -- the value to return if checkvalues is a subset of variable. 1287 Returns ``True`` if the variable contains the values specified, ``False``
995 1288 otherwise.
996 falsevalue -- the value to return if variable is empty or if checkvalues is
997 not a subset of variable.
998
999 d -- the data store.
1000 """ 1289 """
1001 1290
1002 val = d.getVar(variable) 1291 val = d.getVar(variable)
@@ -1016,18 +1305,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1016 1305
1017 Arguments: 1306 Arguments:
1018 1307
1019 variable -- the variable name. This will be fetched and expanded (using 1308 - ``variable``: the variable name. This will be fetched and expanded (using
1020 d.getVar(variable)) and then split into a set(). 1309 d.getVar(variable)) and then split into a set().
1021 1310 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1022 checkvalues -- if this is a string it is split on whitespace into a set(), 1311 otherwise coerced directly into a set().
1023 otherwise coerced directly into a set(). 1312 - ``truevalue``: the value to return if checkvalues is a subset of variable.
1024 1313 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
1025 truevalue -- the value to return if checkvalues is a subset of variable. 1314 not a subset of variable.
1315 - ``d``: the data store.
1026 1316
1027 falsevalue -- the value to return if variable is empty or if checkvalues is 1317 Returns ``True`` if the variable contains any of the values specified,
1028 not a subset of variable. 1318 ``False`` otherwise.
1029
1030 d -- the data store.
1031 """ 1319 """
1032 val = d.getVar(variable) 1320 val = d.getVar(variable)
1033 if not val: 1321 if not val:
@@ -1042,17 +1330,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1042 return falsevalue 1330 return falsevalue
1043 1331
1044def filter(variable, checkvalues, d): 1332def filter(variable, checkvalues, d):
1045 """Return all words in the variable that are present in the checkvalues. 1333 """Return all words in the variable that are present in the ``checkvalues``.
1046 1334
1047 Arguments: 1335 Arguments:
1048 1336
1049 variable -- the variable name. This will be fetched and expanded (using 1337 - ``variable``: the variable name. This will be fetched and expanded (using
1050 d.getVar(variable)) and then split into a set(). 1338 d.getVar(variable)) and then split into a set().
1051 1339 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1052 checkvalues -- if this is a string it is split on whitespace into a set(), 1340 otherwise coerced directly into a set().
1053 otherwise coerced directly into a set(). 1341 - ``d``: the data store.
1054 1342
1055 d -- the data store. 1343 Returns a list of string.
1056 """ 1344 """
1057 1345
1058 val = d.getVar(variable) 1346 val = d.getVar(variable)
@@ -1068,8 +1356,27 @@ def filter(variable, checkvalues, d):
1068 1356
1069def get_referenced_vars(start_expr, d): 1357def get_referenced_vars(start_expr, d):
1070 """ 1358 """
1071 :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level 1359 Get the names of the variables referenced in a given expression.
1072 are ordered arbitrarily) 1360
1361 Arguments:
1362
1363 - ``start_expr``: the expression where to look for variables references.
1364
1365 For example::
1366
1367 ${VAR_A} string ${VAR_B}
1368
1369 Or::
1370
1371 ${@d.getVar('VAR')}
1372
1373 If a variables makes references to other variables, the latter are also
1374 returned recursively.
1375
1376 - ``d``: the data store.
1377
1378 Returns the names of vars referenced in ``start_expr`` (recursively), in
1379 quasi-BFS order (variables within the same level are ordered arbitrarily).
1073 """ 1380 """
1074 1381
1075 seen = set() 1382 seen = set()
@@ -1103,7 +1410,10 @@ def get_referenced_vars(start_expr, d):
1103 1410
1104 1411
1105def cpu_count(): 1412def cpu_count():
1106 return multiprocessing.cpu_count() 1413 try:
1414 return len(os.sched_getaffinity(0))
1415 except OSError:
1416 return multiprocessing.cpu_count()
1107 1417
1108def nonblockingfd(fd): 1418def nonblockingfd(fd):
1109 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) 1419 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
@@ -1146,7 +1456,9 @@ def multiprocessingpool(*args, **kwargs):
1146 return multiprocessing.Pool(*args, **kwargs) 1456 return multiprocessing.Pool(*args, **kwargs)
1147 1457
1148def exec_flat_python_func(func, *args, **kwargs): 1458def exec_flat_python_func(func, *args, **kwargs):
1149 """Execute a flat python function (defined with def funcname(args):...)""" 1459 """Execute a flat python function (defined with ``def funcname(args): ...``)
1460
1461 Returns the return value of the function."""
1150 # Prepare a small piece of python code which calls the requested function 1462 # Prepare a small piece of python code which calls the requested function
1151 # To do this we need to prepare two things - a set of variables we can use to pass 1463 # To do this we need to prepare two things - a set of variables we can use to pass
1152 # the values of arguments into the calling function, and the list of arguments for 1464 # the values of arguments into the calling function, and the list of arguments for
@@ -1172,48 +1484,57 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1172 """Edit lines from a recipe or config file and modify one or more 1484 """Edit lines from a recipe or config file and modify one or more
1173 specified variable values set in the file using a specified callback 1485 specified variable values set in the file using a specified callback
1174 function. Lines are expected to have trailing newlines. 1486 function. Lines are expected to have trailing newlines.
1175 Parameters: 1487
1176 meta_lines: lines from the file; can be a list or an iterable 1488 Arguments:
1177 (e.g. file pointer) 1489
1178 variables: a list of variable names to look for. Functions 1490 - ``meta_lines``: lines from the file; can be a list or an iterable
1179 may also be specified, but must be specified with '()' at 1491 (e.g. file pointer)
1180 the end of the name. Note that the function doesn't have 1492 - ``variables``: a list of variable names to look for. Functions
1181 any intrinsic understanding of _append, _prepend, _remove, 1493 may also be specified, but must be specified with ``()`` at
1182 or overrides, so these are considered as part of the name. 1494 the end of the name. Note that the function doesn't have
1183 These values go into a regular expression, so regular 1495 any intrinsic understanding of ``:append``, ``:prepend``, ``:remove``,
1184 expression syntax is allowed. 1496 or overrides, so these are considered as part of the name.
1185 varfunc: callback function called for every variable matching 1497 These values go into a regular expression, so regular
1186 one of the entries in the variables parameter. The function 1498 expression syntax is allowed.
1187 should take four arguments: 1499 - ``varfunc``: callback function called for every variable matching
1188 varname: name of variable matched 1500 one of the entries in the variables parameter.
1189 origvalue: current value in file 1501
1190 op: the operator (e.g. '+=') 1502 The function should take four arguments:
1191 newlines: list of lines up to this point. You can use 1503
1192 this to prepend lines before this variable setting 1504 - ``varname``: name of variable matched
1193 if you wish. 1505 - ``origvalue``: current value in file
1194 and should return a four-element tuple: 1506 - ``op``: the operator (e.g. ``+=``)
1195 newvalue: new value to substitute in, or None to drop 1507 - ``newlines``: list of lines up to this point. You can use
1196 the variable setting entirely. (If the removal 1508 this to prepend lines before this variable setting
1197 results in two consecutive blank lines, one of the 1509 if you wish.
1198 blank lines will also be dropped). 1510
1199 newop: the operator to use - if you specify None here, 1511 And should return a four-element tuple:
1200 the original operation will be used. 1512
1201 indent: number of spaces to indent multi-line entries, 1513 - ``newvalue``: new value to substitute in, or ``None`` to drop
1202 or -1 to indent up to the level of the assignment 1514 the variable setting entirely. (If the removal
1203 and opening quote, or a string to use as the indent. 1515 results in two consecutive blank lines, one of the
1204 minbreak: True to allow the first element of a 1516 blank lines will also be dropped).
1205 multi-line value to continue on the same line as 1517 - ``newop``: the operator to use - if you specify ``None`` here,
1206 the assignment, False to indent before the first 1518 the original operation will be used.
1207 element. 1519 - ``indent``: number of spaces to indent multi-line entries,
1208 To clarify, if you wish not to change the value, then you 1520 or ``-1`` to indent up to the level of the assignment
1209 would return like this: return origvalue, None, 0, True 1521 and opening quote, or a string to use as the indent.
1210 match_overrides: True to match items with _overrides on the end, 1522 - ``minbreak``: ``True`` to allow the first element of a
1211 False otherwise 1523 multi-line value to continue on the same line as
1524 the assignment, ``False`` to indent before the first
1525 element.
1526
1527 To clarify, if you wish not to change the value, then you
1528 would return like this::
1529
1530 return origvalue, None, 0, True
1531 - ``match_overrides``: True to match items with _overrides on the end,
1532 False otherwise
1533
1212 Returns a tuple: 1534 Returns a tuple:
1213 updated: 1535
1214 True if changes were made, False otherwise. 1536 - ``updated``: ``True`` if changes were made, ``False`` otherwise.
1215 newlines: 1537 - ``newlines``: Lines after processing.
1216 Lines after processing
1217 """ 1538 """
1218 1539
1219 var_res = {} 1540 var_res = {}
@@ -1357,12 +1678,13 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1357 1678
1358 1679
1359def edit_metadata_file(meta_file, variables, varfunc): 1680def edit_metadata_file(meta_file, variables, varfunc):
1360 """Edit a recipe or config file and modify one or more specified 1681 """Edit a recipe or configuration file and modify one or more specified
1361 variable values set in the file using a specified callback function. 1682 variable values set in the file using a specified callback function.
1362 The file is only written to if the value(s) actually change. 1683 The file is only written to if the value(s) actually change.
1363 This is basically the file version of edit_metadata(), see that 1684 This is basically the file version of ``bb.utils.edit_metadata()``, see that
1364 function's description for parameter/usage information. 1685 function's description for parameter/usage information.
1365 Returns True if the file was written to, False otherwise. 1686
1687 Returns ``True`` if the file was written to, ``False`` otherwise.
1366 """ 1688 """
1367 with open(meta_file, 'r') as f: 1689 with open(meta_file, 'r') as f:
1368 (updated, newlines) = edit_metadata(f, variables, varfunc) 1690 (updated, newlines) = edit_metadata(f, variables, varfunc)
@@ -1373,23 +1695,25 @@ def edit_metadata_file(meta_file, variables, varfunc):
1373 1695
1374 1696
1375def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None): 1697def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1376 """Edit bblayers.conf, adding and/or removing layers 1698 """Edit ``bblayers.conf``, adding and/or removing layers.
1377 Parameters: 1699
1378 bblayers_conf: path to bblayers.conf file to edit 1700 Arguments:
1379 add: layer path (or list of layer paths) to add; None or empty 1701
1380 list to add nothing 1702 - ``bblayers_conf``: path to ``bblayers.conf`` file to edit
1381 remove: layer path (or list of layer paths) to remove; None or 1703 - ``add``: layer path (or list of layer paths) to add; ``None`` or empty
1382 empty list to remove nothing 1704 list to add nothing
1383 edit_cb: optional callback function that will be called after 1705 - ``remove``: layer path (or list of layer paths) to remove; ``None`` or
1384 processing adds/removes once per existing entry. 1706 empty list to remove nothing
1707 - ``edit_cb``: optional callback function that will be called
1708 after processing adds/removes once per existing entry.
1709
1385 Returns a tuple: 1710 Returns a tuple:
1386 notadded: list of layers specified to be added but weren't
1387 (because they were already in the list)
1388 notremoved: list of layers that were specified to be removed
1389 but weren't (because they weren't in the list)
1390 """
1391 1711
1392 import fnmatch 1712 - ``notadded``: list of layers specified to be added but weren't
1713 (because they were already in the list)
1714 - ``notremoved``: list of layers that were specified to be removed
1715 but weren't (because they weren't in the list)
1716 """
1393 1717
1394 def remove_trailing_sep(pth): 1718 def remove_trailing_sep(pth):
1395 if pth and pth[-1] == os.sep: 1719 if pth and pth[-1] == os.sep:
@@ -1508,7 +1832,22 @@ def get_collection_res(d):
1508 1832
1509 1833
1510def get_file_layer(filename, d, collection_res={}): 1834def get_file_layer(filename, d, collection_res={}):
1511 """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" 1835 """Determine the collection (or layer name, as defined by a layer's
1836 ``layer.conf`` file) containing the specified file.
1837
1838 Arguments:
1839
1840 - ``filename``: the filename to look for.
1841 - ``d``: the data store.
1842 - ``collection_res``: dictionary with the layer names as keys and file
1843 patterns to match as defined with the BBFILE_COLLECTIONS and
1844 BBFILE_PATTERN variables respectively. The return value of
1845 ``bb.utils.get_collection_res()`` is the default if this variable is
1846 not specified.
1847
1848 Returns the layer name containing the file. If multiple layers contain the
1849 file, the last matching layer name from collection_res is returned.
1850 """
1512 if not collection_res: 1851 if not collection_res:
1513 collection_res = get_collection_res(d) 1852 collection_res = get_collection_res(d)
1514 1853
@@ -1546,7 +1885,13 @@ class PrCtlError(Exception):
1546 1885
1547def signal_on_parent_exit(signame): 1886def signal_on_parent_exit(signame):
1548 """ 1887 """
1549 Trigger signame to be sent when the parent process dies 1888 Trigger ``signame`` to be sent when the parent process dies.
1889
1890 Arguments:
1891
1892 - ``signame``: name of the signal. See ``man signal``.
1893
1894 No return value.
1550 """ 1895 """
1551 signum = getattr(signal, signame) 1896 signum = getattr(signal, signame)
1552 # http://linux.die.net/man/2/prctl 1897 # http://linux.die.net/man/2/prctl
@@ -1581,7 +1926,7 @@ def ioprio_set(who, cls, value):
1581 bb.warn("Unable to set IO Prio for arch %s" % _unamearch) 1926 bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1582 1927
1583def set_process_name(name): 1928def set_process_name(name):
1584 from ctypes import cdll, byref, create_string_buffer 1929 from ctypes import byref, create_string_buffer
1585 # This is nice to have for debugging, not essential 1930 # This is nice to have for debugging, not essential
1586 try: 1931 try:
1587 libc = cdll.LoadLibrary('libc.so.6') 1932 libc = cdll.LoadLibrary('libc.so.6')
@@ -1590,33 +1935,96 @@ def set_process_name(name):
1590 except: 1935 except:
1591 pass 1936 pass
1592 1937
1593def export_proxies(d): 1938def enable_loopback_networking():
1594 """ export common proxies variables from datastore to environment """ 1939 # From bits/ioctls.h
1595 import os 1940 SIOCGIFFLAGS = 0x8913
1941 SIOCSIFFLAGS = 0x8914
1942 SIOCSIFADDR = 0x8916
1943 SIOCSIFNETMASK = 0x891C
1596 1944
1597 variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', 1945 # if.h
1598 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY', 1946 IFF_UP = 0x1
1599 'GIT_PROXY_COMMAND'] 1947 IFF_RUNNING = 0x40
1600 exported = False
1601 1948
1602 for v in variables: 1949 # bits/socket.h
1603 if v in os.environ.keys(): 1950 AF_INET = 2
1604 exported = True 1951
1605 else: 1952 # char ifr_name[IFNAMSIZ=16]
1606 v_proxy = d.getVar(v) 1953 ifr_name = struct.pack("@16s", b"lo")
1607 if v_proxy is not None: 1954 def netdev_req(fd, req, data = b""):
1608 os.environ[v] = v_proxy 1955 # Pad and add interface name
1609 exported = True 1956 data = ifr_name + data + (b'\x00' * (16 - len(data)))
1957 # Return all data after interface name
1958 return fcntl.ioctl(fd, req, data)[16:]
1959
1960 with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock:
1961 fd = sock.fileno()
1962
1963 # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; }
1964 req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1)
1965 netdev_req(fd, SIOCSIFADDR, req)
1610 1966
1611 return exported 1967 # short ifr_flags
1968 flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0]
1969 flags |= IFF_UP | IFF_RUNNING
1970 netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags))
1612 1971
1972 # struct sockaddr_in ifr_netmask
1973 req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0)
1974 netdev_req(fd, SIOCSIFNETMASK, req)
1975
1976def disable_network(uid=None, gid=None):
1977 """
1978 Disable networking in the current process if the kernel supports it, else
1979 just return after logging to debug. To do this we need to create a new user
1980 namespace, then map back to the original uid/gid.
1981
1982 Arguments:
1983
1984 - ``uid``: original user id.
1985 - ``gid``: original user group id.
1986
1987 No return value.
1988 """
1989 libc = ctypes.CDLL('libc.so.6')
1990
1991 # From sched.h
1992 # New user namespace
1993 CLONE_NEWUSER = 0x10000000
1994 # New network namespace
1995 CLONE_NEWNET = 0x40000000
1996
1997 if uid is None:
1998 uid = os.getuid()
1999 if gid is None:
2000 gid = os.getgid()
2001
2002 ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
2003 if ret != 0:
2004 logger.debug("System doesn't support disabling network without admin privs")
2005 return
2006 with open("/proc/self/uid_map", "w") as f:
2007 f.write("%s %s 1" % (uid, uid))
2008 with open("/proc/self/setgroups", "w") as f:
2009 f.write("deny")
2010 with open("/proc/self/gid_map", "w") as f:
2011 f.write("%s %s 1" % (gid, gid))
2012
2013def export_proxies(d):
2014 from bb.fetch2 import get_fetcher_environment
2015 """ export common proxies variables from datastore to environment """
2016 newenv = get_fetcher_environment(d)
2017 for v in newenv:
2018 os.environ[v] = newenv[v]
1613 2019
1614def load_plugins(logger, plugins, pluginpath): 2020def load_plugins(logger, plugins, pluginpath):
1615 def load_plugin(name): 2021 def load_plugin(name):
1616 logger.debug('Loading plugin %s' % name) 2022 logger.debug('Loading plugin %s' % name)
1617 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) 2023 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1618 if spec: 2024 if spec:
1619 return spec.loader.load_module() 2025 mod = importlib.util.module_from_spec(spec)
2026 spec.loader.exec_module(mod)
2027 return mod
1620 2028
1621 logger.debug('Loading plugins from %s...' % pluginpath) 2029 logger.debug('Loading plugins from %s...' % pluginpath)
1622 2030
@@ -1646,9 +2054,14 @@ class LogCatcher(logging.Handler):
1646 2054
1647def is_semver(version): 2055def is_semver(version):
1648 """ 2056 """
1649 Is the version string following the semver semantic? 2057 Arguments:
2058
2059 - ``version``: the version string.
2060
2061 Returns ``True`` if the version string follow semantic versioning, ``False``
2062 otherwise.
1650 2063
1651 https://semver.org/spec/v2.0.0.html 2064 See https://semver.org/spec/v2.0.0.html.
1652 """ 2065 """
1653 regex = re.compile( 2066 regex = re.compile(
1654 r""" 2067 r"""
@@ -1669,3 +2082,150 @@ def is_semver(version):
1669 return False 2082 return False
1670 2083
1671 return True 2084 return True
2085
2086# Wrapper around os.rename which can handle cross device problems
2087# e.g. from container filesystems
2088def rename(src, dst):
2089 try:
2090 os.rename(src, dst)
2091 except OSError as err:
2092 if err.errno == 18:
2093 # Invalid cross-device link error
2094 shutil.move(src, dst)
2095 else:
2096 raise err
2097
2098@contextmanager
2099def environment(**envvars):
2100 """
2101 Context manager to selectively update the environment with the specified mapping.
2102
2103 No return value.
2104 """
2105 backup = dict(os.environ)
2106 try:
2107 os.environ.update(envvars)
2108 yield
2109 finally:
2110 for var in envvars:
2111 if var in backup:
2112 os.environ[var] = backup[var]
2113 elif var in os.environ:
2114 del os.environ[var]
2115
2116def is_local_uid(uid=''):
2117 """
2118 Check whether uid is a local one or not.
2119 Can't use pwd module since it gets all UIDs, not local ones only.
2120
2121 Arguments:
2122
2123 - ``uid``: user id. If not specified the user id is determined from
2124 ``os.getuid()``.
2125
2126 Returns ``True`` is the user id is local, ``False`` otherwise.
2127 """
2128 if not uid:
2129 uid = os.getuid()
2130 with open('/etc/passwd', 'r') as f:
2131 for line in f:
2132 line_split = line.split(':')
2133 if len(line_split) < 3:
2134 continue
2135 if str(uid) == line_split[2]:
2136 return True
2137 return False
2138
2139def mkstemp(suffix=None, prefix=None, dir=None, text=False):
2140 """
2141 Generates a unique temporary file, independent of time.
2142
2143 mkstemp() in glibc (at least) generates unique file names based on the
2144 current system time. When combined with highly parallel builds, and
2145 operating over NFS (e.g. shared sstate/downloads) this can result in
2146 conflicts and race conditions.
2147
2148 This function adds additional entropy to the file name so that a collision
2149 is independent of time and thus extremely unlikely.
2150
2151 Arguments:
2152
2153 - ``suffix``: filename suffix.
2154 - ``prefix``: filename prefix.
2155 - ``dir``: directory where the file will be created.
2156 - ``text``: if ``True``, the file is opened in text mode.
2157
2158 Returns a tuple containing:
2159
2160 - the file descriptor for the created file
2161 - the name of the file.
2162 """
2163 entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
2164 if prefix:
2165 prefix = prefix + entropy
2166 else:
2167 prefix = tempfile.gettempprefix() + entropy
2168 return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
2169
2170def path_is_descendant(descendant, ancestor):
2171 """
2172 Returns ``True`` if the path ``descendant`` is a descendant of ``ancestor``
2173 (including being equivalent to ``ancestor`` itself). Otherwise returns
2174 ``False``.
2175
2176 Correctly accounts for symlinks, bind mounts, etc. by using
2177 ``os.path.samestat()`` to compare paths.
2178
2179 May raise any exception that ``os.stat()`` raises.
2180
2181 Arguments:
2182
2183 - ``descendant``: path to check for being an ancestor.
2184 - ``ancestor``: path to the ancestor ``descendant`` will be checked
2185 against.
2186 """
2187
2188 ancestor_stat = os.stat(ancestor)
2189
2190 # Recurse up each directory component of the descendant to see if it is
2191 # equivalent to the ancestor
2192 check_dir = os.path.abspath(descendant).rstrip("/")
2193 while check_dir:
2194 check_stat = os.stat(check_dir)
2195 if os.path.samestat(check_stat, ancestor_stat):
2196 return True
2197 check_dir = os.path.dirname(check_dir).rstrip("/")
2198
2199 return False
2200
2201# If we don't have a timeout of some kind and a process/thread exits badly (for example
2202# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
2203# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
2204# This function can still deadlock python since it can't signal the other threads to exit
2205# (signals are handled in the main thread) and even os._exit() will wait on non-daemon threads
2206# to exit.
2207@contextmanager
2208def lock_timeout(lock):
2209 try:
2210 s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
2211 held = lock.acquire(timeout=5*60)
2212 if not held:
2213 bb.server.process.serverlog("Couldn't get the lock for 5 mins, timed out, exiting.\n%s" % traceback.format_stack())
2214 os._exit(1)
2215 yield held
2216 finally:
2217 lock.release()
2218 signal.pthread_sigmask(signal.SIG_SETMASK, s)
2219
2220# A version of lock_timeout without the check that the lock was locked and a shorter timeout
2221@contextmanager
2222def lock_timeout_nocheck(lock):
2223 l = False
2224 try:
2225 s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
2226 l = lock.acquire(timeout=10)
2227 yield l
2228 finally:
2229 if l:
2230 lock.release()
2231 signal.pthread_sigmask(signal.SIG_SETMASK, s)
diff --git a/bitbake/lib/bb/xattr.py b/bitbake/lib/bb/xattr.py
new file mode 100755
index 0000000000..7b634944a4
--- /dev/null
+++ b/bitbake/lib/bb/xattr.py
@@ -0,0 +1,126 @@
1#! /usr/bin/env python3
2#
3# Copyright 2023 by Garmin Ltd. or its subsidiaries
4#
5# SPDX-License-Identifier: MIT
6
7import sys
8import ctypes
9import os
10import errno
11
12libc = ctypes.CDLL("libc.so.6", use_errno=True)
13fsencoding = sys.getfilesystemencoding()
14
15
16libc.listxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
17libc.llistxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
18
19
20def listxattr(path, follow=True):
21 func = libc.listxattr if follow else libc.llistxattr
22
23 os_path = os.fsencode(path)
24
25 while True:
26 length = func(os_path, None, 0)
27
28 if length < 0:
29 err = ctypes.get_errno()
30 raise OSError(err, os.strerror(err), str(path))
31
32 if length == 0:
33 return []
34
35 arr = ctypes.create_string_buffer(length)
36
37 read_length = func(os_path, arr, length)
38 if read_length != length:
39 # Race!
40 continue
41
42 return [a.decode(fsencoding) for a in arr.raw.split(b"\x00") if a]
43
44
45libc.getxattr.argtypes = [
46 ctypes.c_char_p,
47 ctypes.c_char_p,
48 ctypes.c_char_p,
49 ctypes.c_size_t,
50]
51libc.lgetxattr.argtypes = [
52 ctypes.c_char_p,
53 ctypes.c_char_p,
54 ctypes.c_char_p,
55 ctypes.c_size_t,
56]
57
58
59def getxattr(path, name, follow=True):
60 func = libc.getxattr if follow else libc.lgetxattr
61
62 os_path = os.fsencode(path)
63 os_name = os.fsencode(name)
64
65 while True:
66 length = func(os_path, os_name, None, 0)
67
68 if length < 0:
69 err = ctypes.get_errno()
70 if err == errno.ENODATA:
71 return None
72 raise OSError(err, os.strerror(err), str(path))
73
74 if length == 0:
75 return ""
76
77 arr = ctypes.create_string_buffer(length)
78
79 read_length = func(os_path, os_name, arr, length)
80 if read_length != length:
81 # Race!
82 continue
83
84 return arr.raw
85
86
87def get_all_xattr(path, follow=True):
88 attrs = {}
89
90 names = listxattr(path, follow)
91
92 for name in names:
93 value = getxattr(path, name, follow)
94 if value is None:
95 # This can happen if a value is erased after listxattr is called,
96 # so ignore it
97 continue
98 attrs[name] = value
99
100 return attrs
101
102
103def main():
104 import argparse
105 from pathlib import Path
106
107 parser = argparse.ArgumentParser()
108 parser.add_argument("path", help="File Path", type=Path)
109
110 args = parser.parse_args()
111
112 attrs = get_all_xattr(args.path)
113
114 for name, value in attrs.items():
115 try:
116 value = value.decode(fsencoding)
117 except UnicodeDecodeError:
118 pass
119
120 print(f"{name} = {value}")
121
122 return 0
123
124
125if __name__ == "__main__":
126 sys.exit(main())
diff --git a/bitbake/lib/bblayers/__init__.py b/bitbake/lib/bblayers/__init__.py
index 4e7c09da04..78efd29750 100644
--- a/bitbake/lib/bblayers/__init__.py
+++ b/bitbake/lib/bblayers/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/bitbake/lib/bblayers/action.py b/bitbake/lib/bblayers/action.py
index f05f5d330f..a14f19948e 100644
--- a/bitbake/lib/bblayers/action.py
+++ b/bitbake/lib/bblayers/action.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -9,6 +11,7 @@ import shutil
9import sys 11import sys
10import tempfile 12import tempfile
11 13
14from bb.cookerdata import findTopdir
12import bb.utils 15import bb.utils
13 16
14from bblayers.common import LayerPlugin 17from bblayers.common import LayerPlugin
@@ -35,7 +38,7 @@ class ActionPlugin(LayerPlugin):
35 sys.stderr.write("Specified layer directory %s doesn't contain a conf/layer.conf file\n" % layerdir) 38 sys.stderr.write("Specified layer directory %s doesn't contain a conf/layer.conf file\n" % layerdir)
36 return 1 39 return 1
37 40
38 bblayers_conf = os.path.join('conf', 'bblayers.conf') 41 bblayers_conf = os.path.join(findTopdir(),'conf', 'bblayers.conf')
39 if not os.path.exists(bblayers_conf): 42 if not os.path.exists(bblayers_conf):
40 sys.stderr.write("Unable to find bblayers.conf\n") 43 sys.stderr.write("Unable to find bblayers.conf\n")
41 return 1 44 return 1
@@ -48,12 +51,14 @@ class ActionPlugin(LayerPlugin):
48 try: 51 try:
49 notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None) 52 notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None)
50 if not (args.force or notadded): 53 if not (args.force or notadded):
54 self.tinfoil.modified_files()
51 try: 55 try:
52 self.tinfoil.run_command('parseConfiguration') 56 self.tinfoil.run_command('parseConfiguration')
53 except (bb.tinfoil.TinfoilUIException, bb.BBHandledException): 57 except (bb.tinfoil.TinfoilUIException, bb.BBHandledException):
54 # Restore the back up copy of bblayers.conf 58 # Restore the back up copy of bblayers.conf
55 shutil.copy2(backup, bblayers_conf) 59 shutil.copy2(backup, bblayers_conf)
56 bb.fatal("Parse failure with the specified layer added, aborting.") 60 self.tinfoil.modified_files()
61 bb.fatal("Parse failure with the specified layer added, exiting.")
57 else: 62 else:
58 for item in notadded: 63 for item in notadded:
59 sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item) 64 sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
@@ -63,7 +68,7 @@ class ActionPlugin(LayerPlugin):
63 68
64 def do_remove_layer(self, args): 69 def do_remove_layer(self, args):
65 """Remove one or more layers from bblayers.conf.""" 70 """Remove one or more layers from bblayers.conf."""
66 bblayers_conf = os.path.join('conf', 'bblayers.conf') 71 bblayers_conf = os.path.join(findTopdir() ,'conf', 'bblayers.conf')
67 if not os.path.exists(bblayers_conf): 72 if not os.path.exists(bblayers_conf):
68 sys.stderr.write("Unable to find bblayers.conf\n") 73 sys.stderr.write("Unable to find bblayers.conf\n")
69 return 1 74 return 1
@@ -78,6 +83,9 @@ class ActionPlugin(LayerPlugin):
78 layerdir = os.path.abspath(item) 83 layerdir = os.path.abspath(item)
79 layerdirs.append(layerdir) 84 layerdirs.append(layerdir)
80 (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs) 85 (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs)
86 if args.force > 1:
87 return 0
88 self.tinfoil.modified_files()
81 if notremoved: 89 if notremoved:
82 for item in notremoved: 90 for item in notremoved:
83 sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item) 91 sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item)
@@ -237,6 +245,9 @@ build results (as the layer priority order has effectively changed).
237 if not entry_found: 245 if not entry_found:
238 logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full) 246 logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full)
239 247
248 self.tinfoil.modified_files()
249
250
240 def get_file_layer(self, filename): 251 def get_file_layer(self, filename):
241 layerdir = self.get_file_layerdir(filename) 252 layerdir = self.get_file_layerdir(filename)
242 if layerdir: 253 if layerdir:
diff --git a/bitbake/lib/bblayers/common.py b/bitbake/lib/bblayers/common.py
index 6c76ef3505..f7b9cee371 100644
--- a/bitbake/lib/bblayers/common.py
+++ b/bitbake/lib/bblayers/common.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/bitbake/lib/bblayers/layerindex.py b/bitbake/lib/bblayers/layerindex.py
index b2f27b21ee..ba91fac669 100644
--- a/bitbake/lib/bblayers/layerindex.py
+++ b/bitbake/lib/bblayers/layerindex.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -47,6 +49,31 @@ class LayerIndexPlugin(ActionPlugin):
47 else: 49 else:
48 logger.plain("Repository %s needs to be fetched" % url) 50 logger.plain("Repository %s needs to be fetched" % url)
49 return subdir, layername, layerdir 51 return subdir, layername, layerdir
52 elif os.path.exists(repodir) and branch:
53 """
54 If the repo is already cloned, ensure it is on the correct branch,
55 switching branches if necessary and possible.
56 """
57 base_cmd = ['git', '--git-dir=%s/.git' % repodir, '--work-tree=%s' % repodir]
58 cmd = base_cmd + ['branch']
59 completed_proc = subprocess.run(cmd, text=True, capture_output=True)
60 if completed_proc.returncode:
61 logger.error("Unable to validate repo %s (%s)" % (repodir, stderr))
62 return None, None, None
63 else:
64 if branch != completed_proc.stdout[2:-1]:
65 cmd = base_cmd + ['status', '--short']
66 completed_proc = subprocess.run(cmd, text=True, capture_output=True)
67 if completed_proc.stdout.count('\n') != 0:
68 logger.warning("There are uncommitted changes in repo %s" % repodir)
69 cmd = base_cmd + ['checkout', branch]
70 completed_proc = subprocess.run(cmd, text=True, capture_output=True)
71 if completed_proc.returncode:
72 # Could be due to original shallow clone on a different branch for example
73 logger.error("Unable to automatically switch %s to desired branch '%s' (%s)"
74 % (repodir, branch, completed_proc.stderr))
75 return None, None, None
76 return subdir, layername, layerdir
50 elif os.path.exists(layerdir): 77 elif os.path.exists(layerdir):
51 return subdir, layername, layerdir 78 return subdir, layername, layerdir
52 else: 79 else:
@@ -159,12 +186,17 @@ class LayerIndexPlugin(ActionPlugin):
159 logger.plain(' recommended by: %s' % ' '.join(recommendedby)) 186 logger.plain(' recommended by: %s' % ' '.join(recommendedby))
160 187
161 if dependencies: 188 if dependencies:
162 fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR') 189 if args.fetchdir:
163 if not fetchdir: 190 fetchdir = args.fetchdir
164 logger.error("Cannot get BBLAYERS_FETCH_DIR") 191 else:
165 return 1 192 fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR')
193 if not fetchdir:
194 logger.error("Cannot get BBLAYERS_FETCH_DIR")
195 return 1
196
166 if not os.path.exists(fetchdir): 197 if not os.path.exists(fetchdir):
167 os.makedirs(fetchdir) 198 os.makedirs(fetchdir)
199
168 addlayers = [] 200 addlayers = []
169 201
170 for deplayerbranch in dependencies: 202 for deplayerbranch in dependencies:
@@ -206,6 +238,8 @@ class LayerIndexPlugin(ActionPlugin):
206""" 238"""
207 args.show_only = True 239 args.show_only = True
208 args.ignore = [] 240 args.ignore = []
241 args.fetchdir = ""
242 args.shallow = True
209 self.do_layerindex_fetch(args) 243 self.do_layerindex_fetch(args)
210 244
211 def register_commands(self, sp): 245 def register_commands(self, sp):
@@ -214,6 +248,7 @@ class LayerIndexPlugin(ActionPlugin):
214 parser_layerindex_fetch.add_argument('-b', '--branch', help='branch name to fetch') 248 parser_layerindex_fetch.add_argument('-b', '--branch', help='branch name to fetch')
215 parser_layerindex_fetch.add_argument('-s', '--shallow', help='do only shallow clones (--depth=1)', action='store_true') 249 parser_layerindex_fetch.add_argument('-s', '--shallow', help='do only shallow clones (--depth=1)', action='store_true')
216 parser_layerindex_fetch.add_argument('-i', '--ignore', help='assume the specified layers do not need to be fetched/added (separate multiple layers with commas, no spaces)', metavar='LAYER') 250 parser_layerindex_fetch.add_argument('-i', '--ignore', help='assume the specified layers do not need to be fetched/added (separate multiple layers with commas, no spaces)', metavar='LAYER')
251 parser_layerindex_fetch.add_argument('-f', '--fetchdir', help='directory to fetch the layer(s) into (will be created if it does not exist)')
217 parser_layerindex_fetch.add_argument('layername', nargs='+', help='layer to fetch') 252 parser_layerindex_fetch.add_argument('layername', nargs='+', help='layer to fetch')
218 253
219 parser_layerindex_show_depends = self.add_command(sp, 'layerindex-show-depends', self.do_layerindex_show_depends, parserecipes=False) 254 parser_layerindex_show_depends = self.add_command(sp, 'layerindex-show-depends', self.do_layerindex_show_depends, parserecipes=False)
diff --git a/bitbake/lib/bblayers/query.py b/bitbake/lib/bblayers/query.py
index f5e3c84747..eb7cb465b4 100644
--- a/bitbake/lib/bblayers/query.py
+++ b/bitbake/lib/bblayers/query.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -27,12 +29,12 @@ class QueryPlugin(LayerPlugin):
27 29
28 def do_show_layers(self, args): 30 def do_show_layers(self, args):
29 """show current configured layers.""" 31 """show current configured layers."""
30 logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority")) 32 logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(70), "priority"))
31 logger.plain('=' * 74) 33 logger.plain('=' * 104)
32 for layer, _, regex, pri in self.tinfoil.cooker.bbfile_config_priorities: 34 for layer, _, regex, pri in self.tinfoil.cooker.bbfile_config_priorities:
33 layerdir = self.bbfile_collections.get(layer, None) 35 layerdir = self.bbfile_collections.get(layer, None)
34 layername = self.get_layer_name(layerdir) 36 layername = layer
35 logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), pri)) 37 logger.plain("%s %s %s" % (layername.ljust(20), layerdir.ljust(70), pri))
36 38
37 def version_str(self, pe, pv, pr = None): 39 def version_str(self, pe, pv, pr = None):
38 verstr = "%s" % pv 40 verstr = "%s" % pv
@@ -55,11 +57,12 @@ are overlayed will also be listed, with a " (skipped)" suffix.
55 # Check for overlayed .bbclass files 57 # Check for overlayed .bbclass files
56 classes = collections.defaultdict(list) 58 classes = collections.defaultdict(list)
57 for layerdir in self.bblayers: 59 for layerdir in self.bblayers:
58 classdir = os.path.join(layerdir, 'classes') 60 for c in ["classes-global", "classes-recipe", "classes"]:
59 if os.path.exists(classdir): 61 classdir = os.path.join(layerdir, c)
60 for classfile in os.listdir(classdir): 62 if os.path.exists(classdir):
61 if os.path.splitext(classfile)[1] == '.bbclass': 63 for classfile in os.listdir(classdir):
62 classes[classfile].append(classdir) 64 if os.path.splitext(classfile)[1] == '.bbclass':
65 classes[classfile].append(classdir)
63 66
64 # Locating classes and other files is a bit more complicated than recipes - 67 # Locating classes and other files is a bit more complicated than recipes -
65 # layer priority is not a factor; instead BitBake uses the first matching 68 # layer priority is not a factor; instead BitBake uses the first matching
@@ -122,22 +125,27 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
122 if inherits: 125 if inherits:
123 bbpath = str(self.tinfoil.config_data.getVar('BBPATH')) 126 bbpath = str(self.tinfoil.config_data.getVar('BBPATH'))
124 for classname in inherits: 127 for classname in inherits:
125 classfile = 'classes/%s.bbclass' % classname 128 found = False
126 if not bb.utils.which(bbpath, classfile, history=False): 129 for c in ["classes-global", "classes-recipe", "classes"]:
127 logger.error('No class named %s found in BBPATH', classfile) 130 cfile = c + '/%s.bbclass' % classname
131 if bb.utils.which(bbpath, cfile, history=False):
132 found = True
133 break
134 if not found:
135 logger.error('No class named %s found in BBPATH', classname)
128 sys.exit(1) 136 sys.exit(1)
129 137
130 pkg_pn = self.tinfoil.cooker.recipecaches[mc].pkg_pn 138 pkg_pn = self.tinfoil.cooker.recipecaches[mc].pkg_pn
131 (latest_versions, preferred_versions) = self.tinfoil.find_providers(mc) 139 (latest_versions, preferred_versions, required_versions) = self.tinfoil.find_providers(mc)
132 allproviders = self.tinfoil.get_all_providers(mc) 140 allproviders = self.tinfoil.get_all_providers(mc)
133 141
134 # Ensure we list skipped recipes 142 # Ensure we list skipped recipes
135 # We are largely guessing about PN, PV and the preferred version here, 143 # We are largely guessing about PN, PV and the preferred version here,
136 # but we have no choice since skipped recipes are not fully parsed 144 # but we have no choice since skipped recipes are not fully parsed
137 skiplist = list(self.tinfoil.cooker.skiplist.keys()) 145 skiplist = list(self.tinfoil.cooker.skiplist_by_mc[mc].keys())
138 mcspec = 'mc:%s:' % mc 146
139 if mc: 147 if mc:
140 skiplist = [s[len(mcspec):] for s in skiplist if s.startswith(mcspec)] 148 skiplist = [s.removeprefix(f'mc:{mc}:') for s in skiplist]
141 149
142 for fn in skiplist: 150 for fn in skiplist:
143 recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_') 151 recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_')
@@ -154,7 +162,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
154 def print_item(f, pn, ver, layer, ispref): 162 def print_item(f, pn, ver, layer, ispref):
155 if not selected_layer or layer == selected_layer: 163 if not selected_layer or layer == selected_layer:
156 if not bare and f in skiplist: 164 if not bare and f in skiplist:
157 skipped = ' (skipped)' 165 skipped = ' (skipped: %s)' % self.tinfoil.cooker.skiplist_by_mc[mc][f].skipreason
158 else: 166 else:
159 skipped = '' 167 skipped = ''
160 if show_filenames: 168 if show_filenames:
@@ -172,7 +180,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
172 logger.plain(" %s %s%s", layer.ljust(20), ver, skipped) 180 logger.plain(" %s %s%s", layer.ljust(20), ver, skipped)
173 181
174 global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split() 182 global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split()
175 cls_re = re.compile('classes/') 183 cls_re = re.compile('classes.*/')
176 184
177 preffiles = [] 185 preffiles = []
178 show_unique_pn = [] 186 show_unique_pn = []
@@ -274,7 +282,10 @@ Lists recipes with the bbappends that apply to them as subitems.
274 else: 282 else:
275 logger.plain('=== Appended recipes ===') 283 logger.plain('=== Appended recipes ===')
276 284
277 pnlist = list(self.tinfoil.cooker_data.pkg_pn.keys()) 285
286 cooker_data = self.tinfoil.cooker.recipecaches[args.mc]
287
288 pnlist = list(cooker_data.pkg_pn.keys())
278 pnlist.sort() 289 pnlist.sort()
279 appends = False 290 appends = False
280 for pn in pnlist: 291 for pn in pnlist:
@@ -287,26 +298,28 @@ Lists recipes with the bbappends that apply to them as subitems.
287 if not found: 298 if not found:
288 continue 299 continue
289 300
290 if self.show_appends_for_pn(pn): 301 if self.show_appends_for_pn(pn, cooker_data, args.mc):
291 appends = True 302 appends = True
292 303
293 if not args.pnspec and self.show_appends_for_skipped(): 304 if not args.pnspec and self.show_appends_for_skipped(args.mc):
294 appends = True 305 appends = True
295 306
296 if not appends: 307 if not appends:
297 logger.plain('No append files found') 308 logger.plain('No append files found')
298 309
299 def show_appends_for_pn(self, pn): 310 def show_appends_for_pn(self, pn, cooker_data, mc):
300 filenames = self.tinfoil.cooker_data.pkg_pn[pn] 311 filenames = cooker_data.pkg_pn[pn]
312 if mc:
313 pn = "mc:%s:%s" % (mc, pn)
301 314
302 best = self.tinfoil.find_best_provider(pn) 315 best = self.tinfoil.find_best_provider(pn)
303 best_filename = os.path.basename(best[3]) 316 best_filename = os.path.basename(best[3])
304 317
305 return self.show_appends_output(filenames, best_filename) 318 return self.show_appends_output(filenames, best_filename)
306 319
307 def show_appends_for_skipped(self): 320 def show_appends_for_skipped(self, mc):
308 filenames = [os.path.basename(f) 321 filenames = [os.path.basename(f)
309 for f in self.tinfoil.cooker.skiplist.keys()] 322 for f in self.tinfoil.cooker.skiplist_by_mc[mc].keys()]
310 return self.show_appends_output(filenames, None, " (skipped)") 323 return self.show_appends_output(filenames, None, " (skipped)")
311 324
312 def show_appends_output(self, filenames, best_filename, name_suffix = ''): 325 def show_appends_output(self, filenames, best_filename, name_suffix = ''):
@@ -405,7 +418,7 @@ NOTE: .bbappend files can impact the dependencies.
405 self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers) 418 self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers)
406 419
407 # The inherit class 420 # The inherit class
408 cls_re = re.compile('classes/') 421 cls_re = re.compile('classes.*/')
409 if f in self.tinfoil.cooker_data.inherits: 422 if f in self.tinfoil.cooker_data.inherits:
410 inherits = self.tinfoil.cooker_data.inherits[f] 423 inherits = self.tinfoil.cooker_data.inherits[f]
411 for cls in inherits: 424 for cls in inherits:
@@ -441,10 +454,10 @@ NOTE: .bbappend files can impact the dependencies.
441 line = fnfile.readline() 454 line = fnfile.readline()
442 455
443 # The "require/include xxx" in conf/machine/*.conf, .inc and .bbclass 456 # The "require/include xxx" in conf/machine/*.conf, .inc and .bbclass
444 conf_re = re.compile(".*/conf/machine/[^\/]*\.conf$") 457 conf_re = re.compile(r".*/conf/machine/[^\/]*\.conf$")
445 inc_re = re.compile(".*\.inc$") 458 inc_re = re.compile(r".*\.inc$")
446 # The "inherit xxx" in .bbclass 459 # The "inherit xxx" in .bbclass
447 bbclass_re = re.compile(".*\.bbclass$") 460 bbclass_re = re.compile(r".*\.bbclass$")
448 for layerdir in self.bblayers: 461 for layerdir in self.bblayers:
449 layername = self.get_layer_name(layerdir) 462 layername = self.get_layer_name(layerdir)
450 for dirpath, dirnames, filenames in os.walk(layerdir): 463 for dirpath, dirnames, filenames in os.walk(layerdir):
@@ -522,6 +535,7 @@ NOTE: .bbappend files can impact the dependencies.
522 535
523 parser_show_appends = self.add_command(sp, 'show-appends', self.do_show_appends) 536 parser_show_appends = self.add_command(sp, 'show-appends', self.do_show_appends)
524 parser_show_appends.add_argument('pnspec', nargs='*', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)') 537 parser_show_appends.add_argument('pnspec', nargs='*', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)')
538 parser_show_appends.add_argument('--mc', help='use specified multiconfig', default='')
525 539
526 parser_show_cross_depends = self.add_command(sp, 'show-cross-depends', self.do_show_cross_depends) 540 parser_show_cross_depends = self.add_command(sp, 'show-cross-depends', self.do_show_cross_depends)
527 parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true') 541 parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true')
diff --git a/bitbake/lib/bs4/AUTHORS b/bitbake/lib/bs4/AUTHORS
new file mode 100644
index 0000000000..1f14fe07de
--- /dev/null
+++ b/bitbake/lib/bs4/AUTHORS
@@ -0,0 +1,49 @@
1Behold, mortal, the origins of Beautiful Soup...
2================================================
3
4Leonard Richardson is the primary maintainer.
5
6Aaron DeVore and Isaac Muse have made significant contributions to the
7code base.
8
9Mark Pilgrim provided the encoding detection code that forms the base
10of UnicodeDammit.
11
12Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful
13Soup 4 working under Python 3.
14
15Simon Willison wrote soupselect, which was used to make Beautiful Soup
16support CSS selectors. Isaac Muse wrote SoupSieve, which made it
17possible to _remove_ the CSS selector code from Beautiful Soup.
18
19Sam Ruby helped with a lot of edge cases.
20
21Jonathan Ellis was awarded the prestigious Beau Potage D'Or for his
22work in solving the nestable tags conundrum.
23
24An incomplete list of people have contributed patches to Beautiful
25Soup:
26
27 Istvan Albert, Andrew Lin, Anthony Baxter, Oliver Beattie, Andrew
28Boyko, Tony Chang, Francisco Canas, "Delong", Zephyr Fang, Fuzzy,
29Roman Gaufman, Yoni Gilad, Richie Hindle, Toshihiro Kamiya, Peteris
30Krumins, Kent Johnson, Marek Kapolka, Andreas Kostyrka, Roel Kramer,
31Ben Last, Robert Leftwich, Stefaan Lippens, "liquider", Staffan
32Malmgren, Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon",
33Ed Oskiewicz, Martijn Peters, Greg Phillips, Giles Radford, Stefano
34Revera, Arthur Rudolph, Marko Samastur, James Salter, Jouni Seppänen,
35Alexander Schmolck, Tim Shirley, Geoffrey Sneddon, Ville Skyttä,
36"Vikas", Jens Svalgaard, Andy Theyers, Eric Weiser, Glyn Webster, John
37Wiseman, Paul Wright, Danny Yoo
38
39An incomplete list of people who made suggestions or found bugs or
40found ways to break Beautiful Soup:
41
42 Hanno Böck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel,
43 Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes,
44 Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams,
45 warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison,
46 Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed
47 Summers, Dennis Sutch, Chris Smith, Aaron Swartz, Stuart
48 Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de
49 Sousa Rocha, Yichun Wei, Per Vognsen
diff --git a/bitbake/lib/bs4/AUTHORS.txt b/bitbake/lib/bs4/AUTHORS.txt
deleted file mode 100644
index 2ac8fcc8cc..0000000000
--- a/bitbake/lib/bs4/AUTHORS.txt
+++ /dev/null
@@ -1,43 +0,0 @@
1Behold, mortal, the origins of Beautiful Soup...
2================================================
3
4Leonard Richardson is the primary programmer.
5
6Aaron DeVore is awesome.
7
8Mark Pilgrim provided the encoding detection code that forms the base
9of UnicodeDammit.
10
11Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful
12Soup 4 working under Python 3.
13
14Simon Willison wrote soupselect, which was used to make Beautiful Soup
15support CSS selectors.
16
17Sam Ruby helped with a lot of edge cases.
18
19Jonathan Ellis was awarded the prestigous Beau Potage D'Or for his
20work in solving the nestable tags conundrum.
21
22An incomplete list of people have contributed patches to Beautiful
23Soup:
24
25 Istvan Albert, Andrew Lin, Anthony Baxter, Andrew Boyko, Tony Chang,
26 Zephyr Fang, Fuzzy, Roman Gaufman, Yoni Gilad, Richie Hindle, Peteris
27 Krumins, Kent Johnson, Ben Last, Robert Leftwich, Staffan Malmgren,
28 Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", Ed
29 Oskiewicz, Greg Phillips, Giles Radford, Arthur Rudolph, Marko
30 Samastur, Jouni Seppänen, Alexander Schmolck, Andy Theyers, Glyn
31 Webster, Paul Wright, Danny Yoo
32
33An incomplete list of people who made suggestions or found bugs or
34found ways to break Beautiful Soup:
35
36 Hanno Böck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel,
37 Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes,
38 Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams,
39 warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison,
40 Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed
41 Summers, Dennis Sutch, Chris Smith, Aaron Sweep^W Swartz, Stuart
42 Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de
43 Sousa Rocha, Yichun Wei, Per Vognsen
diff --git a/bitbake/lib/bs4/NEWS.txt b/bitbake/lib/bs4/CHANGELOG
index 88a60a2458..2701446a6d 100644
--- a/bitbake/lib/bs4/NEWS.txt
+++ b/bitbake/lib/bs4/CHANGELOG
@@ -1,3 +1,776 @@
1= 4.12.3 (20240117)
2
3* The Beautiful Soup documentation now has a Spanish translation, thanks
4 to Carlos Romero. Delong Wang's Chinese translation has been updated
5 to cover Beautiful Soup 4.12.0.
6
7* Fixed a regression such that if you set .hidden on a tag, the tag
8 becomes invisible but its contents are still visible. User manipulation
9 of .hidden is not a documented or supported feature, so don't do this,
10 but it wasn't too difficult to keep the old behavior working.
11
12* Fixed a case found by Mengyuhan where html.parser giving up on
13 markup would result in an AssertionError instead of a
14 ParserRejectedMarkup exception.
15
16* Added the correct stacklevel to instances of the XMLParsedAsHTMLWarning.
17 [bug=2034451]
18
19* Corrected the syntax of the license definition in pyproject.toml. Patch
20 by Louis Maddox. [bug=2032848]
21
22* Corrected a typo in a test that was causing test failures when run against
23 libxml2 2.12.1. [bug=2045481]
24
25= 4.12.2 (20230407)
26
27* Fixed an unhandled exception in BeautifulSoup.decode_contents
28 and methods that call it. [bug=2015545]
29
30= 4.12.1 (20230405)
31
32NOTE: the following things are likely to be dropped in the next
33feature release of Beautiful Soup:
34
35 Official support for Python 3.6.
36 Inclusion of unit tests and test data in the wheel file.
37 Two scripts: demonstrate_parser_differences.py and test-all-versions.
38
39Changes:
40
41* This version of Beautiful Soup replaces setup.py and setup.cfg
42 with pyproject.toml. Beautiful Soup now uses tox as its test backend
43 and hatch to do builds.
44
45* The main functional improvement in this version is a nonrecursive technique
46 for regenerating a tree. This technique is used to avoid situations where,
47 in previous versions, doing something to a very deeply nested tree
48 would overflow the Python interpreter stack:
49
50 1. Outputting a tree as a string, e.g. with
51 BeautifulSoup.encode() [bug=1471755]
52
53 2. Making copies of trees (copy.copy() and
54 copy.deepcopy() from the Python standard library). [bug=1709837]
55
56 3. Pickling a BeautifulSoup object. (Note that pickling a Tag
57 object can still cause an overflow.)
58
59* Making a copy of a BeautifulSoup object no longer parses the
60 document again, which should improve performance significantly.
61
62* When a BeautifulSoup object is unpickled, Beautiful Soup now
63 tries to associate an appropriate TreeBuilder object with it.
64
65* Tag.prettify() will now consistently end prettified markup with
66 a newline.
67
68* Added unit tests for fuzz test cases created by third
69 parties. Some of these tests are skipped since they point
70 to problems outside of Beautiful Soup, but this change
71 puts them all in one convenient place.
72
73* PageElement now implements the known_xml attribute. (This was technically
74 a bug, but it shouldn't be an issue in normal use.) [bug=2007895]
75
76* The demonstrate_parser_differences.py script was still written in
77 Python 2. I've converted it to Python 3, but since no one has
78 mentioned this over the years, it's a sign that no one uses this
79 script and it's not serving its purpose.
80
81= 4.12.0 (20230320)
82
83* Introduced the .css property, which centralizes all access to
84 the Soup Sieve API. This allows Beautiful Soup to give direct
85 access to as much of Soup Sieve that makes sense, without cluttering
86 the BeautifulSoup and Tag classes with a lot of new methods.
87
88 This does mean one addition to the BeautifulSoup and Tag classes
89 (the .css property itself), so this might be a breaking change if you
90 happen to use Beautiful Soup to parse XML that includes a tag called
91 <css>. In particular, code like this will stop working in 4.12.0:
92
93 soup.css['id']
94
95 Code like this will work just as before:
96
97 soup.find_one('css')['id']
98
99 The Soup Sieve methods supported through the .css property are
100 select(), select_one(), iselect(), closest(), match(), filter(),
101 escape(), and compile(). The BeautifulSoup and Tag classes still
102 support the select() and select_one() methods; they have not been
103 deprecated, but they have been demoted to convenience methods.
104
105 [bug=2003677]
106
107* When the html.parser parser decides it can't parse a document, Beautiful
108 Soup now consistently propagates this fact by raising a
109 ParserRejectedMarkup error. [bug=2007343]
110
111* Removed some error checking code from diagnose(), which is redundant with
112 similar (but more Pythonic) code in the BeautifulSoup constructor.
113 [bug=2007344]
114
115* Added intersphinx references to the documentation so that other
116 projects have a target to point to when they reference Beautiful
117 Soup classes. [bug=1453370]
118
119= 4.11.2 (20230131)
120
121* Fixed test failures caused by nondeterministic behavior of
122 UnicodeDammit's character detection, depending on the platform setup.
123 [bug=1973072]
124
125* Fixed another crash when overriding multi_valued_attributes and using the
126 html5lib parser. [bug=1948488]
127
128* The HTMLFormatter and XMLFormatter constructors no longer return a
129 value. [bug=1992693]
130
131* Tag.interesting_string_types is now propagated when a tag is
132 copied. [bug=1990400]
133
134* Warnings now do their best to provide an appropriate stacklevel,
135 improving the usefulness of the message. [bug=1978744]
136
137* Passing a Tag's .contents into PageElement.extend() now works the
138 same way as passing the Tag itself.
139
140* Soup Sieve tests will be skipped if the library is not installed.
141
142= 4.11.1 (20220408)
143
144This release was done to ensure that the unit tests are packaged along
145with the released source. There are no functionality changes in this
146release, but there are a few other packaging changes:
147
148* The Japanese and Korean translations of the documentation are included.
149* The changelog is now packaged as CHANGELOG, and the license file is
150 packaged as LICENSE. NEWS.txt and COPYING.txt are still present,
151 but may be removed in the future.
152* TODO.txt is no longer packaged, since a TODO is not relevant for released
153 code.
154
155= 4.11.0 (20220407)
156
157* Ported unit tests to use pytest.
158
159* Added special string classes, RubyParenthesisString and RubyTextString,
160 to make it possible to treat ruby text specially in get_text() calls.
161 [bug=1941980]
162
163* It's now possible to customize the way output is indented by
164 providing a value for the 'indent' argument to the Formatter
165 constructor. The 'indent' argument works very similarly to the
166 argument of the same name in the Python standard library's
167 json.dump() function. [bug=1955497]
168
169* If the charset-normalizer Python module
170 (https://pypi.org/project/charset-normalizer/) is installed, Beautiful
171 Soup will use it to detect the character sets of incoming documents.
172 This is also the module used by newer versions of the Requests library.
173 For the sake of backwards compatibility, chardet and cchardet both take
174 precedence if installed. [bug=1955346]
175
176* Added a workaround for an lxml bug
177 (https://bugs.launchpad.net/lxml/+bug/1948551) that causes
178 problems when parsing a Unicode string beginning with BYTE ORDER MARK.
179 [bug=1947768]
180
181* Issue a warning when an HTML parser is used to parse a document that
182 looks like XML but not XHTML. [bug=1939121]
183
184* Do a better job of keeping track of namespaces as an XML document is
185 parsed, so that CSS selectors that use namespaces will do the right
186 thing more often. [bug=1946243]
187
188* Some time ago, the misleadingly named "text" argument to find-type
189 methods was renamed to the more accurate "string." But this supposed
190 "renaming" didn't make it into important places like the method
191 signatures or the docstrings. That's corrected in this
192 version. "text" still works, but will give a DeprecationWarning.
193 [bug=1947038]
194
195* Fixed a crash when pickling a BeautifulSoup object that has no
196 tree builder. [bug=1934003]
197
198* Fixed a crash when overriding multi_valued_attributes and using the
199 html5lib parser. [bug=1948488]
200
201* Standardized the wording of the MarkupResemblesLocatorWarning
202 warnings to omit untrusted input and make the warnings less
203 judgmental about what you ought to be doing. [bug=1955450]
204
205* Removed support for the iconv_codec library, which doesn't seem
206 to exist anymore and was never put up on PyPI. (The closest
207 replacement on PyPI, iconv_codecs, is GPL-licensed, so we can't use
208 it--it's also quite old.)
209
210= 4.10.0 (20210907)
211
212* This is the first release of Beautiful Soup to only support Python
213 3. I dropped Python 2 support to maintain support for newer versions
214 (58 and up) of setuptools. See:
215 https://github.com/pypa/setuptools/issues/2769 [bug=1942919]
216
217* The behavior of methods like .get_text() and .strings now differs
218 depending on the type of tag. The change is visible with HTML tags
219 like <script>, <style>, and <template>. Starting in 4.9.0, methods
220 like get_text() returned no results on such tags, because the
221 contents of those tags are not considered 'text' within the document
222 as a whole.
223
224 But a user who calls script.get_text() is working from a different
225 definition of 'text' than a user who calls div.get_text()--otherwise
226 there would be no need to call script.get_text() at all. In 4.10.0,
227 the contents of (e.g.) a <script> tag are considered 'text' during a
228 get_text() call on the tag itself, but not considered 'text' during
229 a get_text() call on the tag's parent.
230
231 Because of this change, calling get_text() on each child of a tag
232 may now return a different result than calling get_text() on the tag
233 itself. That's because different tags now have different
234 understandings of what counts as 'text'. [bug=1906226] [bug=1868861]
235
236* NavigableString and its subclasses now implement the get_text()
237 method, as well as the properties .strings and
238 .stripped_strings. These methods will either return the string
239 itself, or nothing, so the only reason to use this is when iterating
240 over a list of mixed Tag and NavigableString objects. [bug=1904309]
241
242* The 'html5' formatter now treats attributes whose values are the
243 empty string as HTML boolean attributes. Previously (and in other
244 formatters), an attribute value must be set as None to be treated as
245 a boolean attribute. In a future release, I plan to also give this
246 behavior to the 'html' formatter. Patch by Isaac Muse. [bug=1915424]
247
248* The 'replace_with()' method now takes a variable number of arguments,
249 and can be used to replace a single element with a sequence of elements.
250 Patch by Bill Chandos. [rev=605]
251
252* Corrected output when the namespace prefix associated with a
253 namespaced attribute is the empty string, as opposed to
254 None. [bug=1915583]
255
256* Performance improvement when processing tags that speeds up overall
257 tree construction by 2%. Patch by Morotti. [bug=1899358]
258
259* Corrected the use of special string container classes in cases when a
260 single tag may contain strings with different containers; such as
261 the <template> tag, which may contain both TemplateString objects
262 and Comment objects. [bug=1913406]
263
264* The html.parser tree builder can now handle named entities
265 found in the HTML5 spec in much the same way that the html5lib
266 tree builder does. Note that the lxml HTML tree builder doesn't handle
267 named entities this way. [bug=1924908]
268
269* Added a second way to pass specify encodings to UnicodeDammit and
270 EncodingDetector, based on the order of precedence defined in the
271 HTML5 spec, starting at:
272 https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding
273
274 Encodings in 'known_definite_encodings' are tried first, then
275 byte-order-mark sniffing is run, then encodings in 'user_encodings'
276 are tried. The old argument, 'override_encodings', is now a
277 deprecated alias for 'known_definite_encodings'.
278
279 This changes the default behavior of the html.parser and lxml tree
280 builders, in a way that may slightly improve encoding
281 detection but will probably have no effect. [bug=1889014]
282
283* Improve the warning issued when a directory name (as opposed to
284 the name of a regular file) is passed as markup into the BeautifulSoup
285 constructor. [bug=1913628]
286
287= 4.9.3 (20201003)
288
289This is the final release of Beautiful Soup to support Python
2902. Beautiful Soup's official support for Python 2 ended on 01 January,
2912021. In the Launchpad Git repository, the final revision to support
292Python 2 was revision 70f546b1e689a70e2f103795efce6d261a3dadf7; it is
293tagged as "python2".
294
295* Implemented a significant performance optimization to the process of
296 searching the parse tree. Patch by Morotti. [bug=1898212]
297
298= 4.9.2 (20200926)
299
300* Fixed a bug that caused too many tags to be popped from the tag
301 stack during tree building, when encountering a closing tag that had
302 no matching opening tag. [bug=1880420]
303
304* Fixed a bug that inconsistently moved elements over when passing
305 a Tag, rather than a list, into Tag.extend(). [bug=1885710]
306
307* Specify the soupsieve dependency in a way that complies with
308 PEP 508. Patch by Mike Nerone. [bug=1893696]
309
310* Change the signatures for BeautifulSoup.insert_before and insert_after
311 (which are not implemented) to match PageElement.insert_before and
312 insert_after, quieting warnings in some IDEs. [bug=1897120]
313
314= 4.9.1 (20200517)
315
316* Added a keyword argument 'on_duplicate_attribute' to the
317 BeautifulSoupHTMLParser constructor (used by the html.parser tree
318 builder) which lets you customize the handling of markup that
319 contains the same attribute more than once, as in:
320 <a href="url1" href="url2"> [bug=1878209]
321
322* Added a distinct subclass, GuessedAtParserWarning, for the warning
323 issued when BeautifulSoup is instantiated without a parser being
324 specified. [bug=1873787]
325
326* Added a distinct subclass, MarkupResemblesLocatorWarning, for the
327 warning issued when BeautifulSoup is instantiated with 'markup' that
328 actually seems to be a URL or the path to a file on
329 disk. [bug=1873787]
330
331* The new NavigableString subclasses (Stylesheet, Script, and
332 TemplateString) can now be imported directly from the bs4 package.
333
334* If you encode a document with a Python-specific encoding like
335 'unicode_escape', that encoding is no longer mentioned in the final
336 XML or HTML document. Instead, encoding information is omitted or
337 left blank. [bug=1874955]
338
339* Fixed test failures when run against soupselect 2.0. Patch by Tomáš
340 Chvátal. [bug=1872279]
341
342= 4.9.0 (20200405)
343
344* Added PageElement.decomposed, a new property which lets you
345 check whether you've already called decompose() on a Tag or
346 NavigableString.
347
348* Embedded CSS and Javascript is now stored in distinct Stylesheet and
349 Script tags, which are ignored by methods like get_text() since most
350 people don't consider this sort of content to be 'text'. This
351 feature is not supported by the html5lib treebuilder. [bug=1868861]
352
353* Added a Russian translation by 'authoress' to the repository.
354
355* Fixed an unhandled exception when formatting a Tag that had been
356 decomposed.[bug=1857767]
357
358* Fixed a bug that happened when passing a Unicode filename containing
359 non-ASCII characters as markup into Beautiful Soup, on a system that
360 allows Unicode filenames. [bug=1866717]
361
362* Added a performance optimization to PageElement.extract(). Patch by
363 Arthur Darcet.
364
365= 4.8.2 (20191224)
366
367* Added Python docstrings to all public methods of the most commonly
368 used classes.
369
370* Added a Chinese translation by Deron Wang and a Brazilian Portuguese
371 translation by Cezar Peixeiro to the repository.
372
373* Fixed two deprecation warnings. Patches by Colin
374 Watson and Nicholas Neumann. [bug=1847592] [bug=1855301]
375
376* The html.parser tree builder now correctly handles DOCTYPEs that are
377 not uppercase. [bug=1848401]
378
379* PageElement.select() now returns a ResultSet rather than a regular
380 list, making it consistent with methods like find_all().
381
382= 4.8.1 (20191006)
383
384* When the html.parser or html5lib parsers are in use, Beautiful Soup
385 will, by default, record the position in the original document where
386 each tag was encountered. This includes line number (Tag.sourceline)
387 and position within a line (Tag.sourcepos). Based on code by Chris
388 Mayo. [bug=1742921]
389
390* When instantiating a BeautifulSoup object, it's now possible to
391 provide a dictionary ('element_classes') of the classes you'd like to be
392 instantiated instead of Tag, NavigableString, etc.
393
394* Fixed the definition of the default XML namespace when using
395 lxml 4.4. Patch by Isaac Muse. [bug=1840141]
396
397* Fixed a crash when pretty-printing tags that were not created
398 during initial parsing. [bug=1838903]
399
400* Copying a Tag preserves information that was originally obtained from
401 the TreeBuilder used to build the original Tag. [bug=1838903]
402
403* Raise an explanatory exception when the underlying parser
404 completely rejects the incoming markup. [bug=1838877]
405
406* Avoid a crash when trying to detect the declared encoding of a
407 Unicode document. [bug=1838877]
408
409* Avoid a crash when unpickling certain parse trees generated
410 using html5lib on Python 3. [bug=1843545]
411
412= 4.8.0 (20190720, "One Small Soup")
413
414This release focuses on making it easier to customize Beautiful Soup's
415input mechanism (the TreeBuilder) and output mechanism (the Formatter).
416
417* You can customize the TreeBuilder object by passing keyword
418 arguments into the BeautifulSoup constructor. Those keyword
419 arguments will be passed along into the TreeBuilder constructor.
420
421 The main reason to do this right now is to change how which
422 attributes are treated as multi-valued attributes (the way 'class'
423 is treated by default). You can do this with the
424 'multi_valued_attributes' argument. [bug=1832978]
425
426* The role of Formatter objects has been greatly expanded. The Formatter
427 class now controls the following:
428
429 - The function to call to perform entity substitution. (This was
430 previously Formatter's only job.)
431 - Which tags should be treated as containing CDATA and have their
432 contents exempt from entity substitution.
433 - The order in which a tag's attributes are output. [bug=1812422]
434 - Whether or not to put a '/' inside a void element, e.g. '<br/>' vs '<br>'
435
436 All preexisting code should work as before.
437
438* Added a new method to the API, Tag.smooth(), which consolidates
439 multiple adjacent NavigableString elements. [bug=1697296]
440
441* &apos; (which is valid in XML, XHTML, and HTML 5, but not HTML 4) is always
442 recognized as a named entity and converted to a single quote. [bug=1818721]
443
444= 4.7.1 (20190106)
445
446* Fixed a significant performance problem introduced in 4.7.0. [bug=1810617]
447
448* Fixed an incorrectly raised exception when inserting a tag before or
449 after an identical tag. [bug=1810692]
450
451* Beautiful Soup will no longer try to keep track of namespaces that
452 are not defined with a prefix; this can confuse soupselect. [bug=1810680]
453
454* Tried even harder to avoid the deprecation warning originally fixed in
455 4.6.1. [bug=1778909]
456
457= 4.7.0 (20181231)
458
459* Beautiful Soup's CSS Selector implementation has been replaced by a
460 dependency on Isaac Muse's SoupSieve project (the soupsieve package
461 on PyPI). The good news is that SoupSieve has a much more robust and
462 complete implementation of CSS selectors, resolving a large number
463 of longstanding issues. The bad news is that from this point onward,
464 SoupSieve must be installed if you want to use the select() method.
465
466 You don't have to change anything lf you installed Beautiful Soup
467 through pip (SoupSieve will be automatically installed when you
468 upgrade Beautiful Soup) or if you don't use CSS selectors from
469 within Beautiful Soup.
470
471 SoupSieve documentation: https://facelessuser.github.io/soupsieve/
472
473* Added the PageElement.extend() method, which works like list.append().
474 [bug=1514970]
475
476* PageElement.insert_before() and insert_after() now take a variable
477 number of arguments. [bug=1514970]
478
479* Fix a number of problems with the tree builder that caused
480 trees that were superficially okay, but which fell apart when bits
481 were extracted. Patch by Isaac Muse. [bug=1782928,1809910]
482
483* Fixed a problem with the tree builder in which elements that
484 contained no content (such as empty comments and all-whitespace
485 elements) were not being treated as part of the tree. Patch by Isaac
486 Muse. [bug=1798699]
487
488* Fixed a problem with multi-valued attributes where the value
489 contained whitespace. Thanks to Jens Svalgaard for the
490 fix. [bug=1787453]
491
492* Clarified ambiguous license statements in the source code. Beautiful
493 Soup is released under the MIT license, and has been since 4.4.0.
494
495* This file has been renamed from NEWS.txt to CHANGELOG.
496
497= 4.6.3 (20180812)
498
499* Exactly the same as 4.6.2. Re-released to make the README file
500 render properly on PyPI.
501
502= 4.6.2 (20180812)
503
504* Fix an exception when a custom formatter was asked to format a void
505 element. [bug=1784408]
506
507= 4.6.1 (20180728)
508
509* Stop data loss when encountering an empty numeric entity, and
510 possibly in other cases. Thanks to tos.kamiya for the fix. [bug=1698503]
511
512* Preserve XML namespaces introduced inside an XML document, not just
513 the ones introduced at the top level. [bug=1718787]
514
515* Added a new formatter, "html5", which represents void elements
516 as "<element>" rather than "<element/>". [bug=1716272]
517
518* Fixed a problem where the html.parser tree builder interpreted
519 a string like "&foo " as the character entity "&foo;" [bug=1728706]
520
521* Correctly handle invalid HTML numeric character entities like &#147;
522 which reference code points that are not Unicode code points. Note
523 that this is only fixed when Beautiful Soup is used with the
524 html.parser parser -- html5lib already worked and I couldn't fix it
525 with lxml. [bug=1782933]
526
527* Improved the warning given when no parser is specified. [bug=1780571]
528
529* When markup contains duplicate elements, a select() call that
530 includes multiple match clauses will match all relevant
531 elements. [bug=1770596]
532
533* Fixed code that was causing deprecation warnings in recent Python 3
534 versions. Includes a patch from Ville Skyttä. [bug=1778909] [bug=1689496]
535
536* Fixed a Windows crash in diagnose() when checking whether a long
537 markup string is a filename. [bug=1737121]
538
539* Stopped HTMLParser from raising an exception in very rare cases of
540 bad markup. [bug=1708831]
541
542* Fixed a bug where find_all() was not working when asked to find a
543 tag with a namespaced name in an XML document that was parsed as
544 HTML. [bug=1723783]
545
546* You can get finer control over formatting by subclassing
547 bs4.element.Formatter and passing a Formatter instance into (e.g.)
548 encode(). [bug=1716272]
549
550* You can pass a dictionary of `attrs` into
551 BeautifulSoup.new_tag. This makes it possible to create a tag with
552 an attribute like 'name' that would otherwise be masked by another
553 argument of new_tag. [bug=1779276]
554
555* Clarified the deprecation warning when accessing tag.fooTag, to cover
556 the possibility that you might really have been looking for a tag
557 called 'fooTag'.
558
559= 4.6.0 (20170507) =
560
561* Added the `Tag.get_attribute_list` method, which acts like `Tag.get` for
562 getting the value of an attribute, but which always returns a list,
563 whether or not the attribute is a multi-value attribute. [bug=1678589]
564
565* It's now possible to use a tag's namespace prefix when searching,
566 e.g. soup.find('namespace:tag') [bug=1655332]
567
568* Improved the handling of empty-element tags like <br> when using the
569 html.parser parser. [bug=1676935]
570
571* HTML parsers treat all HTML4 and HTML5 empty element tags (aka void
572 element tags) correctly. [bug=1656909]
573
574* Namespace prefix is preserved when an XML tag is copied. Thanks
575 to Vikas for a patch and test. [bug=1685172]
576
577= 4.5.3 (20170102) =
578
579* Fixed foster parenting when html5lib is the tree builder. Thanks to
580 Geoffrey Sneddon for a patch and test.
581
582* Fixed yet another problem that caused the html5lib tree builder to
583 create a disconnected parse tree. [bug=1629825]
584
585= 4.5.2 (20170102) =
586
587* Apart from the version number, this release is identical to
588 4.5.3. Due to user error, it could not be completely uploaded to
589 PyPI. Use 4.5.3 instead.
590
591= 4.5.1 (20160802) =
592
593* Fixed a crash when passing Unicode markup that contained a
594 processing instruction into the lxml HTML parser on Python
595 3. [bug=1608048]
596
597= 4.5.0 (20160719) =
598
599* Beautiful Soup is no longer compatible with Python 2.6. This
600 actually happened a few releases ago, but it's now official.
601
602* Beautiful Soup will now work with versions of html5lib greater than
603 0.99999999. [bug=1603299]
604
605* If a search against each individual value of a multi-valued
606 attribute fails, the search will be run one final time against the
607 complete attribute value considered as a single string. That is, if
608 a tag has class="foo bar" and neither "foo" nor "bar" matches, but
609 "foo bar" does, the tag is now considered a match.
610
611 This happened in previous versions, but only when the value being
612 searched for was a string. Now it also works when that value is
613 a regular expression, a list of strings, etc. [bug=1476868]
614
615* Fixed a bug that deranged the tree when a whitespace element was
616 reparented into a tag that contained an identical whitespace
617 element. [bug=1505351]
618
619* Added support for CSS selector values that contain quoted spaces,
620 such as tag[style="display: foo"]. [bug=1540588]
621
622* Corrected handling of XML processing instructions. [bug=1504393]
623
624* Corrected an encoding error that happened when a BeautifulSoup
625 object was copied. [bug=1554439]
626
627* The contents of <textarea> tags will no longer be modified when the
628 tree is prettified. [bug=1555829]
629
630* When a BeautifulSoup object is pickled but its tree builder cannot
631 be pickled, its .builder attribute is set to None instead of being
632 destroyed. This avoids a performance problem once the object is
633 unpickled. [bug=1523629]
634
635* Specify the file and line number when warning about a
636 BeautifulSoup object being instantiated without a parser being
637 specified. [bug=1574647]
638
639* The `limit` argument to `select()` now works correctly, though it's
640 not implemented very efficiently. [bug=1520530]
641
642* Fixed a Python 3 ByteWarning when a URL was passed in as though it
643 were markup. Thanks to James Salter for a patch and
644 test. [bug=1533762]
645
646* We don't run the check for a filename passed in as markup if the
647 'filename' contains a less-than character; the less-than character
648 indicates it's most likely a very small document. [bug=1577864]
649
650= 4.4.1 (20150928) =
651
652* Fixed a bug that deranged the tree when part of it was
653 removed. Thanks to Eric Weiser for the patch and John Wiseman for a
654 test. [bug=1481520]
655
656* Fixed a parse bug with the html5lib tree-builder. Thanks to Roel
657 Kramer for the patch. [bug=1483781]
658
659* Improved the implementation of CSS selector grouping. Thanks to
660 Orangain for the patch. [bug=1484543]
661
662* Fixed the test_detect_utf8 test so that it works when chardet is
663 installed. [bug=1471359]
664
665* Corrected the output of Declaration objects. [bug=1477847]
666
667
668= 4.4.0 (20150703) =
669
670Especially important changes:
671
672* Added a warning when you instantiate a BeautifulSoup object without
673 explicitly naming a parser. [bug=1398866]
674
675* __repr__ now returns an ASCII bytestring in Python 2, and a Unicode
676 string in Python 3, instead of a UTF8-encoded bytestring in both
677 versions. In Python 3, __str__ now returns a Unicode string instead
678 of a bytestring. [bug=1420131]
679
680* The `text` argument to the find_* methods is now called `string`,
681 which is more accurate. `text` still works, but `string` is the
682 argument described in the documentation. `text` may eventually
683 change its meaning, but not for a very long time. [bug=1366856]
684
685* Changed the way soup objects work under copy.copy(). Copying a
686 NavigableString or a Tag will give you a new NavigableString that's
687 equal to the old one but not connected to the parse tree. Patch by
688 Martijn Peters. [bug=1307490]
689
690* Started using a standard MIT license. [bug=1294662]
691
692* Added a Chinese translation of the documentation by Delong .w.
693
694New features:
695
696* Introduced the select_one() method, which uses a CSS selector but
697 only returns the first match, instead of a list of
698 matches. [bug=1349367]
699
700* You can now create a Tag object without specifying a
701 TreeBuilder. Patch by Martijn Pieters. [bug=1307471]
702
703* You can now create a NavigableString or a subclass just by invoking
704 the constructor. [bug=1294315]
705
706* Added an `exclude_encodings` argument to UnicodeDammit and to the
707 Beautiful Soup constructor, which lets you prohibit the detection of
708 an encoding that you know is wrong. [bug=1469408]
709
710* The select() method now supports selector grouping. Patch by
711 Francisco Canas [bug=1191917]
712
713Bug fixes:
714
715* Fixed yet another problem that caused the html5lib tree builder to
716 create a disconnected parse tree. [bug=1237763]
717
718* Force object_was_parsed() to keep the tree intact even when an element
719 from later in the document is moved into place. [bug=1430633]
720
721* Fixed yet another bug that caused a disconnected tree when html5lib
722 copied an element from one part of the tree to another. [bug=1270611]
723
724* Fixed a bug where Element.extract() could create an infinite loop in
725 the remaining tree.
726
727* The select() method can now find tags whose names contain
728 dashes. Patch by Francisco Canas. [bug=1276211]
729
730* The select() method can now find tags with attributes whose names
731 contain dashes. Patch by Marek Kapolka. [bug=1304007]
732
733* Improved the lxml tree builder's handling of processing
734 instructions. [bug=1294645]
735
736* Restored the helpful syntax error that happens when you try to
737 import the Python 2 edition of Beautiful Soup under Python
738 3. [bug=1213387]
739
740* In Python 3.4 and above, set the new convert_charrefs argument to
741 the html.parser constructor to avoid a warning and future
742 failures. Patch by Stefano Revera. [bug=1375721]
743
744* The warning when you pass in a filename or URL as markup will now be
745 displayed correctly even if the filename or URL is a Unicode
746 string. [bug=1268888]
747
748* If the initial <html> tag contains a CDATA list attribute such as
749 'class', the html5lib tree builder will now turn its value into a
750 list, as it would with any other tag. [bug=1296481]
751
752* Fixed an import error in Python 3.5 caused by the removal of the
753 HTMLParseError class. [bug=1420063]
754
755* Improved docstring for encode_contents() and
756 decode_contents(). [bug=1441543]
757
758* Fixed a crash in Unicode, Dammit's encoding detector when the name
759 of the encoding itself contained invalid bytes. [bug=1360913]
760
761* Improved the exception raised when you call .unwrap() or
762 .replace_with() on an element that's not attached to a tree.
763
764* Raise a NotImplementedError whenever an unsupported CSS pseudoclass
765 is used in select(). Previously some cases did not result in a
766 NotImplementedError.
767
768* It's now possible to pickle a BeautifulSoup object no matter which
769 tree builder was used to create it. However, the only tree builder
770 that survives the pickling process is the HTMLParserTreeBuilder
771 ('html.parser'). If you unpickle a BeautifulSoup object created with
772 some other tree builder, soup.builder will be None. [bug=1231545]
773
1= 4.3.2 (20131002) = 774= 4.3.2 (20131002) =
2 775
3* Fixed a bug in which short Unicode input was improperly encoded to 776* Fixed a bug in which short Unicode input was improperly encoded to
@@ -331,7 +1104,7 @@
331* Renamed Tag.nsprefix to Tag.prefix, for consistency with 1104* Renamed Tag.nsprefix to Tag.prefix, for consistency with
332 NamespacedAttribute. 1105 NamespacedAttribute.
333 1106
334* Fixed a test failure that occured on Python 3.x when chardet was 1107* Fixed a test failure that occurred on Python 3.x when chardet was
335 installed. 1108 installed.
336 1109
337* Made prettify() return Unicode by default, so it will look nice on 1110* Made prettify() return Unicode by default, so it will look nice on
@@ -365,7 +1138,7 @@
365 1138
366* Restored compatibility with Python 2.6. 1139* Restored compatibility with Python 2.6.
367 1140
368* The install process no longer installs docs or auxillary text files. 1141* The install process no longer installs docs or auxiliary text files.
369 1142
370* It's now possible to deepcopy a BeautifulSoup object created with 1143* It's now possible to deepcopy a BeautifulSoup object created with
371 Python's built-in HTML parser. 1144 Python's built-in HTML parser.
@@ -604,7 +1377,7 @@ Added an import that makes BS work in Python 2.3.
604Fixed a UnicodeDecodeError when unpickling documents that contain 1377Fixed a UnicodeDecodeError when unpickling documents that contain
605non-ASCII characters. 1378non-ASCII characters.
606 1379
607Fixed a TypeError that occured in some circumstances when a tag 1380Fixed a TypeError that occurred in some circumstances when a tag
608contained no text. 1381contained no text.
609 1382
610Jump through hoops to avoid the use of chardet, which can be extremely 1383Jump through hoops to avoid the use of chardet, which can be extremely
diff --git a/bitbake/lib/bs4/COPYING.txt b/bitbake/lib/bs4/LICENSE
index d668d13f04..08e3a9cf8c 100644
--- a/bitbake/lib/bs4/COPYING.txt
+++ b/bitbake/lib/bs4/LICENSE
@@ -1,6 +1,6 @@
1Beautiful Soup is made available under the MIT license: 1Beautiful Soup is made available under the MIT license:
2 2
3 Copyright (c) 2004-2012 Leonard Richardson 3 Copyright (c) Leonard Richardson
4 4
5 Permission is hereby granted, free of charge, to any person obtaining 5 Permission is hereby granted, free of charge, to any person obtaining
6 a copy of this software and associated documentation files (the 6 a copy of this software and associated documentation files (the
@@ -20,7 +20,12 @@ Beautiful Soup is made available under the MIT license:
20 BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 20 BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
21 ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 21 ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
22 CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23 SOFTWARE, DAMMIT. 23 SOFTWARE.
24 24
25Beautiful Soup incorporates code from the html5lib library, which is 25Beautiful Soup incorporates code from the html5lib library, which is
26also made available under the MIT license. 26also made available under the MIT license. Copyright (c) James Graham
27and other contributors
28
29Beautiful Soup has an optional dependency on the soupsieve library,
30which is also made available under the MIT license. Copyright (c)
31Isaac Muse
diff --git a/bitbake/lib/bs4/__init__.py b/bitbake/lib/bs4/__init__.py
index e35725b86e..725203d94a 100644
--- a/bitbake/lib/bs4/__init__.py
+++ b/bitbake/lib/bs4/__init__.py
@@ -1,65 +1,99 @@
1"""Beautiful Soup 1"""Beautiful Soup Elixir and Tonic - "The Screen-Scraper's Friend".
2Elixir and Tonic 2
3"The Screen-Scraper's Friend"
4http://www.crummy.com/software/BeautifulSoup/ 3http://www.crummy.com/software/BeautifulSoup/
5 4
6Beautiful Soup uses a pluggable XML or HTML parser to parse a 5Beautiful Soup uses a pluggable XML or HTML parser to parse a
7(possibly invalid) document into a tree representation. Beautiful Soup 6(possibly invalid) document into a tree representation. Beautiful Soup
8provides provides methods and Pythonic idioms that make it easy to 7provides methods and Pythonic idioms that make it easy to navigate,
9navigate, search, and modify the parse tree. 8search, and modify the parse tree.
10 9
11Beautiful Soup works with Python 2.6 and up. It works better if lxml 10Beautiful Soup works with Python 3.6 and up. It works better if lxml
12and/or html5lib is installed. 11and/or html5lib is installed.
13 12
14For more than you ever wanted to know about Beautiful Soup, see the 13For more than you ever wanted to know about Beautiful Soup, see the
15documentation: 14documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/
16http://www.crummy.com/software/BeautifulSoup/bs4/doc/
17""" 15"""
18 16
19__author__ = "Leonard Richardson (leonardr@segfault.org)" 17__author__ = "Leonard Richardson (leonardr@segfault.org)"
20__version__ = "4.4.1" 18__version__ = "4.12.3"
21__copyright__ = "Copyright (c) 2004-2015 Leonard Richardson" 19__copyright__ = "Copyright (c) 2004-2024 Leonard Richardson"
20# Use of this source code is governed by the MIT license.
22__license__ = "MIT" 21__license__ = "MIT"
23 22
24__all__ = ['BeautifulSoup'] 23__all__ = ['BeautifulSoup']
25 24
25from collections import Counter
26import os 26import os
27import re 27import re
28import sys
29import traceback
28import warnings 30import warnings
29 31
30from .builder import builder_registry, ParserRejectedMarkup 32# The very first thing we do is give a useful error if someone is
33# running this code under Python 2.
34if sys.version_info.major < 3:
35 raise ImportError('You are trying to use a Python 3-specific version of Beautiful Soup under Python 2. This will not work. The final version of Beautiful Soup to support Python 2 was 4.9.3.')
36
37from .builder import (
38 builder_registry,
39 ParserRejectedMarkup,
40 XMLParsedAsHTMLWarning,
41 HTMLParserTreeBuilder
42)
31from .dammit import UnicodeDammit 43from .dammit import UnicodeDammit
32from .element import ( 44from .element import (
33 CData, 45 CData,
34 Comment, 46 Comment,
47 CSS,
35 DEFAULT_OUTPUT_ENCODING, 48 DEFAULT_OUTPUT_ENCODING,
36 Declaration, 49 Declaration,
37 Doctype, 50 Doctype,
38 NavigableString, 51 NavigableString,
39 PageElement, 52 PageElement,
40 ProcessingInstruction, 53 ProcessingInstruction,
54 PYTHON_SPECIFIC_ENCODINGS,
41 ResultSet, 55 ResultSet,
56 Script,
57 Stylesheet,
42 SoupStrainer, 58 SoupStrainer,
43 Tag, 59 Tag,
60 TemplateString,
44 ) 61 )
45 62
46# The very first thing we do is give a useful error if someone is 63# Define some custom warnings.
47# running this code under Python 3 without converting it. 64class GuessedAtParserWarning(UserWarning):
48'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work.'!='You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).' 65 """The warning issued when BeautifulSoup has to guess what parser to
66 use -- probably because no parser was specified in the constructor.
67 """
49 68
50class BeautifulSoup(Tag): 69class MarkupResemblesLocatorWarning(UserWarning):
70 """The warning issued when BeautifulSoup is given 'markup' that
71 actually looks like a resource locator -- a URL or a path to a file
72 on disk.
51 """ 73 """
52 This class defines the basic interface called by the tree builders.
53 74
54 These methods will be called by the parser: 75
55 reset() 76class BeautifulSoup(Tag):
56 feed(markup) 77 """A data structure representing a parsed HTML or XML document.
78
79 Most of the methods you'll call on a BeautifulSoup object are inherited from
80 PageElement or Tag.
81
82 Internally, this class defines the basic interface called by the
83 tree builders when converting an HTML/XML document into a data
84 structure. The interface abstracts away the differences between
85 parsers. To write a new tree builder, you'll need to understand
86 these methods as a whole.
87
88 These methods will be called by the BeautifulSoup constructor:
89 * reset()
90 * feed(markup)
57 91
58 The tree builder may call these methods from its feed() implementation: 92 The tree builder may call these methods from its feed() implementation:
59 handle_starttag(name, attrs) # See note about return value 93 * handle_starttag(name, attrs) # See note about return value
60 handle_endtag(name) 94 * handle_endtag(name)
61 handle_data(data) # Appends to the current data node 95 * handle_data(data) # Appends to the current data node
62 endData(containerClass=NavigableString) # Ends the current data node 96 * endData(containerClass) # Ends the current data node
63 97
64 No matter how complicated the underlying parser is, you should be 98 No matter how complicated the underlying parser is, you should be
65 able to build a tree using 'start tag' events, 'end tag' events, 99 able to build a tree using 'start tag' events, 'end tag' events,
@@ -69,24 +103,77 @@ class BeautifulSoup(Tag):
69 like HTML's <br> tag), call handle_starttag and then 103 like HTML's <br> tag), call handle_starttag and then
70 handle_endtag. 104 handle_endtag.
71 """ 105 """
106
107 # Since BeautifulSoup subclasses Tag, it's possible to treat it as
108 # a Tag with a .name. This name makes it clear the BeautifulSoup
109 # object isn't a real markup tag.
72 ROOT_TAG_NAME = '[document]' 110 ROOT_TAG_NAME = '[document]'
73 111
74 # If the end-user gives no indication which tree builder they 112 # If the end-user gives no indication which tree builder they
75 # want, look for one with these features. 113 # want, look for one with these features.
76 DEFAULT_BUILDER_FEATURES = ['html', 'fast'] 114 DEFAULT_BUILDER_FEATURES = ['html', 'fast']
77 115
116 # A string containing all ASCII whitespace characters, used in
117 # endData() to detect data chunks that seem 'empty'.
78 ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' 118 ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
79 119
80 NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nTo get rid of this warning, change this:\n\n BeautifulSoup([your markup])\n\nto this:\n\n BeautifulSoup([your markup], \"%(parser)s\")\n" 120 NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nThe code that caused this warning is on line %(line_number)s of the file %(filename)s. To get rid of this warning, pass the additional argument 'features=\"%(parser)s\"' to the BeautifulSoup constructor.\n"
81 121
82 def __init__(self, markup="", features=None, builder=None, 122 def __init__(self, markup="", features=None, builder=None,
83 parse_only=None, from_encoding=None, exclude_encodings=None, 123 parse_only=None, from_encoding=None, exclude_encodings=None,
84 **kwargs): 124 element_classes=None, **kwargs):
85 """The Soup object is initialized as the 'root tag', and the 125 """Constructor.
86 provided markup (which can be a string or a file-like object) 126
87 is fed into the underlying parser.""" 127 :param markup: A string or a file-like object representing
88 128 markup to be parsed.
129
130 :param features: Desirable features of the parser to be
131 used. This may be the name of a specific parser ("lxml",
132 "lxml-xml", "html.parser", or "html5lib") or it may be the
133 type of markup to be used ("html", "html5", "xml"). It's
134 recommended that you name a specific parser, so that
135 Beautiful Soup gives you the same results across platforms
136 and virtual environments.
137
138 :param builder: A TreeBuilder subclass to instantiate (or
139 instance to use) instead of looking one up based on
140 `features`. You only need to use this if you've implemented a
141 custom TreeBuilder.
142
143 :param parse_only: A SoupStrainer. Only parts of the document
144 matching the SoupStrainer will be considered. This is useful
145 when parsing part of a document that would otherwise be too
146 large to fit into memory.
147
148 :param from_encoding: A string indicating the encoding of the
149 document to be parsed. Pass this in if Beautiful Soup is
150 guessing wrongly about the document's encoding.
151
152 :param exclude_encodings: A list of strings indicating
153 encodings known to be wrong. Pass this in if you don't know
154 the document's encoding but you know Beautiful Soup's guess is
155 wrong.
156
157 :param element_classes: A dictionary mapping BeautifulSoup
158 classes like Tag and NavigableString, to other classes you'd
159 like to be instantiated instead as the parse tree is
160 built. This is useful for subclassing Tag or NavigableString
161 to modify default behavior.
162
163 :param kwargs: For backwards compatibility purposes, the
164 constructor accepts certain keyword arguments used in
165 Beautiful Soup 3. None of these arguments do anything in
166 Beautiful Soup 4; they will result in a warning and then be
167 ignored.
168
169 Apart from this, any keyword arguments passed into the
170 BeautifulSoup constructor are propagated to the TreeBuilder
171 constructor. This makes it possible to configure a
172 TreeBuilder by passing in arguments, not just by saying which
173 one to use.
174 """
89 if 'convertEntities' in kwargs: 175 if 'convertEntities' in kwargs:
176 del kwargs['convertEntities']
90 warnings.warn( 177 warnings.warn(
91 "BS4 does not respect the convertEntities argument to the " 178 "BS4 does not respect the convertEntities argument to the "
92 "BeautifulSoup constructor. Entities are always converted " 179 "BeautifulSoup constructor. Entities are always converted "
@@ -125,10 +212,10 @@ class BeautifulSoup(Tag):
125 if old_name in kwargs: 212 if old_name in kwargs:
126 warnings.warn( 213 warnings.warn(
127 'The "%s" argument to the BeautifulSoup constructor ' 214 'The "%s" argument to the BeautifulSoup constructor '
128 'has been renamed to "%s."' % (old_name, new_name)) 215 'has been renamed to "%s."' % (old_name, new_name),
129 value = kwargs[old_name] 216 DeprecationWarning, stacklevel=3
130 del kwargs[old_name] 217 )
131 return value 218 return kwargs.pop(old_name)
132 return None 219 return None
133 220
134 parse_only = parse_only or deprecated_argument( 221 parse_only = parse_only or deprecated_argument(
@@ -137,13 +224,23 @@ class BeautifulSoup(Tag):
137 from_encoding = from_encoding or deprecated_argument( 224 from_encoding = from_encoding or deprecated_argument(
138 "fromEncoding", "from_encoding") 225 "fromEncoding", "from_encoding")
139 226
140 if len(kwargs) > 0: 227 if from_encoding and isinstance(markup, str):
141 arg = list(kwargs.keys()).pop() 228 warnings.warn("You provided Unicode markup but also provided a value for from_encoding. Your from_encoding will be ignored.")
142 raise TypeError( 229 from_encoding = None
143 "__init__() got an unexpected keyword argument '%s'" % arg) 230
144 231 self.element_classes = element_classes or dict()
145 if builder is None: 232
146 original_features = features 233 # We need this information to track whether or not the builder
234 # was specified well enough that we can omit the 'you need to
235 # specify a parser' warning.
236 original_builder = builder
237 original_features = features
238
239 if isinstance(builder, type):
240 # A builder class was passed in; it needs to be instantiated.
241 builder_class = builder
242 builder = None
243 elif builder is None:
147 if isinstance(features, str): 244 if isinstance(features, str):
148 features = [features] 245 features = [features]
149 if features is None or len(features) == 0: 246 if features is None or len(features) == 0:
@@ -154,85 +251,227 @@ class BeautifulSoup(Tag):
154 "Couldn't find a tree builder with the features you " 251 "Couldn't find a tree builder with the features you "
155 "requested: %s. Do you need to install a parser library?" 252 "requested: %s. Do you need to install a parser library?"
156 % ",".join(features)) 253 % ",".join(features))
157 builder = builder_class() 254
158 if not (original_features == builder.NAME or 255 # At this point either we have a TreeBuilder instance in
159 original_features in builder.ALTERNATE_NAMES): 256 # builder, or we have a builder_class that we can instantiate
257 # with the remaining **kwargs.
258 if builder is None:
259 builder = builder_class(**kwargs)
260 if not original_builder and not (
261 original_features == builder.NAME or
262 original_features in builder.ALTERNATE_NAMES
263 ) and markup:
264 # The user did not tell us which TreeBuilder to use,
265 # and we had to guess. Issue a warning.
160 if builder.is_xml: 266 if builder.is_xml:
161 markup_type = "XML" 267 markup_type = "XML"
162 else: 268 else:
163 markup_type = "HTML" 269 markup_type = "HTML"
164 warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % dict(
165 parser=builder.NAME,
166 markup_type=markup_type))
167 270
271 # This code adapted from warnings.py so that we get the same line
272 # of code as our warnings.warn() call gets, even if the answer is wrong
273 # (as it may be in a multithreading situation).
274 caller = None
275 try:
276 caller = sys._getframe(1)
277 except ValueError:
278 pass
279 if caller:
280 globals = caller.f_globals
281 line_number = caller.f_lineno
282 else:
283 globals = sys.__dict__
284 line_number= 1
285 filename = globals.get('__file__')
286 if filename:
287 fnl = filename.lower()
288 if fnl.endswith((".pyc", ".pyo")):
289 filename = filename[:-1]
290 if filename:
291 # If there is no filename at all, the user is most likely in a REPL,
292 # and the warning is not necessary.
293 values = dict(
294 filename=filename,
295 line_number=line_number,
296 parser=builder.NAME,
297 markup_type=markup_type
298 )
299 warnings.warn(
300 self.NO_PARSER_SPECIFIED_WARNING % values,
301 GuessedAtParserWarning, stacklevel=2
302 )
303 else:
304 if kwargs:
305 warnings.warn("Keyword arguments to the BeautifulSoup constructor will be ignored. These would normally be passed into the TreeBuilder constructor, but a TreeBuilder instance was passed in as `builder`.")
306
168 self.builder = builder 307 self.builder = builder
169 self.is_xml = builder.is_xml 308 self.is_xml = builder.is_xml
170 self.builder.soup = self 309 self.known_xml = self.is_xml
171 310 self._namespaces = dict()
172 self.parse_only = parse_only 311 self.parse_only = parse_only
173 312
174 if hasattr(markup, 'read'): # It's a file-type object. 313 if hasattr(markup, 'read'): # It's a file-type object.
175 markup = markup.read() 314 markup = markup.read()
176 elif len(markup) <= 256: 315 elif len(markup) <= 256 and (
177 # Print out warnings for a couple beginner problems 316 (isinstance(markup, bytes) and not b'<' in markup)
317 or (isinstance(markup, str) and not '<' in markup)
318 ):
319 # Issue warnings for a couple beginner problems
178 # involving passing non-markup to Beautiful Soup. 320 # involving passing non-markup to Beautiful Soup.
179 # Beautiful Soup will still parse the input as markup, 321 # Beautiful Soup will still parse the input as markup,
180 # just in case that's what the user really wants. 322 # since that is sometimes the intended behavior.
181 if (isinstance(markup, str) 323 if not self._markup_is_url(markup):
182 and not os.path.supports_unicode_filenames): 324 self._markup_resembles_filename(markup)
183 possible_filename = markup.encode("utf8")
184 else:
185 possible_filename = markup
186 is_file = False
187 try:
188 is_file = os.path.exists(possible_filename)
189 except Exception as e:
190 # This is almost certainly a problem involving
191 # characters not valid in filenames on this
192 # system. Just let it go.
193 pass
194 if is_file:
195 if isinstance(markup, str):
196 markup = markup.encode("utf8")
197 warnings.warn(
198 '"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
199 if markup[:5] == "http:" or markup[:6] == "https:":
200 # TODO: This is ugly but I couldn't get it to work in
201 # Python 3 otherwise.
202 if ((isinstance(markup, bytes) and not b' ' in markup)
203 or (isinstance(markup, str) and not ' ' in markup)):
204 if isinstance(markup, str):
205 markup = markup.encode("utf8")
206 warnings.warn(
207 '"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
208 325
326 rejections = []
327 success = False
209 for (self.markup, self.original_encoding, self.declared_html_encoding, 328 for (self.markup, self.original_encoding, self.declared_html_encoding,
210 self.contains_replacement_characters) in ( 329 self.contains_replacement_characters) in (
211 self.builder.prepare_markup( 330 self.builder.prepare_markup(
212 markup, from_encoding, exclude_encodings=exclude_encodings)): 331 markup, from_encoding, exclude_encodings=exclude_encodings)):
213 self.reset() 332 self.reset()
333 self.builder.initialize_soup(self)
214 try: 334 try:
215 self._feed() 335 self._feed()
336 success = True
216 break 337 break
217 except ParserRejectedMarkup: 338 except ParserRejectedMarkup as e:
339 rejections.append(e)
218 pass 340 pass
219 341
342 if not success:
343 other_exceptions = [str(e) for e in rejections]
344 raise ParserRejectedMarkup(
345 "The markup you provided was rejected by the parser. Trying a different parser or a different encoding may help.\n\nOriginal exception(s) from parser:\n " + "\n ".join(other_exceptions)
346 )
347
220 # Clear out the markup and remove the builder's circular 348 # Clear out the markup and remove the builder's circular
221 # reference to this object. 349 # reference to this object.
222 self.markup = None 350 self.markup = None
223 self.builder.soup = None 351 self.builder.soup = None
224 352
225 def __copy__(self): 353 def _clone(self):
226 return type(self)(self.encode(), builder=self.builder) 354 """Create a new BeautifulSoup object with the same TreeBuilder,
355 but not associated with any markup.
356
357 This is the first step of the deepcopy process.
358 """
359 clone = type(self)("", None, self.builder)
227 360
361 # Keep track of the encoding of the original document,
362 # since we won't be parsing it again.
363 clone.original_encoding = self.original_encoding
364 return clone
365
228 def __getstate__(self): 366 def __getstate__(self):
229 # Frequently a tree builder can't be pickled. 367 # Frequently a tree builder can't be pickled.
230 d = dict(self.__dict__) 368 d = dict(self.__dict__)
231 if 'builder' in d and not self.builder.picklable: 369 if 'builder' in d and d['builder'] is not None and not self.builder.picklable:
232 del d['builder'] 370 d['builder'] = type(self.builder)
371 # Store the contents as a Unicode string.
372 d['contents'] = []
373 d['markup'] = self.decode()
374
375 # If _most_recent_element is present, it's a Tag object left
376 # over from initial parse. It might not be picklable and we
377 # don't need it.
378 if '_most_recent_element' in d:
379 del d['_most_recent_element']
233 return d 380 return d
234 381
382 def __setstate__(self, state):
383 # If necessary, restore the TreeBuilder by looking it up.
384 self.__dict__ = state
385 if isinstance(self.builder, type):
386 self.builder = self.builder()
387 elif not self.builder:
388 # We don't know which builder was used to build this
389 # parse tree, so use a default we know is always available.
390 self.builder = HTMLParserTreeBuilder()
391 self.builder.soup = self
392 self.reset()
393 self._feed()
394 return state
395
396
397 @classmethod
398 def _decode_markup(cls, markup):
399 """Ensure `markup` is bytes so it's safe to send into warnings.warn.
400
401 TODO: warnings.warn had this problem back in 2010 but it might not
402 anymore.
403 """
404 if isinstance(markup, bytes):
405 decoded = markup.decode('utf-8', 'replace')
406 else:
407 decoded = markup
408 return decoded
409
410 @classmethod
411 def _markup_is_url(cls, markup):
412 """Error-handling method to raise a warning if incoming markup looks
413 like a URL.
414
415 :param markup: A string.
416 :return: Whether or not the markup resembles a URL
417 closely enough to justify a warning.
418 """
419 if isinstance(markup, bytes):
420 space = b' '
421 cant_start_with = (b"http:", b"https:")
422 elif isinstance(markup, str):
423 space = ' '
424 cant_start_with = ("http:", "https:")
425 else:
426 return False
427
428 if any(markup.startswith(prefix) for prefix in cant_start_with):
429 if not space in markup:
430 warnings.warn(
431 'The input looks more like a URL than markup. You may want to use'
432 ' an HTTP client like requests to get the document behind'
433 ' the URL, and feed that document to Beautiful Soup.',
434 MarkupResemblesLocatorWarning,
435 stacklevel=3
436 )
437 return True
438 return False
439
440 @classmethod
441 def _markup_resembles_filename(cls, markup):
442 """Error-handling method to raise a warning if incoming markup
443 resembles a filename.
444
445 :param markup: A bytestring or string.
446 :return: Whether or not the markup resembles a filename
447 closely enough to justify a warning.
448 """
449 path_characters = '/\\'
450 extensions = ['.html', '.htm', '.xml', '.xhtml', '.txt']
451 if isinstance(markup, bytes):
452 path_characters = path_characters.encode("utf8")
453 extensions = [x.encode('utf8') for x in extensions]
454 filelike = False
455 if any(x in markup for x in path_characters):
456 filelike = True
457 else:
458 lower = markup.lower()
459 if any(lower.endswith(ext) for ext in extensions):
460 filelike = True
461 if filelike:
462 warnings.warn(
463 'The input looks more like a filename than markup. You may'
464 ' want to open this file and pass the filehandle into'
465 ' Beautiful Soup.',
466 MarkupResemblesLocatorWarning, stacklevel=3
467 )
468 return True
469 return False
470
235 def _feed(self): 471 def _feed(self):
472 """Internal method that parses previously set markup, creating a large
473 number of Tag and NavigableString objects.
474 """
236 # Convert the document to Unicode. 475 # Convert the document to Unicode.
237 self.builder.reset() 476 self.builder.reset()
238 477
@@ -243,48 +482,111 @@ class BeautifulSoup(Tag):
243 self.popTag() 482 self.popTag()
244 483
245 def reset(self): 484 def reset(self):
485 """Reset this object to a state as though it had never parsed any
486 markup.
487 """
246 Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME) 488 Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
247 self.hidden = 1 489 self.hidden = 1
248 self.builder.reset() 490 self.builder.reset()
249 self.current_data = [] 491 self.current_data = []
250 self.currentTag = None 492 self.currentTag = None
251 self.tagStack = [] 493 self.tagStack = []
494 self.open_tag_counter = Counter()
252 self.preserve_whitespace_tag_stack = [] 495 self.preserve_whitespace_tag_stack = []
496 self.string_container_stack = []
497 self._most_recent_element = None
253 self.pushTag(self) 498 self.pushTag(self)
254 499
255 def new_tag(self, name, namespace=None, nsprefix=None, **attrs): 500 def new_tag(self, name, namespace=None, nsprefix=None, attrs={},
256 """Create a new tag associated with this soup.""" 501 sourceline=None, sourcepos=None, **kwattrs):
257 return Tag(None, self.builder, name, namespace, nsprefix, attrs) 502 """Create a new Tag associated with this BeautifulSoup object.
503
504 :param name: The name of the new Tag.
505 :param namespace: The URI of the new Tag's XML namespace, if any.
506 :param prefix: The prefix for the new Tag's XML namespace, if any.
507 :param attrs: A dictionary of this Tag's attribute values; can
508 be used instead of `kwattrs` for attributes like 'class'
509 that are reserved words in Python.
510 :param sourceline: The line number where this tag was
511 (purportedly) found in its source document.
512 :param sourcepos: The character position within `sourceline` where this
513 tag was (purportedly) found.
514 :param kwattrs: Keyword arguments for the new Tag's attribute values.
258 515
259 def new_string(self, s, subclass=NavigableString): 516 """
260 """Create a new NavigableString associated with this soup.""" 517 kwattrs.update(attrs)
261 return subclass(s) 518 return self.element_classes.get(Tag, Tag)(
519 None, self.builder, name, namespace, nsprefix, kwattrs,
520 sourceline=sourceline, sourcepos=sourcepos
521 )
522
523 def string_container(self, base_class=None):
524 container = base_class or NavigableString
525
526 # There may be a general override of NavigableString.
527 container = self.element_classes.get(
528 container, container
529 )
530
531 # On top of that, we may be inside a tag that needs a special
532 # container class.
533 if self.string_container_stack and container is NavigableString:
534 container = self.builder.string_containers.get(
535 self.string_container_stack[-1].name, container
536 )
537 return container
538
539 def new_string(self, s, subclass=None):
540 """Create a new NavigableString associated with this BeautifulSoup
541 object.
542 """
543 container = self.string_container(subclass)
544 return container(s)
262 545
263 def insert_before(self, successor): 546 def insert_before(self, *args):
547 """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement
548 it because there is nothing before or after it in the parse tree.
549 """
264 raise NotImplementedError("BeautifulSoup objects don't support insert_before().") 550 raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
265 551
266 def insert_after(self, successor): 552 def insert_after(self, *args):
553 """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement
554 it because there is nothing before or after it in the parse tree.
555 """
267 raise NotImplementedError("BeautifulSoup objects don't support insert_after().") 556 raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
268 557
269 def popTag(self): 558 def popTag(self):
559 """Internal method called by _popToTag when a tag is closed."""
270 tag = self.tagStack.pop() 560 tag = self.tagStack.pop()
561 if tag.name in self.open_tag_counter:
562 self.open_tag_counter[tag.name] -= 1
271 if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]: 563 if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
272 self.preserve_whitespace_tag_stack.pop() 564 self.preserve_whitespace_tag_stack.pop()
273 #print "Pop", tag.name 565 if self.string_container_stack and tag == self.string_container_stack[-1]:
566 self.string_container_stack.pop()
567 #print("Pop", tag.name)
274 if self.tagStack: 568 if self.tagStack:
275 self.currentTag = self.tagStack[-1] 569 self.currentTag = self.tagStack[-1]
276 return self.currentTag 570 return self.currentTag
277 571
278 def pushTag(self, tag): 572 def pushTag(self, tag):
279 #print "Push", tag.name 573 """Internal method called by handle_starttag when a tag is opened."""
280 if self.currentTag: 574 #print("Push", tag.name)
575 if self.currentTag is not None:
281 self.currentTag.contents.append(tag) 576 self.currentTag.contents.append(tag)
282 self.tagStack.append(tag) 577 self.tagStack.append(tag)
283 self.currentTag = self.tagStack[-1] 578 self.currentTag = self.tagStack[-1]
579 if tag.name != self.ROOT_TAG_NAME:
580 self.open_tag_counter[tag.name] += 1
284 if tag.name in self.builder.preserve_whitespace_tags: 581 if tag.name in self.builder.preserve_whitespace_tags:
285 self.preserve_whitespace_tag_stack.append(tag) 582 self.preserve_whitespace_tag_stack.append(tag)
583 if tag.name in self.builder.string_containers:
584 self.string_container_stack.append(tag)
286 585
287 def endData(self, containerClass=NavigableString): 586 def endData(self, containerClass=None):
587 """Method called by the TreeBuilder when the end of a data segment
588 occurs.
589 """
288 if self.current_data: 590 if self.current_data:
289 current_data = ''.join(self.current_data) 591 current_data = ''.join(self.current_data)
290 # If whitespace is not preserved, and this string contains 592 # If whitespace is not preserved, and this string contains
@@ -311,61 +613,93 @@ class BeautifulSoup(Tag):
311 not self.parse_only.search(current_data)): 613 not self.parse_only.search(current_data)):
312 return 614 return
313 615
616 containerClass = self.string_container(containerClass)
314 o = containerClass(current_data) 617 o = containerClass(current_data)
315 self.object_was_parsed(o) 618 self.object_was_parsed(o)
316 619
317 def object_was_parsed(self, o, parent=None, most_recent_element=None): 620 def object_was_parsed(self, o, parent=None, most_recent_element=None):
318 """Add an object to the parse tree.""" 621 """Method called by the TreeBuilder to integrate an object into the parse tree."""
319 parent = parent or self.currentTag 622 if parent is None:
320 previous_element = most_recent_element or self._most_recent_element 623 parent = self.currentTag
624 if most_recent_element is not None:
625 previous_element = most_recent_element
626 else:
627 previous_element = self._most_recent_element
321 628
322 next_element = previous_sibling = next_sibling = None 629 next_element = previous_sibling = next_sibling = None
323 if isinstance(o, Tag): 630 if isinstance(o, Tag):
324 next_element = o.next_element 631 next_element = o.next_element
325 next_sibling = o.next_sibling 632 next_sibling = o.next_sibling
326 previous_sibling = o.previous_sibling 633 previous_sibling = o.previous_sibling
327 if not previous_element: 634 if previous_element is None:
328 previous_element = o.previous_element 635 previous_element = o.previous_element
329 636
637 fix = parent.next_element is not None
638
330 o.setup(parent, previous_element, next_element, previous_sibling, next_sibling) 639 o.setup(parent, previous_element, next_element, previous_sibling, next_sibling)
331 640
332 self._most_recent_element = o 641 self._most_recent_element = o
333 parent.contents.append(o) 642 parent.contents.append(o)
334 643
335 if parent.next_sibling: 644 # Check if we are inserting into an already parsed node.
336 # This node is being inserted into an element that has 645 if fix:
337 # already been parsed. Deal with any dangling references. 646 self._linkage_fixer(parent)
338 index = parent.contents.index(o) 647
339 if index == 0: 648 def _linkage_fixer(self, el):
340 previous_element = parent 649 """Make sure linkage of this fragment is sound."""
341 previous_sibling = None 650
342 else: 651 first = el.contents[0]
343 previous_element = previous_sibling = parent.contents[index-1] 652 child = el.contents[-1]
344 if index == len(parent.contents)-1: 653 descendant = child
345 next_element = parent.next_sibling 654
346 next_sibling = None 655 if child is first and el.parent is not None:
347 else: 656 # Parent should be linked to first child
348 next_element = next_sibling = parent.contents[index+1] 657 el.next_element = child
349 658 # We are no longer linked to whatever this element is
350 o.previous_element = previous_element 659 prev_el = child.previous_element
351 if previous_element: 660 if prev_el is not None and prev_el is not el:
352 previous_element.next_element = o 661 prev_el.next_element = None
353 o.next_element = next_element 662 # First child should be linked to the parent, and no previous siblings.
354 if next_element: 663 child.previous_element = el
355 next_element.previous_element = o 664 child.previous_sibling = None
356 o.next_sibling = next_sibling 665
357 if next_sibling: 666 # We have no sibling as we've been appended as the last.
358 next_sibling.previous_sibling = o 667 child.next_sibling = None
359 o.previous_sibling = previous_sibling 668
360 if previous_sibling: 669 # This index is a tag, dig deeper for a "last descendant"
361 previous_sibling.next_sibling = o 670 if isinstance(child, Tag) and child.contents:
671 descendant = child._last_descendant(False)
672
673 # As the final step, link last descendant. It should be linked
674 # to the parent's next sibling (if found), else walk up the chain
675 # and find a parent with a sibling. It should have no next sibling.
676 descendant.next_element = None
677 descendant.next_sibling = None
678 target = el
679 while True:
680 if target is None:
681 break
682 elif target.next_sibling is not None:
683 descendant.next_element = target.next_sibling
684 target.next_sibling.previous_element = child
685 break
686 target = target.parent
362 687
363 def _popToTag(self, name, nsprefix=None, inclusivePop=True): 688 def _popToTag(self, name, nsprefix=None, inclusivePop=True):
364 """Pops the tag stack up to and including the most recent 689 """Pops the tag stack up to and including the most recent
365 instance of the given tag. If inclusivePop is false, pops the tag 690 instance of the given tag.
366 stack up to but *not* including the most recent instqance of 691
367 the given tag.""" 692 If there are no open tags with the given name, nothing will be
368 #print "Popping to %s" % name 693 popped.
694
695 :param name: Pop up to the most recent tag with this name.
696 :param nsprefix: The namespace prefix that goes with `name`.
697 :param inclusivePop: It this is false, pops the tag stack up
698 to but *not* including the most recent instqance of the
699 given tag.
700
701 """
702 #print("Popping to %s" % name)
369 if name == self.ROOT_TAG_NAME: 703 if name == self.ROOT_TAG_NAME:
370 # The BeautifulSoup object itself can never be popped. 704 # The BeautifulSoup object itself can never be popped.
371 return 705 return
@@ -374,6 +708,8 @@ class BeautifulSoup(Tag):
374 708
375 stack_size = len(self.tagStack) 709 stack_size = len(self.tagStack)
376 for i in range(stack_size - 1, 0, -1): 710 for i in range(stack_size - 1, 0, -1):
711 if not self.open_tag_counter.get(name):
712 break
377 t = self.tagStack[i] 713 t = self.tagStack[i]
378 if (name == t.name and nsprefix == t.prefix): 714 if (name == t.name and nsprefix == t.prefix):
379 if inclusivePop: 715 if inclusivePop:
@@ -383,16 +719,26 @@ class BeautifulSoup(Tag):
383 719
384 return most_recently_popped 720 return most_recently_popped
385 721
386 def handle_starttag(self, name, namespace, nsprefix, attrs): 722 def handle_starttag(self, name, namespace, nsprefix, attrs, sourceline=None,
387 """Push a start tag on to the stack. 723 sourcepos=None, namespaces=None):
388 724 """Called by the tree builder when a new tag is encountered.
389 If this method returns None, the tag was rejected by the 725
390 SoupStrainer. You should proceed as if the tag had not occured 726 :param name: Name of the tag.
727 :param nsprefix: Namespace prefix for the tag.
728 :param attrs: A dictionary of attribute values.
729 :param sourceline: The line number where this tag was found in its
730 source document.
731 :param sourcepos: The character position within `sourceline` where this
732 tag was found.
733 :param namespaces: A dictionary of all namespace prefix mappings
734 currently in scope in the document.
735
736 If this method returns None, the tag was rejected by an active
737 SoupStrainer. You should proceed as if the tag had not occurred
391 in the document. For instance, if this was a self-closing tag, 738 in the document. For instance, if this was a self-closing tag,
392 don't call handle_endtag. 739 don't call handle_endtag.
393 """ 740 """
394 741 # print("Start tag %s: %s" % (name, attrs))
395 # print "Start tag %s: %s" % (name, attrs)
396 self.endData() 742 self.endData()
397 743
398 if (self.parse_only and len(self.tagStack) <= 1 744 if (self.parse_only and len(self.tagStack) <= 1
@@ -400,34 +746,54 @@ class BeautifulSoup(Tag):
400 or not self.parse_only.search_tag(name, attrs))): 746 or not self.parse_only.search_tag(name, attrs))):
401 return None 747 return None
402 748
403 tag = Tag(self, self.builder, name, namespace, nsprefix, attrs, 749 tag = self.element_classes.get(Tag, Tag)(
404 self.currentTag, self._most_recent_element) 750 self, self.builder, name, namespace, nsprefix, attrs,
751 self.currentTag, self._most_recent_element,
752 sourceline=sourceline, sourcepos=sourcepos,
753 namespaces=namespaces
754 )
405 if tag is None: 755 if tag is None:
406 return tag 756 return tag
407 if self._most_recent_element: 757 if self._most_recent_element is not None:
408 self._most_recent_element.next_element = tag 758 self._most_recent_element.next_element = tag
409 self._most_recent_element = tag 759 self._most_recent_element = tag
410 self.pushTag(tag) 760 self.pushTag(tag)
411 return tag 761 return tag
412 762
413 def handle_endtag(self, name, nsprefix=None): 763 def handle_endtag(self, name, nsprefix=None):
414 #print "End tag: " + name 764 """Called by the tree builder when an ending tag is encountered.
765
766 :param name: Name of the tag.
767 :param nsprefix: Namespace prefix for the tag.
768 """
769 #print("End tag: " + name)
415 self.endData() 770 self.endData()
416 self._popToTag(name, nsprefix) 771 self._popToTag(name, nsprefix)
417 772
418 def handle_data(self, data): 773 def handle_data(self, data):
774 """Called by the tree builder when a chunk of textual data is encountered."""
419 self.current_data.append(data) 775 self.current_data.append(data)
420 776
421 def decode(self, pretty_print=False, 777 def decode(self, pretty_print=False,
422 eventual_encoding=DEFAULT_OUTPUT_ENCODING, 778 eventual_encoding=DEFAULT_OUTPUT_ENCODING,
423 formatter="minimal"): 779 formatter="minimal", iterator=None):
424 """Returns a string or Unicode representation of this document. 780 """Returns a string or Unicode representation of the parse tree
425 To get Unicode, pass None for encoding.""" 781 as an HTML or XML document.
426 782
783 :param pretty_print: If this is True, indentation will be used to
784 make the document more readable.
785 :param eventual_encoding: The encoding of the final document.
786 If this is None, the document will be a Unicode string.
787 """
427 if self.is_xml: 788 if self.is_xml:
428 # Print the XML declaration 789 # Print the XML declaration
429 encoding_part = '' 790 encoding_part = ''
430 if eventual_encoding is not None: 791 if eventual_encoding in PYTHON_SPECIFIC_ENCODINGS:
792 # This is a special Python encoding; it can't actually
793 # go into an XML document because it means nothing
794 # outside of Python.
795 eventual_encoding = None
796 if eventual_encoding != None:
431 encoding_part = ' encoding="%s"' % eventual_encoding 797 encoding_part = ' encoding="%s"' % eventual_encoding
432 prefix = '<?xml version="1.0"%s?>\n' % encoding_part 798 prefix = '<?xml version="1.0"%s?>\n' % encoding_part
433 else: 799 else:
@@ -437,9 +803,9 @@ class BeautifulSoup(Tag):
437 else: 803 else:
438 indent_level = 0 804 indent_level = 0
439 return prefix + super(BeautifulSoup, self).decode( 805 return prefix + super(BeautifulSoup, self).decode(
440 indent_level, eventual_encoding, formatter) 806 indent_level, eventual_encoding, formatter, iterator)
441 807
442# Alias to make it easier to type import: 'from bs4 import _soup' 808# Aliases to make it easier to get started quickly, e.g. 'from bs4 import _soup'
443_s = BeautifulSoup 809_s = BeautifulSoup
444_soup = BeautifulSoup 810_soup = BeautifulSoup
445 811
@@ -450,19 +816,24 @@ class BeautifulStoneSoup(BeautifulSoup):
450 kwargs['features'] = 'xml' 816 kwargs['features'] = 'xml'
451 warnings.warn( 817 warnings.warn(
452 'The BeautifulStoneSoup class is deprecated. Instead of using ' 818 'The BeautifulStoneSoup class is deprecated. Instead of using '
453 'it, pass features="xml" into the BeautifulSoup constructor.') 819 'it, pass features="xml" into the BeautifulSoup constructor.',
820 DeprecationWarning, stacklevel=2
821 )
454 super(BeautifulStoneSoup, self).__init__(*args, **kwargs) 822 super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
455 823
456 824
457class StopParsing(Exception): 825class StopParsing(Exception):
826 """Exception raised by a TreeBuilder if it's unable to continue parsing."""
458 pass 827 pass
459 828
460class FeatureNotFound(ValueError): 829class FeatureNotFound(ValueError):
830 """Exception raised by the BeautifulSoup constructor if no parser with the
831 requested features is found.
832 """
461 pass 833 pass
462 834
463 835
464#By default, act as an HTML pretty-printer. 836#If this file is run as a script, act as an HTML pretty-printer.
465if __name__ == '__main__': 837if __name__ == '__main__':
466 import sys
467 soup = BeautifulSoup(sys.stdin) 838 soup = BeautifulSoup(sys.stdin)
468 print(soup.prettify()) 839 print((soup.prettify()))
diff --git a/bitbake/lib/bs4/builder/__init__.py b/bitbake/lib/bs4/builder/__init__.py
index 6ccd4d23d6..ffb31fc25e 100644
--- a/bitbake/lib/bs4/builder/__init__.py
+++ b/bitbake/lib/bs4/builder/__init__.py
@@ -1,11 +1,21 @@
1# Use of this source code is governed by the MIT license.
2__license__ = "MIT"
3
1from collections import defaultdict 4from collections import defaultdict
2import itertools 5import itertools
6import re
7import warnings
3import sys 8import sys
4from bs4.element import ( 9from bs4.element import (
5 CharsetMetaAttributeValue, 10 CharsetMetaAttributeValue,
6 ContentMetaAttributeValue, 11 ContentMetaAttributeValue,
7 whitespace_re 12 RubyParenthesisString,
8 ) 13 RubyTextString,
14 Stylesheet,
15 Script,
16 TemplateString,
17 nonwhitespace_re
18)
9 19
10__all__ = [ 20__all__ = [
11 'HTMLTreeBuilder', 21 'HTMLTreeBuilder',
@@ -22,20 +32,41 @@ XML = 'xml'
22HTML = 'html' 32HTML = 'html'
23HTML_5 = 'html5' 33HTML_5 = 'html5'
24 34
35class XMLParsedAsHTMLWarning(UserWarning):
36 """The warning issued when an HTML parser is used to parse
37 XML that is not XHTML.
38 """
39 MESSAGE = """It looks like you're parsing an XML document using an HTML parser. If this really is an HTML document (maybe it's XHTML?), you can ignore or filter this warning. If it's XML, you should know that using an XML parser will be more reliable. To parse this document as XML, make sure you have the lxml package installed, and pass the keyword argument `features="xml"` into the BeautifulSoup constructor."""
40
25 41
26class TreeBuilderRegistry(object): 42class TreeBuilderRegistry(object):
27 43 """A way of looking up TreeBuilder subclasses by their name or by desired
44 features.
45 """
46
28 def __init__(self): 47 def __init__(self):
29 self.builders_for_feature = defaultdict(list) 48 self.builders_for_feature = defaultdict(list)
30 self.builders = [] 49 self.builders = []
31 50
32 def register(self, treebuilder_class): 51 def register(self, treebuilder_class):
33 """Register a treebuilder based on its advertised features.""" 52 """Register a treebuilder based on its advertised features.
53
54 :param treebuilder_class: A subclass of Treebuilder. its .features
55 attribute should list its features.
56 """
34 for feature in treebuilder_class.features: 57 for feature in treebuilder_class.features:
35 self.builders_for_feature[feature].insert(0, treebuilder_class) 58 self.builders_for_feature[feature].insert(0, treebuilder_class)
36 self.builders.insert(0, treebuilder_class) 59 self.builders.insert(0, treebuilder_class)
37 60
38 def lookup(self, *features): 61 def lookup(self, *features):
62 """Look up a TreeBuilder subclass with the desired features.
63
64 :param features: A list of features to look for. If none are
65 provided, the most recently registered TreeBuilder subclass
66 will be used.
67 :return: A TreeBuilder subclass, or None if there's no
68 registered subclass with all the requested features.
69 """
39 if len(self.builders) == 0: 70 if len(self.builders) == 0:
40 # There are no builders at all. 71 # There are no builders at all.
41 return None 72 return None
@@ -78,7 +109,7 @@ class TreeBuilderRegistry(object):
78builder_registry = TreeBuilderRegistry() 109builder_registry = TreeBuilderRegistry()
79 110
80class TreeBuilder(object): 111class TreeBuilder(object):
81 """Turn a document into a Beautiful Soup object tree.""" 112 """Turn a textual document into a Beautiful Soup object tree."""
82 113
83 NAME = "[Unknown tree builder]" 114 NAME = "[Unknown tree builder]"
84 ALTERNATE_NAMES = [] 115 ALTERNATE_NAMES = []
@@ -86,19 +117,89 @@ class TreeBuilder(object):
86 117
87 is_xml = False 118 is_xml = False
88 picklable = False 119 picklable = False
89 preserve_whitespace_tags = set()
90 empty_element_tags = None # A tag will be considered an empty-element 120 empty_element_tags = None # A tag will be considered an empty-element
91 # tag when and only when it has no contents. 121 # tag when and only when it has no contents.
92 122
93 # A value for these tag/attribute combinations is a space- or 123 # A value for these tag/attribute combinations is a space- or
94 # comma-separated list of CDATA, rather than a single CDATA. 124 # comma-separated list of CDATA, rather than a single CDATA.
95 cdata_list_attributes = {} 125 DEFAULT_CDATA_LIST_ATTRIBUTES = defaultdict(list)
96 126
97 127 # Whitespace should be preserved inside these tags.
98 def __init__(self): 128 DEFAULT_PRESERVE_WHITESPACE_TAGS = set()
129
130 # The textual contents of tags with these names should be
131 # instantiated with some class other than NavigableString.
132 DEFAULT_STRING_CONTAINERS = {}
133
134 USE_DEFAULT = object()
135
136 # Most parsers don't keep track of line numbers.
137 TRACKS_LINE_NUMBERS = False
138
139 def __init__(self, multi_valued_attributes=USE_DEFAULT,
140 preserve_whitespace_tags=USE_DEFAULT,
141 store_line_numbers=USE_DEFAULT,
142 string_containers=USE_DEFAULT,
143 ):
144 """Constructor.
145
146 :param multi_valued_attributes: If this is set to None, the
147 TreeBuilder will not turn any values for attributes like
148 'class' into lists. Setting this to a dictionary will
149 customize this behavior; look at DEFAULT_CDATA_LIST_ATTRIBUTES
150 for an example.
151
152 Internally, these are called "CDATA list attributes", but that
153 probably doesn't make sense to an end-user, so the argument name
154 is `multi_valued_attributes`.
155
156 :param preserve_whitespace_tags: A list of tags to treat
157 the way <pre> tags are treated in HTML. Tags in this list
158 are immune from pretty-printing; their contents will always be
159 output as-is.
160
161 :param string_containers: A dictionary mapping tag names to
162 the classes that should be instantiated to contain the textual
163 contents of those tags. The default is to use NavigableString
164 for every tag, no matter what the name. You can override the
165 default by changing DEFAULT_STRING_CONTAINERS.
166
167 :param store_line_numbers: If the parser keeps track of the
168 line numbers and positions of the original markup, that
169 information will, by default, be stored in each corresponding
170 `Tag` object. You can turn this off by passing
171 store_line_numbers=False. If the parser you're using doesn't
172 keep track of this information, then setting store_line_numbers=True
173 will do nothing.
174 """
99 self.soup = None 175 self.soup = None
100 176 if multi_valued_attributes is self.USE_DEFAULT:
177 multi_valued_attributes = self.DEFAULT_CDATA_LIST_ATTRIBUTES
178 self.cdata_list_attributes = multi_valued_attributes
179 if preserve_whitespace_tags is self.USE_DEFAULT:
180 preserve_whitespace_tags = self.DEFAULT_PRESERVE_WHITESPACE_TAGS
181 self.preserve_whitespace_tags = preserve_whitespace_tags
182 if store_line_numbers == self.USE_DEFAULT:
183 store_line_numbers = self.TRACKS_LINE_NUMBERS
184 self.store_line_numbers = store_line_numbers
185 if string_containers == self.USE_DEFAULT:
186 string_containers = self.DEFAULT_STRING_CONTAINERS
187 self.string_containers = string_containers
188
189 def initialize_soup(self, soup):
190 """The BeautifulSoup object has been initialized and is now
191 being associated with the TreeBuilder.
192
193 :param soup: A BeautifulSoup object.
194 """
195 self.soup = soup
196
101 def reset(self): 197 def reset(self):
198 """Do any work necessary to reset the underlying parser
199 for a new document.
200
201 By default, this does nothing.
202 """
102 pass 203 pass
103 204
104 def can_be_empty_element(self, tag_name): 205 def can_be_empty_element(self, tag_name):
@@ -110,24 +211,58 @@ class TreeBuilder(object):
110 For instance: an HTMLBuilder does not consider a <p> tag to be 211 For instance: an HTMLBuilder does not consider a <p> tag to be
111 an empty-element tag (it's not in 212 an empty-element tag (it's not in
112 HTMLBuilder.empty_element_tags). This means an empty <p> tag 213 HTMLBuilder.empty_element_tags). This means an empty <p> tag
113 will be presented as "<p></p>", not "<p />". 214 will be presented as "<p></p>", not "<p/>" or "<p>".
114 215
115 The default implementation has no opinion about which tags are 216 The default implementation has no opinion about which tags are
116 empty-element tags, so a tag will be presented as an 217 empty-element tags, so a tag will be presented as an
117 empty-element tag if and only if it has no contents. 218 empty-element tag if and only if it has no children.
118 "<foo></foo>" will become "<foo />", and "<foo>bar</foo>" will 219 "<foo></foo>" will become "<foo/>", and "<foo>bar</foo>" will
119 be left alone. 220 be left alone.
221
222 :param tag_name: The name of a markup tag.
120 """ 223 """
121 if self.empty_element_tags is None: 224 if self.empty_element_tags is None:
122 return True 225 return True
123 return tag_name in self.empty_element_tags 226 return tag_name in self.empty_element_tags
124 227
125 def feed(self, markup): 228 def feed(self, markup):
229 """Run some incoming markup through some parsing process,
230 populating the `BeautifulSoup` object in self.soup.
231
232 This method is not implemented in TreeBuilder; it must be
233 implemented in subclasses.
234
235 :return: None.
236 """
126 raise NotImplementedError() 237 raise NotImplementedError()
127 238
128 def prepare_markup(self, markup, user_specified_encoding=None, 239 def prepare_markup(self, markup, user_specified_encoding=None,
129 document_declared_encoding=None): 240 document_declared_encoding=None, exclude_encodings=None):
130 return markup, None, None, False 241 """Run any preliminary steps necessary to make incoming markup
242 acceptable to the parser.
243
244 :param markup: Some markup -- probably a bytestring.
245 :param user_specified_encoding: The user asked to try this encoding.
246 :param document_declared_encoding: The markup itself claims to be
247 in this encoding. NOTE: This argument is not used by the
248 calling code and can probably be removed.
249 :param exclude_encodings: The user asked _not_ to try any of
250 these encodings.
251
252 :yield: A series of 4-tuples:
253 (markup, encoding, declared encoding,
254 has undergone character replacement)
255
256 Each 4-tuple represents a strategy for converting the
257 document to Unicode and parsing it. Each strategy will be tried
258 in turn.
259
260 By default, the only strategy is to parse the markup
261 as-is. See `LXMLTreeBuilderForXML` and
262 `HTMLParserTreeBuilder` for implementations that take into
263 account the quirks of particular parsers.
264 """
265 yield markup, None, None, False
131 266
132 def test_fragment_to_document(self, fragment): 267 def test_fragment_to_document(self, fragment):
133 """Wrap an HTML fragment to make it look like a document. 268 """Wrap an HTML fragment to make it look like a document.
@@ -139,16 +274,36 @@ class TreeBuilder(object):
139 results against other HTML fragments. 274 results against other HTML fragments.
140 275
141 This method should not be used outside of tests. 276 This method should not be used outside of tests.
277
278 :param fragment: A string -- fragment of HTML.
279 :return: A string -- a full HTML document.
142 """ 280 """
143 return fragment 281 return fragment
144 282
145 def set_up_substitutions(self, tag): 283 def set_up_substitutions(self, tag):
284 """Set up any substitutions that will need to be performed on
285 a `Tag` when it's output as a string.
286
287 By default, this does nothing. See `HTMLTreeBuilder` for a
288 case where this is used.
289
290 :param tag: A `Tag`
291 :return: Whether or not a substitution was performed.
292 """
146 return False 293 return False
147 294
148 def _replace_cdata_list_attribute_values(self, tag_name, attrs): 295 def _replace_cdata_list_attribute_values(self, tag_name, attrs):
149 """Replaces class="foo bar" with class=["foo", "bar"] 296 """When an attribute value is associated with a tag that can
297 have multiple values for that attribute, convert the string
298 value to a list of strings.
150 299
151 Modifies its input in place. 300 Basically, replaces class="foo bar" with class=["foo", "bar"]
301
302 NOTE: This method modifies its input in place.
303
304 :param tag_name: The name of a tag.
305 :param attrs: A dictionary containing the tag's attributes.
306 Any appropriate attribute values will be modified in place.
152 """ 307 """
153 if not attrs: 308 if not attrs:
154 return attrs 309 return attrs
@@ -163,7 +318,7 @@ class TreeBuilder(object):
163 # values. Split it into a list. 318 # values. Split it into a list.
164 value = attrs[attr] 319 value = attrs[attr]
165 if isinstance(value, str): 320 if isinstance(value, str):
166 values = whitespace_re.split(value) 321 values = nonwhitespace_re.findall(value)
167 else: 322 else:
168 # html5lib sometimes calls setAttributes twice 323 # html5lib sometimes calls setAttributes twice
169 # for the same tag when rearranging the parse 324 # for the same tag when rearranging the parse
@@ -174,9 +329,13 @@ class TreeBuilder(object):
174 values = value 329 values = value
175 attrs[attr] = values 330 attrs[attr] = values
176 return attrs 331 return attrs
177 332
178class SAXTreeBuilder(TreeBuilder): 333class SAXTreeBuilder(TreeBuilder):
179 """A Beautiful Soup treebuilder that listens for SAX events.""" 334 """A Beautiful Soup treebuilder that listens for SAX events.
335
336 This is not currently used for anything, but it demonstrates
337 how a simple TreeBuilder would work.
338 """
180 339
181 def feed(self, markup): 340 def feed(self, markup):
182 raise NotImplementedError() 341 raise NotImplementedError()
@@ -186,11 +345,11 @@ class SAXTreeBuilder(TreeBuilder):
186 345
187 def startElement(self, name, attrs): 346 def startElement(self, name, attrs):
188 attrs = dict((key[1], value) for key, value in list(attrs.items())) 347 attrs = dict((key[1], value) for key, value in list(attrs.items()))
189 #print "Start %s, %r" % (name, attrs) 348 #print("Start %s, %r" % (name, attrs))
190 self.soup.handle_starttag(name, attrs) 349 self.soup.handle_starttag(name, attrs)
191 350
192 def endElement(self, name): 351 def endElement(self, name):
193 #print "End %s" % name 352 #print("End %s" % name)
194 self.soup.handle_endtag(name) 353 self.soup.handle_endtag(name)
195 354
196 def startElementNS(self, nsTuple, nodeName, attrs): 355 def startElementNS(self, nsTuple, nodeName, attrs):
@@ -227,10 +386,44 @@ class HTMLTreeBuilder(TreeBuilder):
227 Such as which tags are empty-element tags. 386 Such as which tags are empty-element tags.
228 """ 387 """
229 388
230 preserve_whitespace_tags = set(['pre', 'textarea']) 389 empty_element_tags = set([
231 empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta', 390 # These are from HTML5.
232 'spacer', 'link', 'frame', 'base']) 391 'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr',
233 392
393 # These are from earlier versions of HTML and are removed in HTML5.
394 'basefont', 'bgsound', 'command', 'frame', 'image', 'isindex', 'nextid', 'spacer'
395 ])
396
397 # The HTML standard defines these as block-level elements. Beautiful
398 # Soup does not treat these elements differently from other elements,
399 # but it may do so eventually, and this information is available if
400 # you need to use it.
401 block_elements = set(["address", "article", "aside", "blockquote", "canvas", "dd", "div", "dl", "dt", "fieldset", "figcaption", "figure", "footer", "form", "h1", "h2", "h3", "h4", "h5", "h6", "header", "hr", "li", "main", "nav", "noscript", "ol", "output", "p", "pre", "section", "table", "tfoot", "ul", "video"])
402
403 # These HTML tags need special treatment so they can be
404 # represented by a string class other than NavigableString.
405 #
406 # For some of these tags, it's because the HTML standard defines
407 # an unusual content model for them. I made this list by going
408 # through the HTML spec
409 # (https://html.spec.whatwg.org/#metadata-content) and looking for
410 # "metadata content" elements that can contain strings.
411 #
412 # The Ruby tags (<rt> and <rp>) are here despite being normal
413 # "phrasing content" tags, because the content they contain is
414 # qualitatively different from other text in the document, and it
415 # can be useful to be able to distinguish it.
416 #
417 # TODO: Arguably <noscript> could go here but it seems
418 # qualitatively different from the other tags.
419 DEFAULT_STRING_CONTAINERS = {
420 'rt' : RubyTextString,
421 'rp' : RubyParenthesisString,
422 'style': Stylesheet,
423 'script': Script,
424 'template': TemplateString,
425 }
426
234 # The HTML standard defines these attributes as containing a 427 # The HTML standard defines these attributes as containing a
235 # space-separated list of values, not a single value. That is, 428 # space-separated list of values, not a single value. That is,
236 # class="foo bar" means that the 'class' attribute has two values, 429 # class="foo bar" means that the 'class' attribute has two values,
@@ -238,7 +431,7 @@ class HTMLTreeBuilder(TreeBuilder):
238 # encounter one of these attributes, we will parse its value into 431 # encounter one of these attributes, we will parse its value into
239 # a list of values if possible. Upon output, the list will be 432 # a list of values if possible. Upon output, the list will be
240 # converted back into a string. 433 # converted back into a string.
241 cdata_list_attributes = { 434 DEFAULT_CDATA_LIST_ATTRIBUTES = {
242 "*" : ['class', 'accesskey', 'dropzone'], 435 "*" : ['class', 'accesskey', 'dropzone'],
243 "a" : ['rel', 'rev'], 436 "a" : ['rel', 'rev'],
244 "link" : ['rel', 'rev'], 437 "link" : ['rel', 'rev'],
@@ -255,7 +448,19 @@ class HTMLTreeBuilder(TreeBuilder):
255 "output" : ["for"], 448 "output" : ["for"],
256 } 449 }
257 450
451 DEFAULT_PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea'])
452
258 def set_up_substitutions(self, tag): 453 def set_up_substitutions(self, tag):
454 """Replace the declared encoding in a <meta> tag with a placeholder,
455 to be substituted when the tag is output to a string.
456
457 An HTML document may come in to Beautiful Soup as one
458 encoding, but exit in a different encoding, and the <meta> tag
459 needs to be changed to reflect this.
460
461 :param tag: A `Tag`
462 :return: Whether or not a substitution was performed.
463 """
259 # We are only interested in <meta> tags 464 # We are only interested in <meta> tags
260 if tag.name != 'meta': 465 if tag.name != 'meta':
261 return False 466 return False
@@ -288,10 +493,107 @@ class HTMLTreeBuilder(TreeBuilder):
288 493
289 return (meta_encoding is not None) 494 return (meta_encoding is not None)
290 495
496class DetectsXMLParsedAsHTML(object):
497 """A mixin class for any class (a TreeBuilder, or some class used by a
498 TreeBuilder) that's in a position to detect whether an XML
499 document is being incorrectly parsed as HTML, and issue an
500 appropriate warning.
501
502 This requires being able to observe an incoming processing
503 instruction that might be an XML declaration, and also able to
504 observe tags as they're opened. If you can't do that for a given
505 TreeBuilder, there's a less reliable implementation based on
506 examining the raw markup.
507 """
508
509 # Regular expression for seeing if markup has an <html> tag.
510 LOOKS_LIKE_HTML = re.compile("<[^ +]html", re.I)
511 LOOKS_LIKE_HTML_B = re.compile(b"<[^ +]html", re.I)
512
513 XML_PREFIX = '<?xml'
514 XML_PREFIX_B = b'<?xml'
515
516 @classmethod
517 def warn_if_markup_looks_like_xml(cls, markup, stacklevel=3):
518 """Perform a check on some markup to see if it looks like XML
519 that's not XHTML. If so, issue a warning.
520
521 This is much less reliable than doing the check while parsing,
522 but some of the tree builders can't do that.
523
524 :param stacklevel: The stacklevel of the code calling this
525 function.
526
527 :return: True if the markup looks like non-XHTML XML, False
528 otherwise.
529
530 """
531 if isinstance(markup, bytes):
532 prefix = cls.XML_PREFIX_B
533 looks_like_html = cls.LOOKS_LIKE_HTML_B
534 else:
535 prefix = cls.XML_PREFIX
536 looks_like_html = cls.LOOKS_LIKE_HTML
537
538 if (markup is not None
539 and markup.startswith(prefix)
540 and not looks_like_html.search(markup[:500])
541 ):
542 cls._warn(stacklevel=stacklevel+2)
543 return True
544 return False
545
546 @classmethod
547 def _warn(cls, stacklevel=5):
548 """Issue a warning about XML being parsed as HTML."""
549 warnings.warn(
550 XMLParsedAsHTMLWarning.MESSAGE, XMLParsedAsHTMLWarning,
551 stacklevel=stacklevel
552 )
553
554 def _initialize_xml_detector(self):
555 """Call this method before parsing a document."""
556 self._first_processing_instruction = None
557 self._root_tag = None
558
559 def _document_might_be_xml(self, processing_instruction):
560 """Call this method when encountering an XML declaration, or a
561 "processing instruction" that might be an XML declaration.
562 """
563 if (self._first_processing_instruction is not None
564 or self._root_tag is not None):
565 # The document has already started. Don't bother checking
566 # anymore.
567 return
568
569 self._first_processing_instruction = processing_instruction
570
571 # We won't know until we encounter the first tag whether or
572 # not this is actually a problem.
573
574 def _root_tag_encountered(self, name):
575 """Call this when you encounter the document's root tag.
576
577 This is where we actually check whether an XML document is
578 being incorrectly parsed as HTML, and issue the warning.
579 """
580 if self._root_tag is not None:
581 # This method was incorrectly called multiple times. Do
582 # nothing.
583 return
584
585 self._root_tag = name
586 if (name != 'html' and self._first_processing_instruction is not None
587 and self._first_processing_instruction.lower().startswith('xml ')):
588 # We encountered an XML declaration and then a tag other
589 # than 'html'. This is a reliable indicator that a
590 # non-XHTML document is being parsed as XML.
591 self._warn()
592
593
291def register_treebuilders_from(module): 594def register_treebuilders_from(module):
292 """Copy TreeBuilders from the given module into this module.""" 595 """Copy TreeBuilders from the given module into this module."""
293 # I'm fairly sure this is not the best way to do this. 596 this_module = sys.modules[__name__]
294 this_module = sys.modules['bs4.builder']
295 for name in module.__all__: 597 for name in module.__all__:
296 obj = getattr(module, name) 598 obj = getattr(module, name)
297 599
@@ -302,12 +604,22 @@ def register_treebuilders_from(module):
302 this_module.builder_registry.register(obj) 604 this_module.builder_registry.register(obj)
303 605
304class ParserRejectedMarkup(Exception): 606class ParserRejectedMarkup(Exception):
305 pass 607 """An Exception to be raised when the underlying parser simply
306 608 refuses to parse the given markup.
609 """
610 def __init__(self, message_or_exception):
611 """Explain why the parser rejected the given markup, either
612 with a textual explanation or another exception.
613 """
614 if isinstance(message_or_exception, Exception):
615 e = message_or_exception
616 message_or_exception = "%s: %s" % (e.__class__.__name__, str(e))
617 super(ParserRejectedMarkup, self).__init__(message_or_exception)
618
307# Builders are registered in reverse order of priority, so that custom 619# Builders are registered in reverse order of priority, so that custom
308# builder registrations will take precedence. In general, we want lxml 620# builder registrations will take precedence. In general, we want lxml
309# to take precedence over html5lib, because it's faster. And we only 621# to take precedence over html5lib, because it's faster. And we only
310# want to use HTMLParser as a last result. 622# want to use HTMLParser as a last resort.
311from . import _htmlparser 623from . import _htmlparser
312register_treebuilders_from(_htmlparser) 624register_treebuilders_from(_htmlparser)
313try: 625try:
diff --git a/bitbake/lib/bs4/builder/_html5lib.py b/bitbake/lib/bs4/builder/_html5lib.py
index 9e9216ef9c..7c46a85118 100644
--- a/bitbake/lib/bs4/builder/_html5lib.py
+++ b/bitbake/lib/bs4/builder/_html5lib.py
@@ -1,9 +1,14 @@
1# Use of this source code is governed by the MIT license.
2__license__ = "MIT"
3
1__all__ = [ 4__all__ = [
2 'HTML5TreeBuilder', 5 'HTML5TreeBuilder',
3 ] 6 ]
4 7
5import warnings 8import warnings
9import re
6from bs4.builder import ( 10from bs4.builder import (
11 DetectsXMLParsedAsHTML,
7 PERMISSIVE, 12 PERMISSIVE,
8 HTML, 13 HTML,
9 HTML_5, 14 HTML_5,
@@ -11,17 +16,13 @@ from bs4.builder import (
11 ) 16 )
12from bs4.element import ( 17from bs4.element import (
13 NamespacedAttribute, 18 NamespacedAttribute,
14 whitespace_re, 19 nonwhitespace_re,
15) 20)
16import html5lib 21import html5lib
17try: 22from html5lib.constants import (
18 # html5lib >= 0.99999999/1.0b9 23 namespaces,
19 from html5lib.treebuilders import base as treebuildersbase 24 prefixes,
20except ImportError: 25 )
21 # html5lib <= 0.9999999/1.0b8
22 from html5lib.treebuilders import _base as treebuildersbase
23from html5lib.constants import namespaces
24
25from bs4.element import ( 26from bs4.element import (
26 Comment, 27 Comment,
27 Doctype, 28 Doctype,
@@ -29,13 +30,37 @@ from bs4.element import (
29 Tag, 30 Tag,
30 ) 31 )
31 32
33try:
34 # Pre-0.99999999
35 from html5lib.treebuilders import _base as treebuilder_base
36 new_html5lib = False
37except ImportError as e:
38 # 0.99999999 and up
39 from html5lib.treebuilders import base as treebuilder_base
40 new_html5lib = True
41
32class HTML5TreeBuilder(HTMLTreeBuilder): 42class HTML5TreeBuilder(HTMLTreeBuilder):
33 """Use html5lib to build a tree.""" 43 """Use html5lib to build a tree.
44
45 Note that this TreeBuilder does not support some features common
46 to HTML TreeBuilders. Some of these features could theoretically
47 be implemented, but at the very least it's quite difficult,
48 because html5lib moves the parse tree around as it's being built.
49
50 * This TreeBuilder doesn't use different subclasses of NavigableString
51 based on the name of the tag in which the string was found.
52
53 * You can't use a SoupStrainer to parse only part of a document.
54 """
34 55
35 NAME = "html5lib" 56 NAME = "html5lib"
36 57
37 features = [NAME, PERMISSIVE, HTML_5, HTML] 58 features = [NAME, PERMISSIVE, HTML_5, HTML]
38 59
60 # html5lib can tell us which line number and position in the
61 # original file is the source of an element.
62 TRACKS_LINE_NUMBERS = True
63
39 def prepare_markup(self, markup, user_specified_encoding, 64 def prepare_markup(self, markup, user_specified_encoding,
40 document_declared_encoding=None, exclude_encodings=None): 65 document_declared_encoding=None, exclude_encodings=None):
41 # Store the user-specified encoding for use later on. 66 # Store the user-specified encoding for use later on.
@@ -45,27 +70,56 @@ class HTML5TreeBuilder(HTMLTreeBuilder):
45 # ATM because the html5lib TreeBuilder doesn't use 70 # ATM because the html5lib TreeBuilder doesn't use
46 # UnicodeDammit. 71 # UnicodeDammit.
47 if exclude_encodings: 72 if exclude_encodings:
48 warnings.warn("You provided a value for exclude_encoding, but the html5lib tree builder doesn't support exclude_encoding.") 73 warnings.warn(
74 "You provided a value for exclude_encoding, but the html5lib tree builder doesn't support exclude_encoding.",
75 stacklevel=3
76 )
77
78 # html5lib only parses HTML, so if it's given XML that's worth
79 # noting.
80 DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(
81 markup, stacklevel=3
82 )
83
49 yield (markup, None, None, False) 84 yield (markup, None, None, False)
50 85
51 # These methods are defined by Beautiful Soup. 86 # These methods are defined by Beautiful Soup.
52 def feed(self, markup): 87 def feed(self, markup):
53 if self.soup.parse_only is not None: 88 if self.soup.parse_only is not None:
54 warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.") 89 warnings.warn(
90 "You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.",
91 stacklevel=4
92 )
55 parser = html5lib.HTMLParser(tree=self.create_treebuilder) 93 parser = html5lib.HTMLParser(tree=self.create_treebuilder)
56 doc = parser.parse(markup, encoding=self.user_specified_encoding) 94 self.underlying_builder.parser = parser
57 95 extra_kwargs = dict()
96 if not isinstance(markup, str):
97 if new_html5lib:
98 extra_kwargs['override_encoding'] = self.user_specified_encoding
99 else:
100 extra_kwargs['encoding'] = self.user_specified_encoding
101 doc = parser.parse(markup, **extra_kwargs)
102
58 # Set the character encoding detected by the tokenizer. 103 # Set the character encoding detected by the tokenizer.
59 if isinstance(markup, str): 104 if isinstance(markup, str):
60 # We need to special-case this because html5lib sets 105 # We need to special-case this because html5lib sets
61 # charEncoding to UTF-8 if it gets Unicode input. 106 # charEncoding to UTF-8 if it gets Unicode input.
62 doc.original_encoding = None 107 doc.original_encoding = None
63 else: 108 else:
64 doc.original_encoding = parser.tokenizer.stream.charEncoding[0] 109 original_encoding = parser.tokenizer.stream.charEncoding[0]
65 110 if not isinstance(original_encoding, str):
111 # In 0.99999999 and up, the encoding is an html5lib
112 # Encoding object. We want to use a string for compatibility
113 # with other tree builders.
114 original_encoding = original_encoding.name
115 doc.original_encoding = original_encoding
116 self.underlying_builder.parser = None
117
66 def create_treebuilder(self, namespaceHTMLElements): 118 def create_treebuilder(self, namespaceHTMLElements):
67 self.underlying_builder = TreeBuilderForHtml5lib( 119 self.underlying_builder = TreeBuilderForHtml5lib(
68 self.soup, namespaceHTMLElements) 120 namespaceHTMLElements, self.soup,
121 store_line_numbers=self.store_line_numbers
122 )
69 return self.underlying_builder 123 return self.underlying_builder
70 124
71 def test_fragment_to_document(self, fragment): 125 def test_fragment_to_document(self, fragment):
@@ -73,12 +127,30 @@ class HTML5TreeBuilder(HTMLTreeBuilder):
73 return '<html><head></head><body>%s</body></html>' % fragment 127 return '<html><head></head><body>%s</body></html>' % fragment
74 128
75 129
76class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder): 130class TreeBuilderForHtml5lib(treebuilder_base.TreeBuilder):
77 131
78 def __init__(self, soup, namespaceHTMLElements): 132 def __init__(self, namespaceHTMLElements, soup=None,
79 self.soup = soup 133 store_line_numbers=True, **kwargs):
134 if soup:
135 self.soup = soup
136 else:
137 from bs4 import BeautifulSoup
138 # TODO: Why is the parser 'html.parser' here? To avoid an
139 # infinite loop?
140 self.soup = BeautifulSoup(
141 "", "html.parser", store_line_numbers=store_line_numbers,
142 **kwargs
143 )
144 # TODO: What are **kwargs exactly? Should they be passed in
145 # here in addition to/instead of being passed to the BeautifulSoup
146 # constructor?
80 super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements) 147 super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements)
81 148
149 # This will be set later to an html5lib.html5parser.HTMLParser
150 # object, which we can use to track the current line number.
151 self.parser = None
152 self.store_line_numbers = store_line_numbers
153
82 def documentClass(self): 154 def documentClass(self):
83 self.soup.reset() 155 self.soup.reset()
84 return Element(self.soup, self.soup, None) 156 return Element(self.soup, self.soup, None)
@@ -92,14 +164,26 @@ class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder):
92 self.soup.object_was_parsed(doctype) 164 self.soup.object_was_parsed(doctype)
93 165
94 def elementClass(self, name, namespace): 166 def elementClass(self, name, namespace):
95 tag = self.soup.new_tag(name, namespace) 167 kwargs = {}
168 if self.parser and self.store_line_numbers:
169 # This represents the point immediately after the end of the
170 # tag. We don't know when the tag started, but we do know
171 # where it ended -- the character just before this one.
172 sourceline, sourcepos = self.parser.tokenizer.stream.position()
173 kwargs['sourceline'] = sourceline
174 kwargs['sourcepos'] = sourcepos-1
175 tag = self.soup.new_tag(name, namespace, **kwargs)
176
96 return Element(tag, self.soup, namespace) 177 return Element(tag, self.soup, namespace)
97 178
98 def commentClass(self, data): 179 def commentClass(self, data):
99 return TextNode(Comment(data), self.soup) 180 return TextNode(Comment(data), self.soup)
100 181
101 def fragmentClass(self): 182 def fragmentClass(self):
102 self.soup = BeautifulSoup("") 183 from bs4 import BeautifulSoup
184 # TODO: Why is the parser 'html.parser' here? To avoid an
185 # infinite loop?
186 self.soup = BeautifulSoup("", "html.parser")
103 self.soup.name = "[document_fragment]" 187 self.soup.name = "[document_fragment]"
104 return Element(self.soup, self.soup, None) 188 return Element(self.soup, self.soup, None)
105 189
@@ -111,7 +195,57 @@ class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder):
111 return self.soup 195 return self.soup
112 196
113 def getFragment(self): 197 def getFragment(self):
114 return treebuildersbase.TreeBuilder.getFragment(self).element 198 return treebuilder_base.TreeBuilder.getFragment(self).element
199
200 def testSerializer(self, element):
201 from bs4 import BeautifulSoup
202 rv = []
203 doctype_re = re.compile(r'^(.*?)(?: PUBLIC "(.*?)"(?: "(.*?)")?| SYSTEM "(.*?)")?$')
204
205 def serializeElement(element, indent=0):
206 if isinstance(element, BeautifulSoup):
207 pass
208 if isinstance(element, Doctype):
209 m = doctype_re.match(element)
210 if m:
211 name = m.group(1)
212 if m.lastindex > 1:
213 publicId = m.group(2) or ""
214 systemId = m.group(3) or m.group(4) or ""
215 rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" %
216 (' ' * indent, name, publicId, systemId))
217 else:
218 rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, name))
219 else:
220 rv.append("|%s<!DOCTYPE >" % (' ' * indent,))
221 elif isinstance(element, Comment):
222 rv.append("|%s<!-- %s -->" % (' ' * indent, element))
223 elif isinstance(element, NavigableString):
224 rv.append("|%s\"%s\"" % (' ' * indent, element))
225 else:
226 if element.namespace:
227 name = "%s %s" % (prefixes[element.namespace],
228 element.name)
229 else:
230 name = element.name
231 rv.append("|%s<%s>" % (' ' * indent, name))
232 if element.attrs:
233 attributes = []
234 for name, value in list(element.attrs.items()):
235 if isinstance(name, NamespacedAttribute):
236 name = "%s %s" % (prefixes[name.namespace], name.name)
237 if isinstance(value, list):
238 value = " ".join(value)
239 attributes.append((name, value))
240
241 for name, value in sorted(attributes):
242 rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
243 indent += 2
244 for child in element.children:
245 serializeElement(child, indent)
246 serializeElement(element, 0)
247
248 return "\n".join(rv)
115 249
116class AttrList(object): 250class AttrList(object):
117 def __init__(self, element): 251 def __init__(self, element):
@@ -122,14 +256,14 @@ class AttrList(object):
122 def __setitem__(self, name, value): 256 def __setitem__(self, name, value):
123 # If this attribute is a multi-valued attribute for this element, 257 # If this attribute is a multi-valued attribute for this element,
124 # turn its value into a list. 258 # turn its value into a list.
125 list_attr = HTML5TreeBuilder.cdata_list_attributes 259 list_attr = self.element.cdata_list_attributes or {}
126 if (name in list_attr['*'] 260 if (name in list_attr.get('*', [])
127 or (self.element.name in list_attr 261 or (self.element.name in list_attr
128 and name in list_attr[self.element.name])): 262 and name in list_attr.get(self.element.name, []))):
129 # A node that is being cloned may have already undergone 263 # A node that is being cloned may have already undergone
130 # this procedure. 264 # this procedure.
131 if not isinstance(value, list): 265 if not isinstance(value, list):
132 value = whitespace_re.split(value) 266 value = nonwhitespace_re.findall(value)
133 self.element[name] = value 267 self.element[name] = value
134 def items(self): 268 def items(self):
135 return list(self.attrs.items()) 269 return list(self.attrs.items())
@@ -143,9 +277,9 @@ class AttrList(object):
143 return name in list(self.attrs.keys()) 277 return name in list(self.attrs.keys())
144 278
145 279
146class Element(treebuildersbase.Node): 280class Element(treebuilder_base.Node):
147 def __init__(self, element, soup, namespace): 281 def __init__(self, element, soup, namespace):
148 treebuildersbase.Node.__init__(self, element.name) 282 treebuilder_base.Node.__init__(self, element.name)
149 self.element = element 283 self.element = element
150 self.soup = soup 284 self.soup = soup
151 self.namespace = namespace 285 self.namespace = namespace
@@ -164,13 +298,15 @@ class Element(treebuildersbase.Node):
164 child = node 298 child = node
165 elif node.element.__class__ == NavigableString: 299 elif node.element.__class__ == NavigableString:
166 string_child = child = node.element 300 string_child = child = node.element
301 node.parent = self
167 else: 302 else:
168 child = node.element 303 child = node.element
304 node.parent = self
169 305
170 if not isinstance(child, str) and child.parent is not None: 306 if not isinstance(child, str) and child.parent is not None:
171 node.element.extract() 307 node.element.extract()
172 308
173 if (string_child and self.element.contents 309 if (string_child is not None and self.element.contents
174 and self.element.contents[-1].__class__ == NavigableString): 310 and self.element.contents[-1].__class__ == NavigableString):
175 # We are appending a string onto another string. 311 # We are appending a string onto another string.
176 # TODO This has O(n^2) performance, for input like 312 # TODO This has O(n^2) performance, for input like
@@ -203,12 +339,12 @@ class Element(treebuildersbase.Node):
203 most_recent_element=most_recent_element) 339 most_recent_element=most_recent_element)
204 340
205 def getAttributes(self): 341 def getAttributes(self):
342 if isinstance(self.element, Comment):
343 return {}
206 return AttrList(self.element) 344 return AttrList(self.element)
207 345
208 def setAttributes(self, attributes): 346 def setAttributes(self, attributes):
209
210 if attributes is not None and len(attributes) > 0: 347 if attributes is not None and len(attributes) > 0:
211
212 converted_attributes = [] 348 converted_attributes = []
213 for name, value in list(attributes.items()): 349 for name, value in list(attributes.items()):
214 if isinstance(name, tuple): 350 if isinstance(name, tuple):
@@ -230,11 +366,11 @@ class Element(treebuildersbase.Node):
230 attributes = property(getAttributes, setAttributes) 366 attributes = property(getAttributes, setAttributes)
231 367
232 def insertText(self, data, insertBefore=None): 368 def insertText(self, data, insertBefore=None):
369 text = TextNode(self.soup.new_string(data), self.soup)
233 if insertBefore: 370 if insertBefore:
234 text = TextNode(self.soup.new_string(data), self.soup) 371 self.insertBefore(text, insertBefore)
235 self.insertBefore(data, insertBefore)
236 else: 372 else:
237 self.appendChild(data) 373 self.appendChild(text)
238 374
239 def insertBefore(self, node, refNode): 375 def insertBefore(self, node, refNode):
240 index = self.element.index(refNode.element) 376 index = self.element.index(refNode.element)
@@ -253,9 +389,10 @@ class Element(treebuildersbase.Node):
253 389
254 def reparentChildren(self, new_parent): 390 def reparentChildren(self, new_parent):
255 """Move all of this tag's children into another tag.""" 391 """Move all of this tag's children into another tag."""
256 # print "MOVE", self.element.contents 392 # print("MOVE", self.element.contents)
257 # print "FROM", self.element 393 # print("FROM", self.element)
258 # print "TO", new_parent.element 394 # print("TO", new_parent.element)
395
259 element = self.element 396 element = self.element
260 new_parent_element = new_parent.element 397 new_parent_element = new_parent.element
261 # Determine what this tag's next_element will be once all the children 398 # Determine what this tag's next_element will be once all the children
@@ -274,29 +411,35 @@ class Element(treebuildersbase.Node):
274 new_parents_last_descendant_next_element = new_parent_element.next_element 411 new_parents_last_descendant_next_element = new_parent_element.next_element
275 412
276 to_append = element.contents 413 to_append = element.contents
277 append_after = new_parent_element.contents
278 if len(to_append) > 0: 414 if len(to_append) > 0:
279 # Set the first child's previous_element and previous_sibling 415 # Set the first child's previous_element and previous_sibling
280 # to elements within the new parent 416 # to elements within the new parent
281 first_child = to_append[0] 417 first_child = to_append[0]
282 if new_parents_last_descendant: 418 if new_parents_last_descendant is not None:
283 first_child.previous_element = new_parents_last_descendant 419 first_child.previous_element = new_parents_last_descendant
284 else: 420 else:
285 first_child.previous_element = new_parent_element 421 first_child.previous_element = new_parent_element
286 first_child.previous_sibling = new_parents_last_child 422 first_child.previous_sibling = new_parents_last_child
287 if new_parents_last_descendant: 423 if new_parents_last_descendant is not None:
288 new_parents_last_descendant.next_element = first_child 424 new_parents_last_descendant.next_element = first_child
289 else: 425 else:
290 new_parent_element.next_element = first_child 426 new_parent_element.next_element = first_child
291 if new_parents_last_child: 427 if new_parents_last_child is not None:
292 new_parents_last_child.next_sibling = first_child 428 new_parents_last_child.next_sibling = first_child
293 429
294 # Fix the last child's next_element and next_sibling 430 # Find the very last element being moved. It is now the
295 last_child = to_append[-1] 431 # parent's last descendant. It has no .next_sibling and
296 last_child.next_element = new_parents_last_descendant_next_element 432 # its .next_element is whatever the previous last
297 if new_parents_last_descendant_next_element: 433 # descendant had.
298 new_parents_last_descendant_next_element.previous_element = last_child 434 last_childs_last_descendant = to_append[-1]._last_descendant(False, True)
299 last_child.next_sibling = None 435
436 last_childs_last_descendant.next_element = new_parents_last_descendant_next_element
437 if new_parents_last_descendant_next_element is not None:
438 # TODO: This code has no test coverage and I'm not sure
439 # how to get html5lib to go through this path, but it's
440 # just the other side of the previous line.
441 new_parents_last_descendant_next_element.previous_element = last_childs_last_descendant
442 last_childs_last_descendant.next_sibling = None
300 443
301 for child in to_append: 444 for child in to_append:
302 child.parent = new_parent_element 445 child.parent = new_parent_element
@@ -306,9 +449,9 @@ class Element(treebuildersbase.Node):
306 element.contents = [] 449 element.contents = []
307 element.next_element = final_next_element 450 element.next_element = final_next_element
308 451
309 # print "DONE WITH MOVE" 452 # print("DONE WITH MOVE")
310 # print "FROM", self.element 453 # print("FROM", self.element)
311 # print "TO", new_parent_element 454 # print("TO", new_parent_element)
312 455
313 def cloneNode(self): 456 def cloneNode(self):
314 tag = self.soup.new_tag(self.element.name, self.namespace) 457 tag = self.soup.new_tag(self.element.name, self.namespace)
@@ -321,7 +464,7 @@ class Element(treebuildersbase.Node):
321 return self.element.contents 464 return self.element.contents
322 465
323 def getNameTuple(self): 466 def getNameTuple(self):
324 if self.namespace is None: 467 if self.namespace == None:
325 return namespaces["html"], self.name 468 return namespaces["html"], self.name
326 else: 469 else:
327 return self.namespace, self.name 470 return self.namespace, self.name
@@ -330,7 +473,7 @@ class Element(treebuildersbase.Node):
330 473
331class TextNode(Element): 474class TextNode(Element):
332 def __init__(self, element, soup): 475 def __init__(self, element, soup):
333 treebuildersbase.Node.__init__(self, None) 476 treebuilder_base.Node.__init__(self, None)
334 self.element = element 477 self.element = element
335 self.soup = soup 478 self.soup = soup
336 479
diff --git a/bitbake/lib/bs4/builder/_htmlparser.py b/bitbake/lib/bs4/builder/_htmlparser.py
index bb0a63f2f3..3cc187f892 100644
--- a/bitbake/lib/bs4/builder/_htmlparser.py
+++ b/bitbake/lib/bs4/builder/_htmlparser.py
@@ -1,35 +1,18 @@
1# encoding: utf-8
1"""Use the HTMLParser library to parse HTML files that aren't too bad.""" 2"""Use the HTMLParser library to parse HTML files that aren't too bad."""
2 3
4# Use of this source code is governed by the MIT license.
5__license__ = "MIT"
6
3__all__ = [ 7__all__ = [
4 'HTMLParserTreeBuilder', 8 'HTMLParserTreeBuilder',
5 ] 9 ]
6 10
7from html.parser import HTMLParser 11from html.parser import HTMLParser
8 12
9try:
10 from html.parser import HTMLParseError
11except ImportError as e:
12 # HTMLParseError is removed in Python 3.5. Since it can never be
13 # thrown in 3.5, we can just define our own class as a placeholder.
14 class HTMLParseError(Exception):
15 pass
16
17import sys 13import sys
18import warnings 14import warnings
19 15
20# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
21# argument, which we'd like to set to False. Unfortunately,
22# http://bugs.python.org/issue13273 makes strict=True a better bet
23# before Python 3.2.3.
24#
25# At the end of this file, we monkeypatch HTMLParser so that
26# strict=True works well on Python 3.2.2.
27major, minor, release = sys.version_info[:3]
28CONSTRUCTOR_TAKES_STRICT = major == 3 and minor == 2 and release >= 3
29CONSTRUCTOR_STRICT_IS_DEPRECATED = major == 3 and minor == 3
30CONSTRUCTOR_TAKES_CONVERT_CHARREFS = major == 3 and minor >= 4
31
32
33from bs4.element import ( 16from bs4.element import (
34 CData, 17 CData,
35 Comment, 18 Comment,
@@ -40,6 +23,8 @@ from bs4.element import (
40from bs4.dammit import EntitySubstitution, UnicodeDammit 23from bs4.dammit import EntitySubstitution, UnicodeDammit
41 24
42from bs4.builder import ( 25from bs4.builder import (
26 DetectsXMLParsedAsHTML,
27 ParserRejectedMarkup,
43 HTML, 28 HTML,
44 HTMLTreeBuilder, 29 HTMLTreeBuilder,
45 STRICT, 30 STRICT,
@@ -48,8 +33,84 @@ from bs4.builder import (
48 33
49HTMLPARSER = 'html.parser' 34HTMLPARSER = 'html.parser'
50 35
51class BeautifulSoupHTMLParser(HTMLParser): 36class BeautifulSoupHTMLParser(HTMLParser, DetectsXMLParsedAsHTML):
52 def handle_starttag(self, name, attrs): 37 """A subclass of the Python standard library's HTMLParser class, which
38 listens for HTMLParser events and translates them into calls
39 to Beautiful Soup's tree construction API.
40 """
41
42 # Strategies for handling duplicate attributes
43 IGNORE = 'ignore'
44 REPLACE = 'replace'
45
46 def __init__(self, *args, **kwargs):
47 """Constructor.
48
49 :param on_duplicate_attribute: A strategy for what to do if a
50 tag includes the same attribute more than once. Accepted
51 values are: REPLACE (replace earlier values with later
52 ones, the default), IGNORE (keep the earliest value
53 encountered), or a callable. A callable must take three
54 arguments: the dictionary of attributes already processed,
55 the name of the duplicate attribute, and the most recent value
56 encountered.
57 """
58 self.on_duplicate_attribute = kwargs.pop(
59 'on_duplicate_attribute', self.REPLACE
60 )
61 HTMLParser.__init__(self, *args, **kwargs)
62
63 # Keep a list of empty-element tags that were encountered
64 # without an explicit closing tag. If we encounter a closing tag
65 # of this type, we'll associate it with one of those entries.
66 #
67 # This isn't a stack because we don't care about the
68 # order. It's a list of closing tags we've already handled and
69 # will ignore, assuming they ever show up.
70 self.already_closed_empty_element = []
71
72 self._initialize_xml_detector()
73
74 def error(self, message):
75 # NOTE: This method is required so long as Python 3.9 is
76 # supported. The corresponding code is removed from HTMLParser
77 # in 3.5, but not removed from ParserBase until 3.10.
78 # https://github.com/python/cpython/issues/76025
79 #
80 # The original implementation turned the error into a warning,
81 # but in every case I discovered, this made HTMLParser
82 # immediately crash with an error message that was less
83 # helpful than the warning. The new implementation makes it
84 # more clear that html.parser just can't parse this
85 # markup. The 3.10 implementation does the same, though it
86 # raises AssertionError rather than calling a method. (We
87 # catch this error and wrap it in a ParserRejectedMarkup.)
88 raise ParserRejectedMarkup(message)
89
90 def handle_startendtag(self, name, attrs):
91 """Handle an incoming empty-element tag.
92
93 This is only called when the markup looks like <tag/>.
94
95 :param name: Name of the tag.
96 :param attrs: Dictionary of the tag's attributes.
97 """
98 # is_startend() tells handle_starttag not to close the tag
99 # just because its name matches a known empty-element tag. We
100 # know that this is an empty-element tag and we want to call
101 # handle_endtag ourselves.
102 tag = self.handle_starttag(name, attrs, handle_empty_element=False)
103 self.handle_endtag(name)
104
105 def handle_starttag(self, name, attrs, handle_empty_element=True):
106 """Handle an opening tag, e.g. '<tag>'
107
108 :param name: Name of the tag.
109 :param attrs: Dictionary of the tag's attributes.
110 :param handle_empty_element: True if this tag is known to be
111 an empty-element tag (i.e. there is not expected to be any
112 closing tag).
113 """
53 # XXX namespace 114 # XXX namespace
54 attr_dict = {} 115 attr_dict = {}
55 for key, value in attrs: 116 for key, value in attrs:
@@ -57,20 +118,78 @@ class BeautifulSoupHTMLParser(HTMLParser):
57 # for consistency with the other tree builders. 118 # for consistency with the other tree builders.
58 if value is None: 119 if value is None:
59 value = '' 120 value = ''
60 attr_dict[key] = value 121 if key in attr_dict:
122 # A single attribute shows up multiple times in this
123 # tag. How to handle it depends on the
124 # on_duplicate_attribute setting.
125 on_dupe = self.on_duplicate_attribute
126 if on_dupe == self.IGNORE:
127 pass
128 elif on_dupe in (None, self.REPLACE):
129 attr_dict[key] = value
130 else:
131 on_dupe(attr_dict, key, value)
132 else:
133 attr_dict[key] = value
61 attrvalue = '""' 134 attrvalue = '""'
62 self.soup.handle_starttag(name, None, None, attr_dict) 135 #print("START", name)
63 136 sourceline, sourcepos = self.getpos()
64 def handle_endtag(self, name): 137 tag = self.soup.handle_starttag(
65 self.soup.handle_endtag(name) 138 name, None, None, attr_dict, sourceline=sourceline,
66 139 sourcepos=sourcepos
140 )
141 if tag and tag.is_empty_element and handle_empty_element:
142 # Unlike other parsers, html.parser doesn't send separate end tag
143 # events for empty-element tags. (It's handled in
144 # handle_startendtag, but only if the original markup looked like
145 # <tag/>.)
146 #
147 # So we need to call handle_endtag() ourselves. Since we
148 # know the start event is identical to the end event, we
149 # don't want handle_endtag() to cross off any previous end
150 # events for tags of this name.
151 self.handle_endtag(name, check_already_closed=False)
152
153 # But we might encounter an explicit closing tag for this tag
154 # later on. If so, we want to ignore it.
155 self.already_closed_empty_element.append(name)
156
157 if self._root_tag is None:
158 self._root_tag_encountered(name)
159
160 def handle_endtag(self, name, check_already_closed=True):
161 """Handle a closing tag, e.g. '</tag>'
162
163 :param name: A tag name.
164 :param check_already_closed: True if this tag is expected to
165 be the closing portion of an empty-element tag,
166 e.g. '<tag></tag>'.
167 """
168 #print("END", name)
169 if check_already_closed and name in self.already_closed_empty_element:
170 # This is a redundant end tag for an empty-element tag.
171 # We've already called handle_endtag() for it, so just
172 # check it off the list.
173 #print("ALREADY CLOSED", name)
174 self.already_closed_empty_element.remove(name)
175 else:
176 self.soup.handle_endtag(name)
177
67 def handle_data(self, data): 178 def handle_data(self, data):
179 """Handle some textual data that shows up between tags."""
68 self.soup.handle_data(data) 180 self.soup.handle_data(data)
69 181
70 def handle_charref(self, name): 182 def handle_charref(self, name):
71 # XXX workaround for a bug in HTMLParser. Remove this once 183 """Handle a numeric character reference by converting it to the
72 # it's fixed in all supported versions. 184 corresponding Unicode character and treating it as textual
73 # http://bugs.python.org/issue13633 185 data.
186
187 :param name: Character number, possibly in hexadecimal.
188 """
189 # TODO: This was originally a workaround for a bug in
190 # HTMLParser. (http://bugs.python.org/issue13633) The bug has
191 # been fixed, but removing this code still makes some
192 # Beautiful Soup tests fail. This needs investigation.
74 if name.startswith('x'): 193 if name.startswith('x'):
75 real_name = int(name.lstrip('x'), 16) 194 real_name = int(name.lstrip('x'), 16)
76 elif name.startswith('X'): 195 elif name.startswith('X'):
@@ -78,37 +197,71 @@ class BeautifulSoupHTMLParser(HTMLParser):
78 else: 197 else:
79 real_name = int(name) 198 real_name = int(name)
80 199
81 try: 200 data = None
82 data = chr(real_name) 201 if real_name < 256:
83 except (ValueError, OverflowError) as e: 202 # HTML numeric entities are supposed to reference Unicode
84 data = "\N{REPLACEMENT CHARACTER}" 203 # code points, but sometimes they reference code points in
85 204 # some other encoding (ahem, Windows-1252). E.g. &#147;
205 # instead of &#201; for LEFT DOUBLE QUOTATION MARK. This
206 # code tries to detect this situation and compensate.
207 for encoding in (self.soup.original_encoding, 'windows-1252'):
208 if not encoding:
209 continue
210 try:
211 data = bytearray([real_name]).decode(encoding)
212 except UnicodeDecodeError as e:
213 pass
214 if not data:
215 try:
216 data = chr(real_name)
217 except (ValueError, OverflowError) as e:
218 pass
219 data = data or "\N{REPLACEMENT CHARACTER}"
86 self.handle_data(data) 220 self.handle_data(data)
87 221
88 def handle_entityref(self, name): 222 def handle_entityref(self, name):
223 """Handle a named entity reference by converting it to the
224 corresponding Unicode character(s) and treating it as textual
225 data.
226
227 :param name: Name of the entity reference.
228 """
89 character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name) 229 character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
90 if character is not None: 230 if character is not None:
91 data = character 231 data = character
92 else: 232 else:
93 data = "&%s;" % name 233 # If this were XML, it would be ambiguous whether "&foo"
234 # was an character entity reference with a missing
235 # semicolon or the literal string "&foo". Since this is
236 # HTML, we have a complete list of all character entity references,
237 # and this one wasn't found, so assume it's the literal string "&foo".
238 data = "&%s" % name
94 self.handle_data(data) 239 self.handle_data(data)
95 240
96 def handle_comment(self, data): 241 def handle_comment(self, data):
242 """Handle an HTML comment.
243
244 :param data: The text of the comment.
245 """
97 self.soup.endData() 246 self.soup.endData()
98 self.soup.handle_data(data) 247 self.soup.handle_data(data)
99 self.soup.endData(Comment) 248 self.soup.endData(Comment)
100 249
101 def handle_decl(self, data): 250 def handle_decl(self, data):
251 """Handle a DOCTYPE declaration.
252
253 :param data: The text of the declaration.
254 """
102 self.soup.endData() 255 self.soup.endData()
103 if data.startswith("DOCTYPE "): 256 data = data[len("DOCTYPE "):]
104 data = data[len("DOCTYPE "):]
105 elif data == 'DOCTYPE':
106 # i.e. "<!DOCTYPE>"
107 data = ''
108 self.soup.handle_data(data) 257 self.soup.handle_data(data)
109 self.soup.endData(Doctype) 258 self.soup.endData(Doctype)
110 259
111 def unknown_decl(self, data): 260 def unknown_decl(self, data):
261 """Handle a declaration of unknown type -- probably a CDATA block.
262
263 :param data: The text of the declaration.
264 """
112 if data.upper().startswith('CDATA['): 265 if data.upper().startswith('CDATA['):
113 cls = CData 266 cls = CData
114 data = data[len('CDATA['):] 267 data = data[len('CDATA['):]
@@ -119,144 +272,116 @@ class BeautifulSoupHTMLParser(HTMLParser):
119 self.soup.endData(cls) 272 self.soup.endData(cls)
120 273
121 def handle_pi(self, data): 274 def handle_pi(self, data):
275 """Handle a processing instruction.
276
277 :param data: The text of the instruction.
278 """
122 self.soup.endData() 279 self.soup.endData()
123 self.soup.handle_data(data) 280 self.soup.handle_data(data)
281 self._document_might_be_xml(data)
124 self.soup.endData(ProcessingInstruction) 282 self.soup.endData(ProcessingInstruction)
125 283
126 284
127class HTMLParserTreeBuilder(HTMLTreeBuilder): 285class HTMLParserTreeBuilder(HTMLTreeBuilder):
128 286 """A Beautiful soup `TreeBuilder` that uses the `HTMLParser` parser,
287 found in the Python standard library.
288 """
129 is_xml = False 289 is_xml = False
130 picklable = True 290 picklable = True
131 NAME = HTMLPARSER 291 NAME = HTMLPARSER
132 features = [NAME, HTML, STRICT] 292 features = [NAME, HTML, STRICT]
133 293
134 def __init__(self, *args, **kwargs): 294 # The html.parser knows which line number and position in the
135 if CONSTRUCTOR_TAKES_STRICT and not CONSTRUCTOR_STRICT_IS_DEPRECATED: 295 # original file is the source of an element.
136 kwargs['strict'] = False 296 TRACKS_LINE_NUMBERS = True
137 if CONSTRUCTOR_TAKES_CONVERT_CHARREFS:
138 kwargs['convert_charrefs'] = False
139 self.parser_args = (args, kwargs)
140 297
298 def __init__(self, parser_args=None, parser_kwargs=None, **kwargs):
299 """Constructor.
300
301 :param parser_args: Positional arguments to pass into
302 the BeautifulSoupHTMLParser constructor, once it's
303 invoked.
304 :param parser_kwargs: Keyword arguments to pass into
305 the BeautifulSoupHTMLParser constructor, once it's
306 invoked.
307 :param kwargs: Keyword arguments for the superclass constructor.
308 """
309 # Some keyword arguments will be pulled out of kwargs and placed
310 # into parser_kwargs.
311 extra_parser_kwargs = dict()
312 for arg in ('on_duplicate_attribute',):
313 if arg in kwargs:
314 value = kwargs.pop(arg)
315 extra_parser_kwargs[arg] = value
316 super(HTMLParserTreeBuilder, self).__init__(**kwargs)
317 parser_args = parser_args or []
318 parser_kwargs = parser_kwargs or {}
319 parser_kwargs.update(extra_parser_kwargs)
320 parser_kwargs['convert_charrefs'] = False
321 self.parser_args = (parser_args, parser_kwargs)
322
141 def prepare_markup(self, markup, user_specified_encoding=None, 323 def prepare_markup(self, markup, user_specified_encoding=None,
142 document_declared_encoding=None, exclude_encodings=None): 324 document_declared_encoding=None, exclude_encodings=None):
143 """ 325
144 :return: A 4-tuple (markup, original encoding, encoding 326 """Run any preliminary steps necessary to make incoming markup
145 declared within markup, whether any characters had to be 327 acceptable to the parser.
146 replaced with REPLACEMENT CHARACTER). 328
329 :param markup: Some markup -- probably a bytestring.
330 :param user_specified_encoding: The user asked to try this encoding.
331 :param document_declared_encoding: The markup itself claims to be
332 in this encoding.
333 :param exclude_encodings: The user asked _not_ to try any of
334 these encodings.
335
336 :yield: A series of 4-tuples:
337 (markup, encoding, declared encoding,
338 has undergone character replacement)
339
340 Each 4-tuple represents a strategy for converting the
341 document to Unicode and parsing it. Each strategy will be tried
342 in turn.
147 """ 343 """
148 if isinstance(markup, str): 344 if isinstance(markup, str):
345 # Parse Unicode as-is.
149 yield (markup, None, None, False) 346 yield (markup, None, None, False)
150 return 347 return
151 348
349 # Ask UnicodeDammit to sniff the most likely encoding.
350
351 # This was provided by the end-user; treat it as a known
352 # definite encoding per the algorithm laid out in the HTML5
353 # spec. (See the EncodingDetector class for details.)
354 known_definite_encodings = [user_specified_encoding]
355
356 # This was found in the document; treat it as a slightly lower-priority
357 # user encoding.
358 user_encodings = [document_declared_encoding]
359
152 try_encodings = [user_specified_encoding, document_declared_encoding] 360 try_encodings = [user_specified_encoding, document_declared_encoding]
153 dammit = UnicodeDammit(markup, try_encodings, is_html=True, 361 dammit = UnicodeDammit(
154 exclude_encodings=exclude_encodings) 362 markup,
363 known_definite_encodings=known_definite_encodings,
364 user_encodings=user_encodings,
365 is_html=True,
366 exclude_encodings=exclude_encodings
367 )
155 yield (dammit.markup, dammit.original_encoding, 368 yield (dammit.markup, dammit.original_encoding,
156 dammit.declared_html_encoding, 369 dammit.declared_html_encoding,
157 dammit.contains_replacement_characters) 370 dammit.contains_replacement_characters)
158 371
159 def feed(self, markup): 372 def feed(self, markup):
373 """Run some incoming markup through some parsing process,
374 populating the `BeautifulSoup` object in self.soup.
375 """
160 args, kwargs = self.parser_args 376 args, kwargs = self.parser_args
161 parser = BeautifulSoupHTMLParser(*args, **kwargs) 377 parser = BeautifulSoupHTMLParser(*args, **kwargs)
162 parser.soup = self.soup 378 parser.soup = self.soup
163 try: 379 try:
164 parser.feed(markup) 380 parser.feed(markup)
165 except HTMLParseError as e: 381 parser.close()
166 warnings.warn(RuntimeWarning( 382 except AssertionError as e:
167 "Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help.")) 383 # html.parser raises AssertionError in rare cases to
168 raise e 384 # indicate a fatal problem with the markup, especially
169 385 # when there's an error in the doctype declaration.
170# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some 386 raise ParserRejectedMarkup(e)
171# 3.2.3 code. This ensures they don't treat markup like <p></p> as a 387 parser.already_closed_empty_element = []
172# string.
173#
174# XXX This code can be removed once most Python 3 users are on 3.2.3.
175if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
176 import re
177 attrfind_tolerant = re.compile(
178 r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
179 r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
180 HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
181
182 locatestarttagend = re.compile(r"""
183 <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
184 (?:\s+ # whitespace before attribute name
185 (?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
186 (?:\s*=\s* # value indicator
187 (?:'[^']*' # LITA-enclosed value
188 |\"[^\"]*\" # LIT-enclosed value
189 |[^'\">\s]+ # bare value
190 )
191 )?
192 )
193 )*
194 \s* # trailing whitespace
195""", re.VERBOSE)
196 BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
197
198 from html.parser import tagfind, attrfind
199
200 def parse_starttag(self, i):
201 self.__starttag_text = None
202 endpos = self.check_for_whole_start_tag(i)
203 if endpos < 0:
204 return endpos
205 rawdata = self.rawdata
206 self.__starttag_text = rawdata[i:endpos]
207
208 # Now parse the data between i+1 and j into a tag and attrs
209 attrs = []
210 match = tagfind.match(rawdata, i+1)
211 assert match, 'unexpected call to parse_starttag()'
212 k = match.end()
213 self.lasttag = tag = rawdata[i+1:k].lower()
214 while k < endpos:
215 if self.strict:
216 m = attrfind.match(rawdata, k)
217 else:
218 m = attrfind_tolerant.match(rawdata, k)
219 if not m:
220 break
221 attrname, rest, attrvalue = m.group(1, 2, 3)
222 if not rest:
223 attrvalue = None
224 elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
225 attrvalue[:1] == '"' == attrvalue[-1:]:
226 attrvalue = attrvalue[1:-1]
227 if attrvalue:
228 attrvalue = self.unescape(attrvalue)
229 attrs.append((attrname.lower(), attrvalue))
230 k = m.end()
231
232 end = rawdata[k:endpos].strip()
233 if end not in (">", "/>"):
234 lineno, offset = self.getpos()
235 if "\n" in self.__starttag_text:
236 lineno = lineno + self.__starttag_text.count("\n")
237 offset = len(self.__starttag_text) \
238 - self.__starttag_text.rfind("\n")
239 else:
240 offset = offset + len(self.__starttag_text)
241 if self.strict:
242 self.error("junk characters in start tag: %r"
243 % (rawdata[k:endpos][:20],))
244 self.handle_data(rawdata[i:endpos])
245 return endpos
246 if end.endswith('/>'):
247 # XHTML-style empty tag: <span attr="value" />
248 self.handle_startendtag(tag, attrs)
249 else:
250 self.handle_starttag(tag, attrs)
251 if tag in self.CDATA_CONTENT_ELEMENTS:
252 self.set_cdata_mode(tag)
253 return endpos
254
255 def set_cdata_mode(self, elem):
256 self.cdata_elem = elem.lower()
257 self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
258
259 BeautifulSoupHTMLParser.parse_starttag = parse_starttag
260 BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
261
262 CONSTRUCTOR_TAKES_STRICT = True
diff --git a/bitbake/lib/bs4/builder/_lxml.py b/bitbake/lib/bs4/builder/_lxml.py
index 9c6c14ee65..4f7cf74681 100644
--- a/bitbake/lib/bs4/builder/_lxml.py
+++ b/bitbake/lib/bs4/builder/_lxml.py
@@ -1,19 +1,28 @@
1# Use of this source code is governed by the MIT license.
2__license__ = "MIT"
3
1__all__ = [ 4__all__ = [
2 'LXMLTreeBuilderForXML', 5 'LXMLTreeBuilderForXML',
3 'LXMLTreeBuilder', 6 'LXMLTreeBuilder',
4 ] 7 ]
5 8
9try:
10 from collections.abc import Callable # Python 3.6
11except ImportError as e:
12 from collections import Callable
13
6from io import BytesIO 14from io import BytesIO
7from io import StringIO 15from io import StringIO
8import collections
9from lxml import etree 16from lxml import etree
10from bs4.element import ( 17from bs4.element import (
11 Comment, 18 Comment,
12 Doctype, 19 Doctype,
13 NamespacedAttribute, 20 NamespacedAttribute,
14 ProcessingInstruction, 21 ProcessingInstruction,
22 XMLProcessingInstruction,
15) 23)
16from bs4.builder import ( 24from bs4.builder import (
25 DetectsXMLParsedAsHTML,
17 FAST, 26 FAST,
18 HTML, 27 HTML,
19 HTMLTreeBuilder, 28 HTMLTreeBuilder,
@@ -25,10 +34,15 @@ from bs4.dammit import EncodingDetector
25 34
26LXML = 'lxml' 35LXML = 'lxml'
27 36
37def _invert(d):
38 "Invert a dictionary."
39 return dict((v,k) for k, v in list(d.items()))
40
28class LXMLTreeBuilderForXML(TreeBuilder): 41class LXMLTreeBuilderForXML(TreeBuilder):
29 DEFAULT_PARSER_CLASS = etree.XMLParser 42 DEFAULT_PARSER_CLASS = etree.XMLParser
30 43
31 is_xml = True 44 is_xml = True
45 processing_instruction_class = XMLProcessingInstruction
32 46
33 NAME = "lxml-xml" 47 NAME = "lxml-xml"
34 ALTERNATE_NAMES = ["xml"] 48 ALTERNATE_NAMES = ["xml"]
@@ -40,26 +54,79 @@ class LXMLTreeBuilderForXML(TreeBuilder):
40 54
41 # This namespace mapping is specified in the XML Namespace 55 # This namespace mapping is specified in the XML Namespace
42 # standard. 56 # standard.
43 DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"} 57 DEFAULT_NSMAPS = dict(xml='http://www.w3.org/XML/1998/namespace')
58
59 DEFAULT_NSMAPS_INVERTED = _invert(DEFAULT_NSMAPS)
60
61 # NOTE: If we parsed Element objects and looked at .sourceline,
62 # we'd be able to see the line numbers from the original document.
63 # But instead we build an XMLParser or HTMLParser object to serve
64 # as the target of parse messages, and those messages don't include
65 # line numbers.
66 # See: https://bugs.launchpad.net/lxml/+bug/1846906
67
68 def initialize_soup(self, soup):
69 """Let the BeautifulSoup object know about the standard namespace
70 mapping.
71
72 :param soup: A `BeautifulSoup`.
73 """
74 super(LXMLTreeBuilderForXML, self).initialize_soup(soup)
75 self._register_namespaces(self.DEFAULT_NSMAPS)
76
77 def _register_namespaces(self, mapping):
78 """Let the BeautifulSoup object know about namespaces encountered
79 while parsing the document.
80
81 This might be useful later on when creating CSS selectors.
82
83 This will track (almost) all namespaces, even ones that were
84 only in scope for part of the document. If two namespaces have
85 the same prefix, only the first one encountered will be
86 tracked. Un-prefixed namespaces are not tracked.
44 87
88 :param mapping: A dictionary mapping namespace prefixes to URIs.
89 """
90 for key, value in list(mapping.items()):
91 # This is 'if key' and not 'if key is not None' because we
92 # don't track un-prefixed namespaces. Soupselect will
93 # treat an un-prefixed namespace as the default, which
94 # causes confusion in some cases.
95 if key and key not in self.soup._namespaces:
96 # Let the BeautifulSoup object know about a new namespace.
97 # If there are multiple namespaces defined with the same
98 # prefix, the first one in the document takes precedence.
99 self.soup._namespaces[key] = value
100
45 def default_parser(self, encoding): 101 def default_parser(self, encoding):
46 # This can either return a parser object or a class, which 102 """Find the default parser for the given encoding.
47 # will be instantiated with default arguments. 103
104 :param encoding: A string.
105 :return: Either a parser object or a class, which
106 will be instantiated with default arguments.
107 """
48 if self._default_parser is not None: 108 if self._default_parser is not None:
49 return self._default_parser 109 return self._default_parser
50 return etree.XMLParser( 110 return etree.XMLParser(
51 target=self, strip_cdata=False, recover=True, encoding=encoding) 111 target=self, strip_cdata=False, recover=True, encoding=encoding)
52 112
53 def parser_for(self, encoding): 113 def parser_for(self, encoding):
114 """Instantiate an appropriate parser for the given encoding.
115
116 :param encoding: A string.
117 :return: A parser object such as an `etree.XMLParser`.
118 """
54 # Use the default parser. 119 # Use the default parser.
55 parser = self.default_parser(encoding) 120 parser = self.default_parser(encoding)
56 121
57 if isinstance(parser, collections.Callable): 122 if isinstance(parser, Callable):
58 # Instantiate the parser with default arguments 123 # Instantiate the parser with default arguments
59 parser = parser(target=self, strip_cdata=False, encoding=encoding) 124 parser = parser(
125 target=self, strip_cdata=False, recover=True, encoding=encoding
126 )
60 return parser 127 return parser
61 128
62 def __init__(self, parser=None, empty_element_tags=None): 129 def __init__(self, parser=None, empty_element_tags=None, **kwargs):
63 # TODO: Issue a warning if parser is present but not a 130 # TODO: Issue a warning if parser is present but not a
64 # callable, since that means there's no way to create new 131 # callable, since that means there's no way to create new
65 # parsers for different encodings. 132 # parsers for different encodings.
@@ -67,8 +134,10 @@ class LXMLTreeBuilderForXML(TreeBuilder):
67 if empty_element_tags is not None: 134 if empty_element_tags is not None:
68 self.empty_element_tags = set(empty_element_tags) 135 self.empty_element_tags = set(empty_element_tags)
69 self.soup = None 136 self.soup = None
70 self.nsmaps = [self.DEFAULT_NSMAPS] 137 self.nsmaps = [self.DEFAULT_NSMAPS_INVERTED]
71 138 self.active_namespace_prefixes = [dict(self.DEFAULT_NSMAPS)]
139 super(LXMLTreeBuilderForXML, self).__init__(**kwargs)
140
72 def _getNsTag(self, tag): 141 def _getNsTag(self, tag):
73 # Split the namespace URL out of a fully-qualified lxml tag 142 # Split the namespace URL out of a fully-qualified lxml tag
74 # name. Copied from lxml's src/lxml/sax.py. 143 # name. Copied from lxml's src/lxml/sax.py.
@@ -80,16 +149,51 @@ class LXMLTreeBuilderForXML(TreeBuilder):
80 def prepare_markup(self, markup, user_specified_encoding=None, 149 def prepare_markup(self, markup, user_specified_encoding=None,
81 exclude_encodings=None, 150 exclude_encodings=None,
82 document_declared_encoding=None): 151 document_declared_encoding=None):
83 """ 152 """Run any preliminary steps necessary to make incoming markup
84 :yield: A series of 4-tuples. 153 acceptable to the parser.
154
155 lxml really wants to get a bytestring and convert it to
156 Unicode itself. So instead of using UnicodeDammit to convert
157 the bytestring to Unicode using different encodings, this
158 implementation uses EncodingDetector to iterate over the
159 encodings, and tell lxml to try to parse the document as each
160 one in turn.
161
162 :param markup: Some markup -- hopefully a bytestring.
163 :param user_specified_encoding: The user asked to try this encoding.
164 :param document_declared_encoding: The markup itself claims to be
165 in this encoding.
166 :param exclude_encodings: The user asked _not_ to try any of
167 these encodings.
168
169 :yield: A series of 4-tuples:
85 (markup, encoding, declared encoding, 170 (markup, encoding, declared encoding,
86 has undergone character replacement) 171 has undergone character replacement)
87 172
88 Each 4-tuple represents a strategy for parsing the document. 173 Each 4-tuple represents a strategy for converting the
174 document to Unicode and parsing it. Each strategy will be tried
175 in turn.
89 """ 176 """
177 is_html = not self.is_xml
178 if is_html:
179 self.processing_instruction_class = ProcessingInstruction
180 # We're in HTML mode, so if we're given XML, that's worth
181 # noting.
182 DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(
183 markup, stacklevel=3
184 )
185 else:
186 self.processing_instruction_class = XMLProcessingInstruction
187
90 if isinstance(markup, str): 188 if isinstance(markup, str):
91 # We were given Unicode. Maybe lxml can parse Unicode on 189 # We were given Unicode. Maybe lxml can parse Unicode on
92 # this system? 190 # this system?
191
192 # TODO: This is a workaround for
193 # https://bugs.launchpad.net/lxml/+bug/1948551.
194 # We can remove it once the upstream issue is fixed.
195 if len(markup) > 0 and markup[0] == u'\N{BYTE ORDER MARK}':
196 markup = markup[1:]
93 yield markup, None, document_declared_encoding, False 197 yield markup, None, document_declared_encoding, False
94 198
95 if isinstance(markup, str): 199 if isinstance(markup, str):
@@ -98,14 +202,19 @@ class LXMLTreeBuilderForXML(TreeBuilder):
98 yield (markup.encode("utf8"), "utf8", 202 yield (markup.encode("utf8"), "utf8",
99 document_declared_encoding, False) 203 document_declared_encoding, False)
100 204
101 # Instead of using UnicodeDammit to convert the bytestring to 205 # This was provided by the end-user; treat it as a known
102 # Unicode using different encodings, use EncodingDetector to 206 # definite encoding per the algorithm laid out in the HTML5
103 # iterate over the encodings, and tell lxml to try to parse 207 # spec. (See the EncodingDetector class for details.)
104 # the document as each one in turn. 208 known_definite_encodings = [user_specified_encoding]
105 is_html = not self.is_xml 209
106 try_encodings = [user_specified_encoding, document_declared_encoding] 210 # This was found in the document; treat it as a slightly lower-priority
211 # user encoding.
212 user_encodings = [document_declared_encoding]
107 detector = EncodingDetector( 213 detector = EncodingDetector(
108 markup, try_encodings, is_html, exclude_encodings) 214 markup, known_definite_encodings=known_definite_encodings,
215 user_encodings=user_encodings, is_html=is_html,
216 exclude_encodings=exclude_encodings
217 )
109 for encoding in detector.encodings: 218 for encoding in detector.encodings:
110 yield (detector.markup, encoding, document_declared_encoding, False) 219 yield (detector.markup, encoding, document_declared_encoding, False)
111 220
@@ -128,25 +237,45 @@ class LXMLTreeBuilderForXML(TreeBuilder):
128 self.parser.feed(data) 237 self.parser.feed(data)
129 self.parser.close() 238 self.parser.close()
130 except (UnicodeDecodeError, LookupError, etree.ParserError) as e: 239 except (UnicodeDecodeError, LookupError, etree.ParserError) as e:
131 raise ParserRejectedMarkup(str(e)) 240 raise ParserRejectedMarkup(e)
132 241
133 def close(self): 242 def close(self):
134 self.nsmaps = [self.DEFAULT_NSMAPS] 243 self.nsmaps = [self.DEFAULT_NSMAPS_INVERTED]
135 244
136 def start(self, name, attrs, nsmap={}): 245 def start(self, name, attrs, nsmap={}):
137 # Make sure attrs is a mutable dict--lxml may send an immutable dictproxy. 246 # Make sure attrs is a mutable dict--lxml may send an immutable dictproxy.
138 attrs = dict(attrs) 247 attrs = dict(attrs)
139 nsprefix = None 248 nsprefix = None
140 # Invert each namespace map as it comes in. 249 # Invert each namespace map as it comes in.
141 if len(self.nsmaps) > 1: 250 if len(nsmap) == 0 and len(self.nsmaps) > 1:
142 # There are no new namespaces for this tag, but 251 # There are no new namespaces for this tag, but
143 # non-default namespaces are in play, so we need a 252 # non-default namespaces are in play, so we need a
144 # separate tag stack to know when they end. 253 # separate tag stack to know when they end.
145 self.nsmaps.append(None) 254 self.nsmaps.append(None)
146 elif len(nsmap) > 0: 255 elif len(nsmap) > 0:
147 # A new namespace mapping has come into play. 256 # A new namespace mapping has come into play.
148 inverted_nsmap = dict((value, key) for key, value in list(nsmap.items())) 257
149 self.nsmaps.append(inverted_nsmap) 258 # First, Let the BeautifulSoup object know about it.
259 self._register_namespaces(nsmap)
260
261 # Then, add it to our running list of inverted namespace
262 # mappings.
263 self.nsmaps.append(_invert(nsmap))
264
265 # The currently active namespace prefixes have
266 # changed. Calculate the new mapping so it can be stored
267 # with all Tag objects created while these prefixes are in
268 # scope.
269 current_mapping = dict(self.active_namespace_prefixes[-1])
270 current_mapping.update(nsmap)
271
272 # We should not track un-prefixed namespaces as we can only hold one
273 # and it will be recognized as the default namespace by soupsieve,
274 # which may be confusing in some situations.
275 if '' in current_mapping:
276 del current_mapping['']
277 self.active_namespace_prefixes.append(current_mapping)
278
150 # Also treat the namespace mapping as a set of attributes on the 279 # Also treat the namespace mapping as a set of attributes on the
151 # tag, so we can recreate it later. 280 # tag, so we can recreate it later.
152 attrs = attrs.copy() 281 attrs = attrs.copy()
@@ -171,8 +300,11 @@ class LXMLTreeBuilderForXML(TreeBuilder):
171 300
172 namespace, name = self._getNsTag(name) 301 namespace, name = self._getNsTag(name)
173 nsprefix = self._prefix_for_namespace(namespace) 302 nsprefix = self._prefix_for_namespace(namespace)
174 self.soup.handle_starttag(name, namespace, nsprefix, attrs) 303 self.soup.handle_starttag(
175 304 name, namespace, nsprefix, attrs,
305 namespaces=self.active_namespace_prefixes[-1]
306 )
307
176 def _prefix_for_namespace(self, namespace): 308 def _prefix_for_namespace(self, namespace):
177 """Find the currently active prefix for the given namespace.""" 309 """Find the currently active prefix for the given namespace."""
178 if namespace is None: 310 if namespace is None:
@@ -196,13 +328,20 @@ class LXMLTreeBuilderForXML(TreeBuilder):
196 if len(self.nsmaps) > 1: 328 if len(self.nsmaps) > 1:
197 # This tag, or one of its parents, introduced a namespace 329 # This tag, or one of its parents, introduced a namespace
198 # mapping, so pop it off the stack. 330 # mapping, so pop it off the stack.
199 self.nsmaps.pop() 331 out_of_scope_nsmap = self.nsmaps.pop()
200 332
333 if out_of_scope_nsmap is not None:
334 # This tag introduced a namespace mapping which is no
335 # longer in scope. Recalculate the currently active
336 # namespace prefixes.
337 self.active_namespace_prefixes.pop()
338
201 def pi(self, target, data): 339 def pi(self, target, data):
202 self.soup.endData() 340 self.soup.endData()
203 self.soup.handle_data(target + ' ' + data) 341 data = target + ' ' + data
204 self.soup.endData(ProcessingInstruction) 342 self.soup.handle_data(data)
205 343 self.soup.endData(self.processing_instruction_class)
344
206 def data(self, content): 345 def data(self, content):
207 self.soup.handle_data(content) 346 self.soup.handle_data(content)
208 347
@@ -229,6 +368,7 @@ class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
229 368
230 features = ALTERNATE_NAMES + [NAME, HTML, FAST, PERMISSIVE] 369 features = ALTERNATE_NAMES + [NAME, HTML, FAST, PERMISSIVE]
231 is_xml = False 370 is_xml = False
371 processing_instruction_class = ProcessingInstruction
232 372
233 def default_parser(self, encoding): 373 def default_parser(self, encoding):
234 return etree.HTMLParser 374 return etree.HTMLParser
@@ -240,7 +380,7 @@ class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
240 self.parser.feed(markup) 380 self.parser.feed(markup)
241 self.parser.close() 381 self.parser.close()
242 except (UnicodeDecodeError, LookupError, etree.ParserError) as e: 382 except (UnicodeDecodeError, LookupError, etree.ParserError) as e:
243 raise ParserRejectedMarkup(str(e)) 383 raise ParserRejectedMarkup(e)
244 384
245 385
246 def test_fragment_to_document(self, fragment): 386 def test_fragment_to_document(self, fragment):
diff --git a/bitbake/lib/bs4/css.py b/bitbake/lib/bs4/css.py
new file mode 100644
index 0000000000..cd1fd2df88
--- /dev/null
+++ b/bitbake/lib/bs4/css.py
@@ -0,0 +1,274 @@
1"""Integration code for CSS selectors using Soup Sieve (pypi: soupsieve)."""
2
3# We don't use soupsieve
4soupsieve = None
5
6
7class CSS(object):
8 """A proxy object against the soupsieve library, to simplify its
9 CSS selector API.
10
11 Acquire this object through the .css attribute on the
12 BeautifulSoup object, or on the Tag you want to use as the
13 starting point for a CSS selector.
14
15 The main advantage of doing this is that the tag to be selected
16 against doesn't need to be explicitly specified in the function
17 calls, since it's already scoped to a tag.
18 """
19
20 def __init__(self, tag, api=soupsieve):
21 """Constructor.
22
23 You don't need to instantiate this class yourself; instead,
24 access the .css attribute on the BeautifulSoup object, or on
25 the Tag you want to use as the starting point for your CSS
26 selector.
27
28 :param tag: All CSS selectors will use this as their starting
29 point.
30
31 :param api: A plug-in replacement for the soupsieve module,
32 designed mainly for use in tests.
33 """
34 if api is None:
35 raise NotImplementedError(
36 "Cannot execute CSS selectors because the soupsieve package is not installed."
37 )
38 self.api = api
39 self.tag = tag
40
41 def escape(self, ident):
42 """Escape a CSS identifier.
43
44 This is a simple wrapper around soupselect.escape(). See the
45 documentation for that function for more information.
46 """
47 if soupsieve is None:
48 raise NotImplementedError(
49 "Cannot escape CSS identifiers because the soupsieve package is not installed."
50 )
51 return self.api.escape(ident)
52
53 def _ns(self, ns, select):
54 """Normalize a dictionary of namespaces."""
55 if not isinstance(select, self.api.SoupSieve) and ns is None:
56 # If the selector is a precompiled pattern, it already has
57 # a namespace context compiled in, which cannot be
58 # replaced.
59 ns = self.tag._namespaces
60 return ns
61
62 def _rs(self, results):
63 """Normalize a list of results to a Resultset.
64
65 A ResultSet is more consistent with the rest of Beautiful
66 Soup's API, and ResultSet.__getattr__ has a helpful error
67 message if you try to treat a list of results as a single
68 result (a common mistake).
69 """
70 # Import here to avoid circular import
71 from bs4.element import ResultSet
72 return ResultSet(None, results)
73
74 def compile(self, select, namespaces=None, flags=0, **kwargs):
75 """Pre-compile a selector and return the compiled object.
76
77 :param selector: A CSS selector.
78
79 :param namespaces: A dictionary mapping namespace prefixes
80 used in the CSS selector to namespace URIs. By default,
81 Beautiful Soup will use the prefixes it encountered while
82 parsing the document.
83
84 :param flags: Flags to be passed into Soup Sieve's
85 soupsieve.compile() method.
86
87 :param kwargs: Keyword arguments to be passed into SoupSieve's
88 soupsieve.compile() method.
89
90 :return: A precompiled selector object.
91 :rtype: soupsieve.SoupSieve
92 """
93 return self.api.compile(
94 select, self._ns(namespaces, select), flags, **kwargs
95 )
96
97 def select_one(self, select, namespaces=None, flags=0, **kwargs):
98 """Perform a CSS selection operation on the current Tag and return the
99 first result.
100
101 This uses the Soup Sieve library. For more information, see
102 that library's documentation for the soupsieve.select_one()
103 method.
104
105 :param selector: A CSS selector.
106
107 :param namespaces: A dictionary mapping namespace prefixes
108 used in the CSS selector to namespace URIs. By default,
109 Beautiful Soup will use the prefixes it encountered while
110 parsing the document.
111
112 :param flags: Flags to be passed into Soup Sieve's
113 soupsieve.select_one() method.
114
115 :param kwargs: Keyword arguments to be passed into SoupSieve's
116 soupsieve.select_one() method.
117
118 :return: A Tag, or None if the selector has no match.
119 :rtype: bs4.element.Tag
120
121 """
122 return self.api.select_one(
123 select, self.tag, self._ns(namespaces, select), flags, **kwargs
124 )
125
126 def select(self, select, namespaces=None, limit=0, flags=0, **kwargs):
127 """Perform a CSS selection operation on the current Tag.
128
129 This uses the Soup Sieve library. For more information, see
130 that library's documentation for the soupsieve.select()
131 method.
132
133 :param selector: A string containing a CSS selector.
134
135 :param namespaces: A dictionary mapping namespace prefixes
136 used in the CSS selector to namespace URIs. By default,
137 Beautiful Soup will pass in the prefixes it encountered while
138 parsing the document.
139
140 :param limit: After finding this number of results, stop looking.
141
142 :param flags: Flags to be passed into Soup Sieve's
143 soupsieve.select() method.
144
145 :param kwargs: Keyword arguments to be passed into SoupSieve's
146 soupsieve.select() method.
147
148 :return: A ResultSet of Tag objects.
149 :rtype: bs4.element.ResultSet
150
151 """
152 if limit is None:
153 limit = 0
154
155 return self._rs(
156 self.api.select(
157 select, self.tag, self._ns(namespaces, select), limit, flags,
158 **kwargs
159 )
160 )
161
162 def iselect(self, select, namespaces=None, limit=0, flags=0, **kwargs):
163 """Perform a CSS selection operation on the current Tag.
164
165 This uses the Soup Sieve library. For more information, see
166 that library's documentation for the soupsieve.iselect()
167 method. It is the same as select(), but it returns a generator
168 instead of a list.
169
170 :param selector: A string containing a CSS selector.
171
172 :param namespaces: A dictionary mapping namespace prefixes
173 used in the CSS selector to namespace URIs. By default,
174 Beautiful Soup will pass in the prefixes it encountered while
175 parsing the document.
176
177 :param limit: After finding this number of results, stop looking.
178
179 :param flags: Flags to be passed into Soup Sieve's
180 soupsieve.iselect() method.
181
182 :param kwargs: Keyword arguments to be passed into SoupSieve's
183 soupsieve.iselect() method.
184
185 :return: A generator
186 :rtype: types.GeneratorType
187 """
188 return self.api.iselect(
189 select, self.tag, self._ns(namespaces, select), limit, flags, **kwargs
190 )
191
192 def closest(self, select, namespaces=None, flags=0, **kwargs):
193 """Find the Tag closest to this one that matches the given selector.
194
195 This uses the Soup Sieve library. For more information, see
196 that library's documentation for the soupsieve.closest()
197 method.
198
199 :param selector: A string containing a CSS selector.
200
201 :param namespaces: A dictionary mapping namespace prefixes
202 used in the CSS selector to namespace URIs. By default,
203 Beautiful Soup will pass in the prefixes it encountered while
204 parsing the document.
205
206 :param flags: Flags to be passed into Soup Sieve's
207 soupsieve.closest() method.
208
209 :param kwargs: Keyword arguments to be passed into SoupSieve's
210 soupsieve.closest() method.
211
212 :return: A Tag, or None if there is no match.
213 :rtype: bs4.Tag
214
215 """
216 return self.api.closest(
217 select, self.tag, self._ns(namespaces, select), flags, **kwargs
218 )
219
220 def match(self, select, namespaces=None, flags=0, **kwargs):
221 """Check whether this Tag matches the given CSS selector.
222
223 This uses the Soup Sieve library. For more information, see
224 that library's documentation for the soupsieve.match()
225 method.
226
227 :param: a CSS selector.
228
229 :param namespaces: A dictionary mapping namespace prefixes
230 used in the CSS selector to namespace URIs. By default,
231 Beautiful Soup will pass in the prefixes it encountered while
232 parsing the document.
233
234 :param flags: Flags to be passed into Soup Sieve's
235 soupsieve.match() method.
236
237 :param kwargs: Keyword arguments to be passed into SoupSieve's
238 soupsieve.match() method.
239
240 :return: True if this Tag matches the selector; False otherwise.
241 :rtype: bool
242 """
243 return self.api.match(
244 select, self.tag, self._ns(namespaces, select), flags, **kwargs
245 )
246
247 def filter(self, select, namespaces=None, flags=0, **kwargs):
248 """Filter this Tag's direct children based on the given CSS selector.
249
250 This uses the Soup Sieve library. It works the same way as
251 passing this Tag into that library's soupsieve.filter()
252 method. More information, for more information see the
253 documentation for soupsieve.filter().
254
255 :param namespaces: A dictionary mapping namespace prefixes
256 used in the CSS selector to namespace URIs. By default,
257 Beautiful Soup will pass in the prefixes it encountered while
258 parsing the document.
259
260 :param flags: Flags to be passed into Soup Sieve's
261 soupsieve.filter() method.
262
263 :param kwargs: Keyword arguments to be passed into SoupSieve's
264 soupsieve.filter() method.
265
266 :return: A ResultSet of Tag objects.
267 :rtype: bs4.element.ResultSet
268
269 """
270 return self._rs(
271 self.api.filter(
272 select, self.tag, self._ns(namespaces, select), flags, **kwargs
273 )
274 )
diff --git a/bitbake/lib/bs4/dammit.py b/bitbake/lib/bs4/dammit.py
index 7ad9e0dd1e..692433c57a 100644
--- a/bitbake/lib/bs4/dammit.py
+++ b/bitbake/lib/bs4/dammit.py
@@ -6,61 +6,185 @@ necessary. It is heavily based on code from Mark Pilgrim's Universal
6Feed Parser. It works best on XML and HTML, but it does not rewrite the 6Feed Parser. It works best on XML and HTML, but it does not rewrite the
7XML or HTML to reflect a new encoding; that's the tree builder's job. 7XML or HTML to reflect a new encoding; that's the tree builder's job.
8""" 8"""
9# Use of this source code is governed by the MIT license.
9__license__ = "MIT" 10__license__ = "MIT"
10 11
11import codecs
12from html.entities import codepoint2name 12from html.entities import codepoint2name
13from collections import defaultdict
14import codecs
13import re 15import re
14import logging 16import logging
15 17import string
16# Import a library to autodetect character encodings. 18
17chardet_type = None 19# Import a library to autodetect character encodings. We'll support
20# any of a number of libraries that all support the same API:
21#
22# * cchardet
23# * chardet
24# * charset-normalizer
25chardet_module = None
18try: 26try:
19 # First try the fast C implementation.
20 # PyPI package: cchardet 27 # PyPI package: cchardet
21 import cchardet 28 import cchardet as chardet_module
22 def chardet_dammit(s):
23 return cchardet.detect(s)['encoding']
24except ImportError: 29except ImportError:
25 try: 30 try:
26 # Fall back to the pure Python implementation
27 # Debian package: python-chardet 31 # Debian package: python-chardet
28 # PyPI package: chardet 32 # PyPI package: chardet
29 import chardet 33 import chardet as chardet_module
30 def chardet_dammit(s):
31 return chardet.detect(s)['encoding']
32 #import chardet.constants
33 #chardet.constants._debug = 1
34 except ImportError: 34 except ImportError:
35 # No chardet available. 35 try:
36 def chardet_dammit(s): 36 # PyPI package: charset-normalizer
37 import charset_normalizer as chardet_module
38 except ImportError:
39 # No chardet available.
40 chardet_module = None
41
42if chardet_module:
43 def chardet_dammit(s):
44 if isinstance(s, str):
37 return None 45 return None
46 return chardet_module.detect(s)['encoding']
47else:
48 def chardet_dammit(s):
49 return None
38 50
39xml_encoding_re = re.compile( 51# Build bytestring and Unicode versions of regular expressions for finding
40 r'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I) 52# a declared encoding inside an XML or HTML document.
41html_meta_re = re.compile( 53xml_encoding = '^\\s*<\\?.*encoding=[\'"](.*?)[\'"].*\\?>'
42 r'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I) 54html_meta = '<\\s*meta[^>]+charset\\s*=\\s*["\']?([^>]*?)[ /;\'">]'
55encoding_res = dict()
56encoding_res[bytes] = {
57 'html' : re.compile(html_meta.encode("ascii"), re.I),
58 'xml' : re.compile(xml_encoding.encode("ascii"), re.I),
59}
60encoding_res[str] = {
61 'html' : re.compile(html_meta, re.I),
62 'xml' : re.compile(xml_encoding, re.I)
63}
64
65from html.entities import html5
43 66
44class EntitySubstitution(object): 67class EntitySubstitution(object):
45 68 """The ability to substitute XML or HTML entities for certain characters."""
46 """Substitute XML or HTML entities for the corresponding characters."""
47 69
48 def _populate_class_variables(): 70 def _populate_class_variables():
49 lookup = {} 71 """Initialize variables used by this class to manage the plethora of
50 reverse_lookup = {} 72 HTML5 named entities.
51 characters_for_re = [] 73
74 This function returns a 3-tuple containing two dictionaries
75 and a regular expression:
76
77 unicode_to_name - A mapping of Unicode strings like "⦨" to
78 entity names like "angmsdaa". When a single Unicode string has
79 multiple entity names, we try to choose the most commonly-used
80 name.
81
82 name_to_unicode: A mapping of entity names like "angmsdaa" to
83 Unicode strings like "⦨".
84
85 named_entity_re: A regular expression matching (almost) any
86 Unicode string that corresponds to an HTML5 named entity.
87 """
88 unicode_to_name = {}
89 name_to_unicode = {}
90
91 short_entities = set()
92 long_entities_by_first_character = defaultdict(set)
93
94 for name_with_semicolon, character in sorted(html5.items()):
95 # "It is intentional, for legacy compatibility, that many
96 # code points have multiple character reference names. For
97 # example, some appear both with and without the trailing
98 # semicolon, or with different capitalizations."
99 # - https://html.spec.whatwg.org/multipage/named-characters.html#named-character-references
100 #
101 # The parsers are in charge of handling (or not) character
102 # references with no trailing semicolon, so we remove the
103 # semicolon whenever it appears.
104 if name_with_semicolon.endswith(';'):
105 name = name_with_semicolon[:-1]
106 else:
107 name = name_with_semicolon
108
109 # When parsing HTML, we want to recognize any known named
110 # entity and convert it to a sequence of Unicode
111 # characters.
112 if name not in name_to_unicode:
113 name_to_unicode[name] = character
114
115 # When _generating_ HTML, we want to recognize special
116 # character sequences that _could_ be converted to named
117 # entities.
118 unicode_to_name[character] = name
119
120 # We also need to build a regular expression that lets us
121 # _find_ those characters in output strings so we can
122 # replace them.
123 #
124 # This is tricky, for two reasons.
125
126 if (len(character) == 1 and ord(character) < 128
127 and character not in '<>&'):
128 # First, it would be annoying to turn single ASCII
129 # characters like | into named entities like
130 # &verbar;. The exceptions are <>&, which we _must_
131 # turn into named entities to produce valid HTML.
132 continue
133
134 if len(character) > 1 and all(ord(x) < 128 for x in character):
135 # We also do not want to turn _combinations_ of ASCII
136 # characters like 'fj' into named entities like '&fjlig;',
137 # though that's more debateable.
138 continue
139
140 # Second, some named entities have a Unicode value that's
141 # a subset of the Unicode value for some _other_ named
142 # entity. As an example, \u2267' is &GreaterFullEqual;,
143 # but '\u2267\u0338' is &NotGreaterFullEqual;. Our regular
144 # expression needs to match the first two characters of
145 # "\u2267\u0338foo", but only the first character of
146 # "\u2267foo".
147 #
148 # In this step, we build two sets of characters that
149 # _eventually_ need to go into the regular expression. But
150 # we won't know exactly what the regular expression needs
151 # to look like until we've gone through the entire list of
152 # named entities.
153 if len(character) == 1:
154 short_entities.add(character)
155 else:
156 long_entities_by_first_character[character[0]].add(character)
157
158 # Now that we've been through the entire list of entities, we
159 # can create a regular expression that matches any of them.
160 particles = set()
161 for short in short_entities:
162 long_versions = long_entities_by_first_character[short]
163 if not long_versions:
164 particles.add(short)
165 else:
166 ignore = "".join([x[1] for x in long_versions])
167 # This finds, e.g. \u2267 but only if it is _not_
168 # followed by \u0338.
169 particles.add("%s(?![%s])" % (short, ignore))
170
171 for long_entities in list(long_entities_by_first_character.values()):
172 for long_entity in long_entities:
173 particles.add(long_entity)
174
175 re_definition = "(%s)" % "|".join(particles)
176
177 # If an entity shows up in both html5 and codepoint2name, it's
178 # likely that HTML5 gives it several different names, such as
179 # 'rsquo' and 'rsquor'. When converting Unicode characters to
180 # named entities, the codepoint2name name should take
181 # precedence where possible, since that's the more easily
182 # recognizable one.
52 for codepoint, name in list(codepoint2name.items()): 183 for codepoint, name in list(codepoint2name.items()):
53 character = chr(codepoint) 184 character = chr(codepoint)
54 if codepoint != 34: 185 unicode_to_name[character] = name
55 # There's no point in turning the quotation mark into 186
56 # &quot;, unless it happens within an attribute value, which 187 return unicode_to_name, name_to_unicode, re.compile(re_definition)
57 # is handled elsewhere.
58 characters_for_re.append(character)
59 lookup[character] = name
60 # But we do want to turn &quot; into the quotation mark.
61 reverse_lookup[name] = character
62 re_definition = "[%s]" % "".join(characters_for_re)
63 return lookup, reverse_lookup, re.compile(re_definition)
64 (CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER, 188 (CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER,
65 CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables() 189 CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables()
66 190
@@ -72,21 +196,23 @@ class EntitySubstitution(object):
72 ">": "gt", 196 ">": "gt",
73 } 197 }
74 198
75 BARE_AMPERSAND_OR_BRACKET = re.compile(r"([<>]|" 199 BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
76 r"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)" 200 "&(?!#\\d+;|#x[0-9a-fA-F]+;|\\w+;)"
77 r")") 201 ")")
78 202
79 AMPERSAND_OR_BRACKET = re.compile(r"([<>&])") 203 AMPERSAND_OR_BRACKET = re.compile("([<>&])")
80 204
81 @classmethod 205 @classmethod
82 def _substitute_html_entity(cls, matchobj): 206 def _substitute_html_entity(cls, matchobj):
207 """Used with a regular expression to substitute the
208 appropriate HTML entity for a special character string."""
83 entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0)) 209 entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0))
84 return "&%s;" % entity 210 return "&%s;" % entity
85 211
86 @classmethod 212 @classmethod
87 def _substitute_xml_entity(cls, matchobj): 213 def _substitute_xml_entity(cls, matchobj):
88 """Used with a regular expression to substitute the 214 """Used with a regular expression to substitute the
89 appropriate XML entity for an XML special character.""" 215 appropriate XML entity for a special character string."""
90 entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)] 216 entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)]
91 return "&%s;" % entity 217 return "&%s;" % entity
92 218
@@ -181,6 +307,8 @@ class EntitySubstitution(object):
181 containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that 307 containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that
182 character with "&eacute;" will make it more readable to some 308 character with "&eacute;" will make it more readable to some
183 people. 309 people.
310
311 :param s: A Unicode string.
184 """ 312 """
185 return cls.CHARACTER_TO_HTML_ENTITY_RE.sub( 313 return cls.CHARACTER_TO_HTML_ENTITY_RE.sub(
186 cls._substitute_html_entity, s) 314 cls._substitute_html_entity, s)
@@ -192,23 +320,65 @@ class EncodingDetector:
192 Order of precedence: 320 Order of precedence:
193 321
194 1. Encodings you specifically tell EncodingDetector to try first 322 1. Encodings you specifically tell EncodingDetector to try first
195 (the override_encodings argument to the constructor). 323 (the known_definite_encodings argument to the constructor).
324
325 2. An encoding determined by sniffing the document's byte-order mark.
326
327 3. Encodings you specifically tell EncodingDetector to try if
328 byte-order mark sniffing fails (the user_encodings argument to the
329 constructor).
196 330
197 2. An encoding declared within the bytestring itself, either in an 331 4. An encoding declared within the bytestring itself, either in an
198 XML declaration (if the bytestring is to be interpreted as an XML 332 XML declaration (if the bytestring is to be interpreted as an XML
199 document), or in a <meta> tag (if the bytestring is to be 333 document), or in a <meta> tag (if the bytestring is to be
200 interpreted as an HTML document.) 334 interpreted as an HTML document.)
201 335
202 3. An encoding detected through textual analysis by chardet, 336 5. An encoding detected through textual analysis by chardet,
203 cchardet, or a similar external library. 337 cchardet, or a similar external library.
204 338
205 4. UTF-8. 339 4. UTF-8.
206 340
207 5. Windows-1252. 341 5. Windows-1252.
342
208 """ 343 """
209 def __init__(self, markup, override_encodings=None, is_html=False, 344 def __init__(self, markup, known_definite_encodings=None,
210 exclude_encodings=None): 345 is_html=False, exclude_encodings=None,
211 self.override_encodings = override_encodings or [] 346 user_encodings=None, override_encodings=None):
347 """Constructor.
348
349 :param markup: Some markup in an unknown encoding.
350
351 :param known_definite_encodings: When determining the encoding
352 of `markup`, these encodings will be tried first, in
353 order. In HTML terms, this corresponds to the "known
354 definite encoding" step defined here:
355 https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding
356
357 :param user_encodings: These encodings will be tried after the
358 `known_definite_encodings` have been tried and failed, and
359 after an attempt to sniff the encoding by looking at a
360 byte order mark has failed. In HTML terms, this
361 corresponds to the step "user has explicitly instructed
362 the user agent to override the document's character
363 encoding", defined here:
364 https://html.spec.whatwg.org/multipage/parsing.html#determining-the-character-encoding
365
366 :param override_encodings: A deprecated alias for
367 known_definite_encodings. Any encodings here will be tried
368 immediately after the encodings in
369 known_definite_encodings.
370
371 :param is_html: If True, this markup is considered to be
372 HTML. Otherwise it's assumed to be XML.
373
374 :param exclude_encodings: These encodings will not be tried,
375 even if they otherwise would be.
376
377 """
378 self.known_definite_encodings = list(known_definite_encodings or [])
379 if override_encodings:
380 self.known_definite_encodings += override_encodings
381 self.user_encodings = user_encodings or []
212 exclude_encodings = exclude_encodings or [] 382 exclude_encodings = exclude_encodings or []
213 self.exclude_encodings = set([x.lower() for x in exclude_encodings]) 383 self.exclude_encodings = set([x.lower() for x in exclude_encodings])
214 self.chardet_encoding = None 384 self.chardet_encoding = None
@@ -219,6 +389,12 @@ class EncodingDetector:
219 self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup) 389 self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup)
220 390
221 def _usable(self, encoding, tried): 391 def _usable(self, encoding, tried):
392 """Should we even bother to try this encoding?
393
394 :param encoding: Name of an encoding.
395 :param tried: Encodings that have already been tried. This will be modified
396 as a side effect.
397 """
222 if encoding is not None: 398 if encoding is not None:
223 encoding = encoding.lower() 399 encoding = encoding.lower()
224 if encoding in self.exclude_encodings: 400 if encoding in self.exclude_encodings:
@@ -230,9 +406,14 @@ class EncodingDetector:
230 406
231 @property 407 @property
232 def encodings(self): 408 def encodings(self):
233 """Yield a number of encodings that might work for this markup.""" 409 """Yield a number of encodings that might work for this markup.
410
411 :yield: A sequence of strings.
412 """
234 tried = set() 413 tried = set()
235 for e in self.override_encodings: 414
415 # First, try the known definite encodings
416 for e in self.known_definite_encodings:
236 if self._usable(e, tried): 417 if self._usable(e, tried):
237 yield e 418 yield e
238 419
@@ -241,6 +422,12 @@ class EncodingDetector:
241 if self._usable(self.sniffed_encoding, tried): 422 if self._usable(self.sniffed_encoding, tried):
242 yield self.sniffed_encoding 423 yield self.sniffed_encoding
243 424
425 # Sniffing the byte-order mark did nothing; try the user
426 # encodings.
427 for e in self.user_encodings:
428 if self._usable(e, tried):
429 yield e
430
244 # Look within the document for an XML or HTML encoding 431 # Look within the document for an XML or HTML encoding
245 # declaration. 432 # declaration.
246 if self.declared_encoding is None: 433 if self.declared_encoding is None:
@@ -263,7 +450,11 @@ class EncodingDetector:
263 450
264 @classmethod 451 @classmethod
265 def strip_byte_order_mark(cls, data): 452 def strip_byte_order_mark(cls, data):
266 """If a byte-order mark is present, strip it and return the encoding it implies.""" 453 """If a byte-order mark is present, strip it and return the encoding it implies.
454
455 :param data: Some markup.
456 :return: A 2-tuple (modified data, implied encoding)
457 """
267 encoding = None 458 encoding = None
268 if isinstance(data, str): 459 if isinstance(data, str):
269 # Unicode data cannot have a byte-order mark. 460 # Unicode data cannot have a byte-order mark.
@@ -295,21 +486,36 @@ class EncodingDetector:
295 486
296 An HTML encoding is declared in a <meta> tag, hopefully near the 487 An HTML encoding is declared in a <meta> tag, hopefully near the
297 beginning of the document. 488 beginning of the document.
489
490 :param markup: Some markup.
491 :param is_html: If True, this markup is considered to be HTML. Otherwise
492 it's assumed to be XML.
493 :param search_entire_document: Since an encoding is supposed to declared near the beginning
494 of the document, most of the time it's only necessary to search a few kilobytes of data.
495 Set this to True to force this method to search the entire document.
298 """ 496 """
299 if search_entire_document: 497 if search_entire_document:
300 xml_endpos = html_endpos = len(markup) 498 xml_endpos = html_endpos = len(markup)
301 else: 499 else:
302 xml_endpos = 1024 500 xml_endpos = 1024
303 html_endpos = max(2048, int(len(markup) * 0.05)) 501 html_endpos = max(2048, int(len(markup) * 0.05))
304 502
503 if isinstance(markup, bytes):
504 res = encoding_res[bytes]
505 else:
506 res = encoding_res[str]
507
508 xml_re = res['xml']
509 html_re = res['html']
305 declared_encoding = None 510 declared_encoding = None
306 declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos) 511 declared_encoding_match = xml_re.search(markup, endpos=xml_endpos)
307 if not declared_encoding_match and is_html: 512 if not declared_encoding_match and is_html:
308 declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos) 513 declared_encoding_match = html_re.search(markup, endpos=html_endpos)
309 if declared_encoding_match is not None: 514 if declared_encoding_match is not None:
310 declared_encoding = declared_encoding_match.groups()[0].decode( 515 declared_encoding = declared_encoding_match.groups()[0]
311 'ascii', 'replace')
312 if declared_encoding: 516 if declared_encoding:
517 if isinstance(declared_encoding, bytes):
518 declared_encoding = declared_encoding.decode('ascii', 'replace')
313 return declared_encoding.lower() 519 return declared_encoding.lower()
314 return None 520 return None
315 521
@@ -332,15 +538,53 @@ class UnicodeDammit:
332 "iso-8859-2", 538 "iso-8859-2",
333 ] 539 ]
334 540
335 def __init__(self, markup, override_encodings=[], 541 def __init__(self, markup, known_definite_encodings=[],
336 smart_quotes_to=None, is_html=False, exclude_encodings=[]): 542 smart_quotes_to=None, is_html=False, exclude_encodings=[],
543 user_encodings=None, override_encodings=None
544 ):
545 """Constructor.
546
547 :param markup: A bytestring representing markup in an unknown encoding.
548
549 :param known_definite_encodings: When determining the encoding
550 of `markup`, these encodings will be tried first, in
551 order. In HTML terms, this corresponds to the "known
552 definite encoding" step defined here:
553 https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding
554
555 :param user_encodings: These encodings will be tried after the
556 `known_definite_encodings` have been tried and failed, and
557 after an attempt to sniff the encoding by looking at a
558 byte order mark has failed. In HTML terms, this
559 corresponds to the step "user has explicitly instructed
560 the user agent to override the document's character
561 encoding", defined here:
562 https://html.spec.whatwg.org/multipage/parsing.html#determining-the-character-encoding
563
564 :param override_encodings: A deprecated alias for
565 known_definite_encodings. Any encodings here will be tried
566 immediately after the encodings in
567 known_definite_encodings.
568
569 :param smart_quotes_to: By default, Microsoft smart quotes will, like all other characters, be converted
570 to Unicode characters. Setting this to 'ascii' will convert them to ASCII quotes instead.
571 Setting it to 'xml' will convert them to XML entity references, and setting it to 'html'
572 will convert them to HTML entity references.
573 :param is_html: If True, this markup is considered to be HTML. Otherwise
574 it's assumed to be XML.
575 :param exclude_encodings: These encodings will not be considered, even
576 if the sniffing code thinks they might make sense.
577
578 """
337 self.smart_quotes_to = smart_quotes_to 579 self.smart_quotes_to = smart_quotes_to
338 self.tried_encodings = [] 580 self.tried_encodings = []
339 self.contains_replacement_characters = False 581 self.contains_replacement_characters = False
340 self.is_html = is_html 582 self.is_html = is_html
341 583 self.log = logging.getLogger(__name__)
342 self.detector = EncodingDetector( 584 self.detector = EncodingDetector(
343 markup, override_encodings, is_html, exclude_encodings) 585 markup, known_definite_encodings, is_html, exclude_encodings,
586 user_encodings, override_encodings
587 )
344 588
345 # Short-circuit if the data is in Unicode to begin with. 589 # Short-circuit if the data is in Unicode to begin with.
346 if isinstance(markup, str) or markup == '': 590 if isinstance(markup, str) or markup == '':
@@ -368,9 +612,10 @@ class UnicodeDammit:
368 if encoding != "ascii": 612 if encoding != "ascii":
369 u = self._convert_from(encoding, "replace") 613 u = self._convert_from(encoding, "replace")
370 if u is not None: 614 if u is not None:
371 logging.warning( 615 self.log.warning(
372 "Some characters could not be decoded, and were " 616 "Some characters could not be decoded, and were "
373 "replaced with REPLACEMENT CHARACTER.") 617 "replaced with REPLACEMENT CHARACTER."
618 )
374 self.contains_replacement_characters = True 619 self.contains_replacement_characters = True
375 break 620 break
376 621
@@ -399,6 +644,10 @@ class UnicodeDammit:
399 return sub 644 return sub
400 645
401 def _convert_from(self, proposed, errors="strict"): 646 def _convert_from(self, proposed, errors="strict"):
647 """Attempt to convert the markup to the proposed encoding.
648
649 :param proposed: The name of a character encoding.
650 """
402 proposed = self.find_codec(proposed) 651 proposed = self.find_codec(proposed)
403 if not proposed or (proposed, errors) in self.tried_encodings: 652 if not proposed or (proposed, errors) in self.tried_encodings:
404 return None 653 return None
@@ -413,30 +662,40 @@ class UnicodeDammit:
413 markup = smart_quotes_compiled.sub(self._sub_ms_char, markup) 662 markup = smart_quotes_compiled.sub(self._sub_ms_char, markup)
414 663
415 try: 664 try:
416 #print "Trying to convert document to %s (errors=%s)" % ( 665 #print("Trying to convert document to %s (errors=%s)" % (
417 # proposed, errors) 666 # proposed, errors))
418 u = self._to_unicode(markup, proposed, errors) 667 u = self._to_unicode(markup, proposed, errors)
419 self.markup = u 668 self.markup = u
420 self.original_encoding = proposed 669 self.original_encoding = proposed
421 except Exception as e: 670 except Exception as e:
422 #print "That didn't work!" 671 #print("That didn't work!")
423 #print e 672 #print(e)
424 return None 673 return None
425 #print "Correct encoding: %s" % proposed 674 #print("Correct encoding: %s" % proposed)
426 return self.markup 675 return self.markup
427 676
428 def _to_unicode(self, data, encoding, errors="strict"): 677 def _to_unicode(self, data, encoding, errors="strict"):
429 '''Given a string and its encoding, decodes the string into Unicode. 678 """Given a string and its encoding, decodes the string into Unicode.
430 %encoding is a string recognized by encodings.aliases''' 679
680 :param encoding: The name of an encoding.
681 """
431 return str(data, encoding, errors) 682 return str(data, encoding, errors)
432 683
433 @property 684 @property
434 def declared_html_encoding(self): 685 def declared_html_encoding(self):
686 """If the markup is an HTML document, returns the encoding declared _within_
687 the document.
688 """
435 if not self.is_html: 689 if not self.is_html:
436 return None 690 return None
437 return self.detector.declared_encoding 691 return self.detector.declared_encoding
438 692
439 def find_codec(self, charset): 693 def find_codec(self, charset):
694 """Convert the name of a character set to a codec name.
695
696 :param charset: The name of a character set.
697 :return: The name of a codec.
698 """
440 value = (self._codec(self.CHARSET_ALIASES.get(charset, charset)) 699 value = (self._codec(self.CHARSET_ALIASES.get(charset, charset))
441 or (charset and self._codec(charset.replace("-", ""))) 700 or (charset and self._codec(charset.replace("-", "")))
442 or (charset and self._codec(charset.replace("-", "_"))) 701 or (charset and self._codec(charset.replace("-", "_")))
@@ -726,7 +985,7 @@ class UnicodeDammit:
726 0xde : b'\xc3\x9e', # Þ 985 0xde : b'\xc3\x9e', # Þ
727 0xdf : b'\xc3\x9f', # ß 986 0xdf : b'\xc3\x9f', # ß
728 0xe0 : b'\xc3\xa0', # à 987 0xe0 : b'\xc3\xa0', # à
729 0xe1 : b'\xa1', # á 988 0xe1 : b'\xa1', # á
730 0xe2 : b'\xc3\xa2', # â 989 0xe2 : b'\xc3\xa2', # â
731 0xe3 : b'\xc3\xa3', # ã 990 0xe3 : b'\xc3\xa3', # ã
732 0xe4 : b'\xc3\xa4', # ä 991 0xe4 : b'\xc3\xa4', # ä
@@ -775,12 +1034,16 @@ class UnicodeDammit:
775 Currently the only situation supported is Windows-1252 (or its 1034 Currently the only situation supported is Windows-1252 (or its
776 subset ISO-8859-1), embedded in UTF-8. 1035 subset ISO-8859-1), embedded in UTF-8.
777 1036
778 The input must be a bytestring. If you've already converted 1037 :param in_bytes: A bytestring that you suspect contains
779 the document to Unicode, you're too late. 1038 characters from multiple encodings. Note that this _must_
780 1039 be a bytestring. If you've already converted the document
781 The output is a bytestring in which `embedded_encoding` 1040 to Unicode, you're too late.
782 characters have been converted to their `main_encoding` 1041 :param main_encoding: The primary encoding of `in_bytes`.
783 equivalents. 1042 :param embedded_encoding: The encoding that was used to embed characters
1043 in the main document.
1044 :return: A bytestring in which `embedded_encoding`
1045 characters have been converted to their `main_encoding`
1046 equivalents.
784 """ 1047 """
785 if embedded_encoding.replace('_', '-').lower() not in ( 1048 if embedded_encoding.replace('_', '-').lower() not in (
786 'windows-1252', 'windows_1252'): 1049 'windows-1252', 'windows_1252'):
diff --git a/bitbake/lib/bs4/diagnose.py b/bitbake/lib/bs4/diagnose.py
index 083395fb46..4692795340 100644
--- a/bitbake/lib/bs4/diagnose.py
+++ b/bitbake/lib/bs4/diagnose.py
@@ -1,9 +1,10 @@
1"""Diagnostic functions, mainly for use when doing tech support.""" 1"""Diagnostic functions, mainly for use when doing tech support."""
2 2
3# Use of this source code is governed by the MIT license.
3__license__ = "MIT" 4__license__ = "MIT"
4 5
5import cProfile 6import cProfile
6from io import StringIO 7from io import BytesIO
7from html.parser import HTMLParser 8from html.parser import HTMLParser
8import bs4 9import bs4
9from bs4 import BeautifulSoup, __version__ 10from bs4 import BeautifulSoup, __version__
@@ -16,12 +17,15 @@ import tempfile
16import time 17import time
17import traceback 18import traceback
18import sys 19import sys
19import cProfile
20 20
21def diagnose(data): 21def diagnose(data):
22 """Diagnostic suite for isolating common problems.""" 22 """Diagnostic suite for isolating common problems.
23 print("Diagnostic running on Beautiful Soup %s" % __version__) 23
24 print("Python version %s" % sys.version) 24 :param data: A string containing markup that needs to be explained.
25 :return: None; diagnostics are printed to standard output.
26 """
27 print(("Diagnostic running on Beautiful Soup %s" % __version__))
28 print(("Python version %s" % sys.version))
25 29
26 basic_parsers = ["html.parser", "html5lib", "lxml"] 30 basic_parsers = ["html.parser", "html5lib", "lxml"]
27 for name in basic_parsers: 31 for name in basic_parsers:
@@ -35,61 +39,70 @@ def diagnose(data):
35 name)) 39 name))
36 40
37 if 'lxml' in basic_parsers: 41 if 'lxml' in basic_parsers:
38 basic_parsers.append(["lxml", "xml"]) 42 basic_parsers.append("lxml-xml")
39 try: 43 try:
40 from lxml import etree 44 from lxml import etree
41 print("Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))) 45 print(("Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))))
42 except ImportError as e: 46 except ImportError as e:
43 print ( 47 print(
44 "lxml is not installed or couldn't be imported.") 48 "lxml is not installed or couldn't be imported.")
45 49
46 50
47 if 'html5lib' in basic_parsers: 51 if 'html5lib' in basic_parsers:
48 try: 52 try:
49 import html5lib 53 import html5lib
50 print("Found html5lib version %s" % html5lib.__version__) 54 print(("Found html5lib version %s" % html5lib.__version__))
51 except ImportError as e: 55 except ImportError as e:
52 print ( 56 print(
53 "html5lib is not installed or couldn't be imported.") 57 "html5lib is not installed or couldn't be imported.")
54 58
55 if hasattr(data, 'read'): 59 if hasattr(data, 'read'):
56 data = data.read() 60 data = data.read()
57 elif os.path.exists(data):
58 print('"%s" looks like a filename. Reading data from the file.' % data)
59 data = open(data).read()
60 elif data.startswith("http:") or data.startswith("https:"):
61 print('"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data)
62 print("You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup.")
63 return
64 print()
65 61
66 for parser in basic_parsers: 62 for parser in basic_parsers:
67 print("Trying to parse your markup with %s" % parser) 63 print(("Trying to parse your markup with %s" % parser))
68 success = False 64 success = False
69 try: 65 try:
70 soup = BeautifulSoup(data, parser) 66 soup = BeautifulSoup(data, features=parser)
71 success = True 67 success = True
72 except Exception as e: 68 except Exception as e:
73 print("%s could not parse the markup." % parser) 69 print(("%s could not parse the markup." % parser))
74 traceback.print_exc() 70 traceback.print_exc()
75 if success: 71 if success:
76 print("Here's what %s did with the markup:" % parser) 72 print(("Here's what %s did with the markup:" % parser))
77 print(soup.prettify()) 73 print((soup.prettify()))
78 74
79 print("-" * 80) 75 print(("-" * 80))
80 76
81def lxml_trace(data, html=True, **kwargs): 77def lxml_trace(data, html=True, **kwargs):
82 """Print out the lxml events that occur during parsing. 78 """Print out the lxml events that occur during parsing.
83 79
84 This lets you see how lxml parses a document when no Beautiful 80 This lets you see how lxml parses a document when no Beautiful
85 Soup code is running. 81 Soup code is running. You can use this to determine whether
82 an lxml-specific problem is in Beautiful Soup's lxml tree builders
83 or in lxml itself.
84
85 :param data: Some markup.
86 :param html: If True, markup will be parsed with lxml's HTML parser.
87 if False, lxml's XML parser will be used.
86 """ 88 """
87 from lxml import etree 89 from lxml import etree
88 for event, element in etree.iterparse(StringIO(data), html=html, **kwargs): 90 recover = kwargs.pop('recover', True)
91 if isinstance(data, str):
92 data = data.encode("utf8")
93 reader = BytesIO(data)
94 for event, element in etree.iterparse(
95 reader, html=html, recover=recover, **kwargs
96 ):
89 print(("%s, %4s, %s" % (event, element.tag, element.text))) 97 print(("%s, %4s, %s" % (event, element.tag, element.text)))
90 98
91class AnnouncingParser(HTMLParser): 99class AnnouncingParser(HTMLParser):
92 """Announces HTMLParser parse events, without doing anything else.""" 100 """Subclass of HTMLParser that announces parse events, without doing
101 anything else.
102
103 You can use this to get a picture of how html.parser sees a given
104 document. The easiest way to do this is to call `htmlparser_trace`.
105 """
93 106
94 def _p(self, s): 107 def _p(self, s):
95 print(s) 108 print(s)
@@ -126,6 +139,8 @@ def htmlparser_trace(data):
126 139
127 This lets you see how HTMLParser parses a document when no 140 This lets you see how HTMLParser parses a document when no
128 Beautiful Soup code is running. 141 Beautiful Soup code is running.
142
143 :param data: Some markup.
129 """ 144 """
130 parser = AnnouncingParser() 145 parser = AnnouncingParser()
131 parser.feed(data) 146 parser.feed(data)
@@ -168,9 +183,9 @@ def rdoc(num_elements=1000):
168 183
169def benchmark_parsers(num_elements=100000): 184def benchmark_parsers(num_elements=100000):
170 """Very basic head-to-head performance benchmark.""" 185 """Very basic head-to-head performance benchmark."""
171 print("Comparative parser benchmark on Beautiful Soup %s" % __version__) 186 print(("Comparative parser benchmark on Beautiful Soup %s" % __version__))
172 data = rdoc(num_elements) 187 data = rdoc(num_elements)
173 print("Generated a large invalid HTML document (%d bytes)." % len(data)) 188 print(("Generated a large invalid HTML document (%d bytes)." % len(data)))
174 189
175 for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]: 190 for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
176 success = False 191 success = False
@@ -180,26 +195,26 @@ def benchmark_parsers(num_elements=100000):
180 b = time.time() 195 b = time.time()
181 success = True 196 success = True
182 except Exception as e: 197 except Exception as e:
183 print("%s could not parse the markup." % parser) 198 print(("%s could not parse the markup." % parser))
184 traceback.print_exc() 199 traceback.print_exc()
185 if success: 200 if success:
186 print("BS4+%s parsed the markup in %.2fs." % (parser, b-a)) 201 print(("BS4+%s parsed the markup in %.2fs." % (parser, b-a)))
187 202
188 from lxml import etree 203 from lxml import etree
189 a = time.time() 204 a = time.time()
190 etree.HTML(data) 205 etree.HTML(data)
191 b = time.time() 206 b = time.time()
192 print("Raw lxml parsed the markup in %.2fs." % (b-a)) 207 print(("Raw lxml parsed the markup in %.2fs." % (b-a)))
193 208
194 import html5lib 209 import html5lib
195 parser = html5lib.HTMLParser() 210 parser = html5lib.HTMLParser()
196 a = time.time() 211 a = time.time()
197 parser.parse(data) 212 parser.parse(data)
198 b = time.time() 213 b = time.time()
199 print("Raw html5lib parsed the markup in %.2fs." % (b-a)) 214 print(("Raw html5lib parsed the markup in %.2fs." % (b-a)))
200 215
201def profile(num_elements=100000, parser="lxml"): 216def profile(num_elements=100000, parser="lxml"):
202 217 """Use Python's profiler on a randomly generated document."""
203 filehandle = tempfile.NamedTemporaryFile() 218 filehandle = tempfile.NamedTemporaryFile()
204 filename = filehandle.name 219 filename = filehandle.name
205 220
@@ -212,5 +227,6 @@ def profile(num_elements=100000, parser="lxml"):
212 stats.sort_stats("cumulative") 227 stats.sort_stats("cumulative")
213 stats.print_stats('_html5lib|bs4', 50) 228 stats.print_stats('_html5lib|bs4', 50)
214 229
230# If this file is run as a script, standard input is diagnosed.
215if __name__ == '__main__': 231if __name__ == '__main__':
216 diagnose(sys.stdin.read()) 232 diagnose(sys.stdin.read())
diff --git a/bitbake/lib/bs4/element.py b/bitbake/lib/bs4/element.py
index 68be42d138..0aefe734b2 100644
--- a/bitbake/lib/bs4/element.py
+++ b/bitbake/lib/bs4/element.py
@@ -1,14 +1,27 @@
1# Use of this source code is governed by the MIT license.
1__license__ = "MIT" 2__license__ = "MIT"
2 3
3import collections.abc 4try:
5 from collections.abc import Callable # Python 3.6
6except ImportError as e:
7 from collections import Callable
4import re 8import re
5import sys 9import sys
6import warnings 10import warnings
7from bs4.dammit import EntitySubstitution 11
12from bs4.css import CSS
13from bs4.formatter import (
14 Formatter,
15 HTMLFormatter,
16 XMLFormatter,
17)
8 18
9DEFAULT_OUTPUT_ENCODING = "utf-8" 19DEFAULT_OUTPUT_ENCODING = "utf-8"
10PY3K = (sys.version_info[0] > 2)
11 20
21nonwhitespace_re = re.compile(r"\S+")
22
23# NOTE: This isn't used as of 4.7.0. I'm leaving it for a little bit on
24# the off chance someone imported it for their own use.
12whitespace_re = re.compile(r"\s+") 25whitespace_re = re.compile(r"\s+")
13 26
14def _alias(attr): 27def _alias(attr):
@@ -23,12 +36,49 @@ def _alias(attr):
23 return alias 36 return alias
24 37
25 38
39# These encodings are recognized by Python (so PageElement.encode
40# could theoretically support them) but XML and HTML don't recognize
41# them (so they should not show up in an XML or HTML document as that
42# document's encoding).
43#
44# If an XML document is encoded in one of these encodings, no encoding
45# will be mentioned in the XML declaration. If an HTML document is
46# encoded in one of these encodings, and the HTML document has a
47# <meta> tag that mentions an encoding, the encoding will be given as
48# the empty string.
49#
50# Source:
51# https://docs.python.org/3/library/codecs.html#python-specific-encodings
52PYTHON_SPECIFIC_ENCODINGS = set([
53 "idna",
54 "mbcs",
55 "oem",
56 "palmos",
57 "punycode",
58 "raw_unicode_escape",
59 "undefined",
60 "unicode_escape",
61 "raw-unicode-escape",
62 "unicode-escape",
63 "string-escape",
64 "string_escape",
65])
66
67
26class NamespacedAttribute(str): 68class NamespacedAttribute(str):
69 """A namespaced string (e.g. 'xml:lang') that remembers the namespace
70 ('xml') and the name ('lang') that were used to create it.
71 """
27 72
28 def __new__(cls, prefix, name, namespace=None): 73 def __new__(cls, prefix, name=None, namespace=None):
29 if name is None: 74 if not name:
75 # This is the default namespace. Its name "has no value"
76 # per https://www.w3.org/TR/xml-names/#defaulting
77 name = None
78
79 if not name:
30 obj = str.__new__(cls, prefix) 80 obj = str.__new__(cls, prefix)
31 elif prefix is None: 81 elif not prefix:
32 # Not really namespaced. 82 # Not really namespaced.
33 obj = str.__new__(cls, name) 83 obj = str.__new__(cls, name)
34 else: 84 else:
@@ -54,6 +104,11 @@ class CharsetMetaAttributeValue(AttributeValueWithCharsetSubstitution):
54 return obj 104 return obj
55 105
56 def encode(self, encoding): 106 def encode(self, encoding):
107 """When an HTML document is being encoded to a given encoding, the
108 value of a meta tag's 'charset' is the name of the encoding.
109 """
110 if encoding in PYTHON_SPECIFIC_ENCODINGS:
111 return ''
57 return encoding 112 return encoding
58 113
59 114
@@ -79,118 +134,44 @@ class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution):
79 return obj 134 return obj
80 135
81 def encode(self, encoding): 136 def encode(self, encoding):
137 if encoding in PYTHON_SPECIFIC_ENCODINGS:
138 return ''
82 def rewrite(match): 139 def rewrite(match):
83 return match.group(1) + encoding 140 return match.group(1) + encoding
84 return self.CHARSET_RE.sub(rewrite, self.original_value) 141 return self.CHARSET_RE.sub(rewrite, self.original_value)
85 142
86class HTMLAwareEntitySubstitution(EntitySubstitution):
87
88 """Entity substitution rules that are aware of some HTML quirks.
89 143
90 Specifically, the contents of <script> and <style> tags should not 144class PageElement(object):
91 undergo entity substitution. 145 """Contains the navigational information for some part of the page:
146 that is, its current location in the parse tree.
92 147
93 Incoming NavigableString objects are checked to see if they're the 148 NavigableString, Tag, etc. are all subclasses of PageElement.
94 direct children of a <script> or <style> tag.
95 """ 149 """
96 150
97 cdata_containing_tags = set(["script", "style"]) 151 # In general, we can't tell just by looking at an element whether
152 # it's contained in an XML document or an HTML document. But for
153 # Tags (q.v.) we can store this information at parse time.
154 known_xml = None
98 155
99 preformatted_tags = set(["pre"]) 156 def setup(self, parent=None, previous_element=None, next_element=None,
100 157 previous_sibling=None, next_sibling=None):
101 @classmethod 158 """Sets up the initial relations between this element and
102 def _substitute_if_appropriate(cls, ns, f): 159 other elements.
103 if (isinstance(ns, NavigableString)
104 and ns.parent is not None
105 and ns.parent.name in cls.cdata_containing_tags):
106 # Do nothing.
107 return ns
108 # Substitute.
109 return f(ns)
110 160
111 @classmethod 161 :param parent: The parent of this element.
112 def substitute_html(cls, ns):
113 return cls._substitute_if_appropriate(
114 ns, EntitySubstitution.substitute_html)
115 162
116 @classmethod 163 :param previous_element: The element parsed immediately before
117 def substitute_xml(cls, ns): 164 this one.
118 return cls._substitute_if_appropriate(
119 ns, EntitySubstitution.substitute_xml)
120 165
121class PageElement(object): 166 :param next_element: The element parsed immediately before
122 """Contains the navigational information for some part of the page 167 this one.
123 (either a tag or a piece of text)"""
124
125 # There are five possible values for the "formatter" argument passed in
126 # to methods like encode() and prettify():
127 #
128 # "html" - All Unicode characters with corresponding HTML entities
129 # are converted to those entities on output.
130 # "minimal" - Bare ampersands and angle brackets are converted to
131 # XML entities: &amp; &lt; &gt;
132 # None - The null formatter. Unicode characters are never
133 # converted to entities. This is not recommended, but it's
134 # faster than "minimal".
135 # A function - This function will be called on every string that
136 # needs to undergo entity substitution.
137 #
138
139 # In an HTML document, the default "html" and "minimal" functions
140 # will leave the contents of <script> and <style> tags alone. For
141 # an XML document, all tags will be given the same treatment.
142
143 HTML_FORMATTERS = {
144 "html" : HTMLAwareEntitySubstitution.substitute_html,
145 "minimal" : HTMLAwareEntitySubstitution.substitute_xml,
146 None : None
147 }
148
149 XML_FORMATTERS = {
150 "html" : EntitySubstitution.substitute_html,
151 "minimal" : EntitySubstitution.substitute_xml,
152 None : None
153 }
154
155 def format_string(self, s, formatter='minimal'):
156 """Format the given string using the given formatter."""
157 if not isinstance(formatter, collections.abc.Callable):
158 formatter = self._formatter_for_name(formatter)
159 if formatter is None:
160 output = s
161 else:
162 output = formatter(s)
163 return output
164 168
165 @property 169 :param previous_sibling: The most recently encountered element
166 def _is_xml(self): 170 on the same level of the parse tree as this one.
167 """Is this element part of an XML tree or an HTML tree?
168 171
169 This is used when mapping a formatter name ("minimal") to an 172 :param previous_sibling: The next element to be encountered
170 appropriate function (one that performs entity-substitution on 173 on the same level of the parse tree as this one.
171 the contents of <script> and <style> tags, or not). It's
172 inefficient, but it should be called very rarely.
173 """ 174 """
174 if self.parent is None:
175 # This is the top-level object. It should have .is_xml set
176 # from tree creation. If not, take a guess--BS is usually
177 # used on HTML markup.
178 return getattr(self, 'is_xml', False)
179 return self.parent._is_xml
180
181 def _formatter_for_name(self, name):
182 "Look up a formatter function based on its name and the tree."
183 if self._is_xml:
184 return self.XML_FORMATTERS.get(
185 name, EntitySubstitution.substitute_xml)
186 else:
187 return self.HTML_FORMATTERS.get(
188 name, HTMLAwareEntitySubstitution.substitute_xml)
189
190 def setup(self, parent=None, previous_element=None, next_element=None,
191 previous_sibling=None, next_sibling=None):
192 """Sets up the initial relations between this element and
193 other elements."""
194 self.parent = parent 175 self.parent = parent
195 176
196 self.previous_element = previous_element 177 self.previous_element = previous_element
@@ -198,48 +179,156 @@ class PageElement(object):
198 self.previous_element.next_element = self 179 self.previous_element.next_element = self
199 180
200 self.next_element = next_element 181 self.next_element = next_element
201 if self.next_element: 182 if self.next_element is not None:
202 self.next_element.previous_element = self 183 self.next_element.previous_element = self
203 184
204 self.next_sibling = next_sibling 185 self.next_sibling = next_sibling
205 if self.next_sibling: 186 if self.next_sibling is not None:
206 self.next_sibling.previous_sibling = self 187 self.next_sibling.previous_sibling = self
207 188
208 if (not previous_sibling 189 if (previous_sibling is None
209 and self.parent is not None and self.parent.contents): 190 and self.parent is not None and self.parent.contents):
210 previous_sibling = self.parent.contents[-1] 191 previous_sibling = self.parent.contents[-1]
211 192
212 self.previous_sibling = previous_sibling 193 self.previous_sibling = previous_sibling
213 if previous_sibling: 194 if previous_sibling is not None:
214 self.previous_sibling.next_sibling = self 195 self.previous_sibling.next_sibling = self
215 196
197 def format_string(self, s, formatter):
198 """Format the given string using the given formatter.
199
200 :param s: A string.
201 :param formatter: A Formatter object, or a string naming one of the standard formatters.
202 """
203 if formatter is None:
204 return s
205 if not isinstance(formatter, Formatter):
206 formatter = self.formatter_for_name(formatter)
207 output = formatter.substitute(s)
208 return output
209
210 def formatter_for_name(self, formatter):
211 """Look up or create a Formatter for the given identifier,
212 if necessary.
213
214 :param formatter: Can be a Formatter object (used as-is), a
215 function (used as the entity substitution hook for an
216 XMLFormatter or HTMLFormatter), or a string (used to look
217 up an XMLFormatter or HTMLFormatter in the appropriate
218 registry.
219 """
220 if isinstance(formatter, Formatter):
221 return formatter
222 if self._is_xml:
223 c = XMLFormatter
224 else:
225 c = HTMLFormatter
226 if isinstance(formatter, Callable):
227 return c(entity_substitution=formatter)
228 return c.REGISTRY[formatter]
229
230 @property
231 def _is_xml(self):
232 """Is this element part of an XML tree or an HTML tree?
233
234 This is used in formatter_for_name, when deciding whether an
235 XMLFormatter or HTMLFormatter is more appropriate. It can be
236 inefficient, but it should be called very rarely.
237 """
238 if self.known_xml is not None:
239 # Most of the time we will have determined this when the
240 # document is parsed.
241 return self.known_xml
242
243 # Otherwise, it's likely that this element was created by
244 # direct invocation of the constructor from within the user's
245 # Python code.
246 if self.parent is None:
247 # This is the top-level object. It should have .known_xml set
248 # from tree creation. If not, take a guess--BS is usually
249 # used on HTML markup.
250 return getattr(self, 'is_xml', False)
251 return self.parent._is_xml
252
216 nextSibling = _alias("next_sibling") # BS3 253 nextSibling = _alias("next_sibling") # BS3
217 previousSibling = _alias("previous_sibling") # BS3 254 previousSibling = _alias("previous_sibling") # BS3
218 255
219 def replace_with(self, replace_with): 256 default = object()
220 if not self.parent: 257 def _all_strings(self, strip=False, types=default):
258 """Yield all strings of certain classes, possibly stripping them.
259
260 This is implemented differently in Tag and NavigableString.
261 """
262 raise NotImplementedError()
263
264 @property
265 def stripped_strings(self):
266 """Yield all strings in this PageElement, stripping them first.
267
268 :yield: A sequence of stripped strings.
269 """
270 for string in self._all_strings(True):
271 yield string
272
273 def get_text(self, separator="", strip=False,
274 types=default):
275 """Get all child strings of this PageElement, concatenated using the
276 given separator.
277
278 :param separator: Strings will be concatenated using this separator.
279
280 :param strip: If True, strings will be stripped before being
281 concatenated.
282
283 :param types: A tuple of NavigableString subclasses. Any
284 strings of a subclass not found in this list will be
285 ignored. Although there are exceptions, the default
286 behavior in most cases is to consider only NavigableString
287 and CData objects. That means no comments, processing
288 instructions, etc.
289
290 :return: A string.
291 """
292 return separator.join([s for s in self._all_strings(
293 strip, types=types)])
294 getText = get_text
295 text = property(get_text)
296
297 def replace_with(self, *args):
298 """Replace this PageElement with one or more PageElements, keeping the
299 rest of the tree the same.
300
301 :param args: One or more PageElements.
302 :return: `self`, no longer part of the tree.
303 """
304 if self.parent is None:
221 raise ValueError( 305 raise ValueError(
222 "Cannot replace one element with another when the" 306 "Cannot replace one element with another when the "
223 "element to be replaced is not part of a tree.") 307 "element to be replaced is not part of a tree.")
224 if replace_with is self: 308 if len(args) == 1 and args[0] is self:
225 return 309 return
226 if replace_with is self.parent: 310 if any(x is self.parent for x in args):
227 raise ValueError("Cannot replace a Tag with its parent.") 311 raise ValueError("Cannot replace a Tag with its parent.")
228 old_parent = self.parent 312 old_parent = self.parent
229 my_index = self.parent.index(self) 313 my_index = self.parent.index(self)
230 self.extract() 314 self.extract(_self_index=my_index)
231 old_parent.insert(my_index, replace_with) 315 for idx, replace_with in enumerate(args, start=my_index):
316 old_parent.insert(idx, replace_with)
232 return self 317 return self
233 replaceWith = replace_with # BS3 318 replaceWith = replace_with # BS3
234 319
235 def unwrap(self): 320 def unwrap(self):
321 """Replace this PageElement with its contents.
322
323 :return: `self`, no longer part of the tree.
324 """
236 my_parent = self.parent 325 my_parent = self.parent
237 if not self.parent: 326 if self.parent is None:
238 raise ValueError( 327 raise ValueError(
239 "Cannot replace an element with its contents when that" 328 "Cannot replace an element with its contents when that"
240 "element is not part of a tree.") 329 "element is not part of a tree.")
241 my_index = self.parent.index(self) 330 my_index = self.parent.index(self)
242 self.extract() 331 self.extract(_self_index=my_index)
243 for child in reversed(self.contents[:]): 332 for child in reversed(self.contents[:]):
244 my_parent.insert(my_index, child) 333 my_parent.insert(my_index, child)
245 return self 334 return self
@@ -247,14 +336,29 @@ class PageElement(object):
247 replaceWithChildren = unwrap # BS3 336 replaceWithChildren = unwrap # BS3
248 337
249 def wrap(self, wrap_inside): 338 def wrap(self, wrap_inside):
339 """Wrap this PageElement inside another one.
340
341 :param wrap_inside: A PageElement.
342 :return: `wrap_inside`, occupying the position in the tree that used
343 to be occupied by `self`, and with `self` inside it.
344 """
250 me = self.replace_with(wrap_inside) 345 me = self.replace_with(wrap_inside)
251 wrap_inside.append(me) 346 wrap_inside.append(me)
252 return wrap_inside 347 return wrap_inside
253 348
254 def extract(self): 349 def extract(self, _self_index=None):
255 """Destructively rips this element out of the tree.""" 350 """Destructively rips this element out of the tree.
351
352 :param _self_index: The location of this element in its parent's
353 .contents, if known. Passing this in allows for a performance
354 optimization.
355
356 :return: `self`, no longer part of the tree.
357 """
256 if self.parent is not None: 358 if self.parent is not None:
257 del self.parent.contents[self.parent.index(self)] 359 if _self_index is None:
360 _self_index = self.parent.index(self)
361 del self.parent.contents[_self_index]
258 362
259 #Find the two elements that would be next to each other if 363 #Find the two elements that would be next to each other if
260 #this element (and any children) hadn't been parsed. Connect 364 #this element (and any children) hadn't been parsed. Connect
@@ -281,8 +385,13 @@ class PageElement(object):
281 return self 385 return self
282 386
283 def _last_descendant(self, is_initialized=True, accept_self=True): 387 def _last_descendant(self, is_initialized=True, accept_self=True):
284 "Finds the last element beneath this object to be parsed." 388 """Finds the last element beneath this object to be parsed.
285 if is_initialized and self.next_sibling: 389
390 :param is_initialized: Has `setup` been called on this PageElement
391 yet?
392 :param accept_self: Is `self` an acceptable answer to the question?
393 """
394 if is_initialized and self.next_sibling is not None:
286 last_child = self.next_sibling.previous_element 395 last_child = self.next_sibling.previous_element
287 else: 396 else:
288 last_child = self 397 last_child = self
@@ -295,6 +404,14 @@ class PageElement(object):
295 _lastRecursiveChild = _last_descendant 404 _lastRecursiveChild = _last_descendant
296 405
297 def insert(self, position, new_child): 406 def insert(self, position, new_child):
407 """Insert a new PageElement in the list of this PageElement's children.
408
409 This works the same way as `list.insert`.
410
411 :param position: The numeric position that should be occupied
412 in `self.children` by the new PageElement.
413 :param new_child: A PageElement.
414 """
298 if new_child is None: 415 if new_child is None:
299 raise ValueError("Cannot insert None into a tag.") 416 raise ValueError("Cannot insert None into a tag.")
300 if new_child is self: 417 if new_child is self:
@@ -303,6 +420,14 @@ class PageElement(object):
303 and not isinstance(new_child, NavigableString)): 420 and not isinstance(new_child, NavigableString)):
304 new_child = NavigableString(new_child) 421 new_child = NavigableString(new_child)
305 422
423 from bs4 import BeautifulSoup
424 if isinstance(new_child, BeautifulSoup):
425 # We don't want to end up with a situation where one BeautifulSoup
426 # object contains another. Insert the children one at a time.
427 for subchild in list(new_child.contents):
428 self.insert(position, subchild)
429 position += 1
430 return
306 position = min(position, len(self.contents)) 431 position = min(position, len(self.contents))
307 if hasattr(new_child, 'parent') and new_child.parent is not None: 432 if hasattr(new_child, 'parent') and new_child.parent is not None:
308 # We're 'inserting' an element that's already one 433 # We're 'inserting' an element that's already one
@@ -361,160 +486,326 @@ class PageElement(object):
361 self.contents.insert(position, new_child) 486 self.contents.insert(position, new_child)
362 487
363 def append(self, tag): 488 def append(self, tag):
364 """Appends the given tag to the contents of this tag.""" 489 """Appends the given PageElement to the contents of this one.
490
491 :param tag: A PageElement.
492 """
365 self.insert(len(self.contents), tag) 493 self.insert(len(self.contents), tag)
366 494
367 def insert_before(self, predecessor): 495 def extend(self, tags):
368 """Makes the given element the immediate predecessor of this one. 496 """Appends the given PageElements to this one's contents.
369 497
370 The two elements will have the same parent, and the given element 498 :param tags: A list of PageElements. If a single Tag is
499 provided instead, this PageElement's contents will be extended
500 with that Tag's contents.
501 """
502 if isinstance(tags, Tag):
503 tags = tags.contents
504 if isinstance(tags, list):
505 # Moving items around the tree may change their position in
506 # the original list. Make a list that won't change.
507 tags = list(tags)
508 for tag in tags:
509 self.append(tag)
510
511 def insert_before(self, *args):
512 """Makes the given element(s) the immediate predecessor of this one.
513
514 All the elements will have the same parent, and the given elements
371 will be immediately before this one. 515 will be immediately before this one.
516
517 :param args: One or more PageElements.
372 """ 518 """
373 if self is predecessor:
374 raise ValueError("Can't insert an element before itself.")
375 parent = self.parent 519 parent = self.parent
376 if parent is None: 520 if parent is None:
377 raise ValueError( 521 raise ValueError(
378 "Element has no parent, so 'before' has no meaning.") 522 "Element has no parent, so 'before' has no meaning.")
379 # Extract first so that the index won't be screwed up if they 523 if any(x is self for x in args):
380 # are siblings. 524 raise ValueError("Can't insert an element before itself.")
381 if isinstance(predecessor, PageElement): 525 for predecessor in args:
382 predecessor.extract() 526 # Extract first so that the index won't be screwed up if they
383 index = parent.index(self) 527 # are siblings.
384 parent.insert(index, predecessor) 528 if isinstance(predecessor, PageElement):
385 529 predecessor.extract()
386 def insert_after(self, successor): 530 index = parent.index(self)
387 """Makes the given element the immediate successor of this one. 531 parent.insert(index, predecessor)
388 532
389 The two elements will have the same parent, and the given element 533 def insert_after(self, *args):
534 """Makes the given element(s) the immediate successor of this one.
535
536 The elements will have the same parent, and the given elements
390 will be immediately after this one. 537 will be immediately after this one.
538
539 :param args: One or more PageElements.
391 """ 540 """
392 if self is successor: 541 # Do all error checking before modifying the tree.
393 raise ValueError("Can't insert an element after itself.")
394 parent = self.parent 542 parent = self.parent
395 if parent is None: 543 if parent is None:
396 raise ValueError( 544 raise ValueError(
397 "Element has no parent, so 'after' has no meaning.") 545 "Element has no parent, so 'after' has no meaning.")
398 # Extract first so that the index won't be screwed up if they 546 if any(x is self for x in args):
399 # are siblings. 547 raise ValueError("Can't insert an element after itself.")
400 if isinstance(successor, PageElement): 548
401 successor.extract() 549 offset = 0
402 index = parent.index(self) 550 for successor in args:
403 parent.insert(index+1, successor) 551 # Extract first so that the index won't be screwed up if they
404 552 # are siblings.
405 def find_next(self, name=None, attrs={}, text=None, **kwargs): 553 if isinstance(successor, PageElement):
406 """Returns the first item that matches the given criteria and 554 successor.extract()
407 appears after this Tag in the document.""" 555 index = parent.index(self)
408 return self._find_one(self.find_all_next, name, attrs, text, **kwargs) 556 parent.insert(index+1+offset, successor)
557 offset += 1
558
559 def find_next(self, name=None, attrs={}, string=None, **kwargs):
560 """Find the first PageElement that matches the given criteria and
561 appears later in the document than this PageElement.
562
563 All find_* methods take a common set of arguments. See the online
564 documentation for detailed explanations.
565
566 :param name: A filter on tag name.
567 :param attrs: A dictionary of filters on attribute values.
568 :param string: A filter for a NavigableString with specific text.
569 :kwargs: A dictionary of filters on attribute values.
570 :return: A PageElement.
571 :rtype: bs4.element.Tag | bs4.element.NavigableString
572 """
573 return self._find_one(self.find_all_next, name, attrs, string, **kwargs)
409 findNext = find_next # BS3 574 findNext = find_next # BS3
410 575
411 def find_all_next(self, name=None, attrs={}, text=None, limit=None, 576 def find_all_next(self, name=None, attrs={}, string=None, limit=None,
412 **kwargs): 577 **kwargs):
413 """Returns all items that match the given criteria and appear 578 """Find all PageElements that match the given criteria and appear
414 after this Tag in the document.""" 579 later in the document than this PageElement.
415 return self._find_all(name, attrs, text, limit, self.next_elements, 580
416 **kwargs) 581 All find_* methods take a common set of arguments. See the online
582 documentation for detailed explanations.
583
584 :param name: A filter on tag name.
585 :param attrs: A dictionary of filters on attribute values.
586 :param string: A filter for a NavigableString with specific text.
587 :param limit: Stop looking after finding this many results.
588 :kwargs: A dictionary of filters on attribute values.
589 :return: A ResultSet containing PageElements.
590 """
591 _stacklevel = kwargs.pop('_stacklevel', 2)
592 return self._find_all(name, attrs, string, limit, self.next_elements,
593 _stacklevel=_stacklevel+1, **kwargs)
417 findAllNext = find_all_next # BS3 594 findAllNext = find_all_next # BS3
418 595
419 def find_next_sibling(self, name=None, attrs={}, text=None, **kwargs): 596 def find_next_sibling(self, name=None, attrs={}, string=None, **kwargs):
420 """Returns the closest sibling to this Tag that matches the 597 """Find the closest sibling to this PageElement that matches the
421 given criteria and appears after this Tag in the document.""" 598 given criteria and appears later in the document.
422 return self._find_one(self.find_next_siblings, name, attrs, text, 599
600 All find_* methods take a common set of arguments. See the
601 online documentation for detailed explanations.
602
603 :param name: A filter on tag name.
604 :param attrs: A dictionary of filters on attribute values.
605 :param string: A filter for a NavigableString with specific text.
606 :kwargs: A dictionary of filters on attribute values.
607 :return: A PageElement.
608 :rtype: bs4.element.Tag | bs4.element.NavigableString
609 """
610 return self._find_one(self.find_next_siblings, name, attrs, string,
423 **kwargs) 611 **kwargs)
424 findNextSibling = find_next_sibling # BS3 612 findNextSibling = find_next_sibling # BS3
425 613
426 def find_next_siblings(self, name=None, attrs={}, text=None, limit=None, 614 def find_next_siblings(self, name=None, attrs={}, string=None, limit=None,
427 **kwargs): 615 **kwargs):
428 """Returns the siblings of this Tag that match the given 616 """Find all siblings of this PageElement that match the given criteria
429 criteria and appear after this Tag in the document.""" 617 and appear later in the document.
430 return self._find_all(name, attrs, text, limit, 618
431 self.next_siblings, **kwargs) 619 All find_* methods take a common set of arguments. See the online
620 documentation for detailed explanations.
621
622 :param name: A filter on tag name.
623 :param attrs: A dictionary of filters on attribute values.
624 :param string: A filter for a NavigableString with specific text.
625 :param limit: Stop looking after finding this many results.
626 :kwargs: A dictionary of filters on attribute values.
627 :return: A ResultSet of PageElements.
628 :rtype: bs4.element.ResultSet
629 """
630 _stacklevel = kwargs.pop('_stacklevel', 2)
631 return self._find_all(
632 name, attrs, string, limit,
633 self.next_siblings, _stacklevel=_stacklevel+1, **kwargs
634 )
432 findNextSiblings = find_next_siblings # BS3 635 findNextSiblings = find_next_siblings # BS3
433 fetchNextSiblings = find_next_siblings # BS2 636 fetchNextSiblings = find_next_siblings # BS2
434 637
435 def find_previous(self, name=None, attrs={}, text=None, **kwargs): 638 def find_previous(self, name=None, attrs={}, string=None, **kwargs):
436 """Returns the first item that matches the given criteria and 639 """Look backwards in the document from this PageElement and find the
437 appears before this Tag in the document.""" 640 first PageElement that matches the given criteria.
641
642 All find_* methods take a common set of arguments. See the online
643 documentation for detailed explanations.
644
645 :param name: A filter on tag name.
646 :param attrs: A dictionary of filters on attribute values.
647 :param string: A filter for a NavigableString with specific text.
648 :kwargs: A dictionary of filters on attribute values.
649 :return: A PageElement.
650 :rtype: bs4.element.Tag | bs4.element.NavigableString
651 """
438 return self._find_one( 652 return self._find_one(
439 self.find_all_previous, name, attrs, text, **kwargs) 653 self.find_all_previous, name, attrs, string, **kwargs)
440 findPrevious = find_previous # BS3 654 findPrevious = find_previous # BS3
441 655
442 def find_all_previous(self, name=None, attrs={}, text=None, limit=None, 656 def find_all_previous(self, name=None, attrs={}, string=None, limit=None,
443 **kwargs): 657 **kwargs):
444 """Returns all items that match the given criteria and appear 658 """Look backwards in the document from this PageElement and find all
445 before this Tag in the document.""" 659 PageElements that match the given criteria.
446 return self._find_all(name, attrs, text, limit, self.previous_elements, 660
447 **kwargs) 661 All find_* methods take a common set of arguments. See the online
662 documentation for detailed explanations.
663
664 :param name: A filter on tag name.
665 :param attrs: A dictionary of filters on attribute values.
666 :param string: A filter for a NavigableString with specific text.
667 :param limit: Stop looking after finding this many results.
668 :kwargs: A dictionary of filters on attribute values.
669 :return: A ResultSet of PageElements.
670 :rtype: bs4.element.ResultSet
671 """
672 _stacklevel = kwargs.pop('_stacklevel', 2)
673 return self._find_all(
674 name, attrs, string, limit, self.previous_elements,
675 _stacklevel=_stacklevel+1, **kwargs
676 )
448 findAllPrevious = find_all_previous # BS3 677 findAllPrevious = find_all_previous # BS3
449 fetchPrevious = find_all_previous # BS2 678 fetchPrevious = find_all_previous # BS2
450 679
451 def find_previous_sibling(self, name=None, attrs={}, text=None, **kwargs): 680 def find_previous_sibling(self, name=None, attrs={}, string=None, **kwargs):
452 """Returns the closest sibling to this Tag that matches the 681 """Returns the closest sibling to this PageElement that matches the
453 given criteria and appears before this Tag in the document.""" 682 given criteria and appears earlier in the document.
454 return self._find_one(self.find_previous_siblings, name, attrs, text, 683
684 All find_* methods take a common set of arguments. See the online
685 documentation for detailed explanations.
686
687 :param name: A filter on tag name.
688 :param attrs: A dictionary of filters on attribute values.
689 :param string: A filter for a NavigableString with specific text.
690 :kwargs: A dictionary of filters on attribute values.
691 :return: A PageElement.
692 :rtype: bs4.element.Tag | bs4.element.NavigableString
693 """
694 return self._find_one(self.find_previous_siblings, name, attrs, string,
455 **kwargs) 695 **kwargs)
456 findPreviousSibling = find_previous_sibling # BS3 696 findPreviousSibling = find_previous_sibling # BS3
457 697
458 def find_previous_siblings(self, name=None, attrs={}, text=None, 698 def find_previous_siblings(self, name=None, attrs={}, string=None,
459 limit=None, **kwargs): 699 limit=None, **kwargs):
460 """Returns the siblings of this Tag that match the given 700 """Returns all siblings to this PageElement that match the
461 criteria and appear before this Tag in the document.""" 701 given criteria and appear earlier in the document.
462 return self._find_all(name, attrs, text, limit, 702
463 self.previous_siblings, **kwargs) 703 All find_* methods take a common set of arguments. See the online
704 documentation for detailed explanations.
705
706 :param name: A filter on tag name.
707 :param attrs: A dictionary of filters on attribute values.
708 :param string: A filter for a NavigableString with specific text.
709 :param limit: Stop looking after finding this many results.
710 :kwargs: A dictionary of filters on attribute values.
711 :return: A ResultSet of PageElements.
712 :rtype: bs4.element.ResultSet
713 """
714 _stacklevel = kwargs.pop('_stacklevel', 2)
715 return self._find_all(
716 name, attrs, string, limit,
717 self.previous_siblings, _stacklevel=_stacklevel+1, **kwargs
718 )
464 findPreviousSiblings = find_previous_siblings # BS3 719 findPreviousSiblings = find_previous_siblings # BS3
465 fetchPreviousSiblings = find_previous_siblings # BS2 720 fetchPreviousSiblings = find_previous_siblings # BS2
466 721
467 def find_parent(self, name=None, attrs={}, **kwargs): 722 def find_parent(self, name=None, attrs={}, **kwargs):
468 """Returns the closest parent of this Tag that matches the given 723 """Find the closest parent of this PageElement that matches the given
469 criteria.""" 724 criteria.
725
726 All find_* methods take a common set of arguments. See the online
727 documentation for detailed explanations.
728
729 :param name: A filter on tag name.
730 :param attrs: A dictionary of filters on attribute values.
731 :kwargs: A dictionary of filters on attribute values.
732
733 :return: A PageElement.
734 :rtype: bs4.element.Tag | bs4.element.NavigableString
735 """
470 # NOTE: We can't use _find_one because findParents takes a different 736 # NOTE: We can't use _find_one because findParents takes a different
471 # set of arguments. 737 # set of arguments.
472 r = None 738 r = None
473 l = self.find_parents(name, attrs, 1, **kwargs) 739 l = self.find_parents(name, attrs, 1, _stacklevel=3, **kwargs)
474 if l: 740 if l:
475 r = l[0] 741 r = l[0]
476 return r 742 return r
477 findParent = find_parent # BS3 743 findParent = find_parent # BS3
478 744
479 def find_parents(self, name=None, attrs={}, limit=None, **kwargs): 745 def find_parents(self, name=None, attrs={}, limit=None, **kwargs):
480 """Returns the parents of this Tag that match the given 746 """Find all parents of this PageElement that match the given criteria.
481 criteria.""" 747
748 All find_* methods take a common set of arguments. See the online
749 documentation for detailed explanations.
482 750
751 :param name: A filter on tag name.
752 :param attrs: A dictionary of filters on attribute values.
753 :param limit: Stop looking after finding this many results.
754 :kwargs: A dictionary of filters on attribute values.
755
756 :return: A PageElement.
757 :rtype: bs4.element.Tag | bs4.element.NavigableString
758 """
759 _stacklevel = kwargs.pop('_stacklevel', 2)
483 return self._find_all(name, attrs, None, limit, self.parents, 760 return self._find_all(name, attrs, None, limit, self.parents,
484 **kwargs) 761 _stacklevel=_stacklevel+1, **kwargs)
485 findParents = find_parents # BS3 762 findParents = find_parents # BS3
486 fetchParents = find_parents # BS2 763 fetchParents = find_parents # BS2
487 764
488 @property 765 @property
489 def next(self): 766 def next(self):
767 """The PageElement, if any, that was parsed just after this one.
768
769 :return: A PageElement.
770 :rtype: bs4.element.Tag | bs4.element.NavigableString
771 """
490 return self.next_element 772 return self.next_element
491 773
492 @property 774 @property
493 def previous(self): 775 def previous(self):
776 """The PageElement, if any, that was parsed just before this one.
777
778 :return: A PageElement.
779 :rtype: bs4.element.Tag | bs4.element.NavigableString
780 """
494 return self.previous_element 781 return self.previous_element
495 782
496 #These methods do the real heavy lifting. 783 #These methods do the real heavy lifting.
497 784
498 def _find_one(self, method, name, attrs, text, **kwargs): 785 def _find_one(self, method, name, attrs, string, **kwargs):
499 r = None 786 r = None
500 l = method(name, attrs, text, 1, **kwargs) 787 l = method(name, attrs, string, 1, _stacklevel=4, **kwargs)
501 if l: 788 if l:
502 r = l[0] 789 r = l[0]
503 return r 790 return r
504 791
505 def _find_all(self, name, attrs, text, limit, generator, **kwargs): 792 def _find_all(self, name, attrs, string, limit, generator, **kwargs):
506 "Iterates over a generator looking for things that match." 793 "Iterates over a generator looking for things that match."
794 _stacklevel = kwargs.pop('_stacklevel', 3)
507 795
508 if text is None and 'string' in kwargs: 796 if string is None and 'text' in kwargs:
509 text = kwargs['string'] 797 string = kwargs.pop('text')
510 del kwargs['string'] 798 warnings.warn(
799 "The 'text' argument to find()-type methods is deprecated. Use 'string' instead.",
800 DeprecationWarning, stacklevel=_stacklevel
801 )
511 802
512 if isinstance(name, SoupStrainer): 803 if isinstance(name, SoupStrainer):
513 strainer = name 804 strainer = name
514 else: 805 else:
515 strainer = SoupStrainer(name, attrs, text, **kwargs) 806 strainer = SoupStrainer(name, attrs, string, **kwargs)
516 807
517 if text is None and not limit and not attrs and not kwargs: 808 if string is None and not limit and not attrs and not kwargs:
518 if name is True or name is None: 809 if name is True or name is None:
519 # Optimization to find all tags. 810 # Optimization to find all tags.
520 result = (element for element in generator 811 result = (element for element in generator
@@ -522,9 +813,23 @@ class PageElement(object):
522 return ResultSet(strainer, result) 813 return ResultSet(strainer, result)
523 elif isinstance(name, str): 814 elif isinstance(name, str):
524 # Optimization to find all tags with a given name. 815 # Optimization to find all tags with a given name.
816 if name.count(':') == 1:
817 # This is a name with a prefix. If this is a namespace-aware document,
818 # we need to match the local name against tag.name. If not,
819 # we need to match the fully-qualified name against tag.name.
820 prefix, local_name = name.split(':', 1)
821 else:
822 prefix = None
823 local_name = name
525 result = (element for element in generator 824 result = (element for element in generator
526 if isinstance(element, Tag) 825 if isinstance(element, Tag)
527 and element.name == name) 826 and (
827 element.name == name
828 ) or (
829 element.name == local_name
830 and (prefix is None or element.prefix == prefix)
831 )
832 )
528 return ResultSet(strainer, result) 833 return ResultSet(strainer, result)
529 results = ResultSet(strainer) 834 results = ResultSet(strainer)
530 while True: 835 while True:
@@ -544,6 +849,10 @@ class PageElement(object):
544 #NavigableStrings and Tags. 849 #NavigableStrings and Tags.
545 @property 850 @property
546 def next_elements(self): 851 def next_elements(self):
852 """All PageElements that were parsed after this one.
853
854 :yield: A sequence of PageElements.
855 """
547 i = self.next_element 856 i = self.next_element
548 while i is not None: 857 while i is not None:
549 yield i 858 yield i
@@ -551,6 +860,11 @@ class PageElement(object):
551 860
552 @property 861 @property
553 def next_siblings(self): 862 def next_siblings(self):
863 """All PageElements that are siblings of this one but were parsed
864 later.
865
866 :yield: A sequence of PageElements.
867 """
554 i = self.next_sibling 868 i = self.next_sibling
555 while i is not None: 869 while i is not None:
556 yield i 870 yield i
@@ -558,6 +872,10 @@ class PageElement(object):
558 872
559 @property 873 @property
560 def previous_elements(self): 874 def previous_elements(self):
875 """All PageElements that were parsed before this one.
876
877 :yield: A sequence of PageElements.
878 """
561 i = self.previous_element 879 i = self.previous_element
562 while i is not None: 880 while i is not None:
563 yield i 881 yield i
@@ -565,6 +883,11 @@ class PageElement(object):
565 883
566 @property 884 @property
567 def previous_siblings(self): 885 def previous_siblings(self):
886 """All PageElements that are siblings of this one but were parsed
887 earlier.
888
889 :yield: A sequence of PageElements.
890 """
568 i = self.previous_sibling 891 i = self.previous_sibling
569 while i is not None: 892 while i is not None:
570 yield i 893 yield i
@@ -572,87 +895,23 @@ class PageElement(object):
572 895
573 @property 896 @property
574 def parents(self): 897 def parents(self):
898 """All PageElements that are parents of this PageElement.
899
900 :yield: A sequence of PageElements.
901 """
575 i = self.parent 902 i = self.parent
576 while i is not None: 903 while i is not None:
577 yield i 904 yield i
578 i = i.parent 905 i = i.parent
579 906
580 # Methods for supporting CSS selectors. 907 @property
581 908 def decomposed(self):
582 tag_name_re = re.compile(r'^[a-zA-Z0-9][-.a-zA-Z0-9:_]*$') 909 """Check whether a PageElement has been decomposed.
583
584 # /^([a-zA-Z0-9][-.a-zA-Z0-9:_]*)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/
585 # \---------------------------/ \---/\-------------/ \-------/
586 # | | | |
587 # | | | The value
588 # | | ~,|,^,$,* or =
589 # | Attribute
590 # Tag
591 attribselect_re = re.compile(
592 r'^(?P<tag>[a-zA-Z0-9][-.a-zA-Z0-9:_]*)?\[(?P<attribute>[\w-]+)(?P<operator>[=~\|\^\$\*]?)' +
593 r'=?"?(?P<value>[^\]"]*)"?\]$'
594 )
595
596 def _attr_value_as_string(self, value, default=None):
597 """Force an attribute value into a string representation.
598 910
599 A multi-valued attribute will be converted into a 911 :rtype: bool
600 space-separated stirng.
601 """ 912 """
602 value = self.get(value, default) 913 return getattr(self, '_decomposed', False) or False
603 if isinstance(value, list) or isinstance(value, tuple): 914
604 value =" ".join(value)
605 return value
606
607 def _tag_name_matches_and(self, function, tag_name):
608 if not tag_name:
609 return function
610 else:
611 def _match(tag):
612 return tag.name == tag_name and function(tag)
613 return _match
614
615 def _attribute_checker(self, operator, attribute, value=''):
616 """Create a function that performs a CSS selector operation.
617
618 Takes an operator, attribute and optional value. Returns a
619 function that will return True for elements that match that
620 combination.
621 """
622 if operator == '=':
623 # string representation of `attribute` is equal to `value`
624 return lambda el: el._attr_value_as_string(attribute) == value
625 elif operator == '~':
626 # space-separated list representation of `attribute`
627 # contains `value`
628 def _includes_value(element):
629 attribute_value = element.get(attribute, [])
630 if not isinstance(attribute_value, list):
631 attribute_value = attribute_value.split()
632 return value in attribute_value
633 return _includes_value
634 elif operator == '^':
635 # string representation of `attribute` starts with `value`
636 return lambda el: el._attr_value_as_string(
637 attribute, '').startswith(value)
638 elif operator == '$':
639 # string represenation of `attribute` ends with `value`
640 return lambda el: el._attr_value_as_string(
641 attribute, '').endswith(value)
642 elif operator == '*':
643 # string representation of `attribute` contains `value`
644 return lambda el: value in el._attr_value_as_string(attribute, '')
645 elif operator == '|':
646 # string representation of `attribute` is either exactly
647 # `value` or starts with `value` and then a dash.
648 def _is_or_starts_with_dash(element):
649 attribute_value = element._attr_value_as_string(attribute, '')
650 return (attribute_value == value or attribute_value.startswith(
651 value + '-'))
652 return _is_or_starts_with_dash
653 else:
654 return lambda el: el.has_attr(attribute)
655
656 # Old non-property versions of the generators, for backwards 915 # Old non-property versions of the generators, for backwards
657 # compatibility with BS3. 916 # compatibility with BS3.
658 def nextGenerator(self): 917 def nextGenerator(self):
@@ -672,6 +931,11 @@ class PageElement(object):
672 931
673 932
674class NavigableString(str, PageElement): 933class NavigableString(str, PageElement):
934 """A Python Unicode string that is part of a parse tree.
935
936 When Beautiful Soup parses the markup <b>penguin</b>, it will
937 create a NavigableString for the string "penguin".
938 """
675 939
676 PREFIX = '' 940 PREFIX = ''
677 SUFFIX = '' 941 SUFFIX = ''
@@ -691,12 +955,22 @@ class NavigableString(str, PageElement):
691 u.setup() 955 u.setup()
692 return u 956 return u
693 957
694 def __copy__(self): 958 def __deepcopy__(self, memo, recursive=False):
695 """A copy of a NavigableString has the same contents and class 959 """A copy of a NavigableString has the same contents and class
696 as the original, but it is not connected to the parse tree. 960 as the original, but it is not connected to the parse tree.
961
962 :param recursive: This parameter is ignored; it's only defined
963 so that NavigableString.__deepcopy__ implements the same
964 signature as Tag.__deepcopy__.
697 """ 965 """
698 return type(self)(self) 966 return type(self)(self)
699 967
968 def __copy__(self):
969 """A copy of a NavigableString can only be a deep copy, because
970 only one PageElement can occupy a given place in a parse tree.
971 """
972 return self.__deepcopy__({})
973
700 def __getnewargs__(self): 974 def __getnewargs__(self):
701 return (str(self),) 975 return (str(self),)
702 976
@@ -712,55 +986,146 @@ class NavigableString(str, PageElement):
712 self.__class__.__name__, attr)) 986 self.__class__.__name__, attr))
713 987
714 def output_ready(self, formatter="minimal"): 988 def output_ready(self, formatter="minimal"):
989 """Run the string through the provided formatter.
990
991 :param formatter: A Formatter object, or a string naming one of the standard formatters.
992 """
715 output = self.format_string(self, formatter) 993 output = self.format_string(self, formatter)
716 return self.PREFIX + output + self.SUFFIX 994 return self.PREFIX + output + self.SUFFIX
717 995
718 @property 996 @property
719 def name(self): 997 def name(self):
998 """Since a NavigableString is not a Tag, it has no .name.
999
1000 This property is implemented so that code like this doesn't crash
1001 when run on a mixture of Tag and NavigableString objects:
1002 [x.name for x in tag.children]
1003 """
720 return None 1004 return None
721 1005
722 @name.setter 1006 @name.setter
723 def name(self, name): 1007 def name(self, name):
1008 """Prevent NavigableString.name from ever being set."""
724 raise AttributeError("A NavigableString cannot be given a name.") 1009 raise AttributeError("A NavigableString cannot be given a name.")
725 1010
1011 def _all_strings(self, strip=False, types=PageElement.default):
1012 """Yield all strings of certain classes, possibly stripping them.
1013
1014 This makes it easy for NavigableString to implement methods
1015 like get_text() as conveniences, creating a consistent
1016 text-extraction API across all PageElements.
1017
1018 :param strip: If True, all strings will be stripped before being
1019 yielded.
1020
1021 :param types: A tuple of NavigableString subclasses. If this
1022 NavigableString isn't one of those subclasses, the
1023 sequence will be empty. By default, the subclasses
1024 considered are NavigableString and CData objects. That
1025 means no comments, processing instructions, etc.
1026
1027 :yield: A sequence that either contains this string, or is empty.
1028
1029 """
1030 if types is self.default:
1031 # This is kept in Tag because it's full of subclasses of
1032 # this class, which aren't defined until later in the file.
1033 types = Tag.DEFAULT_INTERESTING_STRING_TYPES
1034
1035 # Do nothing if the caller is looking for specific types of
1036 # string, and we're of a different type.
1037 #
1038 # We check specific types instead of using isinstance(self,
1039 # types) because all of these classes subclass
1040 # NavigableString. Anyone who's using this feature probably
1041 # wants generic NavigableStrings but not other stuff.
1042 my_type = type(self)
1043 if types is not None:
1044 if isinstance(types, type):
1045 # Looking for a single type.
1046 if my_type is not types:
1047 return
1048 elif my_type not in types:
1049 # Looking for one of a list of types.
1050 return
1051
1052 value = self
1053 if strip:
1054 value = value.strip()
1055 if len(value) > 0:
1056 yield value
1057 strings = property(_all_strings)
1058
726class PreformattedString(NavigableString): 1059class PreformattedString(NavigableString):
727 """A NavigableString not subject to the normal formatting rules. 1060 """A NavigableString not subject to the normal formatting rules.
728 1061
729 The string will be passed into the formatter (to trigger side effects), 1062 This is an abstract class used for special kinds of strings such
730 but the return value will be ignored. 1063 as comments (the Comment class) and CDATA blocks (the CData
1064 class).
731 """ 1065 """
732 1066
733 def output_ready(self, formatter="minimal"): 1067 PREFIX = ''
734 """CData strings are passed into the formatter. 1068 SUFFIX = ''
735 But the return value is ignored.""" 1069
736 self.format_string(self, formatter) 1070 def output_ready(self, formatter=None):
1071 """Make this string ready for output by adding any subclass-specific
1072 prefix or suffix.
1073
1074 :param formatter: A Formatter object, or a string naming one
1075 of the standard formatters. The string will be passed into the
1076 Formatter, but only to trigger any side effects: the return
1077 value is ignored.
1078
1079 :return: The string, with any subclass-specific prefix and
1080 suffix added on.
1081 """
1082 if formatter is not None:
1083 ignore = self.format_string(self, formatter)
737 return self.PREFIX + self + self.SUFFIX 1084 return self.PREFIX + self + self.SUFFIX
738 1085
739class CData(PreformattedString): 1086class CData(PreformattedString):
740 1087 """A CDATA block."""
741 PREFIX = '<![CDATA[' 1088 PREFIX = '<![CDATA['
742 SUFFIX = ']]>' 1089 SUFFIX = ']]>'
743 1090
744class ProcessingInstruction(PreformattedString): 1091class ProcessingInstruction(PreformattedString):
1092 """A SGML processing instruction."""
745 1093
746 PREFIX = '<?' 1094 PREFIX = '<?'
747 SUFFIX = '>' 1095 SUFFIX = '>'
748 1096
749class Comment(PreformattedString): 1097class XMLProcessingInstruction(ProcessingInstruction):
1098 """An XML processing instruction."""
1099 PREFIX = '<?'
1100 SUFFIX = '?>'
750 1101
1102class Comment(PreformattedString):
1103 """An HTML or XML comment."""
751 PREFIX = '<!--' 1104 PREFIX = '<!--'
752 SUFFIX = '-->' 1105 SUFFIX = '-->'
753 1106
754 1107
755class Declaration(PreformattedString): 1108class Declaration(PreformattedString):
1109 """An XML declaration."""
756 PREFIX = '<?' 1110 PREFIX = '<?'
757 SUFFIX = '?>' 1111 SUFFIX = '?>'
758 1112
759 1113
760class Doctype(PreformattedString): 1114class Doctype(PreformattedString):
761 1115 """A document type declaration."""
762 @classmethod 1116 @classmethod
763 def for_name_and_ids(cls, name, pub_id, system_id): 1117 def for_name_and_ids(cls, name, pub_id, system_id):
1118 """Generate an appropriate document type declaration for a given
1119 public ID and system ID.
1120
1121 :param name: The name of the document's root element, e.g. 'html'.
1122 :param pub_id: The Formal Public Identifier for this document type,
1123 e.g. '-//W3C//DTD XHTML 1.1//EN'
1124 :param system_id: The system identifier for this document type,
1125 e.g. 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'
1126
1127 :return: A Doctype.
1128 """
764 value = name or '' 1129 value = name or ''
765 if pub_id is not None: 1130 if pub_id is not None:
766 value += ' PUBLIC "%s"' % pub_id 1131 value += ' PUBLIC "%s"' % pub_id
@@ -775,14 +1140,105 @@ class Doctype(PreformattedString):
775 SUFFIX = '>\n' 1140 SUFFIX = '>\n'
776 1141
777 1142
1143class Stylesheet(NavigableString):
1144 """A NavigableString representing an stylesheet (probably
1145 CSS).
1146
1147 Used to distinguish embedded stylesheets from textual content.
1148 """
1149 pass
1150
1151
1152class Script(NavigableString):
1153 """A NavigableString representing an executable script (probably
1154 Javascript).
1155
1156 Used to distinguish executable code from textual content.
1157 """
1158 pass
1159
1160
1161class TemplateString(NavigableString):
1162 """A NavigableString representing a string found inside an HTML
1163 template embedded in a larger document.
1164
1165 Used to distinguish such strings from the main body of the document.
1166 """
1167 pass
1168
1169
1170class RubyTextString(NavigableString):
1171 """A NavigableString representing the contents of the <rt> HTML
1172 element.
1173
1174 https://dev.w3.org/html5/spec-LC/text-level-semantics.html#the-rt-element
1175
1176 Can be used to distinguish such strings from the strings they're
1177 annotating.
1178 """
1179 pass
1180
1181
1182class RubyParenthesisString(NavigableString):
1183 """A NavigableString representing the contents of the <rp> HTML
1184 element.
1185
1186 https://dev.w3.org/html5/spec-LC/text-level-semantics.html#the-rp-element
1187 """
1188 pass
1189
1190
778class Tag(PageElement): 1191class Tag(PageElement):
1192 """Represents an HTML or XML tag that is part of a parse tree, along
1193 with its attributes and contents.
779 1194
780 """Represents a found HTML tag with its attributes and contents.""" 1195 When Beautiful Soup parses the markup <b>penguin</b>, it will
1196 create a Tag object representing the <b> tag.
1197 """
781 1198
782 def __init__(self, parser=None, builder=None, name=None, namespace=None, 1199 def __init__(self, parser=None, builder=None, name=None, namespace=None,
783 prefix=None, attrs=None, parent=None, previous=None): 1200 prefix=None, attrs=None, parent=None, previous=None,
784 "Basic constructor." 1201 is_xml=None, sourceline=None, sourcepos=None,
785 1202 can_be_empty_element=None, cdata_list_attributes=None,
1203 preserve_whitespace_tags=None,
1204 interesting_string_types=None,
1205 namespaces=None
1206 ):
1207 """Basic constructor.
1208
1209 :param parser: A BeautifulSoup object.
1210 :param builder: A TreeBuilder.
1211 :param name: The name of the tag.
1212 :param namespace: The URI of this Tag's XML namespace, if any.
1213 :param prefix: The prefix for this Tag's XML namespace, if any.
1214 :param attrs: A dictionary of this Tag's attribute values.
1215 :param parent: The PageElement to use as this Tag's parent.
1216 :param previous: The PageElement that was parsed immediately before
1217 this tag.
1218 :param is_xml: If True, this is an XML tag. Otherwise, this is an
1219 HTML tag.
1220 :param sourceline: The line number where this tag was found in its
1221 source document.
1222 :param sourcepos: The character position within `sourceline` where this
1223 tag was found.
1224 :param can_be_empty_element: If True, this tag should be
1225 represented as <tag/>. If False, this tag should be represented
1226 as <tag></tag>.
1227 :param cdata_list_attributes: A list of attributes whose values should
1228 be treated as CDATA if they ever show up on this tag.
1229 :param preserve_whitespace_tags: A list of tag names whose contents
1230 should have their whitespace preserved.
1231 :param interesting_string_types: This is a NavigableString
1232 subclass or a tuple of them. When iterating over this
1233 Tag's strings in methods like Tag.strings or Tag.get_text,
1234 these are the types of strings that are interesting enough
1235 to be considered. The default is to consider
1236 NavigableString and CData the only interesting string
1237 subtypes.
1238 :param namespaces: A dictionary mapping currently active
1239 namespace prefixes to URIs. This can be used later to
1240 construct CSS selectors.
1241 """
786 if parser is None: 1242 if parser is None:
787 self.parser_class = None 1243 self.parser_class = None
788 else: 1244 else:
@@ -793,7 +1249,12 @@ class Tag(PageElement):
793 raise ValueError("No value provided for new tag's name.") 1249 raise ValueError("No value provided for new tag's name.")
794 self.name = name 1250 self.name = name
795 self.namespace = namespace 1251 self.namespace = namespace
1252 self._namespaces = namespaces or {}
796 self.prefix = prefix 1253 self.prefix = prefix
1254 if ((not builder or builder.store_line_numbers)
1255 and (sourceline is not None or sourcepos is not None)):
1256 self.sourceline = sourceline
1257 self.sourcepos = sourcepos
797 if attrs is None: 1258 if attrs is None:
798 attrs = {} 1259 attrs = {}
799 elif attrs: 1260 elif attrs:
@@ -804,32 +1265,109 @@ class Tag(PageElement):
804 attrs = dict(attrs) 1265 attrs = dict(attrs)
805 else: 1266 else:
806 attrs = dict(attrs) 1267 attrs = dict(attrs)
1268
1269 # If possible, determine ahead of time whether this tag is an
1270 # XML tag.
1271 if builder:
1272 self.known_xml = builder.is_xml
1273 else:
1274 self.known_xml = is_xml
807 self.attrs = attrs 1275 self.attrs = attrs
808 self.contents = [] 1276 self.contents = []
809 self.setup(parent, previous) 1277 self.setup(parent, previous)
810 self.hidden = False 1278 self.hidden = False
811 1279
812 # Set up any substitutions, such as the charset in a META tag. 1280 if builder is None:
813 if builder is not None: 1281 # In the absence of a TreeBuilder, use whatever values were
1282 # passed in here. They're probably None, unless this is a copy of some
1283 # other tag.
1284 self.can_be_empty_element = can_be_empty_element
1285 self.cdata_list_attributes = cdata_list_attributes
1286 self.preserve_whitespace_tags = preserve_whitespace_tags
1287 self.interesting_string_types = interesting_string_types
1288 else:
1289 # Set up any substitutions for this tag, such as the charset in a META tag.
814 builder.set_up_substitutions(self) 1290 builder.set_up_substitutions(self)
1291
1292 # Ask the TreeBuilder whether this tag might be an empty-element tag.
815 self.can_be_empty_element = builder.can_be_empty_element(name) 1293 self.can_be_empty_element = builder.can_be_empty_element(name)
816 else: 1294
817 self.can_be_empty_element = False 1295 # Keep track of the list of attributes of this tag that
1296 # might need to be treated as a list.
1297 #
1298 # For performance reasons, we store the whole data structure
1299 # rather than asking the question of every tag. Asking would
1300 # require building a new data structure every time, and
1301 # (unlike can_be_empty_element), we almost never need
1302 # to check this.
1303 self.cdata_list_attributes = builder.cdata_list_attributes
1304
1305 # Keep track of the names that might cause this tag to be treated as a
1306 # whitespace-preserved tag.
1307 self.preserve_whitespace_tags = builder.preserve_whitespace_tags
1308
1309 if self.name in builder.string_containers:
1310 # This sort of tag uses a special string container
1311 # subclass for most of its strings. When we ask the
1312 self.interesting_string_types = builder.string_containers[self.name]
1313 else:
1314 self.interesting_string_types = self.DEFAULT_INTERESTING_STRING_TYPES
818 1315
819 parserClass = _alias("parser_class") # BS3 1316 parserClass = _alias("parser_class") # BS3
820 1317
821 def __copy__(self): 1318 def __deepcopy__(self, memo, recursive=True):
822 """A copy of a Tag is a new Tag, unconnected to the parse tree. 1319 """A deepcopy of a Tag is a new Tag, unconnected to the parse tree.
823 Its contents are a copy of the old Tag's contents. 1320 Its contents are a copy of the old Tag's contents.
824 """ 1321 """
825 clone = type(self)(None, self.builder, self.name, self.namespace, 1322 clone = self._clone()
826 self.nsprefix, self.attrs) 1323
1324 if recursive:
1325 # Clone this tag's descendants recursively, but without
1326 # making any recursive function calls.
1327 tag_stack = [clone]
1328 for event, element in self._event_stream(self.descendants):
1329 if event is Tag.END_ELEMENT_EVENT:
1330 # Stop appending incoming Tags to the Tag that was
1331 # just closed.
1332 tag_stack.pop()
1333 else:
1334 descendant_clone = element.__deepcopy__(
1335 memo, recursive=False
1336 )
1337 # Add to its parent's .contents
1338 tag_stack[-1].append(descendant_clone)
1339
1340 if event is Tag.START_ELEMENT_EVENT:
1341 # Add the Tag itself to the stack so that its
1342 # children will be .appended to it.
1343 tag_stack.append(descendant_clone)
1344 return clone
1345
1346 def __copy__(self):
1347 """A copy of a Tag must always be a deep copy, because a Tag's
1348 children can only have one parent at a time.
1349 """
1350 return self.__deepcopy__({})
1351
1352 def _clone(self):
1353 """Create a new Tag just like this one, but with no
1354 contents and unattached to any parse tree.
1355
1356 This is the first step in the deepcopy process.
1357 """
1358 clone = type(self)(
1359 None, None, self.name, self.namespace,
1360 self.prefix, self.attrs, is_xml=self._is_xml,
1361 sourceline=self.sourceline, sourcepos=self.sourcepos,
1362 can_be_empty_element=self.can_be_empty_element,
1363 cdata_list_attributes=self.cdata_list_attributes,
1364 preserve_whitespace_tags=self.preserve_whitespace_tags,
1365 interesting_string_types=self.interesting_string_types
1366 )
827 for attr in ('can_be_empty_element', 'hidden'): 1367 for attr in ('can_be_empty_element', 'hidden'):
828 setattr(clone, attr, getattr(self, attr)) 1368 setattr(clone, attr, getattr(self, attr))
829 for child in self.contents:
830 clone.append(child.__copy__())
831 return clone 1369 return clone
832 1370
833 @property 1371 @property
834 def is_empty_element(self): 1372 def is_empty_element(self):
835 """Is this tag an empty-element tag? (aka a self-closing tag) 1373 """Is this tag an empty-element tag? (aka a self-closing tag)
@@ -850,13 +1388,17 @@ class Tag(PageElement):
850 1388
851 @property 1389 @property
852 def string(self): 1390 def string(self):
853 """Convenience property to get the single string within this tag. 1391 """Convenience property to get the single string within this
1392 PageElement.
854 1393
855 :Return: If this tag has a single string child, return value 1394 TODO It might make sense to have NavigableString.string return
856 is that string. If this tag has no children, or more than one 1395 itself.
857 child, return value is None. If this tag has one child tag, 1396
1397 :return: If this element has a single string child, return
1398 value is that string. If this element has one child tag,
858 return value is the 'string' attribute of the child tag, 1399 return value is the 'string' attribute of the child tag,
859 recursively. 1400 recursively. If this element is itself a string, has no
1401 children, or has more than one child, return value is None.
860 """ 1402 """
861 if len(self.contents) != 1: 1403 if len(self.contents) != 1:
862 return None 1404 return None
@@ -867,57 +1409,75 @@ class Tag(PageElement):
867 1409
868 @string.setter 1410 @string.setter
869 def string(self, string): 1411 def string(self, string):
1412 """Replace this PageElement's contents with `string`."""
870 self.clear() 1413 self.clear()
871 self.append(string.__class__(string)) 1414 self.append(string.__class__(string))
872 1415
873 def _all_strings(self, strip=False, types=(NavigableString, CData)): 1416 DEFAULT_INTERESTING_STRING_TYPES = (NavigableString, CData)
1417 def _all_strings(self, strip=False, types=PageElement.default):
874 """Yield all strings of certain classes, possibly stripping them. 1418 """Yield all strings of certain classes, possibly stripping them.
875 1419
876 By default, yields only NavigableString and CData objects. So 1420 :param strip: If True, all strings will be stripped before being
877 no comments, processing instructions, etc. 1421 yielded.
1422
1423 :param types: A tuple of NavigableString subclasses. Any strings of
1424 a subclass not found in this list will be ignored. By
1425 default, the subclasses considered are the ones found in
1426 self.interesting_string_types. If that's not specified,
1427 only NavigableString and CData objects will be
1428 considered. That means no comments, processing
1429 instructions, etc.
1430
1431 :yield: A sequence of strings.
1432
878 """ 1433 """
1434 if types is self.default:
1435 types = self.interesting_string_types
1436
879 for descendant in self.descendants: 1437 for descendant in self.descendants:
880 if ( 1438 if (types is None and not isinstance(descendant, NavigableString)):
881 (types is None and not isinstance(descendant, NavigableString)) 1439 continue
882 or 1440 descendant_type = type(descendant)
883 (types is not None and type(descendant) not in types)): 1441 if isinstance(types, type):
1442 if descendant_type is not types:
1443 # We're not interested in strings of this type.
1444 continue
1445 elif types is not None and descendant_type not in types:
1446 # We're not interested in strings of this type.
884 continue 1447 continue
885 if strip: 1448 if strip:
886 descendant = descendant.strip() 1449 descendant = descendant.strip()
887 if len(descendant) == 0: 1450 if len(descendant) == 0:
888 continue 1451 continue
889 yield descendant 1452 yield descendant
890
891 strings = property(_all_strings) 1453 strings = property(_all_strings)
892 1454
893 @property 1455 def decompose(self):
894 def stripped_strings(self): 1456 """Recursively destroys this PageElement and its children.
895 for string in self._all_strings(True):
896 yield string
897 1457
898 def get_text(self, separator="", strip=False, 1458 This element will be removed from the tree and wiped out; so
899 types=(NavigableString, CData)): 1459 will everything beneath it.
900 """
901 Get all child strings, concatenated using the given separator.
902 """
903 return separator.join([s for s in self._all_strings(
904 strip, types=types)])
905 getText = get_text
906 text = property(get_text)
907 1460
908 def decompose(self): 1461 The behavior of a decomposed PageElement is undefined and you
909 """Recursively destroys the contents of this tree.""" 1462 should never use one for anything, but if you need to _check_
1463 whether an element has been decomposed, you can use the
1464 `decomposed` property.
1465 """
910 self.extract() 1466 self.extract()
911 i = self 1467 i = self
912 while i is not None: 1468 while i is not None:
913 next = i.next_element 1469 n = i.next_element
914 i.__dict__.clear() 1470 i.__dict__.clear()
915 i.contents = [] 1471 i.contents = []
916 i = next 1472 i._decomposed = True
1473 i = n
917 1474
918 def clear(self, decompose=False): 1475 def clear(self, decompose=False):
919 """ 1476 """Wipe out all children of this PageElement by calling extract()
920 Extract all children. If decompose is True, decompose instead. 1477 on them.
1478
1479 :param decompose: If this is True, decompose() (a more
1480 destructive method) will be called instead of extract().
921 """ 1481 """
922 if decompose: 1482 if decompose:
923 for element in self.contents[:]: 1483 for element in self.contents[:]:
@@ -929,10 +1489,51 @@ class Tag(PageElement):
929 for element in self.contents[:]: 1489 for element in self.contents[:]:
930 element.extract() 1490 element.extract()
931 1491
932 def index(self, element): 1492 def smooth(self):
1493 """Smooth out this element's children by consolidating consecutive
1494 strings.
1495
1496 This makes pretty-printed output look more natural following a
1497 lot of operations that modified the tree.
933 """ 1498 """
934 Find the index of a child by identity, not value. Avoids issues with 1499 # Mark the first position of every pair of children that need
935 tag.contents.index(element) getting the index of equal elements. 1500 # to be consolidated. Do this rather than making a copy of
1501 # self.contents, since in most cases very few strings will be
1502 # affected.
1503 marked = []
1504 for i, a in enumerate(self.contents):
1505 if isinstance(a, Tag):
1506 # Recursively smooth children.
1507 a.smooth()
1508 if i == len(self.contents)-1:
1509 # This is the last item in .contents, and it's not a
1510 # tag. There's no chance it needs any work.
1511 continue
1512 b = self.contents[i+1]
1513 if (isinstance(a, NavigableString)
1514 and isinstance(b, NavigableString)
1515 and not isinstance(a, PreformattedString)
1516 and not isinstance(b, PreformattedString)
1517 ):
1518 marked.append(i)
1519
1520 # Go over the marked positions in reverse order, so that
1521 # removing items from .contents won't affect the remaining
1522 # positions.
1523 for i in reversed(marked):
1524 a = self.contents[i]
1525 b = self.contents[i+1]
1526 b.extract()
1527 n = NavigableString(a+b)
1528 a.replace_with(n)
1529
1530 def index(self, element):
1531 """Find the index of a child by identity, not value.
1532
1533 Avoids issues with tag.contents.index(element) getting the
1534 index of equal elements.
1535
1536 :param element: Look for this PageElement in `self.contents`.
936 """ 1537 """
937 for i, child in enumerate(self.contents): 1538 for i, child in enumerate(self.contents):
938 if child is element: 1539 if child is element:
@@ -945,23 +1546,38 @@ class Tag(PageElement):
945 attribute.""" 1546 attribute."""
946 return self.attrs.get(key, default) 1547 return self.attrs.get(key, default)
947 1548
1549 def get_attribute_list(self, key, default=None):
1550 """The same as get(), but always returns a list.
1551
1552 :param key: The attribute to look for.
1553 :param default: Use this value if the attribute is not present
1554 on this PageElement.
1555 :return: A list of values, probably containing only a single
1556 value.
1557 """
1558 value = self.get(key, default)
1559 if not isinstance(value, list):
1560 value = [value]
1561 return value
1562
948 def has_attr(self, key): 1563 def has_attr(self, key):
1564 """Does this PageElement have an attribute with the given name?"""
949 return key in self.attrs 1565 return key in self.attrs
950 1566
951 def __hash__(self): 1567 def __hash__(self):
952 return str(self).__hash__() 1568 return str(self).__hash__()
953 1569
954 def __getitem__(self, key): 1570 def __getitem__(self, key):
955 """tag[key] returns the value of the 'key' attribute for the tag, 1571 """tag[key] returns the value of the 'key' attribute for the Tag,
956 and throws an exception if it's not there.""" 1572 and throws an exception if it's not there."""
957 return self.attrs[key] 1573 return self.attrs[key]
958 1574
959 def __iter__(self): 1575 def __iter__(self):
960 "Iterating over a tag iterates over its contents." 1576 "Iterating over a Tag iterates over its contents."
961 return iter(self.contents) 1577 return iter(self.contents)
962 1578
963 def __len__(self): 1579 def __len__(self):
964 "The length of a tag is the length of its list of contents." 1580 "The length of a Tag is the length of its list of contents."
965 return len(self.contents) 1581 return len(self.contents)
966 1582
967 def __contains__(self, x): 1583 def __contains__(self, x):
@@ -981,29 +1597,33 @@ class Tag(PageElement):
981 self.attrs.pop(key, None) 1597 self.attrs.pop(key, None)
982 1598
983 def __call__(self, *args, **kwargs): 1599 def __call__(self, *args, **kwargs):
984 """Calling a tag like a function is the same as calling its 1600 """Calling a Tag like a function is the same as calling its
985 find_all() method. Eg. tag('a') returns a list of all the A tags 1601 find_all() method. Eg. tag('a') returns a list of all the A tags
986 found within this tag.""" 1602 found within this tag."""
987 return self.find_all(*args, **kwargs) 1603 return self.find_all(*args, **kwargs)
988 1604
989 def __getattr__(self, tag): 1605 def __getattr__(self, tag):
990 #print "Getattr %s.%s" % (self.__class__, tag) 1606 """Calling tag.subtag is the same as calling tag.find(name="subtag")"""
1607 #print("Getattr %s.%s" % (self.__class__, tag))
991 if len(tag) > 3 and tag.endswith('Tag'): 1608 if len(tag) > 3 and tag.endswith('Tag'):
992 # BS3: soup.aTag -> "soup.find("a") 1609 # BS3: soup.aTag -> "soup.find("a")
993 tag_name = tag[:-3] 1610 tag_name = tag[:-3]
994 warnings.warn( 1611 warnings.warn(
995 '.%sTag is deprecated, use .find("%s") instead.' % ( 1612 '.%(name)sTag is deprecated, use .find("%(name)s") instead. If you really were looking for a tag called %(name)sTag, use .find("%(name)sTag")' % dict(
996 tag_name, tag_name)) 1613 name=tag_name
1614 ),
1615 DeprecationWarning, stacklevel=2
1616 )
997 return self.find(tag_name) 1617 return self.find(tag_name)
998 # We special case contents to avoid recursion. 1618 # We special case contents to avoid recursion.
999 elif not tag.startswith("__") and not tag=="contents": 1619 elif not tag.startswith("__") and not tag == "contents":
1000 return self.find(tag) 1620 return self.find(tag)
1001 raise AttributeError( 1621 raise AttributeError(
1002 "'%s' object has no attribute '%s'" % (self.__class__, tag)) 1622 "'%s' object has no attribute '%s'" % (self.__class__, tag))
1003 1623
1004 def __eq__(self, other): 1624 def __eq__(self, other):
1005 """Returns true iff this tag has the same name, the same attributes, 1625 """Returns true iff this Tag has the same name, the same attributes,
1006 and the same contents (recursively) as the given tag.""" 1626 and the same contents (recursively) as `other`."""
1007 if self is other: 1627 if self is other:
1008 return True 1628 return True
1009 if (not hasattr(other, 'name') or 1629 if (not hasattr(other, 'name') or
@@ -1019,69 +1639,235 @@ class Tag(PageElement):
1019 return True 1639 return True
1020 1640
1021 def __ne__(self, other): 1641 def __ne__(self, other):
1022 """Returns true iff this tag is not identical to the other tag, 1642 """Returns true iff this Tag is not identical to `other`,
1023 as defined in __eq__.""" 1643 as defined in __eq__."""
1024 return not self == other 1644 return not self == other
1025 1645
1026 def __repr__(self, encoding="unicode-escape"): 1646 def __repr__(self, encoding="unicode-escape"):
1027 """Renders this tag as a string.""" 1647 """Renders this PageElement as a string.
1028 if PY3K:
1029 # "The return value must be a string object", i.e. Unicode
1030 return self.decode()
1031 else:
1032 # "The return value must be a string object", i.e. a bytestring.
1033 # By convention, the return value of __repr__ should also be
1034 # an ASCII string.
1035 return self.encode(encoding)
1036 1648
1037 def __unicode__(self): 1649 :param encoding: The encoding to use (Python 2 only).
1650 TODO: This is now ignored and a warning should be issued
1651 if a value is provided.
1652 :return: A (Unicode) string.
1653 """
1654 # "The return value must be a string object", i.e. Unicode
1038 return self.decode() 1655 return self.decode()
1039 1656
1040 def __str__(self): 1657 def __unicode__(self):
1041 if PY3K: 1658 """Renders this PageElement as a Unicode string."""
1042 return self.decode() 1659 return self.decode()
1043 else:
1044 return self.encode()
1045 1660
1046 if PY3K: 1661 __str__ = __repr__ = __unicode__
1047 __str__ = __repr__ = __unicode__
1048 1662
1049 def encode(self, encoding=DEFAULT_OUTPUT_ENCODING, 1663 def encode(self, encoding=DEFAULT_OUTPUT_ENCODING,
1050 indent_level=None, formatter="minimal", 1664 indent_level=None, formatter="minimal",
1051 errors="xmlcharrefreplace"): 1665 errors="xmlcharrefreplace"):
1666 """Render a bytestring representation of this PageElement and its
1667 contents.
1668
1669 :param encoding: The destination encoding.
1670 :param indent_level: Each line of the rendering will be
1671 indented this many levels. (The formatter decides what a
1672 'level' means in terms of spaces or other characters
1673 output.) Used internally in recursive calls while
1674 pretty-printing.
1675 :param formatter: A Formatter object, or a string naming one of
1676 the standard formatters.
1677 :param errors: An error handling strategy such as
1678 'xmlcharrefreplace'. This value is passed along into
1679 encode() and its value should be one of the constants
1680 defined by Python.
1681 :return: A bytestring.
1682
1683 """
1052 # Turn the data structure into Unicode, then encode the 1684 # Turn the data structure into Unicode, then encode the
1053 # Unicode. 1685 # Unicode.
1054 u = self.decode(indent_level, encoding, formatter) 1686 u = self.decode(indent_level, encoding, formatter)
1055 return u.encode(encoding, errors) 1687 return u.encode(encoding, errors)
1056 1688
1057 def _should_pretty_print(self, indent_level):
1058 """Should this tag be pretty-printed?"""
1059 return (
1060 indent_level is not None and
1061 (self.name not in HTMLAwareEntitySubstitution.preformatted_tags
1062 or self._is_xml))
1063
1064 def decode(self, indent_level=None, 1689 def decode(self, indent_level=None,
1065 eventual_encoding=DEFAULT_OUTPUT_ENCODING, 1690 eventual_encoding=DEFAULT_OUTPUT_ENCODING,
1066 formatter="minimal"): 1691 formatter="minimal",
1067 """Returns a Unicode representation of this tag and its contents. 1692 iterator=None):
1693 pieces = []
1694 # First off, turn a non-Formatter `formatter` into a Formatter
1695 # object. This will stop the lookup from happening over and
1696 # over again.
1697 if not isinstance(formatter, Formatter):
1698 formatter = self.formatter_for_name(formatter)
1699
1700 if indent_level is True:
1701 indent_level = 0
1702
1703 # The currently active tag that put us into string literal
1704 # mode. Until this element is closed, children will be treated
1705 # as string literals and not pretty-printed. String literal
1706 # mode is turned on immediately after this tag begins, and
1707 # turned off immediately before it's closed. This means there
1708 # will be whitespace before and after the tag itself.
1709 string_literal_tag = None
1710
1711 for event, element in self._event_stream(iterator):
1712 if event in (Tag.START_ELEMENT_EVENT, Tag.EMPTY_ELEMENT_EVENT):
1713 piece = element._format_tag(
1714 eventual_encoding, formatter, opening=True
1715 )
1716 elif event is Tag.END_ELEMENT_EVENT:
1717 piece = element._format_tag(
1718 eventual_encoding, formatter, opening=False
1719 )
1720 if indent_level is not None:
1721 indent_level -= 1
1722 else:
1723 piece = element.output_ready(formatter)
1724
1725 # Now we need to apply the 'prettiness' -- extra
1726 # whitespace before and/or after this tag. This can get
1727 # complicated because certain tags, like <pre> and
1728 # <script>, can't be prettified, since adding whitespace would
1729 # change the meaning of the content.
1730
1731 # The default behavior is to add whitespace before and
1732 # after an element when string literal mode is off, and to
1733 # leave things as they are when string literal mode is on.
1734 if string_literal_tag:
1735 indent_before = indent_after = False
1736 else:
1737 indent_before = indent_after = True
1738
1739 # The only time the behavior is more complex than that is
1740 # when we encounter an opening or closing tag that might
1741 # put us into or out of string literal mode.
1742 if (event is Tag.START_ELEMENT_EVENT
1743 and not string_literal_tag
1744 and not element._should_pretty_print()):
1745 # We are about to enter string literal mode. Add
1746 # whitespace before this tag, but not after. We
1747 # will stay in string literal mode until this tag
1748 # is closed.
1749 indent_before = True
1750 indent_after = False
1751 string_literal_tag = element
1752 elif (event is Tag.END_ELEMENT_EVENT
1753 and element is string_literal_tag):
1754 # We are about to exit string literal mode by closing
1755 # the tag that sent us into that mode. Add whitespace
1756 # after this tag, but not before.
1757 indent_before = False
1758 indent_after = True
1759 string_literal_tag = None
1760
1761 # Now we know whether to add whitespace before and/or
1762 # after this element.
1763 if indent_level is not None:
1764 if (indent_before or indent_after):
1765 if isinstance(element, NavigableString):
1766 piece = piece.strip()
1767 if piece:
1768 piece = self._indent_string(
1769 piece, indent_level, formatter,
1770 indent_before, indent_after
1771 )
1772 if event == Tag.START_ELEMENT_EVENT:
1773 indent_level += 1
1774 pieces.append(piece)
1775 return "".join(pieces)
1776
1777 # Names for the different events yielded by _event_stream
1778 START_ELEMENT_EVENT = object()
1779 END_ELEMENT_EVENT = object()
1780 EMPTY_ELEMENT_EVENT = object()
1781 STRING_ELEMENT_EVENT = object()
1782
1783 def _event_stream(self, iterator=None):
1784 """Yield a sequence of events that can be used to reconstruct the DOM
1785 for this element.
1786
1787 This lets us recreate the nested structure of this element
1788 (e.g. when formatting it as a string) without using recursive
1789 method calls.
1790
1791 This is similar in concept to the SAX API, but it's a simpler
1792 interface designed for internal use. The events are different
1793 from SAX and the arguments associated with the events are Tags
1794 and other Beautiful Soup objects.
1795
1796 :param iterator: An alternate iterator to use when traversing
1797 the tree.
1798 """
1799 tag_stack = []
1068 1800
1069 :param eventual_encoding: The tag is destined to be 1801 iterator = iterator or self.self_and_descendants
1070 encoded into this encoding. This method is _not_ 1802
1071 responsible for performing that encoding. This information 1803 for c in iterator:
1072 is passed in so that it can be substituted in if the 1804 # If the parent of the element we're about to yield is not
1073 document contains a <META> tag that mentions the document's 1805 # the tag currently on the stack, it means that the tag on
1074 encoding. 1806 # the stack closed before this element appeared.
1807 while tag_stack and c.parent != tag_stack[-1]:
1808 now_closed_tag = tag_stack.pop()
1809 yield Tag.END_ELEMENT_EVENT, now_closed_tag
1810
1811 if isinstance(c, Tag):
1812 if c.is_empty_element:
1813 yield Tag.EMPTY_ELEMENT_EVENT, c
1814 else:
1815 yield Tag.START_ELEMENT_EVENT, c
1816 tag_stack.append(c)
1817 continue
1818 else:
1819 yield Tag.STRING_ELEMENT_EVENT, c
1820
1821 while tag_stack:
1822 now_closed_tag = tag_stack.pop()
1823 yield Tag.END_ELEMENT_EVENT, now_closed_tag
1824
1825 def _indent_string(self, s, indent_level, formatter,
1826 indent_before, indent_after):
1827 """Add indentation whitespace before and/or after a string.
1828
1829 :param s: The string to amend with whitespace.
1830 :param indent_level: The indentation level; affects how much
1831 whitespace goes before the string.
1832 :param indent_before: Whether or not to add whitespace
1833 before the string.
1834 :param indent_after: Whether or not to add whitespace
1835 (a newline) after the string.
1075 """ 1836 """
1837 space_before = ''
1838 if indent_before and indent_level:
1839 space_before = (formatter.indent * indent_level)
1076 1840
1077 # First off, turn a string formatter into a function. This 1841 space_after = ''
1078 # will stop the lookup from happening over and over again. 1842 if indent_after:
1079 if not isinstance(formatter, collections.abc.Callable): 1843 space_after = "\n"
1080 formatter = self._formatter_for_name(formatter)
1081 1844
1082 attrs = [] 1845 return space_before + s + space_after
1083 if self.attrs: 1846
1084 for key, val in sorted(self.attrs.items()): 1847 def _format_tag(self, eventual_encoding, formatter, opening):
1848 if self.hidden:
1849 # A hidden tag is invisible, although its contents
1850 # are visible.
1851 return ''
1852
1853 # A tag starts with the < character (see below).
1854
1855 # Then the / character, if this is a closing tag.
1856 closing_slash = ''
1857 if not opening:
1858 closing_slash = '/'
1859
1860 # Then an optional namespace prefix.
1861 prefix = ''
1862 if self.prefix:
1863 prefix = self.prefix + ":"
1864
1865 # Then a list of attribute values, if this is an opening tag.
1866 attribute_string = ''
1867 if opening:
1868 attributes = formatter.attributes(self)
1869 attrs = []
1870 for key, val in attributes:
1085 if val is None: 1871 if val is None:
1086 decoded = key 1872 decoded = key
1087 else: 1873 else:
@@ -1090,71 +1876,52 @@ class Tag(PageElement):
1090 elif not isinstance(val, str): 1876 elif not isinstance(val, str):
1091 val = str(val) 1877 val = str(val)
1092 elif ( 1878 elif (
1093 isinstance(val, AttributeValueWithCharsetSubstitution) 1879 isinstance(val, AttributeValueWithCharsetSubstitution)
1094 and eventual_encoding is not None): 1880 and eventual_encoding is not None
1881 ):
1095 val = val.encode(eventual_encoding) 1882 val = val.encode(eventual_encoding)
1096 1883
1097 text = self.format_string(val, formatter) 1884 text = formatter.attribute_value(val)
1098 decoded = ( 1885 decoded = (
1099 str(key) + '=' 1886 str(key) + '='
1100 + EntitySubstitution.quoted_attribute_value(text)) 1887 + formatter.quoted_attribute_value(text))
1101 attrs.append(decoded) 1888 attrs.append(decoded)
1102 close = '' 1889 if attrs:
1103 closeTag = '' 1890 attribute_string = ' ' + ' '.join(attrs)
1104
1105 prefix = ''
1106 if self.prefix:
1107 prefix = self.prefix + ":"
1108 1891
1892 # Then an optional closing slash (for a void element in an
1893 # XML document).
1894 void_element_closing_slash = ''
1109 if self.is_empty_element: 1895 if self.is_empty_element:
1110 close = '/' 1896 void_element_closing_slash = formatter.void_element_close_prefix or ''
1111 else:
1112 closeTag = '</%s%s>' % (prefix, self.name)
1113
1114 pretty_print = self._should_pretty_print(indent_level)
1115 space = ''
1116 indent_space = ''
1117 if indent_level is not None:
1118 indent_space = (' ' * (indent_level - 1))
1119 if pretty_print:
1120 space = indent_space
1121 indent_contents = indent_level + 1
1122 else:
1123 indent_contents = None
1124 contents = self.decode_contents(
1125 indent_contents, eventual_encoding, formatter)
1126 1897
1127 if self.hidden: 1898 # Put it all together.
1128 # This is the 'document root' object. 1899 return '<' + closing_slash + prefix + self.name + attribute_string + void_element_closing_slash + '>'
1129 s = contents 1900
1130 else: 1901 def _should_pretty_print(self, indent_level=1):
1131 s = [] 1902 """Should this tag be pretty-printed?
1132 attribute_string = '' 1903
1133 if attrs: 1904 Most of them should, but some (such as <pre> in HTML
1134 attribute_string = ' ' + ' '.join(attrs) 1905 documents) should not.
1135 if indent_level is not None: 1906 """
1136 # Even if this particular tag is not pretty-printed, 1907 return (
1137 # we should indent up to the start of the tag. 1908 indent_level is not None
1138 s.append(indent_space) 1909 and (
1139 s.append('<%s%s%s%s>' % ( 1910 not self.preserve_whitespace_tags
1140 prefix, self.name, attribute_string, close)) 1911 or self.name not in self.preserve_whitespace_tags
1141 if pretty_print: 1912 )
1142 s.append("\n") 1913 )
1143 s.append(contents)
1144 if pretty_print and contents and contents[-1] != "\n":
1145 s.append("\n")
1146 if pretty_print and closeTag:
1147 s.append(space)
1148 s.append(closeTag)
1149 if indent_level is not None and closeTag and self.next_sibling:
1150 # Even if this particular tag is not pretty-printed,
1151 # we're now done with the tag, and we should add a
1152 # newline if appropriate.
1153 s.append("\n")
1154 s = ''.join(s)
1155 return s
1156 1914
1157 def prettify(self, encoding=None, formatter="minimal"): 1915 def prettify(self, encoding=None, formatter="minimal"):
1916 """Pretty-print this PageElement as a string.
1917
1918 :param encoding: The eventual encoding of the string. If this is None,
1919 a Unicode string will be returned.
1920 :param formatter: A Formatter object, or a string naming one of
1921 the standard formatters.
1922 :return: A Unicode string (if encoding==None) or a bytestring
1923 (otherwise).
1924 """
1158 if encoding is None: 1925 if encoding is None:
1159 return self.decode(True, formatter=formatter) 1926 return self.decode(True, formatter=formatter)
1160 else: 1927 else:
@@ -1166,62 +1933,50 @@ class Tag(PageElement):
1166 """Renders the contents of this tag as a Unicode string. 1933 """Renders the contents of this tag as a Unicode string.
1167 1934
1168 :param indent_level: Each line of the rendering will be 1935 :param indent_level: Each line of the rendering will be
1169 indented this many spaces. 1936 indented this many levels. (The formatter decides what a
1937 'level' means in terms of spaces or other characters
1938 output.) Used internally in recursive calls while
1939 pretty-printing.
1170 1940
1171 :param eventual_encoding: The tag is destined to be 1941 :param eventual_encoding: The tag is destined to be
1172 encoded into this encoding. This method is _not_ 1942 encoded into this encoding. decode_contents() is _not_
1173 responsible for performing that encoding. This information 1943 responsible for performing that encoding. This information
1174 is passed in so that it can be substituted in if the 1944 is passed in so that it can be substituted in if the
1175 document contains a <META> tag that mentions the document's 1945 document contains a <META> tag that mentions the document's
1176 encoding. 1946 encoding.
1177 1947
1178 :param formatter: The output formatter responsible for converting 1948 :param formatter: A Formatter object, or a string naming one of
1179 entities to Unicode characters. 1949 the standard Formatters.
1180 """ 1950
1181 # First off, turn a string formatter into a function. This 1951 """
1182 # will stop the lookup from happening over and over again. 1952 return self.decode(indent_level, eventual_encoding, formatter,
1183 if not isinstance(formatter, collections.abc.Callable): 1953 iterator=self.descendants)
1184 formatter = self._formatter_for_name(formatter)
1185
1186 pretty_print = (indent_level is not None)
1187 s = []
1188 for c in self:
1189 text = None
1190 if isinstance(c, NavigableString):
1191 text = c.output_ready(formatter)
1192 elif isinstance(c, Tag):
1193 s.append(c.decode(indent_level, eventual_encoding,
1194 formatter))
1195 if text and indent_level and not self.name == 'pre':
1196 text = text.strip()
1197 if text:
1198 if pretty_print and not self.name == 'pre':
1199 s.append(" " * (indent_level - 1))
1200 s.append(text)
1201 if pretty_print and not self.name == 'pre':
1202 s.append("\n")
1203 return ''.join(s)
1204 1954
1205 def encode_contents( 1955 def encode_contents(
1206 self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING, 1956 self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING,
1207 formatter="minimal"): 1957 formatter="minimal"):
1208 """Renders the contents of this tag as a bytestring. 1958 """Renders the contents of this PageElement as a bytestring.
1209 1959
1210 :param indent_level: Each line of the rendering will be 1960 :param indent_level: Each line of the rendering will be
1211 indented this many spaces. 1961 indented this many levels. (The formatter decides what a
1962 'level' means in terms of spaces or other characters
1963 output.) Used internally in recursive calls while
1964 pretty-printing.
1212 1965
1213 :param eventual_encoding: The bytestring will be in this encoding. 1966 :param eventual_encoding: The bytestring will be in this encoding.
1214 1967
1215 :param formatter: The output formatter responsible for converting 1968 :param formatter: A Formatter object, or a string naming one of
1216 entities to Unicode characters. 1969 the standard Formatters.
1217 """
1218 1970
1971 :return: A bytestring.
1972 """
1219 contents = self.decode_contents(indent_level, encoding, formatter) 1973 contents = self.decode_contents(indent_level, encoding, formatter)
1220 return contents.encode(encoding) 1974 return contents.encode(encoding)
1221 1975
1222 # Old method for BS3 compatibility 1976 # Old method for BS3 compatibility
1223 def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING, 1977 def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
1224 prettyPrint=False, indentLevel=0): 1978 prettyPrint=False, indentLevel=0):
1979 """Deprecated method for BS3 compatibility."""
1225 if not prettyPrint: 1980 if not prettyPrint:
1226 indentLevel = None 1981 indentLevel = None
1227 return self.encode_contents( 1982 return self.encode_contents(
@@ -1229,44 +1984,88 @@ class Tag(PageElement):
1229 1984
1230 #Soup methods 1985 #Soup methods
1231 1986
1232 def find(self, name=None, attrs={}, recursive=True, text=None, 1987 def find(self, name=None, attrs={}, recursive=True, string=None,
1233 **kwargs): 1988 **kwargs):
1234 """Return only the first child of this Tag matching the given 1989 """Look in the children of this PageElement and find the first
1235 criteria.""" 1990 PageElement that matches the given criteria.
1991
1992 All find_* methods take a common set of arguments. See the online
1993 documentation for detailed explanations.
1994
1995 :param name: A filter on tag name.
1996 :param attrs: A dictionary of filters on attribute values.
1997 :param recursive: If this is True, find() will perform a
1998 recursive search of this PageElement's children. Otherwise,
1999 only the direct children will be considered.
2000 :param limit: Stop looking after finding this many results.
2001 :kwargs: A dictionary of filters on attribute values.
2002 :return: A PageElement.
2003 :rtype: bs4.element.Tag | bs4.element.NavigableString
2004 """
1236 r = None 2005 r = None
1237 l = self.find_all(name, attrs, recursive, text, 1, **kwargs) 2006 l = self.find_all(name, attrs, recursive, string, 1, _stacklevel=3,
2007 **kwargs)
1238 if l: 2008 if l:
1239 r = l[0] 2009 r = l[0]
1240 return r 2010 return r
1241 findChild = find 2011 findChild = find #BS2
1242 2012
1243 def find_all(self, name=None, attrs={}, recursive=True, text=None, 2013 def find_all(self, name=None, attrs={}, recursive=True, string=None,
1244 limit=None, **kwargs): 2014 limit=None, **kwargs):
1245 """Extracts a list of Tag objects that match the given 2015 """Look in the children of this PageElement and find all
1246 criteria. You can specify the name of the Tag and any 2016 PageElements that match the given criteria.
1247 attributes you want the Tag to have. 2017
1248 2018 All find_* methods take a common set of arguments. See the online
1249 The value of a key-value pair in the 'attrs' map can be a 2019 documentation for detailed explanations.
1250 string, a list of strings, a regular expression object, or a 2020
1251 callable that takes a string and returns whether or not the 2021 :param name: A filter on tag name.
1252 string matches for some custom definition of 'matches'. The 2022 :param attrs: A dictionary of filters on attribute values.
1253 same is true of the tag name.""" 2023 :param recursive: If this is True, find_all() will perform a
1254 2024 recursive search of this PageElement's children. Otherwise,
2025 only the direct children will be considered.
2026 :param limit: Stop looking after finding this many results.
2027 :kwargs: A dictionary of filters on attribute values.
2028 :return: A ResultSet of PageElements.
2029 :rtype: bs4.element.ResultSet
2030 """
1255 generator = self.descendants 2031 generator = self.descendants
1256 if not recursive: 2032 if not recursive:
1257 generator = self.children 2033 generator = self.children
1258 return self._find_all(name, attrs, text, limit, generator, **kwargs) 2034 _stacklevel = kwargs.pop('_stacklevel', 2)
2035 return self._find_all(name, attrs, string, limit, generator,
2036 _stacklevel=_stacklevel+1, **kwargs)
1259 findAll = find_all # BS3 2037 findAll = find_all # BS3
1260 findChildren = find_all # BS2 2038 findChildren = find_all # BS2
1261 2039
1262 #Generator methods 2040 #Generator methods
1263 @property 2041 @property
1264 def children(self): 2042 def children(self):
2043 """Iterate over all direct children of this PageElement.
2044
2045 :yield: A sequence of PageElements.
2046 """
1265 # return iter() to make the purpose of the method clear 2047 # return iter() to make the purpose of the method clear
1266 return iter(self.contents) # XXX This seems to be untested. 2048 return iter(self.contents) # XXX This seems to be untested.
1267 2049
1268 @property 2050 @property
2051 def self_and_descendants(self):
2052 """Iterate over this PageElement and its children in a
2053 breadth-first sequence.
2054
2055 :yield: A sequence of PageElements.
2056 """
2057 if not self.hidden:
2058 yield self
2059 for i in self.descendants:
2060 yield i
2061
2062 @property
1269 def descendants(self): 2063 def descendants(self):
2064 """Iterate over all children of this PageElement in a
2065 breadth-first sequence.
2066
2067 :yield: A sequence of PageElements.
2068 """
1270 if not len(self.contents): 2069 if not len(self.contents):
1271 return 2070 return
1272 stopNode = self._last_descendant().next_element 2071 stopNode = self._last_descendant().next_element
@@ -1276,262 +2075,102 @@ class Tag(PageElement):
1276 current = current.next_element 2075 current = current.next_element
1277 2076
1278 # CSS selector code 2077 # CSS selector code
2078 def select_one(self, selector, namespaces=None, **kwargs):
2079 """Perform a CSS selection operation on the current element.
1279 2080
1280 _selector_combinators = ['>', '+', '~'] 2081 :param selector: A CSS selector.
1281 _select_debug = False
1282 def select_one(self, selector):
1283 """Perform a CSS selection operation on the current element."""
1284 value = self.select(selector, limit=1)
1285 if value:
1286 return value[0]
1287 return None
1288 2082
1289 def select(self, selector, _candidate_generator=None, limit=None): 2083 :param namespaces: A dictionary mapping namespace prefixes
1290 """Perform a CSS selection operation on the current element.""" 2084 used in the CSS selector to namespace URIs. By default,
1291 2085 Beautiful Soup will use the prefixes it encountered while
1292 # Handle grouping selectors if ',' exists, ie: p,a 2086 parsing the document.
1293 if ',' in selector:
1294 context = []
1295 for partial_selector in selector.split(','):
1296 partial_selector = partial_selector.strip()
1297 if partial_selector == '':
1298 raise ValueError('Invalid group selection syntax: %s' % selector)
1299 candidates = self.select(partial_selector, limit=limit)
1300 for candidate in candidates:
1301 if candidate not in context:
1302 context.append(candidate)
1303
1304 if limit and len(context) >= limit:
1305 break
1306 return context
1307 2087
1308 tokens = selector.split() 2088 :param kwargs: Keyword arguments to be passed into Soup Sieve's
1309 current_context = [self] 2089 soupsieve.select() method.
1310 2090
1311 if tokens[-1] in self._selector_combinators: 2091 :return: A Tag.
1312 raise ValueError( 2092 :rtype: bs4.element.Tag
1313 'Final combinator "%s" is missing an argument.' % tokens[-1]) 2093 """
2094 return self.css.select_one(selector, namespaces, **kwargs)
1314 2095
1315 if self._select_debug: 2096 def select(self, selector, namespaces=None, limit=None, **kwargs):
1316 print('Running CSS selector "%s"' % selector) 2097 """Perform a CSS selection operation on the current element.
1317 2098
1318 for index, token in enumerate(tokens): 2099 This uses the SoupSieve library.
1319 new_context = []
1320 new_context_ids = set([])
1321 2100
1322 if tokens[index-1] in self._selector_combinators: 2101 :param selector: A string containing a CSS selector.
1323 # This token was consumed by the previous combinator. Skip it.
1324 if self._select_debug:
1325 print(' Token was consumed by the previous combinator.')
1326 continue
1327 2102
1328 if self._select_debug: 2103 :param namespaces: A dictionary mapping namespace prefixes
1329 print(' Considering token "%s"' % token) 2104 used in the CSS selector to namespace URIs. By default,
1330 recursive_candidate_generator = None 2105 Beautiful Soup will use the prefixes it encountered while
1331 tag_name = None 2106 parsing the document.
1332 2107
1333 # Each operation corresponds to a checker function, a rule 2108 :param limit: After finding this number of results, stop looking.
1334 # for determining whether a candidate matches the 2109
1335 # selector. Candidates are generated by the active 2110 :param kwargs: Keyword arguments to be passed into SoupSieve's
1336 # iterator. 2111 soupsieve.select() method.
1337 checker = None 2112
1338 2113 :return: A ResultSet of Tags.
1339 m = self.attribselect_re.match(token) 2114 :rtype: bs4.element.ResultSet
1340 if m is not None: 2115 """
1341 # Attribute selector 2116 return self.css.select(selector, namespaces, limit, **kwargs)
1342 tag_name, attribute, operator, value = m.groups() 2117
1343 checker = self._attribute_checker(operator, attribute, value) 2118 @property
1344 2119 def css(self):
1345 elif '#' in token: 2120 """Return an interface to the CSS selector API."""
1346 # ID selector 2121 return CSS(self)
1347 tag_name, tag_id = token.split('#', 1)
1348 def id_matches(tag):
1349 return tag.get('id', None) == tag_id
1350 checker = id_matches
1351
1352 elif '.' in token:
1353 # Class selector
1354 tag_name, klass = token.split('.', 1)
1355 classes = set(klass.split('.'))
1356 def classes_match(candidate):
1357 return classes.issubset(candidate.get('class', []))
1358 checker = classes_match
1359
1360 elif ':' in token:
1361 # Pseudo-class
1362 tag_name, pseudo = token.split(':', 1)
1363 if tag_name == '':
1364 raise ValueError(
1365 "A pseudo-class must be prefixed with a tag name.")
1366 pseudo_attributes = re.match(r'([a-zA-Z\d-]+)\(([a-zA-Z\d]+)\)', pseudo)
1367 found = []
1368 if pseudo_attributes is None:
1369 pseudo_type = pseudo
1370 pseudo_value = None
1371 else:
1372 pseudo_type, pseudo_value = pseudo_attributes.groups()
1373 if pseudo_type == 'nth-of-type':
1374 try:
1375 pseudo_value = int(pseudo_value)
1376 except:
1377 raise NotImplementedError(
1378 'Only numeric values are currently supported for the nth-of-type pseudo-class.')
1379 if pseudo_value < 1:
1380 raise ValueError(
1381 'nth-of-type pseudo-class value must be at least 1.')
1382 class Counter(object):
1383 def __init__(self, destination):
1384 self.count = 0
1385 self.destination = destination
1386
1387 def nth_child_of_type(self, tag):
1388 self.count += 1
1389 if self.count == self.destination:
1390 return True
1391 if self.count > self.destination:
1392 # Stop the generator that's sending us
1393 # these things.
1394 raise StopIteration()
1395 return False
1396 checker = Counter(pseudo_value).nth_child_of_type
1397 else:
1398 raise NotImplementedError(
1399 'Only the following pseudo-classes are implemented: nth-of-type.')
1400
1401 elif token == '*':
1402 # Star selector -- matches everything
1403 pass
1404 elif token == '>':
1405 # Run the next token as a CSS selector against the
1406 # direct children of each tag in the current context.
1407 recursive_candidate_generator = lambda tag: tag.children
1408 elif token == '~':
1409 # Run the next token as a CSS selector against the
1410 # siblings of each tag in the current context.
1411 recursive_candidate_generator = lambda tag: tag.next_siblings
1412 elif token == '+':
1413 # For each tag in the current context, run the next
1414 # token as a CSS selector against the tag's next
1415 # sibling that's a tag.
1416 def next_tag_sibling(tag):
1417 yield tag.find_next_sibling(True)
1418 recursive_candidate_generator = next_tag_sibling
1419
1420 elif self.tag_name_re.match(token):
1421 # Just a tag name.
1422 tag_name = token
1423 else:
1424 raise ValueError(
1425 'Unsupported or invalid CSS selector: "%s"' % token)
1426 if recursive_candidate_generator:
1427 # This happens when the selector looks like "> foo".
1428 #
1429 # The generator calls select() recursively on every
1430 # member of the current context, passing in a different
1431 # candidate generator and a different selector.
1432 #
1433 # In the case of "> foo", the candidate generator is
1434 # one that yields a tag's direct children (">"), and
1435 # the selector is "foo".
1436 next_token = tokens[index+1]
1437 def recursive_select(tag):
1438 if self._select_debug:
1439 print(' Calling select("%s") recursively on %s %s' % (next_token, tag.name, tag.attrs))
1440 print('-' * 40)
1441 for i in tag.select(next_token, recursive_candidate_generator):
1442 if self._select_debug:
1443 print('(Recursive select picked up candidate %s %s)' % (i.name, i.attrs))
1444 yield i
1445 if self._select_debug:
1446 print('-' * 40)
1447 _use_candidate_generator = recursive_select
1448 elif _candidate_generator is None:
1449 # By default, a tag's candidates are all of its
1450 # children. If tag_name is defined, only yield tags
1451 # with that name.
1452 if self._select_debug:
1453 if tag_name:
1454 check = "[any]"
1455 else:
1456 check = tag_name
1457 print(' Default candidate generator, tag name="%s"' % check)
1458 if self._select_debug:
1459 # This is redundant with later code, but it stops
1460 # a bunch of bogus tags from cluttering up the
1461 # debug log.
1462 def default_candidate_generator(tag):
1463 for child in tag.descendants:
1464 if not isinstance(child, Tag):
1465 continue
1466 if tag_name and not child.name == tag_name:
1467 continue
1468 yield child
1469 _use_candidate_generator = default_candidate_generator
1470 else:
1471 _use_candidate_generator = lambda tag: tag.descendants
1472 else:
1473 _use_candidate_generator = _candidate_generator
1474
1475 count = 0
1476 for tag in current_context:
1477 if self._select_debug:
1478 print(" Running candidate generator on %s %s" % (
1479 tag.name, repr(tag.attrs)))
1480 for candidate in _use_candidate_generator(tag):
1481 if not isinstance(candidate, Tag):
1482 continue
1483 if tag_name and candidate.name != tag_name:
1484 continue
1485 if checker is not None:
1486 try:
1487 result = checker(candidate)
1488 except StopIteration:
1489 # The checker has decided we should no longer
1490 # run the generator.
1491 break
1492 if checker is None or result:
1493 if self._select_debug:
1494 print(" SUCCESS %s %s" % (candidate.name, repr(candidate.attrs)))
1495 if id(candidate) not in new_context_ids:
1496 # If a tag matches a selector more than once,
1497 # don't include it in the context more than once.
1498 new_context.append(candidate)
1499 new_context_ids.add(id(candidate))
1500 if limit and len(new_context) >= limit:
1501 break
1502 elif self._select_debug:
1503 print(" FAILURE %s %s" % (candidate.name, repr(candidate.attrs)))
1504
1505
1506 current_context = new_context
1507
1508 if self._select_debug:
1509 print("Final verdict:")
1510 for i in current_context:
1511 print(" %s %s" % (i.name, i.attrs))
1512 return current_context
1513 2122
1514 # Old names for backwards compatibility 2123 # Old names for backwards compatibility
1515 def childGenerator(self): 2124 def childGenerator(self):
2125 """Deprecated generator."""
1516 return self.children 2126 return self.children
1517 2127
1518 def recursiveChildGenerator(self): 2128 def recursiveChildGenerator(self):
2129 """Deprecated generator."""
1519 return self.descendants 2130 return self.descendants
1520 2131
1521 def has_key(self, key): 2132 def has_key(self, key):
1522 """This was kind of misleading because has_key() (attributes) 2133 """Deprecated method. This was kind of misleading because has_key()
1523 was different from __in__ (contents). has_key() is gone in 2134 (attributes) was different from __in__ (contents).
1524 Python 3, anyway.""" 2135
1525 warnings.warn('has_key is deprecated. Use has_attr("%s") instead.' % ( 2136 has_key() is gone in Python 3, anyway.
1526 key)) 2137 """
2138 warnings.warn(
2139 'has_key is deprecated. Use has_attr(key) instead.',
2140 DeprecationWarning, stacklevel=2
2141 )
1527 return self.has_attr(key) 2142 return self.has_attr(key)
1528 2143
1529# Next, a couple classes to represent queries and their results. 2144# Next, a couple classes to represent queries and their results.
1530class SoupStrainer(object): 2145class SoupStrainer(object):
1531 """Encapsulates a number of ways of matching a markup element (tag or 2146 """Encapsulates a number of ways of matching a markup element (tag or
1532 text).""" 2147 string).
2148
2149 This is primarily used to underpin the find_* methods, but you can
2150 create one yourself and pass it in as `parse_only` to the
2151 `BeautifulSoup` constructor, to parse a subset of a large
2152 document.
2153 """
2154
2155 def __init__(self, name=None, attrs={}, string=None, **kwargs):
2156 """Constructor.
2157
2158 The SoupStrainer constructor takes the same arguments passed
2159 into the find_* methods. See the online documentation for
2160 detailed explanations.
2161
2162 :param name: A filter on tag name.
2163 :param attrs: A dictionary of filters on attribute values.
2164 :param string: A filter for a NavigableString with specific text.
2165 :kwargs: A dictionary of filters on attribute values.
2166 """
2167 if string is None and 'text' in kwargs:
2168 string = kwargs.pop('text')
2169 warnings.warn(
2170 "The 'text' argument to the SoupStrainer constructor is deprecated. Use 'string' instead.",
2171 DeprecationWarning, stacklevel=2
2172 )
1533 2173
1534 def __init__(self, name=None, attrs={}, text=None, **kwargs):
1535 self.name = self._normalize_search_value(name) 2174 self.name = self._normalize_search_value(name)
1536 if not isinstance(attrs, dict): 2175 if not isinstance(attrs, dict):
1537 # Treat a non-dict value for attrs as a search for the 'class' 2176 # Treat a non-dict value for attrs as a search for the 'class'
@@ -1556,12 +2195,15 @@ class SoupStrainer(object):
1556 normalized_attrs[key] = self._normalize_search_value(value) 2195 normalized_attrs[key] = self._normalize_search_value(value)
1557 2196
1558 self.attrs = normalized_attrs 2197 self.attrs = normalized_attrs
1559 self.text = self._normalize_search_value(text) 2198 self.string = self._normalize_search_value(string)
2199
2200 # DEPRECATED but just in case someone is checking this.
2201 self.text = self.string
1560 2202
1561 def _normalize_search_value(self, value): 2203 def _normalize_search_value(self, value):
1562 # Leave it alone if it's a Unicode string, a callable, a 2204 # Leave it alone if it's a Unicode string, a callable, a
1563 # regular expression, a boolean, or None. 2205 # regular expression, a boolean, or None.
1564 if (isinstance(value, str) or isinstance(value, collections.abc.Callable) or hasattr(value, 'match') 2206 if (isinstance(value, str) or isinstance(value, Callable) or hasattr(value, 'match')
1565 or isinstance(value, bool) or value is None): 2207 or isinstance(value, bool) or value is None):
1566 return value 2208 return value
1567 2209
@@ -1589,19 +2231,40 @@ class SoupStrainer(object):
1589 return str(str(value)) 2231 return str(str(value))
1590 2232
1591 def __str__(self): 2233 def __str__(self):
1592 if self.text: 2234 """A human-readable representation of this SoupStrainer."""
1593 return self.text 2235 if self.string:
2236 return self.string
1594 else: 2237 else:
1595 return "%s|%s" % (self.name, self.attrs) 2238 return "%s|%s" % (self.name, self.attrs)
1596 2239
1597 def search_tag(self, markup_name=None, markup_attrs={}): 2240 def search_tag(self, markup_name=None, markup_attrs={}):
2241 """Check whether a Tag with the given name and attributes would
2242 match this SoupStrainer.
2243
2244 Used prospectively to decide whether to even bother creating a Tag
2245 object.
2246
2247 :param markup_name: A tag name as found in some markup.
2248 :param markup_attrs: A dictionary of attributes as found in some markup.
2249
2250 :return: True if the prospective tag would match this SoupStrainer;
2251 False otherwise.
2252 """
1598 found = None 2253 found = None
1599 markup = None 2254 markup = None
1600 if isinstance(markup_name, Tag): 2255 if isinstance(markup_name, Tag):
1601 markup = markup_name 2256 markup = markup_name
1602 markup_attrs = markup 2257 markup_attrs = markup
2258
2259 if isinstance(self.name, str):
2260 # Optimization for a very common case where the user is
2261 # searching for a tag with one specific name, and we're
2262 # looking at a tag with a different name.
2263 if markup and not markup.prefix and self.name != markup.name:
2264 return False
2265
1603 call_function_with_tag_data = ( 2266 call_function_with_tag_data = (
1604 isinstance(self.name, collections.abc.Callable) 2267 isinstance(self.name, Callable)
1605 and not isinstance(markup_name, Tag)) 2268 and not isinstance(markup_name, Tag))
1606 2269
1607 if ((not self.name) 2270 if ((not self.name)
@@ -1630,13 +2293,22 @@ class SoupStrainer(object):
1630 found = markup 2293 found = markup
1631 else: 2294 else:
1632 found = markup_name 2295 found = markup_name
1633 if found and self.text and not self._matches(found.string, self.text): 2296 if found and self.string and not self._matches(found.string, self.string):
1634 found = None 2297 found = None
1635 return found 2298 return found
2299
2300 # For BS3 compatibility.
1636 searchTag = search_tag 2301 searchTag = search_tag
1637 2302
1638 def search(self, markup): 2303 def search(self, markup):
1639 # print 'looking for %s in %s' % (self, markup) 2304 """Find all items in `markup` that match this SoupStrainer.
2305
2306 Used by the core _find_all() method, which is ultimately
2307 called by all find_* methods.
2308
2309 :param markup: A PageElement or a list of them.
2310 """
2311 # print('looking for %s in %s' % (self, markup))
1640 found = None 2312 found = None
1641 # If given a list of items, scan it for a text element that 2313 # If given a list of items, scan it for a text element that
1642 # matches. 2314 # matches.
@@ -1649,49 +2321,44 @@ class SoupStrainer(object):
1649 # If it's a Tag, make sure its name or attributes match. 2321 # If it's a Tag, make sure its name or attributes match.
1650 # Don't bother with Tags if we're searching for text. 2322 # Don't bother with Tags if we're searching for text.
1651 elif isinstance(markup, Tag): 2323 elif isinstance(markup, Tag):
1652 if not self.text or self.name or self.attrs: 2324 if not self.string or self.name or self.attrs:
1653 found = self.search_tag(markup) 2325 found = self.search_tag(markup)
1654 # If it's text, make sure the text matches. 2326 # If it's text, make sure the text matches.
1655 elif isinstance(markup, NavigableString) or \ 2327 elif isinstance(markup, NavigableString) or \
1656 isinstance(markup, str): 2328 isinstance(markup, str):
1657 if not self.name and not self.attrs and self._matches(markup, self.text): 2329 if not self.name and not self.attrs and self._matches(markup, self.string):
1658 found = markup 2330 found = markup
1659 else: 2331 else:
1660 raise Exception( 2332 raise Exception(
1661 "I don't know how to match against a %s" % markup.__class__) 2333 "I don't know how to match against a %s" % markup.__class__)
1662 return found 2334 return found
1663 2335
1664 def _matches(self, markup, match_against): 2336 def _matches(self, markup, match_against, already_tried=None):
1665 # print u"Matching %s against %s" % (markup, match_against) 2337 # print(u"Matching %s against %s" % (markup, match_against))
1666 result = False 2338 result = False
1667 if isinstance(markup, list) or isinstance(markup, tuple): 2339 if isinstance(markup, list) or isinstance(markup, tuple):
1668 # This should only happen when searching a multi-valued attribute 2340 # This should only happen when searching a multi-valued attribute
1669 # like 'class'. 2341 # like 'class'.
1670 if (isinstance(match_against, str) 2342 for item in markup:
1671 and ' ' in match_against): 2343 if self._matches(item, match_against):
1672 # A bit of a special case. If they try to match "foo 2344 return True
1673 # bar" on a multivalue attribute's value, only accept 2345 # We didn't match any particular value of the multivalue
1674 # the literal value "foo bar" 2346 # attribute, but maybe we match the attribute value when
1675 # 2347 # considered as a string.
1676 # XXX This is going to be pretty slow because we keep 2348 if self._matches(' '.join(markup), match_against):
1677 # splitting match_against. But it shouldn't come up 2349 return True
1678 # too often. 2350 return False
1679 return (whitespace_re.split(match_against) == markup)
1680 else:
1681 for item in markup:
1682 if self._matches(item, match_against):
1683 return True
1684 return False
1685 2351
1686 if match_against is True: 2352 if match_against is True:
1687 # True matches any non-None value. 2353 # True matches any non-None value.
1688 return markup is not None 2354 return markup is not None
1689 2355
1690 if isinstance(match_against, collections.abc.Callable): 2356 if isinstance(match_against, Callable):
1691 return match_against(markup) 2357 return match_against(markup)
1692 2358
1693 # Custom callables take the tag as an argument, but all 2359 # Custom callables take the tag as an argument, but all
1694 # other ways of matching match the tag name as a string. 2360 # other ways of matching match the tag name as a string.
2361 original_markup = markup
1695 if isinstance(markup, Tag): 2362 if isinstance(markup, Tag):
1696 markup = markup.name 2363 markup = markup.name
1697 2364
@@ -1702,23 +2369,67 @@ class SoupStrainer(object):
1702 # None matches None, False, an empty string, an empty list, and so on. 2369 # None matches None, False, an empty string, an empty list, and so on.
1703 return not match_against 2370 return not match_against
1704 2371
1705 if isinstance(match_against, str): 2372 if (hasattr(match_against, '__iter__')
2373 and not isinstance(match_against, str)):
2374 # We're asked to match against an iterable of items.
2375 # The markup must be match at least one item in the
2376 # iterable. We'll try each one in turn.
2377 #
2378 # To avoid infinite recursion we need to keep track of
2379 # items we've already seen.
2380 if not already_tried:
2381 already_tried = set()
2382 for item in match_against:
2383 if item.__hash__:
2384 key = item
2385 else:
2386 key = id(item)
2387 if key in already_tried:
2388 continue
2389 else:
2390 already_tried.add(key)
2391 if self._matches(original_markup, item, already_tried):
2392 return True
2393 else:
2394 return False
2395
2396 # Beyond this point we might need to run the test twice: once against
2397 # the tag's name and once against its prefixed name.
2398 match = False
2399
2400 if not match and isinstance(match_against, str):
1706 # Exact string match 2401 # Exact string match
1707 return markup == match_against 2402 match = markup == match_against
1708 2403
1709 if hasattr(match_against, 'match'): 2404 if not match and hasattr(match_against, 'search'):
1710 # Regexp match 2405 # Regexp match
1711 return match_against.search(markup) 2406 return match_against.search(markup)
1712 2407
1713 if hasattr(match_against, '__iter__'): 2408 if (not match
1714 # The markup must be an exact match against something 2409 and isinstance(original_markup, Tag)
1715 # in the iterable. 2410 and original_markup.prefix):
1716 return markup in match_against 2411 # Try the whole thing again with the prefixed tag name.
2412 return self._matches(
2413 original_markup.prefix + ':' + original_markup.name, match_against
2414 )
2415
2416 return match
1717 2417
1718 2418
1719class ResultSet(list): 2419class ResultSet(list):
1720 """A ResultSet is just a list that keeps track of the SoupStrainer 2420 """A ResultSet is just a list that keeps track of the SoupStrainer
1721 that created it.""" 2421 that created it."""
1722 def __init__(self, source, result=()): 2422 def __init__(self, source, result=()):
2423 """Constructor.
2424
2425 :param source: A SoupStrainer.
2426 :param result: A list of PageElements.
2427 """
1723 super(ResultSet, self).__init__(result) 2428 super(ResultSet, self).__init__(result)
1724 self.source = source 2429 self.source = source
2430
2431 def __getattr__(self, key):
2432 """Raise a helpful exception to explain a common code fix."""
2433 raise AttributeError(
2434 "ResultSet object has no attribute '%s'. You're probably treating a list of elements like a single element. Did you call find_all() when you meant to call find()?" % key
2435 )
diff --git a/bitbake/lib/bs4/formatter.py b/bitbake/lib/bs4/formatter.py
new file mode 100644
index 0000000000..9fa1b57cb6
--- /dev/null
+++ b/bitbake/lib/bs4/formatter.py
@@ -0,0 +1,185 @@
1from bs4.dammit import EntitySubstitution
2
3class Formatter(EntitySubstitution):
4 """Describes a strategy to use when outputting a parse tree to a string.
5
6 Some parts of this strategy come from the distinction between
7 HTML4, HTML5, and XML. Others are configurable by the user.
8
9 Formatters are passed in as the `formatter` argument to methods
10 like `PageElement.encode`. Most people won't need to think about
11 formatters, and most people who need to think about them can pass
12 in one of these predefined strings as `formatter` rather than
13 making a new Formatter object:
14
15 For HTML documents:
16 * 'html' - HTML entity substitution for generic HTML documents. (default)
17 * 'html5' - HTML entity substitution for HTML5 documents, as
18 well as some optimizations in the way tags are rendered.
19 * 'minimal' - Only make the substitutions necessary to guarantee
20 valid HTML.
21 * None - Do not perform any substitution. This will be faster
22 but may result in invalid markup.
23
24 For XML documents:
25 * 'html' - Entity substitution for XHTML documents.
26 * 'minimal' - Only make the substitutions necessary to guarantee
27 valid XML. (default)
28 * None - Do not perform any substitution. This will be faster
29 but may result in invalid markup.
30 """
31 # Registries of XML and HTML formatters.
32 XML_FORMATTERS = {}
33 HTML_FORMATTERS = {}
34
35 HTML = 'html'
36 XML = 'xml'
37
38 HTML_DEFAULTS = dict(
39 cdata_containing_tags=set(["script", "style"]),
40 )
41
42 def _default(self, language, value, kwarg):
43 if value is not None:
44 return value
45 if language == self.XML:
46 return set()
47 return self.HTML_DEFAULTS[kwarg]
48
49 def __init__(
50 self, language=None, entity_substitution=None,
51 void_element_close_prefix='/', cdata_containing_tags=None,
52 empty_attributes_are_booleans=False, indent=1,
53 ):
54 r"""Constructor.
55
56 :param language: This should be Formatter.XML if you are formatting
57 XML markup and Formatter.HTML if you are formatting HTML markup.
58
59 :param entity_substitution: A function to call to replace special
60 characters with XML/HTML entities. For examples, see
61 bs4.dammit.EntitySubstitution.substitute_html and substitute_xml.
62 :param void_element_close_prefix: By default, void elements
63 are represented as <tag/> (XML rules) rather than <tag>
64 (HTML rules). To get <tag>, pass in the empty string.
65 :param cdata_containing_tags: The list of tags that are defined
66 as containing CDATA in this dialect. For example, in HTML,
67 <script> and <style> tags are defined as containing CDATA,
68 and their contents should not be formatted.
69 :param blank_attributes_are_booleans: Render attributes whose value
70 is the empty string as HTML-style boolean attributes.
71 (Attributes whose value is None are always rendered this way.)
72
73 :param indent: If indent is a non-negative integer or string,
74 then the contents of elements will be indented
75 appropriately when pretty-printing. An indent level of 0,
76 negative, or "" will only insert newlines. Using a
77 positive integer indent indents that many spaces per
78 level. If indent is a string (such as "\t"), that string
79 is used to indent each level. The default behavior is to
80 indent one space per level.
81 """
82 self.language = language
83 self.entity_substitution = entity_substitution
84 self.void_element_close_prefix = void_element_close_prefix
85 self.cdata_containing_tags = self._default(
86 language, cdata_containing_tags, 'cdata_containing_tags'
87 )
88 self.empty_attributes_are_booleans=empty_attributes_are_booleans
89 if indent is None:
90 indent = 0
91 if isinstance(indent, int):
92 if indent < 0:
93 indent = 0
94 indent = ' ' * indent
95 elif isinstance(indent, str):
96 indent = indent
97 else:
98 indent = ' '
99 self.indent = indent
100
101 def substitute(self, ns):
102 """Process a string that needs to undergo entity substitution.
103 This may be a string encountered in an attribute value or as
104 text.
105
106 :param ns: A string.
107 :return: A string with certain characters replaced by named
108 or numeric entities.
109 """
110 if not self.entity_substitution:
111 return ns
112 from .element import NavigableString
113 if (isinstance(ns, NavigableString)
114 and ns.parent is not None
115 and ns.parent.name in self.cdata_containing_tags):
116 # Do nothing.
117 return ns
118 # Substitute.
119 return self.entity_substitution(ns)
120
121 def attribute_value(self, value):
122 """Process the value of an attribute.
123
124 :param ns: A string.
125 :return: A string with certain characters replaced by named
126 or numeric entities.
127 """
128 return self.substitute(value)
129
130 def attributes(self, tag):
131 """Reorder a tag's attributes however you want.
132
133 By default, attributes are sorted alphabetically. This makes
134 behavior consistent between Python 2 and Python 3, and preserves
135 backwards compatibility with older versions of Beautiful Soup.
136
137 If `empty_boolean_attributes` is True, then attributes whose
138 values are set to the empty string will be treated as boolean
139 attributes.
140 """
141 if tag.attrs is None:
142 return []
143 return sorted(
144 (k, (None if self.empty_attributes_are_booleans and v == '' else v))
145 for k, v in list(tag.attrs.items())
146 )
147
148class HTMLFormatter(Formatter):
149 """A generic Formatter for HTML."""
150 REGISTRY = {}
151 def __init__(self, *args, **kwargs):
152 super(HTMLFormatter, self).__init__(self.HTML, *args, **kwargs)
153
154
155class XMLFormatter(Formatter):
156 """A generic Formatter for XML."""
157 REGISTRY = {}
158 def __init__(self, *args, **kwargs):
159 super(XMLFormatter, self).__init__(self.XML, *args, **kwargs)
160
161
162# Set up aliases for the default formatters.
163HTMLFormatter.REGISTRY['html'] = HTMLFormatter(
164 entity_substitution=EntitySubstitution.substitute_html
165)
166HTMLFormatter.REGISTRY["html5"] = HTMLFormatter(
167 entity_substitution=EntitySubstitution.substitute_html,
168 void_element_close_prefix=None,
169 empty_attributes_are_booleans=True,
170)
171HTMLFormatter.REGISTRY["minimal"] = HTMLFormatter(
172 entity_substitution=EntitySubstitution.substitute_xml
173)
174HTMLFormatter.REGISTRY[None] = HTMLFormatter(
175 entity_substitution=None
176)
177XMLFormatter.REGISTRY["html"] = XMLFormatter(
178 entity_substitution=EntitySubstitution.substitute_html
179)
180XMLFormatter.REGISTRY["minimal"] = XMLFormatter(
181 entity_substitution=EntitySubstitution.substitute_xml
182)
183XMLFormatter.REGISTRY[None] = Formatter(
184 Formatter(Formatter.XML, entity_substitution=None)
185)
diff --git a/bitbake/lib/bs4/testing.py b/bitbake/lib/bs4/testing.py
deleted file mode 100644
index 6584ecf303..0000000000
--- a/bitbake/lib/bs4/testing.py
+++ /dev/null
@@ -1,686 +0,0 @@
1"""Helper classes for tests."""
2
3__license__ = "MIT"
4
5import pickle
6import copy
7import unittest
8from unittest import TestCase
9from bs4 import BeautifulSoup
10from bs4.element import (
11 CharsetMetaAttributeValue,
12 Comment,
13 ContentMetaAttributeValue,
14 Doctype,
15 SoupStrainer,
16)
17
18from bs4.builder._htmlparser import HTMLParserTreeBuilder
19default_builder = HTMLParserTreeBuilder
20
21
22class SoupTest(unittest.TestCase):
23
24 @property
25 def default_builder(self):
26 return default_builder()
27
28 def soup(self, markup, **kwargs):
29 """Build a Beautiful Soup object from markup."""
30 builder = kwargs.pop('builder', self.default_builder)
31 return BeautifulSoup(markup, builder=builder, **kwargs)
32
33 def document_for(self, markup):
34 """Turn an HTML fragment into a document.
35
36 The details depend on the builder.
37 """
38 return self.default_builder.test_fragment_to_document(markup)
39
40 def assertSoupEquals(self, to_parse, compare_parsed_to=None):
41 builder = self.default_builder
42 obj = BeautifulSoup(to_parse, builder=builder)
43 if compare_parsed_to is None:
44 compare_parsed_to = to_parse
45
46 self.assertEqual(obj.decode(), self.document_for(compare_parsed_to))
47
48 def assertConnectedness(self, element):
49 """Ensure that next_element and previous_element are properly
50 set for all descendants of the given element.
51 """
52 earlier = None
53 for e in element.descendants:
54 if earlier:
55 self.assertEqual(e, earlier.next_element)
56 self.assertEqual(earlier, e.previous_element)
57 earlier = e
58
59class HTMLTreeBuilderSmokeTest(SoupTest):
60
61 """A basic test of a treebuilder's competence.
62
63 Any HTML treebuilder, present or future, should be able to pass
64 these tests. With invalid markup, there's room for interpretation,
65 and different parsers can handle it differently. But with the
66 markup in these tests, there's not much room for interpretation.
67 """
68
69 def test_pickle_and_unpickle_identity(self):
70 # Pickling a tree, then unpickling it, yields a tree identical
71 # to the original.
72 tree = self.soup("<a><b>foo</a>")
73 dumped = pickle.dumps(tree, 2)
74 loaded = pickle.loads(dumped)
75 self.assertEqual(loaded.__class__, BeautifulSoup)
76 self.assertEqual(loaded.decode(), tree.decode())
77
78 def assertDoctypeHandled(self, doctype_fragment):
79 """Assert that a given doctype string is handled correctly."""
80 doctype_str, soup = self._document_with_doctype(doctype_fragment)
81
82 # Make sure a Doctype object was created.
83 doctype = soup.contents[0]
84 self.assertEqual(doctype.__class__, Doctype)
85 self.assertEqual(doctype, doctype_fragment)
86 self.assertEqual(str(soup)[:len(doctype_str)], doctype_str)
87
88 # Make sure that the doctype was correctly associated with the
89 # parse tree and that the rest of the document parsed.
90 self.assertEqual(soup.p.contents[0], 'foo')
91
92 def _document_with_doctype(self, doctype_fragment):
93 """Generate and parse a document with the given doctype."""
94 doctype = '<!DOCTYPE %s>' % doctype_fragment
95 markup = doctype + '\n<p>foo</p>'
96 soup = self.soup(markup)
97 return doctype, soup
98
99 def test_normal_doctypes(self):
100 """Make sure normal, everyday HTML doctypes are handled correctly."""
101 self.assertDoctypeHandled("html")
102 self.assertDoctypeHandled(
103 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"')
104
105 def test_empty_doctype(self):
106 soup = self.soup("<!DOCTYPE>")
107 doctype = soup.contents[0]
108 self.assertEqual("", doctype.strip())
109
110 def test_public_doctype_with_url(self):
111 doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"'
112 self.assertDoctypeHandled(doctype)
113
114 def test_system_doctype(self):
115 self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"')
116
117 def test_namespaced_system_doctype(self):
118 # We can handle a namespaced doctype with a system ID.
119 self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"')
120
121 def test_namespaced_public_doctype(self):
122 # Test a namespaced doctype with a public id.
123 self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"')
124
125 def test_real_xhtml_document(self):
126 """A real XHTML document should come out more or less the same as it went in."""
127 markup = b"""<?xml version="1.0" encoding="utf-8"?>
128<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
129<html xmlns="http://www.w3.org/1999/xhtml">
130<head><title>Hello.</title></head>
131<body>Goodbye.</body>
132</html>"""
133 soup = self.soup(markup)
134 self.assertEqual(
135 soup.encode("utf-8").replace(b"\n", b""),
136 markup.replace(b"\n", b""))
137
138 def test_processing_instruction(self):
139 markup = b"""<?PITarget PIContent?>"""
140 soup = self.soup(markup)
141 self.assertEqual(markup, soup.encode("utf8"))
142
143 def test_deepcopy(self):
144 """Make sure you can copy the tree builder.
145
146 This is important because the builder is part of a
147 BeautifulSoup object, and we want to be able to copy that.
148 """
149 copy.deepcopy(self.default_builder)
150
151 def test_p_tag_is_never_empty_element(self):
152 """A <p> tag is never designated as an empty-element tag.
153
154 Even if the markup shows it as an empty-element tag, it
155 shouldn't be presented that way.
156 """
157 soup = self.soup("<p/>")
158 self.assertFalse(soup.p.is_empty_element)
159 self.assertEqual(str(soup.p), "<p></p>")
160
161 def test_unclosed_tags_get_closed(self):
162 """A tag that's not closed by the end of the document should be closed.
163
164 This applies to all tags except empty-element tags.
165 """
166 self.assertSoupEquals("<p>", "<p></p>")
167 self.assertSoupEquals("<b>", "<b></b>")
168
169 self.assertSoupEquals("<br>", "<br/>")
170
171 def test_br_is_always_empty_element_tag(self):
172 """A <br> tag is designated as an empty-element tag.
173
174 Some parsers treat <br></br> as one <br/> tag, some parsers as
175 two tags, but it should always be an empty-element tag.
176 """
177 soup = self.soup("<br></br>")
178 self.assertTrue(soup.br.is_empty_element)
179 self.assertEqual(str(soup.br), "<br/>")
180
181 def test_nested_formatting_elements(self):
182 self.assertSoupEquals("<em><em></em></em>")
183
184 def test_double_head(self):
185 html = '''<!DOCTYPE html>
186<html>
187<head>
188<title>Ordinary HEAD element test</title>
189</head>
190<script type="text/javascript">
191alert("Help!");
192</script>
193<body>
194Hello, world!
195</body>
196</html>
197'''
198 soup = self.soup(html)
199 self.assertEqual("text/javascript", soup.find('script')['type'])
200
201 def test_comment(self):
202 # Comments are represented as Comment objects.
203 markup = "<p>foo<!--foobar-->baz</p>"
204 self.assertSoupEquals(markup)
205
206 soup = self.soup(markup)
207 comment = soup.find(text="foobar")
208 self.assertEqual(comment.__class__, Comment)
209
210 # The comment is properly integrated into the tree.
211 foo = soup.find(text="foo")
212 self.assertEqual(comment, foo.next_element)
213 baz = soup.find(text="baz")
214 self.assertEqual(comment, baz.previous_element)
215
216 def test_preserved_whitespace_in_pre_and_textarea(self):
217 """Whitespace must be preserved in <pre> and <textarea> tags."""
218 self.assertSoupEquals("<pre> </pre>")
219 self.assertSoupEquals("<textarea> woo </textarea>")
220
221 def test_nested_inline_elements(self):
222 """Inline elements can be nested indefinitely."""
223 b_tag = "<b>Inside a B tag</b>"
224 self.assertSoupEquals(b_tag)
225
226 nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>"
227 self.assertSoupEquals(nested_b_tag)
228
229 double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>"
230 self.assertSoupEquals(nested_b_tag)
231
232 def test_nested_block_level_elements(self):
233 """Block elements can be nested."""
234 soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>')
235 blockquote = soup.blockquote
236 self.assertEqual(blockquote.p.b.string, 'Foo')
237 self.assertEqual(blockquote.b.string, 'Foo')
238
239 def test_correctly_nested_tables(self):
240 """One table can go inside another one."""
241 markup = ('<table id="1">'
242 '<tr>'
243 "<td>Here's another table:"
244 '<table id="2">'
245 '<tr><td>foo</td></tr>'
246 '</table></td>')
247
248 self.assertSoupEquals(
249 markup,
250 '<table id="1"><tr><td>Here\'s another table:'
251 '<table id="2"><tr><td>foo</td></tr></table>'
252 '</td></tr></table>')
253
254 self.assertSoupEquals(
255 "<table><thead><tr><td>Foo</td></tr></thead>"
256 "<tbody><tr><td>Bar</td></tr></tbody>"
257 "<tfoot><tr><td>Baz</td></tr></tfoot></table>")
258
259 def test_deeply_nested_multivalued_attribute(self):
260 # html5lib can set the attributes of the same tag many times
261 # as it rearranges the tree. This has caused problems with
262 # multivalued attributes.
263 markup = '<table><div><div class="css"></div></div></table>'
264 soup = self.soup(markup)
265 self.assertEqual(["css"], soup.div.div['class'])
266
267 def test_multivalued_attribute_on_html(self):
268 # html5lib uses a different API to set the attributes ot the
269 # <html> tag. This has caused problems with multivalued
270 # attributes.
271 markup = '<html class="a b"></html>'
272 soup = self.soup(markup)
273 self.assertEqual(["a", "b"], soup.html['class'])
274
275 def test_angle_brackets_in_attribute_values_are_escaped(self):
276 self.assertSoupEquals('<a b="<a>"></a>', '<a b="&lt;a&gt;"></a>')
277
278 def test_entities_in_attributes_converted_to_unicode(self):
279 expect = '<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
280 self.assertSoupEquals('<p id="pi&#241;ata"></p>', expect)
281 self.assertSoupEquals('<p id="pi&#xf1;ata"></p>', expect)
282 self.assertSoupEquals('<p id="pi&#Xf1;ata"></p>', expect)
283 self.assertSoupEquals('<p id="pi&ntilde;ata"></p>', expect)
284
285 def test_entities_in_text_converted_to_unicode(self):
286 expect = '<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
287 self.assertSoupEquals("<p>pi&#241;ata</p>", expect)
288 self.assertSoupEquals("<p>pi&#xf1;ata</p>", expect)
289 self.assertSoupEquals("<p>pi&#Xf1;ata</p>", expect)
290 self.assertSoupEquals("<p>pi&ntilde;ata</p>", expect)
291
292 def test_quot_entity_converted_to_quotation_mark(self):
293 self.assertSoupEquals("<p>I said &quot;good day!&quot;</p>",
294 '<p>I said "good day!"</p>')
295
296 def test_out_of_range_entity(self):
297 expect = "\N{REPLACEMENT CHARACTER}"
298 self.assertSoupEquals("&#10000000000000;", expect)
299 self.assertSoupEquals("&#x10000000000000;", expect)
300 self.assertSoupEquals("&#1000000000;", expect)
301
302 def test_multipart_strings(self):
303 "Mostly to prevent a recurrence of a bug in the html5lib treebuilder."
304 soup = self.soup("<html><h2>\nfoo</h2><p></p></html>")
305 self.assertEqual("p", soup.h2.string.next_element.name)
306 self.assertEqual("p", soup.p.name)
307 self.assertConnectedness(soup)
308
309 def test_head_tag_between_head_and_body(self):
310 "Prevent recurrence of a bug in the html5lib treebuilder."
311 content = """<html><head></head>
312 <link></link>
313 <body>foo</body>
314</html>
315"""
316 soup = self.soup(content)
317 self.assertNotEqual(None, soup.html.body)
318 self.assertConnectedness(soup)
319
320 def test_multiple_copies_of_a_tag(self):
321 "Prevent recurrence of a bug in the html5lib treebuilder."
322 content = """<!DOCTYPE html>
323<html>
324 <body>
325 <article id="a" >
326 <div><a href="1"></div>
327 <footer>
328 <a href="2"></a>
329 </footer>
330 </article>
331 </body>
332</html>
333"""
334 soup = self.soup(content)
335 self.assertConnectedness(soup.article)
336
337 def test_basic_namespaces(self):
338 """Parsers don't need to *understand* namespaces, but at the
339 very least they should not choke on namespaces or lose
340 data."""
341
342 markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>'
343 soup = self.soup(markup)
344 self.assertEqual(markup, soup.encode())
345 html = soup.html
346 self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns'])
347 self.assertEqual(
348 'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml'])
349 self.assertEqual(
350 'http://www.w3.org/2000/svg', soup.html['xmlns:svg'])
351
352 def test_multivalued_attribute_value_becomes_list(self):
353 markup = b'<a class="foo bar">'
354 soup = self.soup(markup)
355 self.assertEqual(['foo', 'bar'], soup.a['class'])
356
357 #
358 # Generally speaking, tests below this point are more tests of
359 # Beautiful Soup than tests of the tree builders. But parsers are
360 # weird, so we run these tests separately for every tree builder
361 # to detect any differences between them.
362 #
363
364 def test_can_parse_unicode_document(self):
365 # A seemingly innocuous document... but it's in Unicode! And
366 # it contains characters that can't be represented in the
367 # encoding found in the declaration! The horror!
368 markup = '<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
369 soup = self.soup(markup)
370 self.assertEqual('Sacr\xe9 bleu!', soup.body.string)
371
372 def test_soupstrainer(self):
373 """Parsers should be able to work with SoupStrainers."""
374 strainer = SoupStrainer("b")
375 soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>",
376 parse_only=strainer)
377 self.assertEqual(soup.decode(), "<b>bold</b>")
378
379 def test_single_quote_attribute_values_become_double_quotes(self):
380 self.assertSoupEquals("<foo attr='bar'></foo>",
381 '<foo attr="bar"></foo>')
382
383 def test_attribute_values_with_nested_quotes_are_left_alone(self):
384 text = """<foo attr='bar "brawls" happen'>a</foo>"""
385 self.assertSoupEquals(text)
386
387 def test_attribute_values_with_double_nested_quotes_get_quoted(self):
388 text = """<foo attr='bar "brawls" happen'>a</foo>"""
389 soup = self.soup(text)
390 soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"'
391 self.assertSoupEquals(
392 soup.foo.decode(),
393 """<foo attr="Brawls happen at &quot;Bob\'s Bar&quot;">a</foo>""")
394
395 def test_ampersand_in_attribute_value_gets_escaped(self):
396 self.assertSoupEquals('<this is="really messed up & stuff"></this>',
397 '<this is="really messed up &amp; stuff"></this>')
398
399 self.assertSoupEquals(
400 '<a href="http://example.org?a=1&b=2;3">foo</a>',
401 '<a href="http://example.org?a=1&amp;b=2;3">foo</a>')
402
403 def test_escaped_ampersand_in_attribute_value_is_left_alone(self):
404 self.assertSoupEquals('<a href="http://example.org?a=1&amp;b=2;3"></a>')
405
406 def test_entities_in_strings_converted_during_parsing(self):
407 # Both XML and HTML entities are converted to Unicode characters
408 # during parsing.
409 text = "<p>&lt;&lt;sacr&eacute;&#32;bleu!&gt;&gt;</p>"
410 expected = "<p>&lt;&lt;sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</p>"
411 self.assertSoupEquals(text, expected)
412
413 def test_smart_quotes_converted_on_the_way_in(self):
414 # Microsoft smart quotes are converted to Unicode characters during
415 # parsing.
416 quote = b"<p>\x91Foo\x92</p>"
417 soup = self.soup(quote)
418 self.assertEqual(
419 soup.p.string,
420 "\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
421
422 def test_non_breaking_spaces_converted_on_the_way_in(self):
423 soup = self.soup("<a>&nbsp;&nbsp;</a>")
424 self.assertEqual(soup.a.string, "\N{NO-BREAK SPACE}" * 2)
425
426 def test_entities_converted_on_the_way_out(self):
427 text = "<p>&lt;&lt;sacr&eacute;&#32;bleu!&gt;&gt;</p>"
428 expected = "<p>&lt;&lt;sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</p>".encode("utf-8")
429 soup = self.soup(text)
430 self.assertEqual(soup.p.encode("utf-8"), expected)
431
432 def test_real_iso_latin_document(self):
433 # Smoke test of interrelated functionality, using an
434 # easy-to-understand document.
435
436 # Here it is in Unicode. Note that it claims to be in ISO-Latin-1.
437 unicode_html = '<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
438
439 # That's because we're going to encode it into ISO-Latin-1, and use
440 # that to test.
441 iso_latin_html = unicode_html.encode("iso-8859-1")
442
443 # Parse the ISO-Latin-1 HTML.
444 soup = self.soup(iso_latin_html)
445 # Encode it to UTF-8.
446 result = soup.encode("utf-8")
447
448 # What do we expect the result to look like? Well, it would
449 # look like unicode_html, except that the META tag would say
450 # UTF-8 instead of ISO-Latin-1.
451 expected = unicode_html.replace("ISO-Latin-1", "utf-8")
452
453 # And, of course, it would be in UTF-8, not Unicode.
454 expected = expected.encode("utf-8")
455
456 # Ta-da!
457 self.assertEqual(result, expected)
458
459 def test_real_shift_jis_document(self):
460 # Smoke test to make sure the parser can handle a document in
461 # Shift-JIS encoding, without choking.
462 shift_jis_html = (
463 b'<html><head></head><body><pre>'
464 b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
465 b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
466 b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
467 b'</pre></body></html>')
468 unicode_html = shift_jis_html.decode("shift-jis")
469 soup = self.soup(unicode_html)
470
471 # Make sure the parse tree is correctly encoded to various
472 # encodings.
473 self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8"))
474 self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp"))
475
476 def test_real_hebrew_document(self):
477 # A real-world test to make sure we can convert ISO-8859-9 (a
478 # Hebrew encoding) to UTF-8.
479 hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>'
480 soup = self.soup(
481 hebrew_document, from_encoding="iso8859-8")
482 self.assertEqual(soup.original_encoding, 'iso8859-8')
483 self.assertEqual(
484 soup.encode('utf-8'),
485 hebrew_document.decode("iso8859-8").encode("utf-8"))
486
487 def test_meta_tag_reflects_current_encoding(self):
488 # Here's the <meta> tag saying that a document is
489 # encoded in Shift-JIS.
490 meta_tag = ('<meta content="text/html; charset=x-sjis" '
491 'http-equiv="Content-type"/>')
492
493 # Here's a document incorporating that meta tag.
494 shift_jis_html = (
495 '<html><head>\n%s\n'
496 '<meta http-equiv="Content-language" content="ja"/>'
497 '</head><body>Shift-JIS markup goes here.') % meta_tag
498 soup = self.soup(shift_jis_html)
499
500 # Parse the document, and the charset is seemingly unaffected.
501 parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'})
502 content = parsed_meta['content']
503 self.assertEqual('text/html; charset=x-sjis', content)
504
505 # But that value is actually a ContentMetaAttributeValue object.
506 self.assertTrue(isinstance(content, ContentMetaAttributeValue))
507
508 # And it will take on a value that reflects its current
509 # encoding.
510 self.assertEqual('text/html; charset=utf8', content.encode("utf8"))
511
512 # For the rest of the story, see TestSubstitutions in
513 # test_tree.py.
514
515 def test_html5_style_meta_tag_reflects_current_encoding(self):
516 # Here's the <meta> tag saying that a document is
517 # encoded in Shift-JIS.
518 meta_tag = ('<meta id="encoding" charset="x-sjis" />')
519
520 # Here's a document incorporating that meta tag.
521 shift_jis_html = (
522 '<html><head>\n%s\n'
523 '<meta http-equiv="Content-language" content="ja"/>'
524 '</head><body>Shift-JIS markup goes here.') % meta_tag
525 soup = self.soup(shift_jis_html)
526
527 # Parse the document, and the charset is seemingly unaffected.
528 parsed_meta = soup.find('meta', id="encoding")
529 charset = parsed_meta['charset']
530 self.assertEqual('x-sjis', charset)
531
532 # But that value is actually a CharsetMetaAttributeValue object.
533 self.assertTrue(isinstance(charset, CharsetMetaAttributeValue))
534
535 # And it will take on a value that reflects its current
536 # encoding.
537 self.assertEqual('utf8', charset.encode("utf8"))
538
539 def test_tag_with_no_attributes_can_have_attributes_added(self):
540 data = self.soup("<a>text</a>")
541 data.a['foo'] = 'bar'
542 self.assertEqual('<a foo="bar">text</a>', data.a.decode())
543
544class XMLTreeBuilderSmokeTest(SoupTest):
545
546 def test_pickle_and_unpickle_identity(self):
547 # Pickling a tree, then unpickling it, yields a tree identical
548 # to the original.
549 tree = self.soup("<a><b>foo</a>")
550 dumped = pickle.dumps(tree, 2)
551 loaded = pickle.loads(dumped)
552 self.assertEqual(loaded.__class__, BeautifulSoup)
553 self.assertEqual(loaded.decode(), tree.decode())
554
555 def test_docstring_generated(self):
556 soup = self.soup("<root/>")
557 self.assertEqual(
558 soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>')
559
560 def test_xml_declaration(self):
561 markup = b"""<?xml version="1.0" encoding="utf8"?>\n<foo/>"""
562 soup = self.soup(markup)
563 self.assertEqual(markup, soup.encode("utf8"))
564
565 def test_real_xhtml_document(self):
566 """A real XHTML document should come out *exactly* the same as it went in."""
567 markup = b"""<?xml version="1.0" encoding="utf-8"?>
568<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
569<html xmlns="http://www.w3.org/1999/xhtml">
570<head><title>Hello.</title></head>
571<body>Goodbye.</body>
572</html>"""
573 soup = self.soup(markup)
574 self.assertEqual(
575 soup.encode("utf-8"), markup)
576
577 def test_formatter_processes_script_tag_for_xml_documents(self):
578 doc = """
579 <script type="text/javascript">
580 </script>
581"""
582 soup = BeautifulSoup(doc, "lxml-xml")
583 # lxml would have stripped this while parsing, but we can add
584 # it later.
585 soup.script.string = 'console.log("< < hey > > ");'
586 encoded = soup.encode()
587 self.assertTrue(b"&lt; &lt; hey &gt; &gt;" in encoded)
588
589 def test_can_parse_unicode_document(self):
590 markup = '<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
591 soup = self.soup(markup)
592 self.assertEqual('Sacr\xe9 bleu!', soup.root.string)
593
594 def test_popping_namespaced_tag(self):
595 markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>'
596 soup = self.soup(markup)
597 self.assertEqual(
598 str(soup.rss), markup)
599
600 def test_docstring_includes_correct_encoding(self):
601 soup = self.soup("<root/>")
602 self.assertEqual(
603 soup.encode("latin1"),
604 b'<?xml version="1.0" encoding="latin1"?>\n<root/>')
605
606 def test_large_xml_document(self):
607 """A large XML document should come out the same as it went in."""
608 markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>'
609 + b'0' * (2**12)
610 + b'</root>')
611 soup = self.soup(markup)
612 self.assertEqual(soup.encode("utf-8"), markup)
613
614
615 def test_tags_are_empty_element_if_and_only_if_they_are_empty(self):
616 self.assertSoupEquals("<p>", "<p/>")
617 self.assertSoupEquals("<p>foo</p>")
618
619 def test_namespaces_are_preserved(self):
620 markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>'
621 soup = self.soup(markup)
622 root = soup.root
623 self.assertEqual("http://example.com/", root['xmlns:a'])
624 self.assertEqual("http://example.net/", root['xmlns:b'])
625
626 def test_closing_namespaced_tag(self):
627 markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>'
628 soup = self.soup(markup)
629 self.assertEqual(str(soup.p), markup)
630
631 def test_namespaced_attributes(self):
632 markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>'
633 soup = self.soup(markup)
634 self.assertEqual(str(soup.foo), markup)
635
636 def test_namespaced_attributes_xml_namespace(self):
637 markup = '<foo xml:lang="fr">bar</foo>'
638 soup = self.soup(markup)
639 self.assertEqual(str(soup.foo), markup)
640
641class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest):
642 """Smoke test for a tree builder that supports HTML5."""
643
644 def test_real_xhtml_document(self):
645 # Since XHTML is not HTML5, HTML5 parsers are not tested to handle
646 # XHTML documents in any particular way.
647 pass
648
649 def test_html_tags_have_namespace(self):
650 markup = "<a>"
651 soup = self.soup(markup)
652 self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace)
653
654 def test_svg_tags_have_namespace(self):
655 markup = '<svg><circle/></svg>'
656 soup = self.soup(markup)
657 namespace = "http://www.w3.org/2000/svg"
658 self.assertEqual(namespace, soup.svg.namespace)
659 self.assertEqual(namespace, soup.circle.namespace)
660
661
662 def test_mathml_tags_have_namespace(self):
663 markup = '<math><msqrt>5</msqrt></math>'
664 soup = self.soup(markup)
665 namespace = 'http://www.w3.org/1998/Math/MathML'
666 self.assertEqual(namespace, soup.math.namespace)
667 self.assertEqual(namespace, soup.msqrt.namespace)
668
669 def test_xml_declaration_becomes_comment(self):
670 markup = '<?xml version="1.0" encoding="utf-8"?><html></html>'
671 soup = self.soup(markup)
672 self.assertTrue(isinstance(soup.contents[0], Comment))
673 self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?')
674 self.assertEqual("html", soup.contents[0].next_element.name)
675
676def skipIf(condition, reason):
677 def nothing(test, *args, **kwargs):
678 return None
679
680 def decorator(test_item):
681 if condition:
682 return nothing
683 else:
684 return test_item
685
686 return decorator
diff --git a/bitbake/lib/bs4/tests/__init__.py b/bitbake/lib/bs4/tests/__init__.py
deleted file mode 100644
index 142c8cc3f1..0000000000
--- a/bitbake/lib/bs4/tests/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
1"The beautifulsoup tests."
diff --git a/bitbake/lib/bs4/tests/test_builder_registry.py b/bitbake/lib/bs4/tests/test_builder_registry.py
deleted file mode 100644
index 90cad82933..0000000000
--- a/bitbake/lib/bs4/tests/test_builder_registry.py
+++ /dev/null
@@ -1,147 +0,0 @@
1"""Tests of the builder registry."""
2
3import unittest
4import warnings
5
6from bs4 import BeautifulSoup
7from bs4.builder import (
8 builder_registry as registry,
9 HTMLParserTreeBuilder,
10 TreeBuilderRegistry,
11)
12
13try:
14 from bs4.builder import HTML5TreeBuilder
15 HTML5LIB_PRESENT = True
16except ImportError:
17 HTML5LIB_PRESENT = False
18
19try:
20 from bs4.builder import (
21 LXMLTreeBuilderForXML,
22 LXMLTreeBuilder,
23 )
24 LXML_PRESENT = True
25except ImportError:
26 LXML_PRESENT = False
27
28
29class BuiltInRegistryTest(unittest.TestCase):
30 """Test the built-in registry with the default builders registered."""
31
32 def test_combination(self):
33 if LXML_PRESENT:
34 self.assertEqual(registry.lookup('fast', 'html'),
35 LXMLTreeBuilder)
36
37 if LXML_PRESENT:
38 self.assertEqual(registry.lookup('permissive', 'xml'),
39 LXMLTreeBuilderForXML)
40 self.assertEqual(registry.lookup('strict', 'html'),
41 HTMLParserTreeBuilder)
42 if HTML5LIB_PRESENT:
43 self.assertEqual(registry.lookup('html5lib', 'html'),
44 HTML5TreeBuilder)
45
46 def test_lookup_by_markup_type(self):
47 if LXML_PRESENT:
48 self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
49 self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
50 else:
51 self.assertEqual(registry.lookup('xml'), None)
52 if HTML5LIB_PRESENT:
53 self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
54 else:
55 self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
56
57 def test_named_library(self):
58 if LXML_PRESENT:
59 self.assertEqual(registry.lookup('lxml', 'xml'),
60 LXMLTreeBuilderForXML)
61 self.assertEqual(registry.lookup('lxml', 'html'),
62 LXMLTreeBuilder)
63 if HTML5LIB_PRESENT:
64 self.assertEqual(registry.lookup('html5lib'),
65 HTML5TreeBuilder)
66
67 self.assertEqual(registry.lookup('html.parser'),
68 HTMLParserTreeBuilder)
69
70 def test_beautifulsoup_constructor_does_lookup(self):
71
72 with warnings.catch_warnings(record=True) as w:
73 # This will create a warning about not explicitly
74 # specifying a parser, but we'll ignore it.
75
76 # You can pass in a string.
77 BeautifulSoup("", features="html")
78 # Or a list of strings.
79 BeautifulSoup("", features=["html", "fast"])
80
81 # You'll get an exception if BS can't find an appropriate
82 # builder.
83 self.assertRaises(ValueError, BeautifulSoup,
84 "", features="no-such-feature")
85
86class RegistryTest(unittest.TestCase):
87 """Test the TreeBuilderRegistry class in general."""
88
89 def setUp(self):
90 self.registry = TreeBuilderRegistry()
91
92 def builder_for_features(self, *feature_list):
93 cls = type('Builder_' + '_'.join(feature_list),
94 (object,), {'features' : feature_list})
95
96 self.registry.register(cls)
97 return cls
98
99 def test_register_with_no_features(self):
100 builder = self.builder_for_features()
101
102 # Since the builder advertises no features, you can't find it
103 # by looking up features.
104 self.assertEqual(self.registry.lookup('foo'), None)
105
106 # But you can find it by doing a lookup with no features, if
107 # this happens to be the only registered builder.
108 self.assertEqual(self.registry.lookup(), builder)
109
110 def test_register_with_features_makes_lookup_succeed(self):
111 builder = self.builder_for_features('foo', 'bar')
112 self.assertEqual(self.registry.lookup('foo'), builder)
113 self.assertEqual(self.registry.lookup('bar'), builder)
114
115 def test_lookup_fails_when_no_builder_implements_feature(self):
116 builder = self.builder_for_features('foo', 'bar')
117 self.assertEqual(self.registry.lookup('baz'), None)
118
119 def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
120 builder1 = self.builder_for_features('foo')
121 builder2 = self.builder_for_features('bar')
122 self.assertEqual(self.registry.lookup(), builder2)
123
124 def test_lookup_fails_when_no_tree_builders_registered(self):
125 self.assertEqual(self.registry.lookup(), None)
126
127 def test_lookup_gets_most_recent_builder_supporting_all_features(self):
128 has_one = self.builder_for_features('foo')
129 has_the_other = self.builder_for_features('bar')
130 has_both_early = self.builder_for_features('foo', 'bar', 'baz')
131 has_both_late = self.builder_for_features('foo', 'bar', 'quux')
132 lacks_one = self.builder_for_features('bar')
133 has_the_other = self.builder_for_features('foo')
134
135 # There are two builders featuring 'foo' and 'bar', but
136 # the one that also features 'quux' was registered later.
137 self.assertEqual(self.registry.lookup('foo', 'bar'),
138 has_both_late)
139
140 # There is only one builder featuring 'foo', 'bar', and 'baz'.
141 self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
142 has_both_early)
143
144 def test_lookup_fails_when_cannot_reconcile_requested_features(self):
145 builder1 = self.builder_for_features('foo', 'bar')
146 builder2 = self.builder_for_features('foo', 'baz')
147 self.assertEqual(self.registry.lookup('bar', 'baz'), None)
diff --git a/bitbake/lib/bs4/tests/test_docs.py b/bitbake/lib/bs4/tests/test_docs.py
deleted file mode 100644
index d1d76a33bf..0000000000
--- a/bitbake/lib/bs4/tests/test_docs.py
+++ /dev/null
@@ -1,32 +0,0 @@
1"Test harness for doctests."
2
3# pylint: disable-msg=E0611,W0142
4
5__metaclass__ = type
6__all__ = [
7 'additional_tests',
8 ]
9
10import doctest
11#from pkg_resources import (
12# resource_filename, resource_exists, resource_listdir, cleanup_resources)
13
14DOCTEST_FLAGS = (
15 doctest.ELLIPSIS |
16 doctest.NORMALIZE_WHITESPACE |
17 doctest.REPORT_NDIFF)
18
19# def additional_tests():
20# "Run the doc tests (README.txt and docs/*, if any exist)"
21# doctest_files = [
22# os.path.abspath(resource_filename('bs4', 'README.txt'))]
23# if resource_exists('bs4', 'docs'):
24# for name in resource_listdir('bs4', 'docs'):
25# if name.endswith('.txt'):
26# doctest_files.append(
27# os.path.abspath(
28# resource_filename('bs4', 'docs/%s' % name)))
29# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
30# atexit.register(cleanup_resources)
31# return unittest.TestSuite((
32# doctest.DocFileSuite(*doctest_files, **kwargs)))
diff --git a/bitbake/lib/bs4/tests/test_html5lib.py b/bitbake/lib/bs4/tests/test_html5lib.py
deleted file mode 100644
index a7494ca5ba..0000000000
--- a/bitbake/lib/bs4/tests/test_html5lib.py
+++ /dev/null
@@ -1,98 +0,0 @@
1"""Tests to ensure that the html5lib tree builder generates good trees."""
2
3import warnings
4
5try:
6 from bs4.builder import HTML5TreeBuilder
7 HTML5LIB_PRESENT = True
8except ImportError as e:
9 HTML5LIB_PRESENT = False
10from bs4.element import SoupStrainer
11from bs4.testing import (
12 HTML5TreeBuilderSmokeTest,
13 SoupTest,
14 skipIf,
15)
16
17@skipIf(
18 not HTML5LIB_PRESENT,
19 "html5lib seems not to be present, not testing its tree builder.")
20class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
21 """See ``HTML5TreeBuilderSmokeTest``."""
22
23 @property
24 def default_builder(self):
25 return HTML5TreeBuilder()
26
27 def test_soupstrainer(self):
28 # The html5lib tree builder does not support SoupStrainers.
29 strainer = SoupStrainer("b")
30 markup = "<p>A <b>bold</b> statement.</p>"
31 with warnings.catch_warnings(record=True) as w:
32 soup = self.soup(markup, parse_only=strainer)
33 self.assertEqual(
34 soup.decode(), self.document_for(markup))
35
36 self.assertTrue(
37 "the html5lib tree builder doesn't support parse_only" in
38 str(w[0].message))
39
40 def test_correctly_nested_tables(self):
41 """html5lib inserts <tbody> tags where other parsers don't."""
42 markup = ('<table id="1">'
43 '<tr>'
44 "<td>Here's another table:"
45 '<table id="2">'
46 '<tr><td>foo</td></tr>'
47 '</table></td>')
48
49 self.assertSoupEquals(
50 markup,
51 '<table id="1"><tbody><tr><td>Here\'s another table:'
52 '<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
53 '</td></tr></tbody></table>')
54
55 self.assertSoupEquals(
56 "<table><thead><tr><td>Foo</td></tr></thead>"
57 "<tbody><tr><td>Bar</td></tr></tbody>"
58 "<tfoot><tr><td>Baz</td></tr></tfoot></table>")
59
60 def test_xml_declaration_followed_by_doctype(self):
61 markup = '''<?xml version="1.0" encoding="utf-8"?>
62<!DOCTYPE html>
63<html>
64 <head>
65 </head>
66 <body>
67 <p>foo</p>
68 </body>
69</html>'''
70 soup = self.soup(markup)
71 # Verify that we can reach the <p> tag; this means the tree is connected.
72 self.assertEqual(b"<p>foo</p>", soup.p.encode())
73
74 def test_reparented_markup(self):
75 markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
76 soup = self.soup(markup)
77 self.assertEqual("<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
78 self.assertEqual(2, len(soup.find_all('p')))
79
80
81 def test_reparented_markup_ends_with_whitespace(self):
82 markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
83 soup = self.soup(markup)
84 self.assertEqual("<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
85 self.assertEqual(2, len(soup.find_all('p')))
86
87 def test_processing_instruction(self):
88 """Processing instructions become comments."""
89 markup = b"""<?PITarget PIContent?>"""
90 soup = self.soup(markup)
91 assert str(soup).startswith("<!--?PITarget PIContent?-->")
92
93 def test_cloned_multivalue_node(self):
94 markup = b"""<a class="my_class"><p></a>"""
95 soup = self.soup(markup)
96 a1, a2 = soup.find_all('a')
97 self.assertEqual(a1, a2)
98 assert a1 is not a2
diff --git a/bitbake/lib/bs4/tests/test_htmlparser.py b/bitbake/lib/bs4/tests/test_htmlparser.py
deleted file mode 100644
index 30a25e6709..0000000000
--- a/bitbake/lib/bs4/tests/test_htmlparser.py
+++ /dev/null
@@ -1,31 +0,0 @@
1"""Tests to ensure that the html.parser tree builder generates good
2trees."""
3
4import pickle
5from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest
6from bs4.builder import HTMLParserTreeBuilder
7
8class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
9
10 @property
11 def default_builder(self):
12 return HTMLParserTreeBuilder()
13
14 def test_namespaced_system_doctype(self):
15 # html.parser can't handle namespaced doctypes, so skip this one.
16 pass
17
18 def test_namespaced_public_doctype(self):
19 # html.parser can't handle namespaced doctypes, so skip this one.
20 pass
21
22 def test_builder_is_pickled(self):
23 """Unlike most tree builders, HTMLParserTreeBuilder and will
24 be restored after pickling.
25 """
26 tree = self.soup("<a><b>foo</a>")
27 dumped = pickle.dumps(tree, 2)
28 loaded = pickle.loads(dumped)
29 self.assertTrue(isinstance(loaded.builder, type(tree.builder)))
30
31
diff --git a/bitbake/lib/bs4/tests/test_lxml.py b/bitbake/lib/bs4/tests/test_lxml.py
deleted file mode 100644
index 6b6cdd07cb..0000000000
--- a/bitbake/lib/bs4/tests/test_lxml.py
+++ /dev/null
@@ -1,70 +0,0 @@
1"""Tests to ensure that the lxml tree builder generates good trees."""
2
3import warnings
4
5try:
6 import lxml.etree
7 LXML_PRESENT = True
8 LXML_VERSION = lxml.etree.LXML_VERSION
9except ImportError as e:
10 LXML_PRESENT = False
11 LXML_VERSION = (0,)
12
13if LXML_PRESENT:
14 from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
15
16from bs4 import BeautifulStoneSoup
17from bs4.testing import skipIf
18from bs4.testing import (
19 HTMLTreeBuilderSmokeTest,
20 XMLTreeBuilderSmokeTest,
21 SoupTest,
22 skipIf,
23)
24
25@skipIf(
26 not LXML_PRESENT,
27 "lxml seems not to be present, not testing its tree builder.")
28class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
29 """See ``HTMLTreeBuilderSmokeTest``."""
30
31 @property
32 def default_builder(self):
33 return LXMLTreeBuilder()
34
35 def test_out_of_range_entity(self):
36 self.assertSoupEquals(
37 "<p>foo&#10000000000000;bar</p>", "<p>foobar</p>")
38 self.assertSoupEquals(
39 "<p>foo&#x10000000000000;bar</p>", "<p>foobar</p>")
40 self.assertSoupEquals(
41 "<p>foo&#1000000000;bar</p>", "<p>foobar</p>")
42
43 # In lxml < 2.3.5, an empty doctype causes a segfault. Skip this
44 # test if an old version of lxml is installed.
45
46 @skipIf(
47 not LXML_PRESENT or LXML_VERSION < (2,3,5,0),
48 "Skipping doctype test for old version of lxml to avoid segfault.")
49 def test_empty_doctype(self):
50 soup = self.soup("<!DOCTYPE>")
51 doctype = soup.contents[0]
52 self.assertEqual("", doctype.strip())
53
54 def test_beautifulstonesoup_is_xml_parser(self):
55 # Make sure that the deprecated BSS class uses an xml builder
56 # if one is installed.
57 with warnings.catch_warnings(record=True) as w:
58 soup = BeautifulStoneSoup("<b />")
59 self.assertEqual("<b/>", str(soup.b))
60 self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message))
61
62@skipIf(
63 not LXML_PRESENT,
64 "lxml seems not to be present, not testing its XML tree builder.")
65class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest):
66 """See ``HTMLTreeBuilderSmokeTest``."""
67
68 @property
69 def default_builder(self):
70 return LXMLTreeBuilderForXML()
diff --git a/bitbake/lib/bs4/tests/test_soup.py b/bitbake/lib/bs4/tests/test_soup.py
deleted file mode 100644
index 6ad3cb3765..0000000000
--- a/bitbake/lib/bs4/tests/test_soup.py
+++ /dev/null
@@ -1,479 +0,0 @@
1# -*- coding: utf-8 -*-
2"""Tests of Beautiful Soup as a whole."""
3
4import logging
5import unittest
6import sys
7import tempfile
8
9from bs4 import BeautifulSoup
10from bs4.element import (
11 CharsetMetaAttributeValue,
12 ContentMetaAttributeValue,
13 SoupStrainer,
14 NamespacedAttribute,
15 )
16import bs4.dammit
17from bs4.dammit import (
18 EntitySubstitution,
19 UnicodeDammit,
20 EncodingDetector,
21)
22from bs4.testing import (
23 SoupTest,
24 skipIf,
25)
26import warnings
27
28try:
29 from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
30 LXML_PRESENT = True
31except ImportError as e:
32 LXML_PRESENT = False
33
34PYTHON_2_PRE_2_7 = (sys.version_info < (2,7))
35PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2))
36
37class TestConstructor(SoupTest):
38
39 def test_short_unicode_input(self):
40 data = "<h1>éé</h1>"
41 soup = self.soup(data)
42 self.assertEqual("éé", soup.h1.string)
43
44 def test_embedded_null(self):
45 data = "<h1>foo\0bar</h1>"
46 soup = self.soup(data)
47 self.assertEqual("foo\0bar", soup.h1.string)
48
49 def test_exclude_encodings(self):
50 utf8_data = "Räksmörgås".encode("utf-8")
51 soup = self.soup(utf8_data, exclude_encodings=["utf-8"])
52 self.assertEqual("windows-1252", soup.original_encoding)
53
54
55class TestWarnings(SoupTest):
56
57 def _no_parser_specified(self, s, is_there=True):
58 v = s.startswith(BeautifulSoup.NO_PARSER_SPECIFIED_WARNING[:80])
59 self.assertTrue(v)
60
61 def test_warning_if_no_parser_specified(self):
62 with warnings.catch_warnings(record=True) as w:
63 soup = self.soup("<a><b></b></a>")
64 msg = str(w[0].message)
65 self._assert_no_parser_specified(msg)
66
67 def test_warning_if_parser_specified_too_vague(self):
68 with warnings.catch_warnings(record=True) as w:
69 soup = self.soup("<a><b></b></a>", "html")
70 msg = str(w[0].message)
71 self._assert_no_parser_specified(msg)
72
73 def test_no_warning_if_explicit_parser_specified(self):
74 with warnings.catch_warnings(record=True) as w:
75 soup = self.soup("<a><b></b></a>", "html.parser")
76 self.assertEqual([], w)
77
78 def test_parseOnlyThese_renamed_to_parse_only(self):
79 with warnings.catch_warnings(record=True) as w:
80 soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b"))
81 msg = str(w[0].message)
82 self.assertTrue("parseOnlyThese" in msg)
83 self.assertTrue("parse_only" in msg)
84 self.assertEqual(b"<b></b>", soup.encode())
85
86 def test_fromEncoding_renamed_to_from_encoding(self):
87 with warnings.catch_warnings(record=True) as w:
88 utf8 = b"\xc3\xa9"
89 soup = self.soup(utf8, fromEncoding="utf8")
90 msg = str(w[0].message)
91 self.assertTrue("fromEncoding" in msg)
92 self.assertTrue("from_encoding" in msg)
93 self.assertEqual("utf8", soup.original_encoding)
94
95 def test_unrecognized_keyword_argument(self):
96 self.assertRaises(
97 TypeError, self.soup, "<a>", no_such_argument=True)
98
99class TestWarnings(SoupTest):
100
101 def test_disk_file_warning(self):
102 filehandle = tempfile.NamedTemporaryFile()
103 filename = filehandle.name
104 try:
105 with warnings.catch_warnings(record=True) as w:
106 soup = self.soup(filename)
107 msg = str(w[0].message)
108 self.assertTrue("looks like a filename" in msg)
109 finally:
110 filehandle.close()
111
112 # The file no longer exists, so Beautiful Soup will no longer issue the warning.
113 with warnings.catch_warnings(record=True) as w:
114 soup = self.soup(filename)
115 self.assertEqual(0, len(w))
116
117 def test_url_warning(self):
118 with warnings.catch_warnings(record=True) as w:
119 soup = self.soup("http://www.crummy.com/")
120 msg = str(w[0].message)
121 self.assertTrue("looks like a URL" in msg)
122
123 with warnings.catch_warnings(record=True) as w:
124 soup = self.soup("http://www.crummy.com/ is great")
125 self.assertEqual(0, len(w))
126
127class TestSelectiveParsing(SoupTest):
128
129 def test_parse_with_soupstrainer(self):
130 markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>"
131 strainer = SoupStrainer("b")
132 soup = self.soup(markup, parse_only=strainer)
133 self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>")
134
135
136class TestEntitySubstitution(unittest.TestCase):
137 """Standalone tests of the EntitySubstitution class."""
138 def setUp(self):
139 self.sub = EntitySubstitution
140
141 def test_simple_html_substitution(self):
142 # Unicode characters corresponding to named HTML entites
143 # are substituted, and no others.
144 s = "foo\u2200\N{SNOWMAN}\u00f5bar"
145 self.assertEqual(self.sub.substitute_html(s),
146 "foo&forall;\N{SNOWMAN}&otilde;bar")
147
148 def test_smart_quote_substitution(self):
149 # MS smart quotes are a common source of frustration, so we
150 # give them a special test.
151 quotes = b"\x91\x92foo\x93\x94"
152 dammit = UnicodeDammit(quotes)
153 self.assertEqual(self.sub.substitute_html(dammit.markup),
154 "&lsquo;&rsquo;foo&ldquo;&rdquo;")
155
156 def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self):
157 s = 'Welcome to "my bar"'
158 self.assertEqual(self.sub.substitute_xml(s, False), s)
159
160 def test_xml_attribute_quoting_normally_uses_double_quotes(self):
161 self.assertEqual(self.sub.substitute_xml("Welcome", True),
162 '"Welcome"')
163 self.assertEqual(self.sub.substitute_xml("Bob's Bar", True),
164 '"Bob\'s Bar"')
165
166 def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self):
167 s = 'Welcome to "my bar"'
168 self.assertEqual(self.sub.substitute_xml(s, True),
169 "'Welcome to \"my bar\"'")
170
171 def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self):
172 s = 'Welcome to "Bob\'s Bar"'
173 self.assertEqual(
174 self.sub.substitute_xml(s, True),
175 '"Welcome to &quot;Bob\'s Bar&quot;"')
176
177 def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self):
178 quoted = 'Welcome to "Bob\'s Bar"'
179 self.assertEqual(self.sub.substitute_xml(quoted), quoted)
180
181 def test_xml_quoting_handles_angle_brackets(self):
182 self.assertEqual(
183 self.sub.substitute_xml("foo<bar>"),
184 "foo&lt;bar&gt;")
185
186 def test_xml_quoting_handles_ampersands(self):
187 self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&amp;T")
188
189 def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self):
190 self.assertEqual(
191 self.sub.substitute_xml("&Aacute;T&T"),
192 "&amp;Aacute;T&amp;T")
193
194 def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self):
195 self.assertEqual(
196 self.sub.substitute_xml_containing_entities("&Aacute;T&T"),
197 "&Aacute;T&amp;T")
198
199 def test_quotes_not_html_substituted(self):
200 """There's no need to do this except inside attribute values."""
201 text = 'Bob\'s "bar"'
202 self.assertEqual(self.sub.substitute_html(text), text)
203
204
205class TestEncodingConversion(SoupTest):
206 # Test Beautiful Soup's ability to decode and encode from various
207 # encodings.
208
209 def setUp(self):
210 super(TestEncodingConversion, self).setUp()
211 self.unicode_data = '<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>'
212 self.utf8_data = self.unicode_data.encode("utf-8")
213 # Just so you know what it looks like.
214 self.assertEqual(
215 self.utf8_data,
216 b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>')
217
218 def test_ascii_in_unicode_out(self):
219 # ASCII input is converted to Unicode. The original_encoding
220 # attribute is set to 'utf-8', a superset of ASCII.
221 chardet = bs4.dammit.chardet_dammit
222 logging.disable(logging.WARNING)
223 try:
224 def noop(str):
225 return None
226 # Disable chardet, which will realize that the ASCII is ASCII.
227 bs4.dammit.chardet_dammit = noop
228 ascii = b"<foo>a</foo>"
229 soup_from_ascii = self.soup(ascii)
230 unicode_output = soup_from_ascii.decode()
231 self.assertTrue(isinstance(unicode_output, str))
232 self.assertEqual(unicode_output, self.document_for(ascii.decode()))
233 self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8")
234 finally:
235 logging.disable(logging.NOTSET)
236 bs4.dammit.chardet_dammit = chardet
237
238 def test_unicode_in_unicode_out(self):
239 # Unicode input is left alone. The original_encoding attribute
240 # is not set.
241 soup_from_unicode = self.soup(self.unicode_data)
242 self.assertEqual(soup_from_unicode.decode(), self.unicode_data)
243 self.assertEqual(soup_from_unicode.foo.string, 'Sacr\xe9 bleu!')
244 self.assertEqual(soup_from_unicode.original_encoding, None)
245
246 def test_utf8_in_unicode_out(self):
247 # UTF-8 input is converted to Unicode. The original_encoding
248 # attribute is set.
249 soup_from_utf8 = self.soup(self.utf8_data)
250 self.assertEqual(soup_from_utf8.decode(), self.unicode_data)
251 self.assertEqual(soup_from_utf8.foo.string, 'Sacr\xe9 bleu!')
252
253 def test_utf8_out(self):
254 # The internal data structures can be encoded as UTF-8.
255 soup_from_unicode = self.soup(self.unicode_data)
256 self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data)
257
258 @skipIf(
259 PYTHON_2_PRE_2_7 or PYTHON_3_PRE_3_2,
260 "Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.")
261 def test_attribute_name_containing_unicode_characters(self):
262 markup = '<div><a \N{SNOWMAN}="snowman"></a></div>'
263 self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8"))
264
265class TestUnicodeDammit(unittest.TestCase):
266 """Standalone tests of UnicodeDammit."""
267
268 def test_unicode_input(self):
269 markup = "I'm already Unicode! \N{SNOWMAN}"
270 dammit = UnicodeDammit(markup)
271 self.assertEqual(dammit.unicode_markup, markup)
272
273 def test_smart_quotes_to_unicode(self):
274 markup = b"<foo>\x91\x92\x93\x94</foo>"
275 dammit = UnicodeDammit(markup)
276 self.assertEqual(
277 dammit.unicode_markup, "<foo>\u2018\u2019\u201c\u201d</foo>")
278
279 def test_smart_quotes_to_xml_entities(self):
280 markup = b"<foo>\x91\x92\x93\x94</foo>"
281 dammit = UnicodeDammit(markup, smart_quotes_to="xml")
282 self.assertEqual(
283 dammit.unicode_markup, "<foo>&#x2018;&#x2019;&#x201C;&#x201D;</foo>")
284
285 def test_smart_quotes_to_html_entities(self):
286 markup = b"<foo>\x91\x92\x93\x94</foo>"
287 dammit = UnicodeDammit(markup, smart_quotes_to="html")
288 self.assertEqual(
289 dammit.unicode_markup, "<foo>&lsquo;&rsquo;&ldquo;&rdquo;</foo>")
290
291 def test_smart_quotes_to_ascii(self):
292 markup = b"<foo>\x91\x92\x93\x94</foo>"
293 dammit = UnicodeDammit(markup, smart_quotes_to="ascii")
294 self.assertEqual(
295 dammit.unicode_markup, """<foo>''""</foo>""")
296
297 def test_detect_utf8(self):
298 utf8 = b"Sacr\xc3\xa9 bleu! \xe2\x98\x83"
299 dammit = UnicodeDammit(utf8)
300 self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
301 self.assertEqual(dammit.unicode_markup, 'Sacr\xe9 bleu! \N{SNOWMAN}')
302
303
304 def test_convert_hebrew(self):
305 hebrew = b"\xed\xe5\xec\xf9"
306 dammit = UnicodeDammit(hebrew, ["iso-8859-8"])
307 self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8')
308 self.assertEqual(dammit.unicode_markup, '\u05dd\u05d5\u05dc\u05e9')
309
310 def test_dont_see_smart_quotes_where_there_are_none(self):
311 utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch"
312 dammit = UnicodeDammit(utf_8)
313 self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
314 self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8)
315
316 def test_ignore_inappropriate_codecs(self):
317 utf8_data = "Räksmörgås".encode("utf-8")
318 dammit = UnicodeDammit(utf8_data, ["iso-8859-8"])
319 self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
320
321 def test_ignore_invalid_codecs(self):
322 utf8_data = "Räksmörgås".encode("utf-8")
323 for bad_encoding in ['.utf8', '...', 'utF---16.!']:
324 dammit = UnicodeDammit(utf8_data, [bad_encoding])
325 self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
326
327 def test_exclude_encodings(self):
328 # This is UTF-8.
329 utf8_data = "Räksmörgås".encode("utf-8")
330
331 # But if we exclude UTF-8 from consideration, the guess is
332 # Windows-1252.
333 dammit = UnicodeDammit(utf8_data, exclude_encodings=["utf-8"])
334 self.assertEqual(dammit.original_encoding.lower(), 'windows-1252')
335
336 # And if we exclude that, there is no valid guess at all.
337 dammit = UnicodeDammit(
338 utf8_data, exclude_encodings=["utf-8", "windows-1252"])
339 self.assertEqual(dammit.original_encoding, None)
340
341 def test_encoding_detector_replaces_junk_in_encoding_name_with_replacement_character(self):
342 detected = EncodingDetector(
343 b'<?xml version="1.0" encoding="UTF-\xdb" ?>')
344 encodings = list(detected.encodings)
345 assert 'utf-\N{REPLACEMENT CHARACTER}' in encodings
346
347 def test_detect_html5_style_meta_tag(self):
348
349 for data in (
350 b'<html><meta charset="euc-jp" /></html>',
351 b"<html><meta charset='euc-jp' /></html>",
352 b"<html><meta charset=euc-jp /></html>",
353 b"<html><meta charset=euc-jp/></html>"):
354 dammit = UnicodeDammit(data, is_html=True)
355 self.assertEqual(
356 "euc-jp", dammit.original_encoding)
357
358 def test_last_ditch_entity_replacement(self):
359 # This is a UTF-8 document that contains bytestrings
360 # completely incompatible with UTF-8 (ie. encoded with some other
361 # encoding).
362 #
363 # Since there is no consistent encoding for the document,
364 # Unicode, Dammit will eventually encode the document as UTF-8
365 # and encode the incompatible characters as REPLACEMENT
366 # CHARACTER.
367 #
368 # If chardet is installed, it will detect that the document
369 # can be converted into ISO-8859-1 without errors. This happens
370 # to be the wrong encoding, but it is a consistent encoding, so the
371 # code we're testing here won't run.
372 #
373 # So we temporarily disable chardet if it's present.
374 doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?>
375<html><b>\330\250\330\252\330\261</b>
376<i>\310\322\321\220\312\321\355\344</i></html>"""
377 chardet = bs4.dammit.chardet_dammit
378 logging.disable(logging.WARNING)
379 try:
380 def noop(str):
381 return None
382 bs4.dammit.chardet_dammit = noop
383 dammit = UnicodeDammit(doc)
384 self.assertEqual(True, dammit.contains_replacement_characters)
385 self.assertTrue("\ufffd" in dammit.unicode_markup)
386
387 soup = BeautifulSoup(doc, "html.parser")
388 self.assertTrue(soup.contains_replacement_characters)
389 finally:
390 logging.disable(logging.NOTSET)
391 bs4.dammit.chardet_dammit = chardet
392
393 def test_byte_order_mark_removed(self):
394 # A document written in UTF-16LE will have its byte order marker stripped.
395 data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00'
396 dammit = UnicodeDammit(data)
397 self.assertEqual("<a>áé</a>", dammit.unicode_markup)
398 self.assertEqual("utf-16le", dammit.original_encoding)
399
400 def test_detwingle(self):
401 # Here's a UTF8 document.
402 utf8 = ("\N{SNOWMAN}" * 3).encode("utf8")
403
404 # Here's a Windows-1252 document.
405 windows_1252 = (
406 "\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!"
407 "\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252")
408
409 # Through some unholy alchemy, they've been stuck together.
410 doc = utf8 + windows_1252 + utf8
411
412 # The document can't be turned into UTF-8:
413 self.assertRaises(UnicodeDecodeError, doc.decode, "utf8")
414
415 # Unicode, Dammit thinks the whole document is Windows-1252,
416 # and decodes it into "☃☃☃“Hi, I like Windows!â€Ã¢ËœÆ’☃☃"
417
418 # But if we run it through fix_embedded_windows_1252, it's fixed:
419
420 fixed = UnicodeDammit.detwingle(doc)
421 self.assertEqual(
422 "☃☃☃“Hi, I like Windows!â€â˜ƒâ˜ƒâ˜ƒ", fixed.decode("utf8"))
423
424 def test_detwingle_ignores_multibyte_characters(self):
425 # Each of these characters has a UTF-8 representation ending
426 # in \x93. \x93 is a smart quote if interpreted as
427 # Windows-1252. But our code knows to skip over multibyte
428 # UTF-8 characters, so they'll survive the process unscathed.
429 for tricky_unicode_char in (
430 "\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93'
431 "\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93'
432 "\xf0\x90\x90\x93", # This is a CJK character, not sure which one.
433 ):
434 input = tricky_unicode_char.encode("utf8")
435 self.assertTrue(input.endswith(b'\x93'))
436 output = UnicodeDammit.detwingle(input)
437 self.assertEqual(output, input)
438
439class TestNamedspacedAttribute(SoupTest):
440
441 def test_name_may_be_none(self):
442 a = NamespacedAttribute("xmlns", None)
443 self.assertEqual(a, "xmlns")
444
445 def test_attribute_is_equivalent_to_colon_separated_string(self):
446 a = NamespacedAttribute("a", "b")
447 self.assertEqual("a:b", a)
448
449 def test_attributes_are_equivalent_if_prefix_and_name_identical(self):
450 a = NamespacedAttribute("a", "b", "c")
451 b = NamespacedAttribute("a", "b", "c")
452 self.assertEqual(a, b)
453
454 # The actual namespace is not considered.
455 c = NamespacedAttribute("a", "b", None)
456 self.assertEqual(a, c)
457
458 # But name and prefix are important.
459 d = NamespacedAttribute("a", "z", "c")
460 self.assertNotEqual(a, d)
461
462 e = NamespacedAttribute("z", "b", "c")
463 self.assertNotEqual(a, e)
464
465
466class TestAttributeValueWithCharsetSubstitution(unittest.TestCase):
467
468 def test_content_meta_attribute_value(self):
469 value = CharsetMetaAttributeValue("euc-jp")
470 self.assertEqual("euc-jp", value)
471 self.assertEqual("euc-jp", value.original_value)
472 self.assertEqual("utf8", value.encode("utf8"))
473
474
475 def test_content_meta_attribute_value(self):
476 value = ContentMetaAttributeValue("text/html; charset=euc-jp")
477 self.assertEqual("text/html; charset=euc-jp", value)
478 self.assertEqual("text/html; charset=euc-jp", value.original_value)
479 self.assertEqual("text/html; charset=utf8", value.encode("utf8"))
diff --git a/bitbake/lib/bs4/tests/test_tree.py b/bitbake/lib/bs4/tests/test_tree.py
deleted file mode 100644
index 8e5c66426e..0000000000
--- a/bitbake/lib/bs4/tests/test_tree.py
+++ /dev/null
@@ -1,2004 +0,0 @@
1# -*- coding: utf-8 -*-
2"""Tests for Beautiful Soup's tree traversal methods.
3
4The tree traversal methods are the main advantage of using Beautiful
5Soup over just using a parser.
6
7Different parsers will build different Beautiful Soup trees given the
8same markup, but all Beautiful Soup trees can be traversed with the
9methods tested here.
10"""
11
12import copy
13import pickle
14import re
15import warnings
16from bs4 import BeautifulSoup
17from bs4.builder import builder_registry
18from bs4.element import (
19 PY3K,
20 CData,
21 Comment,
22 Declaration,
23 Doctype,
24 NavigableString,
25 SoupStrainer,
26 Tag,
27)
28from bs4.testing import SoupTest
29
30XML_BUILDER_PRESENT = (builder_registry.lookup("xml") is not None)
31LXML_PRESENT = (builder_registry.lookup("lxml") is not None)
32
33class TreeTest(SoupTest):
34
35 def assertSelects(self, tags, should_match):
36 """Make sure that the given tags have the correct text.
37
38 This is used in tests that define a bunch of tags, each
39 containing a single string, and then select certain strings by
40 some mechanism.
41 """
42 self.assertEqual([tag.string for tag in tags], should_match)
43
44 def assertSelectsIDs(self, tags, should_match):
45 """Make sure that the given tags have the correct IDs.
46
47 This is used in tests that define a bunch of tags, each
48 containing a single string, and then select certain strings by
49 some mechanism.
50 """
51 self.assertEqual([tag['id'] for tag in tags], should_match)
52
53
54class TestFind(TreeTest):
55 """Basic tests of the find() method.
56
57 find() just calls find_all() with limit=1, so it's not tested all
58 that thouroughly here.
59 """
60
61 def test_find_tag(self):
62 soup = self.soup("<a>1</a><b>2</b><a>3</a><b>4</b>")
63 self.assertEqual(soup.find("b").string, "2")
64
65 def test_unicode_text_find(self):
66 soup = self.soup('<h1>Räksmörgås</h1>')
67 self.assertEqual(soup.find(string='Räksmörgås'), 'Räksmörgås')
68
69 def test_unicode_attribute_find(self):
70 soup = self.soup('<h1 id="Räksmörgås">here it is</h1>')
71 str(soup)
72 self.assertEqual("here it is", soup.find(id='Räksmörgås').text)
73
74
75 def test_find_everything(self):
76 """Test an optimization that finds all tags."""
77 soup = self.soup("<a>foo</a><b>bar</b>")
78 self.assertEqual(2, len(soup.find_all()))
79
80 def test_find_everything_with_name(self):
81 """Test an optimization that finds all tags with a given name."""
82 soup = self.soup("<a>foo</a><b>bar</b><a>baz</a>")
83 self.assertEqual(2, len(soup.find_all('a')))
84
85class TestFindAll(TreeTest):
86 """Basic tests of the find_all() method."""
87
88 def test_find_all_text_nodes(self):
89 """You can search the tree for text nodes."""
90 soup = self.soup("<html>Foo<b>bar</b>\xbb</html>")
91 # Exact match.
92 self.assertEqual(soup.find_all(string="bar"), ["bar"])
93 self.assertEqual(soup.find_all(text="bar"), ["bar"])
94 # Match any of a number of strings.
95 self.assertEqual(
96 soup.find_all(text=["Foo", "bar"]), ["Foo", "bar"])
97 # Match a regular expression.
98 self.assertEqual(soup.find_all(text=re.compile('.*')),
99 ["Foo", "bar", '\xbb'])
100 # Match anything.
101 self.assertEqual(soup.find_all(text=True),
102 ["Foo", "bar", '\xbb'])
103
104 def test_find_all_limit(self):
105 """You can limit the number of items returned by find_all."""
106 soup = self.soup("<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a>")
107 self.assertSelects(soup.find_all('a', limit=3), ["1", "2", "3"])
108 self.assertSelects(soup.find_all('a', limit=1), ["1"])
109 self.assertSelects(
110 soup.find_all('a', limit=10), ["1", "2", "3", "4", "5"])
111
112 # A limit of 0 means no limit.
113 self.assertSelects(
114 soup.find_all('a', limit=0), ["1", "2", "3", "4", "5"])
115
116 def test_calling_a_tag_is_calling_findall(self):
117 soup = self.soup("<a>1</a><b>2<a id='foo'>3</a></b>")
118 self.assertSelects(soup('a', limit=1), ["1"])
119 self.assertSelects(soup.b(id="foo"), ["3"])
120
121 def test_find_all_with_self_referential_data_structure_does_not_cause_infinite_recursion(self):
122 soup = self.soup("<a></a>")
123 # Create a self-referential list.
124 l = []
125 l.append(l)
126
127 # Without special code in _normalize_search_value, this would cause infinite
128 # recursion.
129 self.assertEqual([], soup.find_all(l))
130
131 def test_find_all_resultset(self):
132 """All find_all calls return a ResultSet"""
133 soup = self.soup("<a></a>")
134 result = soup.find_all("a")
135 self.assertTrue(hasattr(result, "source"))
136
137 result = soup.find_all(True)
138 self.assertTrue(hasattr(result, "source"))
139
140 result = soup.find_all(text="foo")
141 self.assertTrue(hasattr(result, "source"))
142
143
144class TestFindAllBasicNamespaces(TreeTest):
145
146 def test_find_by_namespaced_name(self):
147 soup = self.soup('<mathml:msqrt>4</mathml:msqrt><a svg:fill="red">')
148 self.assertEqual("4", soup.find("mathml:msqrt").string)
149 self.assertEqual("a", soup.find(attrs= { "svg:fill" : "red" }).name)
150
151
152class TestFindAllByName(TreeTest):
153 """Test ways of finding tags by tag name."""
154
155 def setUp(self):
156 super(TreeTest, self).setUp()
157 self.tree = self.soup("""<a>First tag.</a>
158 <b>Second tag.</b>
159 <c>Third <a>Nested tag.</a> tag.</c>""")
160
161 def test_find_all_by_tag_name(self):
162 # Find all the <a> tags.
163 self.assertSelects(
164 self.tree.find_all('a'), ['First tag.', 'Nested tag.'])
165
166 def test_find_all_by_name_and_text(self):
167 self.assertSelects(
168 self.tree.find_all('a', text='First tag.'), ['First tag.'])
169
170 self.assertSelects(
171 self.tree.find_all('a', text=True), ['First tag.', 'Nested tag.'])
172
173 self.assertSelects(
174 self.tree.find_all('a', text=re.compile("tag")),
175 ['First tag.', 'Nested tag.'])
176
177
178 def test_find_all_on_non_root_element(self):
179 # You can call find_all on any node, not just the root.
180 self.assertSelects(self.tree.c.find_all('a'), ['Nested tag.'])
181
182 def test_calling_element_invokes_find_all(self):
183 self.assertSelects(self.tree('a'), ['First tag.', 'Nested tag.'])
184
185 def test_find_all_by_tag_strainer(self):
186 self.assertSelects(
187 self.tree.find_all(SoupStrainer('a')),
188 ['First tag.', 'Nested tag.'])
189
190 def test_find_all_by_tag_names(self):
191 self.assertSelects(
192 self.tree.find_all(['a', 'b']),
193 ['First tag.', 'Second tag.', 'Nested tag.'])
194
195 def test_find_all_by_tag_dict(self):
196 self.assertSelects(
197 self.tree.find_all({'a' : True, 'b' : True}),
198 ['First tag.', 'Second tag.', 'Nested tag.'])
199
200 def test_find_all_by_tag_re(self):
201 self.assertSelects(
202 self.tree.find_all(re.compile('^[ab]$')),
203 ['First tag.', 'Second tag.', 'Nested tag.'])
204
205 def test_find_all_with_tags_matching_method(self):
206 # You can define an oracle method that determines whether
207 # a tag matches the search.
208 def id_matches_name(tag):
209 return tag.name == tag.get('id')
210
211 tree = self.soup("""<a id="a">Match 1.</a>
212 <a id="1">Does not match.</a>
213 <b id="b">Match 2.</a>""")
214
215 self.assertSelects(
216 tree.find_all(id_matches_name), ["Match 1.", "Match 2."])
217
218
219class TestFindAllByAttribute(TreeTest):
220
221 def test_find_all_by_attribute_name(self):
222 # You can pass in keyword arguments to find_all to search by
223 # attribute.
224 tree = self.soup("""
225 <a id="first">Matching a.</a>
226 <a id="second">
227 Non-matching <b id="first">Matching b.</b>a.
228 </a>""")
229 self.assertSelects(tree.find_all(id='first'),
230 ["Matching a.", "Matching b."])
231
232 def test_find_all_by_utf8_attribute_value(self):
233 peace = "×ולש".encode("utf8")
234 data = '<a title="×ולש"></a>'.encode("utf8")
235 soup = self.soup(data)
236 self.assertEqual([soup.a], soup.find_all(title=peace))
237 self.assertEqual([soup.a], soup.find_all(title=peace.decode("utf8")))
238 self.assertEqual([soup.a], soup.find_all(title=[peace, "something else"]))
239
240 def test_find_all_by_attribute_dict(self):
241 # You can pass in a dictionary as the argument 'attrs'. This
242 # lets you search for attributes like 'name' (a fixed argument
243 # to find_all) and 'class' (a reserved word in Python.)
244 tree = self.soup("""
245 <a name="name1" class="class1">Name match.</a>
246 <a name="name2" class="class2">Class match.</a>
247 <a name="name3" class="class3">Non-match.</a>
248 <name1>A tag called 'name1'.</name1>
249 """)
250
251 # This doesn't do what you want.
252 self.assertSelects(tree.find_all(name='name1'),
253 ["A tag called 'name1'."])
254 # This does what you want.
255 self.assertSelects(tree.find_all(attrs={'name' : 'name1'}),
256 ["Name match."])
257
258 self.assertSelects(tree.find_all(attrs={'class' : 'class2'}),
259 ["Class match."])
260
261 def test_find_all_by_class(self):
262 tree = self.soup("""
263 <a class="1">Class 1.</a>
264 <a class="2">Class 2.</a>
265 <b class="1">Class 1.</b>
266 <c class="3 4">Class 3 and 4.</c>
267 """)
268
269 # Passing in the class_ keyword argument will search against
270 # the 'class' attribute.
271 self.assertSelects(tree.find_all('a', class_='1'), ['Class 1.'])
272 self.assertSelects(tree.find_all('c', class_='3'), ['Class 3 and 4.'])
273 self.assertSelects(tree.find_all('c', class_='4'), ['Class 3 and 4.'])
274
275 # Passing in a string to 'attrs' will also search the CSS class.
276 self.assertSelects(tree.find_all('a', '1'), ['Class 1.'])
277 self.assertSelects(tree.find_all(attrs='1'), ['Class 1.', 'Class 1.'])
278 self.assertSelects(tree.find_all('c', '3'), ['Class 3 and 4.'])
279 self.assertSelects(tree.find_all('c', '4'), ['Class 3 and 4.'])
280
281 def test_find_by_class_when_multiple_classes_present(self):
282 tree = self.soup("<gar class='foo bar'>Found it</gar>")
283
284 f = tree.find_all("gar", class_=re.compile("o"))
285 self.assertSelects(f, ["Found it"])
286
287 f = tree.find_all("gar", class_=re.compile("a"))
288 self.assertSelects(f, ["Found it"])
289
290 # Since the class is not the string "foo bar", but the two
291 # strings "foo" and "bar", this will not find anything.
292 f = tree.find_all("gar", class_=re.compile("o b"))
293 self.assertSelects(f, [])
294
295 def test_find_all_with_non_dictionary_for_attrs_finds_by_class(self):
296 soup = self.soup("<a class='bar'>Found it</a>")
297
298 self.assertSelects(soup.find_all("a", re.compile("ba")), ["Found it"])
299
300 def big_attribute_value(value):
301 return len(value) > 3
302
303 self.assertSelects(soup.find_all("a", big_attribute_value), [])
304
305 def small_attribute_value(value):
306 return len(value) <= 3
307
308 self.assertSelects(
309 soup.find_all("a", small_attribute_value), ["Found it"])
310
311 def test_find_all_with_string_for_attrs_finds_multiple_classes(self):
312 soup = self.soup('<a class="foo bar"></a><a class="foo"></a>')
313 a, a2 = soup.find_all("a")
314 self.assertEqual([a, a2], soup.find_all("a", "foo"))
315 self.assertEqual([a], soup.find_all("a", "bar"))
316
317 # If you specify the class as a string that contains a
318 # space, only that specific value will be found.
319 self.assertEqual([a], soup.find_all("a", class_="foo bar"))
320 self.assertEqual([a], soup.find_all("a", "foo bar"))
321 self.assertEqual([], soup.find_all("a", "bar foo"))
322
323 def test_find_all_by_attribute_soupstrainer(self):
324 tree = self.soup("""
325 <a id="first">Match.</a>
326 <a id="second">Non-match.</a>""")
327
328 strainer = SoupStrainer(attrs={'id' : 'first'})
329 self.assertSelects(tree.find_all(strainer), ['Match.'])
330
331 def test_find_all_with_missing_atribute(self):
332 # You can pass in None as the value of an attribute to find_all.
333 # This will match tags that do not have that attribute set.
334 tree = self.soup("""<a id="1">ID present.</a>
335 <a>No ID present.</a>
336 <a id="">ID is empty.</a>""")
337 self.assertSelects(tree.find_all('a', id=None), ["No ID present."])
338
339 def test_find_all_with_defined_attribute(self):
340 # You can pass in None as the value of an attribute to find_all.
341 # This will match tags that have that attribute set to any value.
342 tree = self.soup("""<a id="1">ID present.</a>
343 <a>No ID present.</a>
344 <a id="">ID is empty.</a>""")
345 self.assertSelects(
346 tree.find_all(id=True), ["ID present.", "ID is empty."])
347
348 def test_find_all_with_numeric_attribute(self):
349 # If you search for a number, it's treated as a string.
350 tree = self.soup("""<a id=1>Unquoted attribute.</a>
351 <a id="1">Quoted attribute.</a>""")
352
353 expected = ["Unquoted attribute.", "Quoted attribute."]
354 self.assertSelects(tree.find_all(id=1), expected)
355 self.assertSelects(tree.find_all(id="1"), expected)
356
357 def test_find_all_with_list_attribute_values(self):
358 # You can pass a list of attribute values instead of just one,
359 # and you'll get tags that match any of the values.
360 tree = self.soup("""<a id="1">1</a>
361 <a id="2">2</a>
362 <a id="3">3</a>
363 <a>No ID.</a>""")
364 self.assertSelects(tree.find_all(id=["1", "3", "4"]),
365 ["1", "3"])
366
367 def test_find_all_with_regular_expression_attribute_value(self):
368 # You can pass a regular expression as an attribute value, and
369 # you'll get tags whose values for that attribute match the
370 # regular expression.
371 tree = self.soup("""<a id="a">One a.</a>
372 <a id="aa">Two as.</a>
373 <a id="ab">Mixed as and bs.</a>
374 <a id="b">One b.</a>
375 <a>No ID.</a>""")
376
377 self.assertSelects(tree.find_all(id=re.compile("^a+$")),
378 ["One a.", "Two as."])
379
380 def test_find_by_name_and_containing_string(self):
381 soup = self.soup("<b>foo</b><b>bar</b><a>foo</a>")
382 a = soup.a
383
384 self.assertEqual([a], soup.find_all("a", text="foo"))
385 self.assertEqual([], soup.find_all("a", text="bar"))
386 self.assertEqual([], soup.find_all("a", text="bar"))
387
388 def test_find_by_name_and_containing_string_when_string_is_buried(self):
389 soup = self.soup("<a>foo</a><a><b><c>foo</c></b></a>")
390 self.assertEqual(soup.find_all("a"), soup.find_all("a", text="foo"))
391
392 def test_find_by_attribute_and_containing_string(self):
393 soup = self.soup('<b id="1">foo</b><a id="2">foo</a>')
394 a = soup.a
395
396 self.assertEqual([a], soup.find_all(id=2, text="foo"))
397 self.assertEqual([], soup.find_all(id=1, text="bar"))
398
399
400
401
402class TestIndex(TreeTest):
403 """Test Tag.index"""
404 def test_index(self):
405 tree = self.soup("""<div>
406 <a>Identical</a>
407 <b>Not identical</b>
408 <a>Identical</a>
409
410 <c><d>Identical with child</d></c>
411 <b>Also not identical</b>
412 <c><d>Identical with child</d></c>
413 </div>""")
414 div = tree.div
415 for i, element in enumerate(div.contents):
416 self.assertEqual(i, div.index(element))
417 self.assertRaises(ValueError, tree.index, 1)
418
419
420class TestParentOperations(TreeTest):
421 """Test navigation and searching through an element's parents."""
422
423 def setUp(self):
424 super(TestParentOperations, self).setUp()
425 self.tree = self.soup('''<ul id="empty"></ul>
426 <ul id="top">
427 <ul id="middle">
428 <ul id="bottom">
429 <b>Start here</b>
430 </ul>
431 </ul>''')
432 self.start = self.tree.b
433
434
435 def test_parent(self):
436 self.assertEqual(self.start.parent['id'], 'bottom')
437 self.assertEqual(self.start.parent.parent['id'], 'middle')
438 self.assertEqual(self.start.parent.parent.parent['id'], 'top')
439
440 def test_parent_of_top_tag_is_soup_object(self):
441 top_tag = self.tree.contents[0]
442 self.assertEqual(top_tag.parent, self.tree)
443
444 def test_soup_object_has_no_parent(self):
445 self.assertEqual(None, self.tree.parent)
446
447 def test_find_parents(self):
448 self.assertSelectsIDs(
449 self.start.find_parents('ul'), ['bottom', 'middle', 'top'])
450 self.assertSelectsIDs(
451 self.start.find_parents('ul', id="middle"), ['middle'])
452
453 def test_find_parent(self):
454 self.assertEqual(self.start.find_parent('ul')['id'], 'bottom')
455 self.assertEqual(self.start.find_parent('ul', id='top')['id'], 'top')
456
457 def test_parent_of_text_element(self):
458 text = self.tree.find(text="Start here")
459 self.assertEqual(text.parent.name, 'b')
460
461 def test_text_element_find_parent(self):
462 text = self.tree.find(text="Start here")
463 self.assertEqual(text.find_parent('ul')['id'], 'bottom')
464
465 def test_parent_generator(self):
466 parents = [parent['id'] for parent in self.start.parents
467 if parent is not None and 'id' in parent.attrs]
468 self.assertEqual(parents, ['bottom', 'middle', 'top'])
469
470
471class ProximityTest(TreeTest):
472
473 def setUp(self):
474 super(TreeTest, self).setUp()
475 self.tree = self.soup(
476 '<html id="start"><head></head><body><b id="1">One</b><b id="2">Two</b><b id="3">Three</b></body></html>')
477
478
479class TestNextOperations(ProximityTest):
480
481 def setUp(self):
482 super(TestNextOperations, self).setUp()
483 self.start = self.tree.b
484
485 def test_next(self):
486 self.assertEqual(self.start.next_element, "One")
487 self.assertEqual(self.start.next_element.next_element['id'], "2")
488
489 def test_next_of_last_item_is_none(self):
490 last = self.tree.find(text="Three")
491 self.assertEqual(last.next_element, None)
492
493 def test_next_of_root_is_none(self):
494 # The document root is outside the next/previous chain.
495 self.assertEqual(self.tree.next_element, None)
496
497 def test_find_all_next(self):
498 self.assertSelects(self.start.find_all_next('b'), ["Two", "Three"])
499 self.start.find_all_next(id=3)
500 self.assertSelects(self.start.find_all_next(id=3), ["Three"])
501
502 def test_find_next(self):
503 self.assertEqual(self.start.find_next('b')['id'], '2')
504 self.assertEqual(self.start.find_next(text="Three"), "Three")
505
506 def test_find_next_for_text_element(self):
507 text = self.tree.find(text="One")
508 self.assertEqual(text.find_next("b").string, "Two")
509 self.assertSelects(text.find_all_next("b"), ["Two", "Three"])
510
511 def test_next_generator(self):
512 start = self.tree.find(text="Two")
513 successors = [node for node in start.next_elements]
514 # There are two successors: the final <b> tag and its text contents.
515 tag, contents = successors
516 self.assertEqual(tag['id'], '3')
517 self.assertEqual(contents, "Three")
518
519class TestPreviousOperations(ProximityTest):
520
521 def setUp(self):
522 super(TestPreviousOperations, self).setUp()
523 self.end = self.tree.find(text="Three")
524
525 def test_previous(self):
526 self.assertEqual(self.end.previous_element['id'], "3")
527 self.assertEqual(self.end.previous_element.previous_element, "Two")
528
529 def test_previous_of_first_item_is_none(self):
530 first = self.tree.find('html')
531 self.assertEqual(first.previous_element, None)
532
533 def test_previous_of_root_is_none(self):
534 # The document root is outside the next/previous chain.
535 # XXX This is broken!
536 #self.assertEqual(self.tree.previous_element, None)
537 pass
538
539 def test_find_all_previous(self):
540 # The <b> tag containing the "Three" node is the predecessor
541 # of the "Three" node itself, which is why "Three" shows up
542 # here.
543 self.assertSelects(
544 self.end.find_all_previous('b'), ["Three", "Two", "One"])
545 self.assertSelects(self.end.find_all_previous(id=1), ["One"])
546
547 def test_find_previous(self):
548 self.assertEqual(self.end.find_previous('b')['id'], '3')
549 self.assertEqual(self.end.find_previous(text="One"), "One")
550
551 def test_find_previous_for_text_element(self):
552 text = self.tree.find(text="Three")
553 self.assertEqual(text.find_previous("b").string, "Three")
554 self.assertSelects(
555 text.find_all_previous("b"), ["Three", "Two", "One"])
556
557 def test_previous_generator(self):
558 start = self.tree.find(text="One")
559 predecessors = [node for node in start.previous_elements]
560
561 # There are four predecessors: the <b> tag containing "One"
562 # the <body> tag, the <head> tag, and the <html> tag.
563 b, body, head, html = predecessors
564 self.assertEqual(b['id'], '1')
565 self.assertEqual(body.name, "body")
566 self.assertEqual(head.name, "head")
567 self.assertEqual(html.name, "html")
568
569
570class SiblingTest(TreeTest):
571
572 def setUp(self):
573 super(SiblingTest, self).setUp()
574 markup = '''<html>
575 <span id="1">
576 <span id="1.1"></span>
577 </span>
578 <span id="2">
579 <span id="2.1"></span>
580 </span>
581 <span id="3">
582 <span id="3.1"></span>
583 </span>
584 <span id="4"></span>
585 </html>'''
586 # All that whitespace looks good but makes the tests more
587 # difficult. Get rid of it.
588 markup = re.compile("\n\s*").sub("", markup)
589 self.tree = self.soup(markup)
590
591
592class TestNextSibling(SiblingTest):
593
594 def setUp(self):
595 super(TestNextSibling, self).setUp()
596 self.start = self.tree.find(id="1")
597
598 def test_next_sibling_of_root_is_none(self):
599 self.assertEqual(self.tree.next_sibling, None)
600
601 def test_next_sibling(self):
602 self.assertEqual(self.start.next_sibling['id'], '2')
603 self.assertEqual(self.start.next_sibling.next_sibling['id'], '3')
604
605 # Note the difference between next_sibling and next_element.
606 self.assertEqual(self.start.next_element['id'], '1.1')
607
608 def test_next_sibling_may_not_exist(self):
609 self.assertEqual(self.tree.html.next_sibling, None)
610
611 nested_span = self.tree.find(id="1.1")
612 self.assertEqual(nested_span.next_sibling, None)
613
614 last_span = self.tree.find(id="4")
615 self.assertEqual(last_span.next_sibling, None)
616
617 def test_find_next_sibling(self):
618 self.assertEqual(self.start.find_next_sibling('span')['id'], '2')
619
620 def test_next_siblings(self):
621 self.assertSelectsIDs(self.start.find_next_siblings("span"),
622 ['2', '3', '4'])
623
624 self.assertSelectsIDs(self.start.find_next_siblings(id='3'), ['3'])
625
626 def test_next_sibling_for_text_element(self):
627 soup = self.soup("Foo<b>bar</b>baz")
628 start = soup.find(text="Foo")
629 self.assertEqual(start.next_sibling.name, 'b')
630 self.assertEqual(start.next_sibling.next_sibling, 'baz')
631
632 self.assertSelects(start.find_next_siblings('b'), ['bar'])
633 self.assertEqual(start.find_next_sibling(text="baz"), "baz")
634 self.assertEqual(start.find_next_sibling(text="nonesuch"), None)
635
636
637class TestPreviousSibling(SiblingTest):
638
639 def setUp(self):
640 super(TestPreviousSibling, self).setUp()
641 self.end = self.tree.find(id="4")
642
643 def test_previous_sibling_of_root_is_none(self):
644 self.assertEqual(self.tree.previous_sibling, None)
645
646 def test_previous_sibling(self):
647 self.assertEqual(self.end.previous_sibling['id'], '3')
648 self.assertEqual(self.end.previous_sibling.previous_sibling['id'], '2')
649
650 # Note the difference between previous_sibling and previous_element.
651 self.assertEqual(self.end.previous_element['id'], '3.1')
652
653 def test_previous_sibling_may_not_exist(self):
654 self.assertEqual(self.tree.html.previous_sibling, None)
655
656 nested_span = self.tree.find(id="1.1")
657 self.assertEqual(nested_span.previous_sibling, None)
658
659 first_span = self.tree.find(id="1")
660 self.assertEqual(first_span.previous_sibling, None)
661
662 def test_find_previous_sibling(self):
663 self.assertEqual(self.end.find_previous_sibling('span')['id'], '3')
664
665 def test_previous_siblings(self):
666 self.assertSelectsIDs(self.end.find_previous_siblings("span"),
667 ['3', '2', '1'])
668
669 self.assertSelectsIDs(self.end.find_previous_siblings(id='1'), ['1'])
670
671 def test_previous_sibling_for_text_element(self):
672 soup = self.soup("Foo<b>bar</b>baz")
673 start = soup.find(text="baz")
674 self.assertEqual(start.previous_sibling.name, 'b')
675 self.assertEqual(start.previous_sibling.previous_sibling, 'Foo')
676
677 self.assertSelects(start.find_previous_siblings('b'), ['bar'])
678 self.assertEqual(start.find_previous_sibling(text="Foo"), "Foo")
679 self.assertEqual(start.find_previous_sibling(text="nonesuch"), None)
680
681
682class TestTagCreation(SoupTest):
683 """Test the ability to create new tags."""
684 def test_new_tag(self):
685 soup = self.soup("")
686 new_tag = soup.new_tag("foo", bar="baz")
687 self.assertTrue(isinstance(new_tag, Tag))
688 self.assertEqual("foo", new_tag.name)
689 self.assertEqual(dict(bar="baz"), new_tag.attrs)
690 self.assertEqual(None, new_tag.parent)
691
692 def test_tag_inherits_self_closing_rules_from_builder(self):
693 if XML_BUILDER_PRESENT:
694 xml_soup = BeautifulSoup("", "lxml-xml")
695 xml_br = xml_soup.new_tag("br")
696 xml_p = xml_soup.new_tag("p")
697
698 # Both the <br> and <p> tag are empty-element, just because
699 # they have no contents.
700 self.assertEqual(b"<br/>", xml_br.encode())
701 self.assertEqual(b"<p/>", xml_p.encode())
702
703 html_soup = BeautifulSoup("", "html.parser")
704 html_br = html_soup.new_tag("br")
705 html_p = html_soup.new_tag("p")
706
707 # The HTML builder users HTML's rules about which tags are
708 # empty-element tags, and the new tags reflect these rules.
709 self.assertEqual(b"<br/>", html_br.encode())
710 self.assertEqual(b"<p></p>", html_p.encode())
711
712 def test_new_string_creates_navigablestring(self):
713 soup = self.soup("")
714 s = soup.new_string("foo")
715 self.assertEqual("foo", s)
716 self.assertTrue(isinstance(s, NavigableString))
717
718 def test_new_string_can_create_navigablestring_subclass(self):
719 soup = self.soup("")
720 s = soup.new_string("foo", Comment)
721 self.assertEqual("foo", s)
722 self.assertTrue(isinstance(s, Comment))
723
724class TestTreeModification(SoupTest):
725
726 def test_attribute_modification(self):
727 soup = self.soup('<a id="1"></a>')
728 soup.a['id'] = 2
729 self.assertEqual(soup.decode(), self.document_for('<a id="2"></a>'))
730 del(soup.a['id'])
731 self.assertEqual(soup.decode(), self.document_for('<a></a>'))
732 soup.a['id2'] = 'foo'
733 self.assertEqual(soup.decode(), self.document_for('<a id2="foo"></a>'))
734
735 def test_new_tag_creation(self):
736 builder = builder_registry.lookup('html')()
737 soup = self.soup("<body></body>", builder=builder)
738 a = Tag(soup, builder, 'a')
739 ol = Tag(soup, builder, 'ol')
740 a['href'] = 'http://foo.com/'
741 soup.body.insert(0, a)
742 soup.body.insert(1, ol)
743 self.assertEqual(
744 soup.body.encode(),
745 b'<body><a href="http://foo.com/"></a><ol></ol></body>')
746
747 def test_append_to_contents_moves_tag(self):
748 doc = """<p id="1">Don't leave me <b>here</b>.</p>
749 <p id="2">Don\'t leave!</p>"""
750 soup = self.soup(doc)
751 second_para = soup.find(id='2')
752 bold = soup.b
753
754 # Move the <b> tag to the end of the second paragraph.
755 soup.find(id='2').append(soup.b)
756
757 # The <b> tag is now a child of the second paragraph.
758 self.assertEqual(bold.parent, second_para)
759
760 self.assertEqual(
761 soup.decode(), self.document_for(
762 '<p id="1">Don\'t leave me .</p>\n'
763 '<p id="2">Don\'t leave!<b>here</b></p>'))
764
765 def test_replace_with_returns_thing_that_was_replaced(self):
766 text = "<a></a><b><c></c></b>"
767 soup = self.soup(text)
768 a = soup.a
769 new_a = a.replace_with(soup.c)
770 self.assertEqual(a, new_a)
771
772 def test_unwrap_returns_thing_that_was_replaced(self):
773 text = "<a><b></b><c></c></a>"
774 soup = self.soup(text)
775 a = soup.a
776 new_a = a.unwrap()
777 self.assertEqual(a, new_a)
778
779 def test_replace_with_and_unwrap_give_useful_exception_when_tag_has_no_parent(self):
780 soup = self.soup("<a><b>Foo</b></a><c>Bar</c>")
781 a = soup.a
782 a.extract()
783 self.assertEqual(None, a.parent)
784 self.assertRaises(ValueError, a.unwrap)
785 self.assertRaises(ValueError, a.replace_with, soup.c)
786
787 def test_replace_tag_with_itself(self):
788 text = "<a><b></b><c>Foo<d></d></c></a><a><e></e></a>"
789 soup = self.soup(text)
790 c = soup.c
791 soup.c.replace_with(c)
792 self.assertEqual(soup.decode(), self.document_for(text))
793
794 def test_replace_tag_with_its_parent_raises_exception(self):
795 text = "<a><b></b></a>"
796 soup = self.soup(text)
797 self.assertRaises(ValueError, soup.b.replace_with, soup.a)
798
799 def test_insert_tag_into_itself_raises_exception(self):
800 text = "<a><b></b></a>"
801 soup = self.soup(text)
802 self.assertRaises(ValueError, soup.a.insert, 0, soup.a)
803
804 def test_replace_with_maintains_next_element_throughout(self):
805 soup = self.soup('<p><a>one</a><b>three</b></p>')
806 a = soup.a
807 b = a.contents[0]
808 # Make it so the <a> tag has two text children.
809 a.insert(1, "two")
810
811 # Now replace each one with the empty string.
812 left, right = a.contents
813 left.replaceWith('')
814 right.replaceWith('')
815
816 # The <b> tag is still connected to the tree.
817 self.assertEqual("three", soup.b.string)
818
819 def test_replace_final_node(self):
820 soup = self.soup("<b>Argh!</b>")
821 soup.find(text="Argh!").replace_with("Hooray!")
822 new_text = soup.find(text="Hooray!")
823 b = soup.b
824 self.assertEqual(new_text.previous_element, b)
825 self.assertEqual(new_text.parent, b)
826 self.assertEqual(new_text.previous_element.next_element, new_text)
827 self.assertEqual(new_text.next_element, None)
828
829 def test_consecutive_text_nodes(self):
830 # A builder should never create two consecutive text nodes,
831 # but if you insert one next to another, Beautiful Soup will
832 # handle it correctly.
833 soup = self.soup("<a><b>Argh!</b><c></c></a>")
834 soup.b.insert(1, "Hooray!")
835
836 self.assertEqual(
837 soup.decode(), self.document_for(
838 "<a><b>Argh!Hooray!</b><c></c></a>"))
839
840 new_text = soup.find(text="Hooray!")
841 self.assertEqual(new_text.previous_element, "Argh!")
842 self.assertEqual(new_text.previous_element.next_element, new_text)
843
844 self.assertEqual(new_text.previous_sibling, "Argh!")
845 self.assertEqual(new_text.previous_sibling.next_sibling, new_text)
846
847 self.assertEqual(new_text.next_sibling, None)
848 self.assertEqual(new_text.next_element, soup.c)
849
850 def test_insert_string(self):
851 soup = self.soup("<a></a>")
852 soup.a.insert(0, "bar")
853 soup.a.insert(0, "foo")
854 # The string were added to the tag.
855 self.assertEqual(["foo", "bar"], soup.a.contents)
856 # And they were converted to NavigableStrings.
857 self.assertEqual(soup.a.contents[0].next_element, "bar")
858
859 def test_insert_tag(self):
860 builder = self.default_builder
861 soup = self.soup(
862 "<a><b>Find</b><c>lady!</c><d></d></a>", builder=builder)
863 magic_tag = Tag(soup, builder, 'magictag')
864 magic_tag.insert(0, "the")
865 soup.a.insert(1, magic_tag)
866
867 self.assertEqual(
868 soup.decode(), self.document_for(
869 "<a><b>Find</b><magictag>the</magictag><c>lady!</c><d></d></a>"))
870
871 # Make sure all the relationships are hooked up correctly.
872 b_tag = soup.b
873 self.assertEqual(b_tag.next_sibling, magic_tag)
874 self.assertEqual(magic_tag.previous_sibling, b_tag)
875
876 find = b_tag.find(text="Find")
877 self.assertEqual(find.next_element, magic_tag)
878 self.assertEqual(magic_tag.previous_element, find)
879
880 c_tag = soup.c
881 self.assertEqual(magic_tag.next_sibling, c_tag)
882 self.assertEqual(c_tag.previous_sibling, magic_tag)
883
884 the = magic_tag.find(text="the")
885 self.assertEqual(the.parent, magic_tag)
886 self.assertEqual(the.next_element, c_tag)
887 self.assertEqual(c_tag.previous_element, the)
888
889 def test_append_child_thats_already_at_the_end(self):
890 data = "<a><b></b></a>"
891 soup = self.soup(data)
892 soup.a.append(soup.b)
893 self.assertEqual(data, soup.decode())
894
895 def test_move_tag_to_beginning_of_parent(self):
896 data = "<a><b></b><c></c><d></d></a>"
897 soup = self.soup(data)
898 soup.a.insert(0, soup.d)
899 self.assertEqual("<a><d></d><b></b><c></c></a>", soup.decode())
900
901 def test_insert_works_on_empty_element_tag(self):
902 # This is a little strange, since most HTML parsers don't allow
903 # markup like this to come through. But in general, we don't
904 # know what the parser would or wouldn't have allowed, so
905 # I'm letting this succeed for now.
906 soup = self.soup("<br/>")
907 soup.br.insert(1, "Contents")
908 self.assertEqual(str(soup.br), "<br>Contents</br>")
909
910 def test_insert_before(self):
911 soup = self.soup("<a>foo</a><b>bar</b>")
912 soup.b.insert_before("BAZ")
913 soup.a.insert_before("QUUX")
914 self.assertEqual(
915 soup.decode(), self.document_for("QUUX<a>foo</a>BAZ<b>bar</b>"))
916
917 soup.a.insert_before(soup.b)
918 self.assertEqual(
919 soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ"))
920
921 def test_insert_after(self):
922 soup = self.soup("<a>foo</a><b>bar</b>")
923 soup.b.insert_after("BAZ")
924 soup.a.insert_after("QUUX")
925 self.assertEqual(
926 soup.decode(), self.document_for("<a>foo</a>QUUX<b>bar</b>BAZ"))
927 soup.b.insert_after(soup.a)
928 self.assertEqual(
929 soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ"))
930
931 def test_insert_after_raises_exception_if_after_has_no_meaning(self):
932 soup = self.soup("")
933 tag = soup.new_tag("a")
934 string = soup.new_string("")
935 self.assertRaises(ValueError, string.insert_after, tag)
936 self.assertRaises(NotImplementedError, soup.insert_after, tag)
937 self.assertRaises(ValueError, tag.insert_after, tag)
938
939 def test_insert_before_raises_notimplementederror_if_before_has_no_meaning(self):
940 soup = self.soup("")
941 tag = soup.new_tag("a")
942 string = soup.new_string("")
943 self.assertRaises(ValueError, string.insert_before, tag)
944 self.assertRaises(NotImplementedError, soup.insert_before, tag)
945 self.assertRaises(ValueError, tag.insert_before, tag)
946
947 def test_replace_with(self):
948 soup = self.soup(
949 "<p>There's <b>no</b> business like <b>show</b> business</p>")
950 no, show = soup.find_all('b')
951 show.replace_with(no)
952 self.assertEqual(
953 soup.decode(),
954 self.document_for(
955 "<p>There's business like <b>no</b> business</p>"))
956
957 self.assertEqual(show.parent, None)
958 self.assertEqual(no.parent, soup.p)
959 self.assertEqual(no.next_element, "no")
960 self.assertEqual(no.next_sibling, " business")
961
962 def test_replace_first_child(self):
963 data = "<a><b></b><c></c></a>"
964 soup = self.soup(data)
965 soup.b.replace_with(soup.c)
966 self.assertEqual("<a><c></c></a>", soup.decode())
967
968 def test_replace_last_child(self):
969 data = "<a><b></b><c></c></a>"
970 soup = self.soup(data)
971 soup.c.replace_with(soup.b)
972 self.assertEqual("<a><b></b></a>", soup.decode())
973
974 def test_nested_tag_replace_with(self):
975 soup = self.soup(
976 """<a>We<b>reserve<c>the</c><d>right</d></b></a><e>to<f>refuse</f><g>service</g></e>""")
977
978 # Replace the entire <b> tag and its contents ("reserve the
979 # right") with the <f> tag ("refuse").
980 remove_tag = soup.b
981 move_tag = soup.f
982 remove_tag.replace_with(move_tag)
983
984 self.assertEqual(
985 soup.decode(), self.document_for(
986 "<a>We<f>refuse</f></a><e>to<g>service</g></e>"))
987
988 # The <b> tag is now an orphan.
989 self.assertEqual(remove_tag.parent, None)
990 self.assertEqual(remove_tag.find(text="right").next_element, None)
991 self.assertEqual(remove_tag.previous_element, None)
992 self.assertEqual(remove_tag.next_sibling, None)
993 self.assertEqual(remove_tag.previous_sibling, None)
994
995 # The <f> tag is now connected to the <a> tag.
996 self.assertEqual(move_tag.parent, soup.a)
997 self.assertEqual(move_tag.previous_element, "We")
998 self.assertEqual(move_tag.next_element.next_element, soup.e)
999 self.assertEqual(move_tag.next_sibling, None)
1000
1001 # The gap where the <f> tag used to be has been mended, and
1002 # the word "to" is now connected to the <g> tag.
1003 to_text = soup.find(text="to")
1004 g_tag = soup.g
1005 self.assertEqual(to_text.next_element, g_tag)
1006 self.assertEqual(to_text.next_sibling, g_tag)
1007 self.assertEqual(g_tag.previous_element, to_text)
1008 self.assertEqual(g_tag.previous_sibling, to_text)
1009
1010 def test_unwrap(self):
1011 tree = self.soup("""
1012 <p>Unneeded <em>formatting</em> is unneeded</p>
1013 """)
1014 tree.em.unwrap()
1015 self.assertEqual(tree.em, None)
1016 self.assertEqual(tree.p.text, "Unneeded formatting is unneeded")
1017
1018 def test_wrap(self):
1019 soup = self.soup("I wish I was bold.")
1020 value = soup.string.wrap(soup.new_tag("b"))
1021 self.assertEqual(value.decode(), "<b>I wish I was bold.</b>")
1022 self.assertEqual(
1023 soup.decode(), self.document_for("<b>I wish I was bold.</b>"))
1024
1025 def test_wrap_extracts_tag_from_elsewhere(self):
1026 soup = self.soup("<b></b>I wish I was bold.")
1027 soup.b.next_sibling.wrap(soup.b)
1028 self.assertEqual(
1029 soup.decode(), self.document_for("<b>I wish I was bold.</b>"))
1030
1031 def test_wrap_puts_new_contents_at_the_end(self):
1032 soup = self.soup("<b>I like being bold.</b>I wish I was bold.")
1033 soup.b.next_sibling.wrap(soup.b)
1034 self.assertEqual(2, len(soup.b.contents))
1035 self.assertEqual(
1036 soup.decode(), self.document_for(
1037 "<b>I like being bold.I wish I was bold.</b>"))
1038
1039 def test_extract(self):
1040 soup = self.soup(
1041 '<html><body>Some content. <div id="nav">Nav crap</div> More content.</body></html>')
1042
1043 self.assertEqual(len(soup.body.contents), 3)
1044 extracted = soup.find(id="nav").extract()
1045
1046 self.assertEqual(
1047 soup.decode(), "<html><body>Some content. More content.</body></html>")
1048 self.assertEqual(extracted.decode(), '<div id="nav">Nav crap</div>')
1049
1050 # The extracted tag is now an orphan.
1051 self.assertEqual(len(soup.body.contents), 2)
1052 self.assertEqual(extracted.parent, None)
1053 self.assertEqual(extracted.previous_element, None)
1054 self.assertEqual(extracted.next_element.next_element, None)
1055
1056 # The gap where the extracted tag used to be has been mended.
1057 content_1 = soup.find(text="Some content. ")
1058 content_2 = soup.find(text=" More content.")
1059 self.assertEqual(content_1.next_element, content_2)
1060 self.assertEqual(content_1.next_sibling, content_2)
1061 self.assertEqual(content_2.previous_element, content_1)
1062 self.assertEqual(content_2.previous_sibling, content_1)
1063
1064 def test_extract_distinguishes_between_identical_strings(self):
1065 soup = self.soup("<a>foo</a><b>bar</b>")
1066 foo_1 = soup.a.string
1067 bar_1 = soup.b.string
1068 foo_2 = soup.new_string("foo")
1069 bar_2 = soup.new_string("bar")
1070 soup.a.append(foo_2)
1071 soup.b.append(bar_2)
1072
1073 # Now there are two identical strings in the <a> tag, and two
1074 # in the <b> tag. Let's remove the first "foo" and the second
1075 # "bar".
1076 foo_1.extract()
1077 bar_2.extract()
1078 self.assertEqual(foo_2, soup.a.string)
1079 self.assertEqual(bar_2, soup.b.string)
1080
1081 def test_extract_multiples_of_same_tag(self):
1082 soup = self.soup("""
1083<html>
1084<head>
1085<script>foo</script>
1086</head>
1087<body>
1088 <script>bar</script>
1089 <a></a>
1090</body>
1091<script>baz</script>
1092</html>""")
1093 [soup.script.extract() for i in soup.find_all("script")]
1094 self.assertEqual("<body>\n\n<a></a>\n</body>", str(soup.body))
1095
1096
1097 def test_extract_works_when_element_is_surrounded_by_identical_strings(self):
1098 soup = self.soup(
1099 '<html>\n'
1100 '<body>hi</body>\n'
1101 '</html>')
1102 soup.find('body').extract()
1103 self.assertEqual(None, soup.find('body'))
1104
1105
1106 def test_clear(self):
1107 """Tag.clear()"""
1108 soup = self.soup("<p><a>String <em>Italicized</em></a> and another</p>")
1109 # clear using extract()
1110 a = soup.a
1111 soup.p.clear()
1112 self.assertEqual(len(soup.p.contents), 0)
1113 self.assertTrue(hasattr(a, "contents"))
1114
1115 # clear using decompose()
1116 em = a.em
1117 a.clear(decompose=True)
1118 self.assertEqual(0, len(em.contents))
1119
1120 def test_string_set(self):
1121 """Tag.string = 'string'"""
1122 soup = self.soup("<a></a> <b><c></c></b>")
1123 soup.a.string = "foo"
1124 self.assertEqual(soup.a.contents, ["foo"])
1125 soup.b.string = "bar"
1126 self.assertEqual(soup.b.contents, ["bar"])
1127
1128 def test_string_set_does_not_affect_original_string(self):
1129 soup = self.soup("<a><b>foo</b><c>bar</c>")
1130 soup.b.string = soup.c.string
1131 self.assertEqual(soup.a.encode(), b"<a><b>bar</b><c>bar</c></a>")
1132
1133 def test_set_string_preserves_class_of_string(self):
1134 soup = self.soup("<a></a>")
1135 cdata = CData("foo")
1136 soup.a.string = cdata
1137 self.assertTrue(isinstance(soup.a.string, CData))
1138
1139class TestElementObjects(SoupTest):
1140 """Test various features of element objects."""
1141
1142 def test_len(self):
1143 """The length of an element is its number of children."""
1144 soup = self.soup("<top>1<b>2</b>3</top>")
1145
1146 # The BeautifulSoup object itself contains one element: the
1147 # <top> tag.
1148 self.assertEqual(len(soup.contents), 1)
1149 self.assertEqual(len(soup), 1)
1150
1151 # The <top> tag contains three elements: the text node "1", the
1152 # <b> tag, and the text node "3".
1153 self.assertEqual(len(soup.top), 3)
1154 self.assertEqual(len(soup.top.contents), 3)
1155
1156 def test_member_access_invokes_find(self):
1157 """Accessing a Python member .foo invokes find('foo')"""
1158 soup = self.soup('<b><i></i></b>')
1159 self.assertEqual(soup.b, soup.find('b'))
1160 self.assertEqual(soup.b.i, soup.find('b').find('i'))
1161 self.assertEqual(soup.a, None)
1162
1163 def test_deprecated_member_access(self):
1164 soup = self.soup('<b><i></i></b>')
1165 with warnings.catch_warnings(record=True) as w:
1166 tag = soup.bTag
1167 self.assertEqual(soup.b, tag)
1168 self.assertEqual(
1169 '.bTag is deprecated, use .find("b") instead.',
1170 str(w[0].message))
1171
1172 def test_has_attr(self):
1173 """has_attr() checks for the presence of an attribute.
1174
1175 Please note note: has_attr() is different from
1176 __in__. has_attr() checks the tag's attributes and __in__
1177 checks the tag's chidlren.
1178 """
1179 soup = self.soup("<foo attr='bar'>")
1180 self.assertTrue(soup.foo.has_attr('attr'))
1181 self.assertFalse(soup.foo.has_attr('attr2'))
1182
1183
1184 def test_attributes_come_out_in_alphabetical_order(self):
1185 markup = '<b a="1" z="5" m="3" f="2" y="4"></b>'
1186 self.assertSoupEquals(markup, '<b a="1" f="2" m="3" y="4" z="5"></b>')
1187
1188 def test_string(self):
1189 # A tag that contains only a text node makes that node
1190 # available as .string.
1191 soup = self.soup("<b>foo</b>")
1192 self.assertEqual(soup.b.string, 'foo')
1193
1194 def test_empty_tag_has_no_string(self):
1195 # A tag with no children has no .stirng.
1196 soup = self.soup("<b></b>")
1197 self.assertEqual(soup.b.string, None)
1198
1199 def test_tag_with_multiple_children_has_no_string(self):
1200 # A tag with no children has no .string.
1201 soup = self.soup("<a>foo<b></b><b></b></b>")
1202 self.assertEqual(soup.b.string, None)
1203
1204 soup = self.soup("<a>foo<b></b>bar</b>")
1205 self.assertEqual(soup.b.string, None)
1206
1207 # Even if all the children are strings, due to trickery,
1208 # it won't work--but this would be a good optimization.
1209 soup = self.soup("<a>foo</b>")
1210 soup.a.insert(1, "bar")
1211 self.assertEqual(soup.a.string, None)
1212
1213 def test_tag_with_recursive_string_has_string(self):
1214 # A tag with a single child which has a .string inherits that
1215 # .string.
1216 soup = self.soup("<a><b>foo</b></a>")
1217 self.assertEqual(soup.a.string, "foo")
1218 self.assertEqual(soup.string, "foo")
1219
1220 def test_lack_of_string(self):
1221 """Only a tag containing a single text node has a .string."""
1222 soup = self.soup("<b>f<i>e</i>o</b>")
1223 self.assertFalse(soup.b.string)
1224
1225 soup = self.soup("<b></b>")
1226 self.assertFalse(soup.b.string)
1227
1228 def test_all_text(self):
1229 """Tag.text and Tag.get_text(sep=u"") -> all child text, concatenated"""
1230 soup = self.soup("<a>a<b>r</b> <r> t </r></a>")
1231 self.assertEqual(soup.a.text, "ar t ")
1232 self.assertEqual(soup.a.get_text(strip=True), "art")
1233 self.assertEqual(soup.a.get_text(","), "a,r, , t ")
1234 self.assertEqual(soup.a.get_text(",", strip=True), "a,r,t")
1235
1236 def test_get_text_ignores_comments(self):
1237 soup = self.soup("foo<!--IGNORE-->bar")
1238 self.assertEqual(soup.get_text(), "foobar")
1239
1240 self.assertEqual(
1241 soup.get_text(types=(NavigableString, Comment)), "fooIGNOREbar")
1242 self.assertEqual(
1243 soup.get_text(types=None), "fooIGNOREbar")
1244
1245 def test_all_strings_ignores_comments(self):
1246 soup = self.soup("foo<!--IGNORE-->bar")
1247 self.assertEqual(['foo', 'bar'], list(soup.strings))
1248
1249class TestCDAtaListAttributes(SoupTest):
1250
1251 """Testing cdata-list attributes like 'class'.
1252 """
1253 def test_single_value_becomes_list(self):
1254 soup = self.soup("<a class='foo'>")
1255 self.assertEqual(["foo"],soup.a['class'])
1256
1257 def test_multiple_values_becomes_list(self):
1258 soup = self.soup("<a class='foo bar'>")
1259 self.assertEqual(["foo", "bar"], soup.a['class'])
1260
1261 def test_multiple_values_separated_by_weird_whitespace(self):
1262 soup = self.soup("<a class='foo\tbar\nbaz'>")
1263 self.assertEqual(["foo", "bar", "baz"],soup.a['class'])
1264
1265 def test_attributes_joined_into_string_on_output(self):
1266 soup = self.soup("<a class='foo\tbar'>")
1267 self.assertEqual(b'<a class="foo bar"></a>', soup.a.encode())
1268
1269 def test_accept_charset(self):
1270 soup = self.soup('<form accept-charset="ISO-8859-1 UTF-8">')
1271 self.assertEqual(['ISO-8859-1', 'UTF-8'], soup.form['accept-charset'])
1272
1273 def test_cdata_attribute_applying_only_to_one_tag(self):
1274 data = '<a accept-charset="ISO-8859-1 UTF-8"></a>'
1275 soup = self.soup(data)
1276 # We saw in another test that accept-charset is a cdata-list
1277 # attribute for the <form> tag. But it's not a cdata-list
1278 # attribute for any other tag.
1279 self.assertEqual('ISO-8859-1 UTF-8', soup.a['accept-charset'])
1280
1281 def test_string_has_immutable_name_property(self):
1282 string = self.soup("s").string
1283 self.assertEqual(None, string.name)
1284 def t():
1285 string.name = 'foo'
1286 self.assertRaises(AttributeError, t)
1287
1288class TestPersistence(SoupTest):
1289 "Testing features like pickle and deepcopy."
1290
1291 def setUp(self):
1292 super(TestPersistence, self).setUp()
1293 self.page = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN"
1294"http://www.w3.org/TR/REC-html40/transitional.dtd">
1295<html>
1296<head>
1297<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
1298<title>Beautiful Soup: We called him Tortoise because he taught us.</title>
1299<link rev="made" href="mailto:leonardr@segfault.org">
1300<meta name="Description" content="Beautiful Soup: an HTML parser optimized for screen-scraping.">
1301<meta name="generator" content="Markov Approximation 1.4 (module: leonardr)">
1302<meta name="author" content="Leonard Richardson">
1303</head>
1304<body>
1305<a href="foo">foo</a>
1306<a href="foo"><b>bar</b></a>
1307</body>
1308</html>"""
1309 self.tree = self.soup(self.page)
1310
1311 def test_pickle_and_unpickle_identity(self):
1312 # Pickling a tree, then unpickling it, yields a tree identical
1313 # to the original.
1314 dumped = pickle.dumps(self.tree, 2)
1315 loaded = pickle.loads(dumped)
1316 self.assertEqual(loaded.__class__, BeautifulSoup)
1317 self.assertEqual(loaded.decode(), self.tree.decode())
1318
1319 def test_deepcopy_identity(self):
1320 # Making a deepcopy of a tree yields an identical tree.
1321 copied = copy.deepcopy(self.tree)
1322 self.assertEqual(copied.decode(), self.tree.decode())
1323
1324 def test_unicode_pickle(self):
1325 # A tree containing Unicode characters can be pickled.
1326 html = "<b>\N{SNOWMAN}</b>"
1327 soup = self.soup(html)
1328 dumped = pickle.dumps(soup, pickle.HIGHEST_PROTOCOL)
1329 loaded = pickle.loads(dumped)
1330 self.assertEqual(loaded.decode(), soup.decode())
1331
1332 def test_copy_navigablestring_is_not_attached_to_tree(self):
1333 html = "<b>Foo<a></a></b><b>Bar</b>"
1334 soup = self.soup(html)
1335 s1 = soup.find(string="Foo")
1336 s2 = copy.copy(s1)
1337 self.assertEqual(s1, s2)
1338 self.assertEqual(None, s2.parent)
1339 self.assertEqual(None, s2.next_element)
1340 self.assertNotEqual(None, s1.next_sibling)
1341 self.assertEqual(None, s2.next_sibling)
1342 self.assertEqual(None, s2.previous_element)
1343
1344 def test_copy_navigablestring_subclass_has_same_type(self):
1345 html = "<b><!--Foo--></b>"
1346 soup = self.soup(html)
1347 s1 = soup.string
1348 s2 = copy.copy(s1)
1349 self.assertEqual(s1, s2)
1350 self.assertTrue(isinstance(s2, Comment))
1351
1352 def test_copy_entire_soup(self):
1353 html = "<div><b>Foo<a></a></b><b>Bar</b></div>end"
1354 soup = self.soup(html)
1355 soup_copy = copy.copy(soup)
1356 self.assertEqual(soup, soup_copy)
1357
1358 def test_copy_tag_copies_contents(self):
1359 html = "<div><b>Foo<a></a></b><b>Bar</b></div>end"
1360 soup = self.soup(html)
1361 div = soup.div
1362 div_copy = copy.copy(div)
1363
1364 # The two tags look the same, and evaluate to equal.
1365 self.assertEqual(str(div), str(div_copy))
1366 self.assertEqual(div, div_copy)
1367
1368 # But they're not the same object.
1369 self.assertFalse(div is div_copy)
1370
1371 # And they don't have the same relation to the parse tree. The
1372 # copy is not associated with a parse tree at all.
1373 self.assertEqual(None, div_copy.parent)
1374 self.assertEqual(None, div_copy.previous_element)
1375 self.assertEqual(None, div_copy.find(string='Bar').next_element)
1376 self.assertNotEqual(None, div.find(string='Bar').next_element)
1377
1378class TestSubstitutions(SoupTest):
1379
1380 def test_default_formatter_is_minimal(self):
1381 markup = "<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
1382 soup = self.soup(markup)
1383 decoded = soup.decode(formatter="minimal")
1384 # The < is converted back into &lt; but the e-with-acute is left alone.
1385 self.assertEqual(
1386 decoded,
1387 self.document_for(
1388 "<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"))
1389
1390 def test_formatter_html(self):
1391 markup = "<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
1392 soup = self.soup(markup)
1393 decoded = soup.decode(formatter="html")
1394 self.assertEqual(
1395 decoded,
1396 self.document_for("<b>&lt;&lt;Sacr&eacute; bleu!&gt;&gt;</b>"))
1397
1398 def test_formatter_minimal(self):
1399 markup = "<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
1400 soup = self.soup(markup)
1401 decoded = soup.decode(formatter="minimal")
1402 # The < is converted back into &lt; but the e-with-acute is left alone.
1403 self.assertEqual(
1404 decoded,
1405 self.document_for(
1406 "<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"))
1407
1408 def test_formatter_null(self):
1409 markup = "<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
1410 soup = self.soup(markup)
1411 decoded = soup.decode(formatter=None)
1412 # Neither the angle brackets nor the e-with-acute are converted.
1413 # This is not valid HTML, but it's what the user wanted.
1414 self.assertEqual(decoded,
1415 self.document_for("<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"))
1416
1417 def test_formatter_custom(self):
1418 markup = "<b>&lt;foo&gt;</b><b>bar</b>"
1419 soup = self.soup(markup)
1420 decoded = soup.decode(formatter = lambda x: x.upper())
1421 # Instead of normal entity conversion code, the custom
1422 # callable is called on every string.
1423 self.assertEqual(
1424 decoded,
1425 self.document_for("<b><FOO></b><b>BAR</b>"))
1426
1427 def test_formatter_is_run_on_attribute_values(self):
1428 markup = '<a href="http://a.com?a=b&c=é">e</a>'
1429 soup = self.soup(markup)
1430 a = soup.a
1431
1432 expect_minimal = '<a href="http://a.com?a=b&amp;c=é">e</a>'
1433
1434 self.assertEqual(expect_minimal, a.decode())
1435 self.assertEqual(expect_minimal, a.decode(formatter="minimal"))
1436
1437 expect_html = '<a href="http://a.com?a=b&amp;c=&eacute;">e</a>'
1438 self.assertEqual(expect_html, a.decode(formatter="html"))
1439
1440 self.assertEqual(markup, a.decode(formatter=None))
1441 expect_upper = '<a href="HTTP://A.COM?A=B&C=É">E</a>'
1442 self.assertEqual(expect_upper, a.decode(formatter=lambda x: x.upper()))
1443
1444 def test_formatter_skips_script_tag_for_html_documents(self):
1445 doc = """
1446 <script type="text/javascript">
1447 console.log("< < hey > > ");
1448 </script>
1449"""
1450 encoded = BeautifulSoup(doc, 'html.parser').encode()
1451 self.assertTrue(b"< < hey > >" in encoded)
1452
1453 def test_formatter_skips_style_tag_for_html_documents(self):
1454 doc = """
1455 <style type="text/css">
1456 console.log("< < hey > > ");
1457 </style>
1458"""
1459 encoded = BeautifulSoup(doc, 'html.parser').encode()
1460 self.assertTrue(b"< < hey > >" in encoded)
1461
1462 def test_prettify_leaves_preformatted_text_alone(self):
1463 soup = self.soup("<div> foo <pre> \tbar\n \n </pre> baz ")
1464 # Everything outside the <pre> tag is reformatted, but everything
1465 # inside is left alone.
1466 self.assertEqual(
1467 '<div>\n foo\n <pre> \tbar\n \n </pre>\n baz\n</div>',
1468 soup.div.prettify())
1469
1470 def test_prettify_accepts_formatter(self):
1471 soup = BeautifulSoup("<html><body>foo</body></html>", 'html.parser')
1472 pretty = soup.prettify(formatter = lambda x: x.upper())
1473 self.assertTrue("FOO" in pretty)
1474
1475 def test_prettify_outputs_unicode_by_default(self):
1476 soup = self.soup("<a></a>")
1477 self.assertEqual(str, type(soup.prettify()))
1478
1479 def test_prettify_can_encode_data(self):
1480 soup = self.soup("<a></a>")
1481 self.assertEqual(bytes, type(soup.prettify("utf-8")))
1482
1483 def test_html_entity_substitution_off_by_default(self):
1484 markup = "<b>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</b>"
1485 soup = self.soup(markup)
1486 encoded = soup.b.encode("utf-8")
1487 self.assertEqual(encoded, markup.encode('utf-8'))
1488
1489 def test_encoding_substitution(self):
1490 # Here's the <meta> tag saying that a document is
1491 # encoded in Shift-JIS.
1492 meta_tag = ('<meta content="text/html; charset=x-sjis" '
1493 'http-equiv="Content-type"/>')
1494 soup = self.soup(meta_tag)
1495
1496 # Parse the document, and the charset apprears unchanged.
1497 self.assertEqual(soup.meta['content'], 'text/html; charset=x-sjis')
1498
1499 # Encode the document into some encoding, and the encoding is
1500 # substituted into the meta tag.
1501 utf_8 = soup.encode("utf-8")
1502 self.assertTrue(b"charset=utf-8" in utf_8)
1503
1504 euc_jp = soup.encode("euc_jp")
1505 self.assertTrue(b"charset=euc_jp" in euc_jp)
1506
1507 shift_jis = soup.encode("shift-jis")
1508 self.assertTrue(b"charset=shift-jis" in shift_jis)
1509
1510 utf_16_u = soup.encode("utf-16").decode("utf-16")
1511 self.assertTrue("charset=utf-16" in utf_16_u)
1512
1513 def test_encoding_substitution_doesnt_happen_if_tag_is_strained(self):
1514 markup = ('<head><meta content="text/html; charset=x-sjis" '
1515 'http-equiv="Content-type"/></head><pre>foo</pre>')
1516
1517 # Beautiful Soup used to try to rewrite the meta tag even if the
1518 # meta tag got filtered out by the strainer. This test makes
1519 # sure that doesn't happen.
1520 strainer = SoupStrainer('pre')
1521 soup = self.soup(markup, parse_only=strainer)
1522 self.assertEqual(soup.contents[0].name, 'pre')
1523
1524class TestEncoding(SoupTest):
1525 """Test the ability to encode objects into strings."""
1526
1527 def test_unicode_string_can_be_encoded(self):
1528 html = "<b>\N{SNOWMAN}</b>"
1529 soup = self.soup(html)
1530 self.assertEqual(soup.b.string.encode("utf-8"),
1531 "\N{SNOWMAN}".encode("utf-8"))
1532
1533 def test_tag_containing_unicode_string_can_be_encoded(self):
1534 html = "<b>\N{SNOWMAN}</b>"
1535 soup = self.soup(html)
1536 self.assertEqual(
1537 soup.b.encode("utf-8"), html.encode("utf-8"))
1538
1539 def test_encoding_substitutes_unrecognized_characters_by_default(self):
1540 html = "<b>\N{SNOWMAN}</b>"
1541 soup = self.soup(html)
1542 self.assertEqual(soup.b.encode("ascii"), b"<b>&#9731;</b>")
1543
1544 def test_encoding_can_be_made_strict(self):
1545 html = "<b>\N{SNOWMAN}</b>"
1546 soup = self.soup(html)
1547 self.assertRaises(
1548 UnicodeEncodeError, soup.encode, "ascii", errors="strict")
1549
1550 def test_decode_contents(self):
1551 html = "<b>\N{SNOWMAN}</b>"
1552 soup = self.soup(html)
1553 self.assertEqual("\N{SNOWMAN}", soup.b.decode_contents())
1554
1555 def test_encode_contents(self):
1556 html = "<b>\N{SNOWMAN}</b>"
1557 soup = self.soup(html)
1558 self.assertEqual(
1559 "\N{SNOWMAN}".encode("utf8"), soup.b.encode_contents(
1560 encoding="utf8"))
1561
1562 def test_deprecated_renderContents(self):
1563 html = "<b>\N{SNOWMAN}</b>"
1564 soup = self.soup(html)
1565 self.assertEqual(
1566 "\N{SNOWMAN}".encode("utf8"), soup.b.renderContents())
1567
1568 def test_repr(self):
1569 html = "<b>\N{SNOWMAN}</b>"
1570 soup = self.soup(html)
1571 if PY3K:
1572 self.assertEqual(html, repr(soup))
1573 else:
1574 self.assertEqual(b'<b>\\u2603</b>', repr(soup))
1575
1576class TestNavigableStringSubclasses(SoupTest):
1577
1578 def test_cdata(self):
1579 # None of the current builders turn CDATA sections into CData
1580 # objects, but you can create them manually.
1581 soup = self.soup("")
1582 cdata = CData("foo")
1583 soup.insert(1, cdata)
1584 self.assertEqual(str(soup), "<![CDATA[foo]]>")
1585 self.assertEqual(soup.find(text="foo"), "foo")
1586 self.assertEqual(soup.contents[0], "foo")
1587
1588 def test_cdata_is_never_formatted(self):
1589 """Text inside a CData object is passed into the formatter.
1590
1591 But the return value is ignored.
1592 """
1593
1594 self.count = 0
1595 def increment(*args):
1596 self.count += 1
1597 return "BITTER FAILURE"
1598
1599 soup = self.soup("")
1600 cdata = CData("<><><>")
1601 soup.insert(1, cdata)
1602 self.assertEqual(
1603 b"<![CDATA[<><><>]]>", soup.encode(formatter=increment))
1604 self.assertEqual(1, self.count)
1605
1606 def test_doctype_ends_in_newline(self):
1607 # Unlike other NavigableString subclasses, a DOCTYPE always ends
1608 # in a newline.
1609 doctype = Doctype("foo")
1610 soup = self.soup("")
1611 soup.insert(1, doctype)
1612 self.assertEqual(soup.encode(), b"<!DOCTYPE foo>\n")
1613
1614 def test_declaration(self):
1615 d = Declaration("foo")
1616 self.assertEqual("<?foo?>", d.output_ready())
1617
1618class TestSoupSelector(TreeTest):
1619
1620 HTML = """
1621<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
1622"http://www.w3.org/TR/html4/strict.dtd">
1623<html>
1624<head>
1625<title>The title</title>
1626<link rel="stylesheet" href="blah.css" type="text/css" id="l1">
1627</head>
1628<body>
1629<custom-dashed-tag class="dashed" id="dash1">Hello there.</custom-dashed-tag>
1630<div id="main" class="fancy">
1631<div id="inner">
1632<h1 id="header1">An H1</h1>
1633<p>Some text</p>
1634<p class="onep" id="p1">Some more text</p>
1635<h2 id="header2">An H2</h2>
1636<p class="class1 class2 class3" id="pmulti">Another</p>
1637<a href="http://bob.example.org/" rel="friend met" id="bob">Bob</a>
1638<h2 id="header3">Another H2</h2>
1639<a id="me" href="http://simonwillison.net/" rel="me">me</a>
1640<span class="s1">
1641<a href="#" id="s1a1">span1a1</a>
1642<a href="#" id="s1a2">span1a2 <span id="s1a2s1">test</span></a>
1643<span class="span2">
1644<a href="#" id="s2a1">span2a1</a>
1645</span>
1646<span class="span3"></span>
1647<custom-dashed-tag class="dashed" id="dash2"/>
1648<div data-tag="dashedvalue" id="data1"/>
1649</span>
1650</div>
1651<x id="xid">
1652<z id="zida"/>
1653<z id="zidab"/>
1654<z id="zidac"/>
1655</x>
1656<y id="yid">
1657<z id="zidb"/>
1658</y>
1659<p lang="en" id="lang-en">English</p>
1660<p lang="en-gb" id="lang-en-gb">English UK</p>
1661<p lang="en-us" id="lang-en-us">English US</p>
1662<p lang="fr" id="lang-fr">French</p>
1663</div>
1664
1665<div id="footer">
1666</div>
1667"""
1668
1669 def setUp(self):
1670 self.soup = BeautifulSoup(self.HTML, 'html.parser')
1671
1672 def assertSelects(self, selector, expected_ids):
1673 el_ids = [el['id'] for el in self.soup.select(selector)]
1674 el_ids.sort()
1675 expected_ids.sort()
1676 self.assertEqual(expected_ids, el_ids,
1677 "Selector %s, expected [%s], got [%s]" % (
1678 selector, ', '.join(expected_ids), ', '.join(el_ids)
1679 )
1680 )
1681
1682 assertSelect = assertSelects
1683
1684 def assertSelectMultiple(self, *tests):
1685 for selector, expected_ids in tests:
1686 self.assertSelect(selector, expected_ids)
1687
1688 def test_one_tag_one(self):
1689 els = self.soup.select('title')
1690 self.assertEqual(len(els), 1)
1691 self.assertEqual(els[0].name, 'title')
1692 self.assertEqual(els[0].contents, ['The title'])
1693
1694 def test_one_tag_many(self):
1695 els = self.soup.select('div')
1696 self.assertEqual(len(els), 4)
1697 for div in els:
1698 self.assertEqual(div.name, 'div')
1699
1700 el = self.soup.select_one('div')
1701 self.assertEqual('main', el['id'])
1702
1703 def test_select_one_returns_none_if_no_match(self):
1704 match = self.soup.select_one('nonexistenttag')
1705 self.assertEqual(None, match)
1706
1707
1708 def test_tag_in_tag_one(self):
1709 els = self.soup.select('div div')
1710 self.assertSelects('div div', ['inner', 'data1'])
1711
1712 def test_tag_in_tag_many(self):
1713 for selector in ('html div', 'html body div', 'body div'):
1714 self.assertSelects(selector, ['data1', 'main', 'inner', 'footer'])
1715
1716 def test_tag_no_match(self):
1717 self.assertEqual(len(self.soup.select('del')), 0)
1718
1719 def test_invalid_tag(self):
1720 self.assertRaises(ValueError, self.soup.select, 'tag%t')
1721
1722 def test_select_dashed_tag_ids(self):
1723 self.assertSelects('custom-dashed-tag', ['dash1', 'dash2'])
1724
1725 def test_select_dashed_by_id(self):
1726 dashed = self.soup.select('custom-dashed-tag[id=\"dash2\"]')
1727 self.assertEqual(dashed[0].name, 'custom-dashed-tag')
1728 self.assertEqual(dashed[0]['id'], 'dash2')
1729
1730 def test_dashed_tag_text(self):
1731 self.assertEqual(self.soup.select('body > custom-dashed-tag')[0].text, 'Hello there.')
1732
1733 def test_select_dashed_matches_find_all(self):
1734 self.assertEqual(self.soup.select('custom-dashed-tag'), self.soup.find_all('custom-dashed-tag'))
1735
1736 def test_header_tags(self):
1737 self.assertSelectMultiple(
1738 ('h1', ['header1']),
1739 ('h2', ['header2', 'header3']),
1740 )
1741
1742 def test_class_one(self):
1743 for selector in ('.onep', 'p.onep', 'html p.onep'):
1744 els = self.soup.select(selector)
1745 self.assertEqual(len(els), 1)
1746 self.assertEqual(els[0].name, 'p')
1747 self.assertEqual(els[0]['class'], ['onep'])
1748
1749 def test_class_mismatched_tag(self):
1750 els = self.soup.select('div.onep')
1751 self.assertEqual(len(els), 0)
1752
1753 def test_one_id(self):
1754 for selector in ('div#inner', '#inner', 'div div#inner'):
1755 self.assertSelects(selector, ['inner'])
1756
1757 def test_bad_id(self):
1758 els = self.soup.select('#doesnotexist')
1759 self.assertEqual(len(els), 0)
1760
1761 def test_items_in_id(self):
1762 els = self.soup.select('div#inner p')
1763 self.assertEqual(len(els), 3)
1764 for el in els:
1765 self.assertEqual(el.name, 'p')
1766 self.assertEqual(els[1]['class'], ['onep'])
1767 self.assertFalse(els[0].has_attr('class'))
1768
1769 def test_a_bunch_of_emptys(self):
1770 for selector in ('div#main del', 'div#main div.oops', 'div div#main'):
1771 self.assertEqual(len(self.soup.select(selector)), 0)
1772
1773 def test_multi_class_support(self):
1774 for selector in ('.class1', 'p.class1', '.class2', 'p.class2',
1775 '.class3', 'p.class3', 'html p.class2', 'div#inner .class2'):
1776 self.assertSelects(selector, ['pmulti'])
1777
1778 def test_multi_class_selection(self):
1779 for selector in ('.class1.class3', '.class3.class2',
1780 '.class1.class2.class3'):
1781 self.assertSelects(selector, ['pmulti'])
1782
1783 def test_child_selector(self):
1784 self.assertSelects('.s1 > a', ['s1a1', 's1a2'])
1785 self.assertSelects('.s1 > a span', ['s1a2s1'])
1786
1787 def test_child_selector_id(self):
1788 self.assertSelects('.s1 > a#s1a2 span', ['s1a2s1'])
1789
1790 def test_attribute_equals(self):
1791 self.assertSelectMultiple(
1792 ('p[class="onep"]', ['p1']),
1793 ('p[id="p1"]', ['p1']),
1794 ('[class="onep"]', ['p1']),
1795 ('[id="p1"]', ['p1']),
1796 ('link[rel="stylesheet"]', ['l1']),
1797 ('link[type="text/css"]', ['l1']),
1798 ('link[href="blah.css"]', ['l1']),
1799 ('link[href="no-blah.css"]', []),
1800 ('[rel="stylesheet"]', ['l1']),
1801 ('[type="text/css"]', ['l1']),
1802 ('[href="blah.css"]', ['l1']),
1803 ('[href="no-blah.css"]', []),
1804 ('p[href="no-blah.css"]', []),
1805 ('[href="no-blah.css"]', []),
1806 )
1807
1808 def test_attribute_tilde(self):
1809 self.assertSelectMultiple(
1810 ('p[class~="class1"]', ['pmulti']),
1811 ('p[class~="class2"]', ['pmulti']),
1812 ('p[class~="class3"]', ['pmulti']),
1813 ('[class~="class1"]', ['pmulti']),
1814 ('[class~="class2"]', ['pmulti']),
1815 ('[class~="class3"]', ['pmulti']),
1816 ('a[rel~="friend"]', ['bob']),
1817 ('a[rel~="met"]', ['bob']),
1818 ('[rel~="friend"]', ['bob']),
1819 ('[rel~="met"]', ['bob']),
1820 )
1821
1822 def test_attribute_startswith(self):
1823 self.assertSelectMultiple(
1824 ('[rel^="style"]', ['l1']),
1825 ('link[rel^="style"]', ['l1']),
1826 ('notlink[rel^="notstyle"]', []),
1827 ('[rel^="notstyle"]', []),
1828 ('link[rel^="notstyle"]', []),
1829 ('link[href^="bla"]', ['l1']),
1830 ('a[href^="http://"]', ['bob', 'me']),
1831 ('[href^="http://"]', ['bob', 'me']),
1832 ('[id^="p"]', ['pmulti', 'p1']),
1833 ('[id^="m"]', ['me', 'main']),
1834 ('div[id^="m"]', ['main']),
1835 ('a[id^="m"]', ['me']),
1836 ('div[data-tag^="dashed"]', ['data1'])
1837 )
1838
1839 def test_attribute_endswith(self):
1840 self.assertSelectMultiple(
1841 ('[href$=".css"]', ['l1']),
1842 ('link[href$=".css"]', ['l1']),
1843 ('link[id$="1"]', ['l1']),
1844 ('[id$="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1', 'dash1']),
1845 ('div[id$="1"]', ['data1']),
1846 ('[id$="noending"]', []),
1847 )
1848
1849 def test_attribute_contains(self):
1850 self.assertSelectMultiple(
1851 # From test_attribute_startswith
1852 ('[rel*="style"]', ['l1']),
1853 ('link[rel*="style"]', ['l1']),
1854 ('notlink[rel*="notstyle"]', []),
1855 ('[rel*="notstyle"]', []),
1856 ('link[rel*="notstyle"]', []),
1857 ('link[href*="bla"]', ['l1']),
1858 ('[href*="http://"]', ['bob', 'me']),
1859 ('[id*="p"]', ['pmulti', 'p1']),
1860 ('div[id*="m"]', ['main']),
1861 ('a[id*="m"]', ['me']),
1862 # From test_attribute_endswith
1863 ('[href*=".css"]', ['l1']),
1864 ('link[href*=".css"]', ['l1']),
1865 ('link[id*="1"]', ['l1']),
1866 ('[id*="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1', 'dash1']),
1867 ('div[id*="1"]', ['data1']),
1868 ('[id*="noending"]', []),
1869 # New for this test
1870 ('[href*="."]', ['bob', 'me', 'l1']),
1871 ('a[href*="."]', ['bob', 'me']),
1872 ('link[href*="."]', ['l1']),
1873 ('div[id*="n"]', ['main', 'inner']),
1874 ('div[id*="nn"]', ['inner']),
1875 ('div[data-tag*="edval"]', ['data1'])
1876 )
1877
1878 def test_attribute_exact_or_hypen(self):
1879 self.assertSelectMultiple(
1880 ('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
1881 ('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
1882 ('p[lang|="fr"]', ['lang-fr']),
1883 ('p[lang|="gb"]', []),
1884 )
1885
1886 def test_attribute_exists(self):
1887 self.assertSelectMultiple(
1888 ('[rel]', ['l1', 'bob', 'me']),
1889 ('link[rel]', ['l1']),
1890 ('a[rel]', ['bob', 'me']),
1891 ('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']),
1892 ('p[class]', ['p1', 'pmulti']),
1893 ('[blah]', []),
1894 ('p[blah]', []),
1895 ('div[data-tag]', ['data1'])
1896 )
1897
1898 def test_unsupported_pseudoclass(self):
1899 self.assertRaises(
1900 NotImplementedError, self.soup.select, "a:no-such-pseudoclass")
1901
1902 self.assertRaises(
1903 NotImplementedError, self.soup.select, "a:nth-of-type(a)")
1904
1905
1906 def test_nth_of_type(self):
1907 # Try to select first paragraph
1908 els = self.soup.select('div#inner p:nth-of-type(1)')
1909 self.assertEqual(len(els), 1)
1910 self.assertEqual(els[0].string, 'Some text')
1911
1912 # Try to select third paragraph
1913 els = self.soup.select('div#inner p:nth-of-type(3)')
1914 self.assertEqual(len(els), 1)
1915 self.assertEqual(els[0].string, 'Another')
1916
1917 # Try to select (non-existent!) fourth paragraph
1918 els = self.soup.select('div#inner p:nth-of-type(4)')
1919 self.assertEqual(len(els), 0)
1920
1921 # Pass in an invalid value.
1922 self.assertRaises(
1923 ValueError, self.soup.select, 'div p:nth-of-type(0)')
1924
1925 def test_nth_of_type_direct_descendant(self):
1926 els = self.soup.select('div#inner > p:nth-of-type(1)')
1927 self.assertEqual(len(els), 1)
1928 self.assertEqual(els[0].string, 'Some text')
1929
1930 def test_id_child_selector_nth_of_type(self):
1931 self.assertSelects('#inner > p:nth-of-type(2)', ['p1'])
1932
1933 def test_select_on_element(self):
1934 # Other tests operate on the tree; this operates on an element
1935 # within the tree.
1936 inner = self.soup.find("div", id="main")
1937 selected = inner.select("div")
1938 # The <div id="inner"> tag was selected. The <div id="footer">
1939 # tag was not.
1940 self.assertSelectsIDs(selected, ['inner', 'data1'])
1941
1942 def test_overspecified_child_id(self):
1943 self.assertSelects(".fancy #inner", ['inner'])
1944 self.assertSelects(".normal #inner", [])
1945
1946 def test_adjacent_sibling_selector(self):
1947 self.assertSelects('#p1 + h2', ['header2'])
1948 self.assertSelects('#p1 + h2 + p', ['pmulti'])
1949 self.assertSelects('#p1 + #header2 + .class1', ['pmulti'])
1950 self.assertEqual([], self.soup.select('#p1 + p'))
1951
1952 def test_general_sibling_selector(self):
1953 self.assertSelects('#p1 ~ h2', ['header2', 'header3'])
1954 self.assertSelects('#p1 ~ #header2', ['header2'])
1955 self.assertSelects('#p1 ~ h2 + a', ['me'])
1956 self.assertSelects('#p1 ~ h2 + [rel="me"]', ['me'])
1957 self.assertEqual([], self.soup.select('#inner ~ h2'))
1958
1959 def test_dangling_combinator(self):
1960 self.assertRaises(ValueError, self.soup.select, 'h1 >')
1961
1962 def test_sibling_combinator_wont_select_same_tag_twice(self):
1963 self.assertSelects('p[lang] ~ p', ['lang-en-gb', 'lang-en-us', 'lang-fr'])
1964
1965 # Test the selector grouping operator (the comma)
1966 def test_multiple_select(self):
1967 self.assertSelects('x, y', ['xid', 'yid'])
1968
1969 def test_multiple_select_with_no_space(self):
1970 self.assertSelects('x,y', ['xid', 'yid'])
1971
1972 def test_multiple_select_with_more_space(self):
1973 self.assertSelects('x, y', ['xid', 'yid'])
1974
1975 def test_multiple_select_duplicated(self):
1976 self.assertSelects('x, x', ['xid'])
1977
1978 def test_multiple_select_sibling(self):
1979 self.assertSelects('x, y ~ p[lang=fr]', ['xid', 'lang-fr'])
1980
1981 def test_multiple_select_tag_and_direct_descendant(self):
1982 self.assertSelects('x, y > z', ['xid', 'zidb'])
1983
1984 def test_multiple_select_direct_descendant_and_tags(self):
1985 self.assertSelects('div > x, y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac'])
1986
1987 def test_multiple_select_indirect_descendant(self):
1988 self.assertSelects('div x,y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac'])
1989
1990 def test_invalid_multiple_select(self):
1991 self.assertRaises(ValueError, self.soup.select, ',x, y')
1992 self.assertRaises(ValueError, self.soup.select, 'x,,y')
1993
1994 def test_multiple_select_attrs(self):
1995 self.assertSelects('p[lang=en], p[lang=en-gb]', ['lang-en', 'lang-en-gb'])
1996
1997 def test_multiple_select_ids(self):
1998 self.assertSelects('x, y > z[id=zida], z[id=zidab], z[id=zidb]', ['xid', 'zidb', 'zidab'])
1999
2000 def test_multiple_select_nested(self):
2001 self.assertSelects('body > div > x, y > z', ['xid', 'zidb'])
2002
2003
2004
diff --git a/bitbake/lib/codegen.py b/bitbake/lib/codegen.py
index 62a6748c47..018b283177 100644
--- a/bitbake/lib/codegen.py
+++ b/bitbake/lib/codegen.py
@@ -392,14 +392,8 @@ class SourceGenerator(NodeVisitor):
392 def visit_Name(self, node): 392 def visit_Name(self, node):
393 self.write(node.id) 393 self.write(node.id)
394 394
395 def visit_Str(self, node): 395 def visit_Constant(self, node):
396 self.write(repr(node.s)) 396 self.write(repr(node.value))
397
398 def visit_Bytes(self, node):
399 self.write(repr(node.s))
400
401 def visit_Num(self, node):
402 self.write(repr(node.n))
403 397
404 def visit_Tuple(self, node): 398 def visit_Tuple(self, node):
405 self.write('(') 399 self.write('(')
diff --git a/bitbake/lib/hashserv/__init__.py b/bitbake/lib/hashserv/__init__.py
index 5f2e101e52..ac891e0174 100644
--- a/bitbake/lib/hashserv/__init__.py
+++ b/bitbake/lib/hashserv/__init__.py
@@ -5,129 +5,104 @@
5 5
6import asyncio 6import asyncio
7from contextlib import closing 7from contextlib import closing
8import re
9import sqlite3
10import itertools 8import itertools
11import json 9import json
10from collections import namedtuple
11from urllib.parse import urlparse
12from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS
12 13
13UNIX_PREFIX = "unix://" 14User = namedtuple("User", ("username", "permissions"))
14
15ADDR_TYPE_UNIX = 0
16ADDR_TYPE_TCP = 1
17
18# The Python async server defaults to a 64K receive buffer, so we hardcode our
19# maximum chunk size. It would be better if the client and server reported to
20# each other what the maximum chunk sizes were, but that will slow down the
21# connection setup with a round trip delay so I'd rather not do that unless it
22# is necessary
23DEFAULT_MAX_CHUNK = 32 * 1024
24
25TABLE_DEFINITION = (
26 ("method", "TEXT NOT NULL"),
27 ("outhash", "TEXT NOT NULL"),
28 ("taskhash", "TEXT NOT NULL"),
29 ("unihash", "TEXT NOT NULL"),
30 ("created", "DATETIME"),
31
32 # Optional fields
33 ("owner", "TEXT"),
34 ("PN", "TEXT"),
35 ("PV", "TEXT"),
36 ("PR", "TEXT"),
37 ("task", "TEXT"),
38 ("outhash_siginfo", "TEXT"),
39)
40
41TABLE_COLUMNS = tuple(name for name, _ in TABLE_DEFINITION)
42
43def setup_database(database, sync=True):
44 db = sqlite3.connect(database)
45 db.row_factory = sqlite3.Row
46
47 with closing(db.cursor()) as cursor:
48 cursor.execute('''
49 CREATE TABLE IF NOT EXISTS tasks_v2 (
50 id INTEGER PRIMARY KEY AUTOINCREMENT,
51 %s
52 UNIQUE(method, outhash, taskhash)
53 )
54 ''' % " ".join("%s %s," % (name, typ) for name, typ in TABLE_DEFINITION))
55 cursor.execute('PRAGMA journal_mode = WAL')
56 cursor.execute('PRAGMA synchronous = %s' % ('NORMAL' if sync else 'OFF'))
57
58 # Drop old indexes
59 cursor.execute('DROP INDEX IF EXISTS taskhash_lookup')
60 cursor.execute('DROP INDEX IF EXISTS outhash_lookup')
61
62 # Create new indexes
63 cursor.execute('CREATE INDEX IF NOT EXISTS taskhash_lookup_v2 ON tasks_v2 (method, taskhash, created)')
64 cursor.execute('CREATE INDEX IF NOT EXISTS outhash_lookup_v2 ON tasks_v2 (method, outhash)')
65
66 return db
67
68
69def parse_address(addr):
70 if addr.startswith(UNIX_PREFIX):
71 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX):],))
72 else:
73 m = re.match(r'\[(?P<host>[^\]]*)\]:(?P<port>\d+)$', addr)
74 if m is not None:
75 host = m.group('host')
76 port = m.group('port')
77 else:
78 host, port = addr.split(':')
79 15
80 return (ADDR_TYPE_TCP, (host, int(port)))
81 16
17def create_server(
18 addr,
19 dbname,
20 *,
21 sync=True,
22 upstream=None,
23 read_only=False,
24 db_username=None,
25 db_password=None,
26 anon_perms=None,
27 admin_username=None,
28 admin_password=None,
29 reuseport=False,
30):
31 def sqlite_engine():
32 from .sqlite import DatabaseEngine
82 33
83def chunkify(msg, max_chunk): 34 return DatabaseEngine(dbname, sync)
84 if len(msg) < max_chunk - 1:
85 yield ''.join((msg, "\n"))
86 else:
87 yield ''.join((json.dumps({
88 'chunk-stream': None
89 }), "\n"))
90 35
91 args = [iter(msg)] * (max_chunk - 1) 36 def sqlalchemy_engine():
92 for m in map(''.join, itertools.zip_longest(*args, fillvalue='')): 37 from .sqlalchemy import DatabaseEngine
93 yield ''.join(itertools.chain(m, "\n"))
94 yield "\n"
95 38
39 return DatabaseEngine(dbname, db_username, db_password)
96 40
97def create_server(addr, dbname, *, sync=True, upstream=None, read_only=False):
98 from . import server 41 from . import server
99 db = setup_database(dbname, sync=sync) 42
100 s = server.Server(db, upstream=upstream, read_only=read_only) 43 if "://" in dbname:
44 db_engine = sqlalchemy_engine()
45 else:
46 db_engine = sqlite_engine()
47
48 if anon_perms is None:
49 anon_perms = server.DEFAULT_ANON_PERMS
50
51 s = server.Server(
52 db_engine,
53 upstream=upstream,
54 read_only=read_only,
55 anon_perms=anon_perms,
56 admin_username=admin_username,
57 admin_password=admin_password,
58 )
101 59
102 (typ, a) = parse_address(addr) 60 (typ, a) = parse_address(addr)
103 if typ == ADDR_TYPE_UNIX: 61 if typ == ADDR_TYPE_UNIX:
104 s.start_unix_server(*a) 62 s.start_unix_server(*a)
63 elif typ == ADDR_TYPE_WS:
64 url = urlparse(a[0])
65 s.start_websocket_server(url.hostname, url.port, reuseport=reuseport)
105 else: 66 else:
106 s.start_tcp_server(*a) 67 s.start_tcp_server(*a, reuseport=reuseport)
107 68
108 return s 69 return s
109 70
110 71
111def create_client(addr): 72def create_client(addr, username=None, password=None):
112 from . import client 73 from . import client
113 c = client.Client()
114 74
115 (typ, a) = parse_address(addr) 75 c = client.Client(username, password)
116 if typ == ADDR_TYPE_UNIX: 76
117 c.connect_unix(*a) 77 try:
118 else: 78 (typ, a) = parse_address(addr)
119 c.connect_tcp(*a) 79 if typ == ADDR_TYPE_UNIX:
80 c.connect_unix(*a)
81 elif typ == ADDR_TYPE_WS:
82 c.connect_websocket(*a)
83 else:
84 c.connect_tcp(*a)
85 return c
86 except Exception as e:
87 c.close()
88 raise e
120 89
121 return c
122 90
123async def create_async_client(addr): 91async def create_async_client(addr, username=None, password=None):
124 from . import client 92 from . import client
125 c = client.AsyncClient()
126 93
127 (typ, a) = parse_address(addr) 94 c = client.AsyncClient(username, password)
128 if typ == ADDR_TYPE_UNIX: 95
129 await c.connect_unix(*a) 96 try:
130 else: 97 (typ, a) = parse_address(addr)
131 await c.connect_tcp(*a) 98 if typ == ADDR_TYPE_UNIX:
99 await c.connect_unix(*a)
100 elif typ == ADDR_TYPE_WS:
101 await c.connect_websocket(*a)
102 else:
103 await c.connect_tcp(*a)
132 104
133 return c 105 return c
106 except Exception as e:
107 await c.close()
108 raise e
diff --git a/bitbake/lib/hashserv/client.py b/bitbake/lib/hashserv/client.py
index e05c1eb568..8cb18050a6 100644
--- a/bitbake/lib/hashserv/client.py
+++ b/bitbake/lib/hashserv/client.py
@@ -3,231 +3,378 @@
3# SPDX-License-Identifier: GPL-2.0-only 3# SPDX-License-Identifier: GPL-2.0-only
4# 4#
5 5
6import asyncio
7import json
8import logging 6import logging
9import socket 7import socket
10import os 8import asyncio
11from . import chunkify, DEFAULT_MAX_CHUNK, create_async_client 9import bb.asyncrpc
10import json
11from . import create_async_client
12 12
13 13
14logger = logging.getLogger("hashserv.client") 14logger = logging.getLogger("hashserv.client")
15 15
16 16
17class HashConnectionError(Exception): 17class Batch(object):
18 pass 18 def __init__(self):
19 self.done = False
20 self.cond = asyncio.Condition()
21 self.pending = []
22 self.results = []
23 self.sent_count = 0
19 24
25 async def recv(self, socket):
26 while True:
27 async with self.cond:
28 await self.cond.wait_for(lambda: self.pending or self.done)
20 29
21class AsyncClient(object): 30 if not self.pending:
22 MODE_NORMAL = 0 31 if self.done:
23 MODE_GET_STREAM = 1 32 return
33 continue
24 34
25 def __init__(self): 35 r = await socket.recv()
26 self.reader = None 36 self.results.append(r)
27 self.writer = None
28 self.mode = self.MODE_NORMAL
29 self.max_chunk = DEFAULT_MAX_CHUNK
30 37
31 async def connect_tcp(self, address, port): 38 async with self.cond:
32 async def connect_sock(): 39 self.pending.pop(0)
33 return await asyncio.open_connection(address, port)
34 40
35 self._connect_sock = connect_sock 41 async def send(self, socket, msgs):
42 try:
43 # In the event of a restart due to a reconnect, all in-flight
44 # messages need to be resent first to keep to result count in sync
45 for m in self.pending:
46 await socket.send(m)
36 47
37 async def connect_unix(self, path): 48 for m in msgs:
38 async def connect_sock(): 49 # Add the message to the pending list before attempting to send
39 return await asyncio.open_unix_connection(path) 50 # it so that if the send fails it will be retried
51 async with self.cond:
52 self.pending.append(m)
53 self.cond.notify()
54 self.sent_count += 1
40 55
41 self._connect_sock = connect_sock 56 await socket.send(m)
42 57
43 async def connect(self): 58 finally:
44 if self.reader is None or self.writer is None: 59 async with self.cond:
45 (self.reader, self.writer) = await self._connect_sock() 60 self.done = True
61 self.cond.notify()
62
63 async def process(self, socket, msgs):
64 await asyncio.gather(
65 self.recv(socket),
66 self.send(socket, msgs),
67 )
46 68
47 self.writer.write("OEHASHEQUIV 1.1\n\n".encode("utf-8")) 69 if len(self.results) != self.sent_count:
48 await self.writer.drain() 70 raise ValueError(
71 f"Expected result count {len(self.results)}. Expected {self.sent_count}"
72 )
49 73
50 cur_mode = self.mode 74 return self.results
51 self.mode = self.MODE_NORMAL
52 await self._set_mode(cur_mode)
53 75
54 async def close(self):
55 self.reader = None
56 76
57 if self.writer is not None: 77class AsyncClient(bb.asyncrpc.AsyncClient):
58 self.writer.close() 78 MODE_NORMAL = 0
59 self.writer = None 79 MODE_GET_STREAM = 1
80 MODE_EXIST_STREAM = 2
81 MODE_MARK_STREAM = 3
60 82
61 async def _send_wrapper(self, proc): 83 def __init__(self, username=None, password=None):
62 count = 0 84 super().__init__("OEHASHEQUIV", "1.1", logger)
63 while True: 85 self.mode = self.MODE_NORMAL
64 try: 86 self.username = username
65 await self.connect() 87 self.password = password
66 return await proc() 88 self.saved_become_user = None
67 except (
68 OSError,
69 HashConnectionError,
70 json.JSONDecodeError,
71 UnicodeDecodeError,
72 ) as e:
73 logger.warning("Error talking to server: %s" % e)
74 if count >= 3:
75 if not isinstance(e, HashConnectionError):
76 raise HashConnectionError(str(e))
77 raise e
78 await self.close()
79 count += 1
80
81 async def send_message(self, msg):
82 async def get_line():
83 line = await self.reader.readline()
84 if not line:
85 raise HashConnectionError("Connection closed")
86
87 line = line.decode("utf-8")
88
89 if not line.endswith("\n"):
90 raise HashConnectionError("Bad message %r" % message)
91
92 return line
93 89
94 async def proc(): 90 async def setup_connection(self):
95 for c in chunkify(json.dumps(msg), self.max_chunk): 91 await super().setup_connection()
96 self.writer.write(c.encode("utf-8")) 92 self.mode = self.MODE_NORMAL
97 await self.writer.drain() 93 if self.username:
94 # Save off become user temporarily because auth() resets it
95 become = self.saved_become_user
96 await self.auth(self.username, self.password)
98 97
99 l = await get_line() 98 if become:
99 await self.become_user(become)
100 100
101 m = json.loads(l) 101 async def send_stream_batch(self, mode, msgs):
102 if m and "chunk-stream" in m: 102 """
103 lines = [] 103 Does a "batch" process of stream messages. This sends the query
104 while True: 104 messages as fast as possible, and simultaneously attempts to read the
105 l = (await get_line()).rstrip("\n") 105 messages back. This helps to mitigate the effects of latency to the
106 if not l: 106 hash equivalence server be allowing multiple queries to be "in-flight"
107 break 107 at once
108 lines.append(l)
109 108
110 m = json.loads("".join(lines)) 109 The implementation does more complicated tracking using a count of sent
110 messages so that `msgs` can be a generator function (i.e. its length is
111 unknown)
111 112
112 return m 113 """
113 114
114 return await self._send_wrapper(proc) 115 b = Batch()
115 116
116 async def send_stream(self, msg):
117 async def proc(): 117 async def proc():
118 self.writer.write(("%s\n" % msg).encode("utf-8")) 118 nonlocal b
119 await self.writer.drain() 119
120 l = await self.reader.readline() 120 await self._set_mode(mode)
121 if not l: 121 return await b.process(self.socket, msgs)
122 raise HashConnectionError("Connection closed")
123 return l.decode("utf-8").rstrip()
124 122
125 return await self._send_wrapper(proc) 123 return await self._send_wrapper(proc)
126 124
125 async def invoke(self, *args, skip_mode=False, **kwargs):
126 # It's OK if connection errors cause a failure here, because the mode
127 # is also reset to normal on a new connection
128 if not skip_mode:
129 await self._set_mode(self.MODE_NORMAL)
130 return await super().invoke(*args, **kwargs)
131
127 async def _set_mode(self, new_mode): 132 async def _set_mode(self, new_mode):
128 if new_mode == self.MODE_NORMAL and self.mode == self.MODE_GET_STREAM: 133 async def stream_to_normal():
129 r = await self.send_stream("END") 134 # Check if already in normal mode (e.g. due to a connection reset)
135 if self.mode == self.MODE_NORMAL:
136 return "ok"
137 await self.socket.send("END")
138 return await self.socket.recv()
139
140 async def normal_to_stream(command):
141 r = await self.invoke({command: None}, skip_mode=True)
130 if r != "ok": 142 if r != "ok":
131 raise HashConnectionError("Bad response from server %r" % r) 143 self.check_invoke_error(r)
132 elif new_mode == self.MODE_GET_STREAM and self.mode == self.MODE_NORMAL: 144 raise ConnectionError(
133 r = await self.send_message({"get-stream": None}) 145 f"Unable to transition to stream mode: Bad response from server {r!r}"
146 )
147 self.logger.debug("Mode is now %s", command)
148
149 if new_mode == self.mode:
150 return
151
152 self.logger.debug("Transitioning mode %s -> %s", self.mode, new_mode)
153
154 # Always transition to normal mode before switching to any other mode
155 if self.mode != self.MODE_NORMAL:
156 r = await self._send_wrapper(stream_to_normal)
134 if r != "ok": 157 if r != "ok":
135 raise HashConnectionError("Bad response from server %r" % r) 158 self.check_invoke_error(r)
136 elif new_mode != self.mode: 159 raise ConnectionError(
137 raise Exception( 160 f"Unable to transition to normal mode: Bad response from server {r!r}"
138 "Undefined mode transition %r -> %r" % (self.mode, new_mode) 161 )
139 ) 162 self.logger.debug("Mode is now normal")
163
164 if new_mode == self.MODE_GET_STREAM:
165 await normal_to_stream("get-stream")
166 elif new_mode == self.MODE_EXIST_STREAM:
167 await normal_to_stream("exists-stream")
168 elif new_mode == self.MODE_MARK_STREAM:
169 await normal_to_stream("gc-mark-stream")
170 elif new_mode != self.MODE_NORMAL:
171 raise Exception("Undefined mode transition {self.mode!r} -> {new_mode!r}")
140 172
141 self.mode = new_mode 173 self.mode = new_mode
142 174
143 async def get_unihash(self, method, taskhash): 175 async def get_unihash(self, method, taskhash):
144 await self._set_mode(self.MODE_GET_STREAM) 176 r = await self.get_unihash_batch([(method, taskhash)])
145 r = await self.send_stream("%s %s" % (method, taskhash)) 177 return r[0]
146 if not r: 178
147 return None 179 async def get_unihash_batch(self, args):
148 return r 180 result = await self.send_stream_batch(
181 self.MODE_GET_STREAM,
182 (f"{method} {taskhash}" for method, taskhash in args),
183 )
184 return [r if r else None for r in result]
149 185
150 async def report_unihash(self, taskhash, method, outhash, unihash, extra={}): 186 async def report_unihash(self, taskhash, method, outhash, unihash, extra={}):
151 await self._set_mode(self.MODE_NORMAL)
152 m = extra.copy() 187 m = extra.copy()
153 m["taskhash"] = taskhash 188 m["taskhash"] = taskhash
154 m["method"] = method 189 m["method"] = method
155 m["outhash"] = outhash 190 m["outhash"] = outhash
156 m["unihash"] = unihash 191 m["unihash"] = unihash
157 return await self.send_message({"report": m}) 192 return await self.invoke({"report": m})
158 193
159 async def report_unihash_equiv(self, taskhash, method, unihash, extra={}): 194 async def report_unihash_equiv(self, taskhash, method, unihash, extra={}):
160 await self._set_mode(self.MODE_NORMAL)
161 m = extra.copy() 195 m = extra.copy()
162 m["taskhash"] = taskhash 196 m["taskhash"] = taskhash
163 m["method"] = method 197 m["method"] = method
164 m["unihash"] = unihash 198 m["unihash"] = unihash
165 return await self.send_message({"report-equiv": m}) 199 return await self.invoke({"report-equiv": m})
166 200
167 async def get_taskhash(self, method, taskhash, all_properties=False): 201 async def get_taskhash(self, method, taskhash, all_properties=False):
168 await self._set_mode(self.MODE_NORMAL) 202 return await self.invoke(
169 return await self.send_message(
170 {"get": {"taskhash": taskhash, "method": method, "all": all_properties}} 203 {"get": {"taskhash": taskhash, "method": method, "all": all_properties}}
171 ) 204 )
172 205
173 async def get_outhash(self, method, outhash, taskhash): 206 async def unihash_exists(self, unihash):
174 await self._set_mode(self.MODE_NORMAL) 207 r = await self.unihash_exists_batch([unihash])
175 return await self.send_message( 208 return r[0]
176 {"get-outhash": {"outhash": outhash, "taskhash": taskhash, "method": method}} 209
210 async def unihash_exists_batch(self, unihashes):
211 result = await self.send_stream_batch(self.MODE_EXIST_STREAM, unihashes)
212 return [r == "true" for r in result]
213
214 async def get_outhash(self, method, outhash, taskhash, with_unihash=True):
215 return await self.invoke(
216 {
217 "get-outhash": {
218 "outhash": outhash,
219 "taskhash": taskhash,
220 "method": method,
221 "with_unihash": with_unihash,
222 }
223 }
177 ) 224 )
178 225
179 async def get_stats(self): 226 async def get_stats(self):
180 await self._set_mode(self.MODE_NORMAL) 227 return await self.invoke({"get-stats": None})
181 return await self.send_message({"get-stats": None})
182 228
183 async def reset_stats(self): 229 async def reset_stats(self):
184 await self._set_mode(self.MODE_NORMAL) 230 return await self.invoke({"reset-stats": None})
185 return await self.send_message({"reset-stats": None})
186 231
187 async def backfill_wait(self): 232 async def backfill_wait(self):
188 await self._set_mode(self.MODE_NORMAL) 233 return (await self.invoke({"backfill-wait": None}))["tasks"]
189 return (await self.send_message({"backfill-wait": None}))["tasks"] 234
235 async def remove(self, where):
236 return await self.invoke({"remove": {"where": where}})
237
238 async def clean_unused(self, max_age):
239 return await self.invoke({"clean-unused": {"max_age_seconds": max_age}})
240
241 async def auth(self, username, token):
242 result = await self.invoke({"auth": {"username": username, "token": token}})
243 self.username = username
244 self.password = token
245 self.saved_become_user = None
246 return result
247
248 async def refresh_token(self, username=None):
249 m = {}
250 if username:
251 m["username"] = username
252 result = await self.invoke({"refresh-token": m})
253 if (
254 self.username
255 and not self.saved_become_user
256 and result["username"] == self.username
257 ):
258 self.password = result["token"]
259 return result
190 260
261 async def set_user_perms(self, username, permissions):
262 return await self.invoke(
263 {"set-user-perms": {"username": username, "permissions": permissions}}
264 )
191 265
192class Client(object): 266 async def get_user(self, username=None):
193 def __init__(self): 267 m = {}
194 self.client = AsyncClient() 268 if username:
195 self.loop = asyncio.new_event_loop() 269 m["username"] = username
270 return await self.invoke({"get-user": m})
271
272 async def get_all_users(self):
273 return (await self.invoke({"get-all-users": {}}))["users"]
274
275 async def new_user(self, username, permissions):
276 return await self.invoke(
277 {"new-user": {"username": username, "permissions": permissions}}
278 )
279
280 async def delete_user(self, username):
281 return await self.invoke({"delete-user": {"username": username}})
282
283 async def become_user(self, username):
284 result = await self.invoke({"become-user": {"username": username}})
285 if username == self.username:
286 self.saved_become_user = None
287 else:
288 self.saved_become_user = username
289 return result
290
291 async def get_db_usage(self):
292 return (await self.invoke({"get-db-usage": {}}))["usage"]
293
294 async def get_db_query_columns(self):
295 return (await self.invoke({"get-db-query-columns": {}}))["columns"]
296
297 async def gc_status(self):
298 return await self.invoke({"gc-status": {}})
299
300 async def gc_mark(self, mark, where):
301 """
302 Starts a new garbage collection operation identified by "mark". If
303 garbage collection is already in progress with "mark", the collection
304 is continued.
305
306 All unihash entries that match the "where" clause are marked to be
307 kept. In addition, any new entries added to the database after this
308 command will be automatically marked with "mark"
309 """
310 return await self.invoke({"gc-mark": {"mark": mark, "where": where}})
311
312 async def gc_mark_stream(self, mark, rows):
313 """
314 Similar to `gc-mark`, but accepts a list of "where" key-value pair
315 conditions. It utilizes stream mode to mark hashes, which helps reduce
316 the impact of latency when communicating with the hash equivalence
317 server.
318 """
319 def row_to_dict(row):
320 pairs = row.split()
321 return dict(zip(pairs[::2], pairs[1::2]))
322
323 responses = await self.send_stream_batch(
324 self.MODE_MARK_STREAM,
325 (json.dumps({"mark": mark, "where": row_to_dict(row)}) for row in rows),
326 )
327
328 return {"count": sum(int(json.loads(r)["count"]) for r in responses)}
196 329
197 for call in ( 330 async def gc_sweep(self, mark):
331 """
332 Finishes garbage collection for "mark". All unihash entries that have
333 not been marked will be deleted.
334
335 It is recommended to clean unused outhash entries after running this to
336 cleanup any dangling outhashes
337 """
338 return await self.invoke({"gc-sweep": {"mark": mark}})
339
340
341class Client(bb.asyncrpc.Client):
342 def __init__(self, username=None, password=None):
343 self.username = username
344 self.password = password
345
346 super().__init__()
347 self._add_methods(
198 "connect_tcp", 348 "connect_tcp",
199 "close", 349 "connect_websocket",
200 "get_unihash", 350 "get_unihash",
351 "get_unihash_batch",
201 "report_unihash", 352 "report_unihash",
202 "report_unihash_equiv", 353 "report_unihash_equiv",
203 "get_taskhash", 354 "get_taskhash",
355 "unihash_exists",
356 "unihash_exists_batch",
357 "get_outhash",
204 "get_stats", 358 "get_stats",
205 "reset_stats", 359 "reset_stats",
206 "backfill_wait", 360 "backfill_wait",
207 ): 361 "remove",
208 downcall = getattr(self.client, call) 362 "clean_unused",
209 setattr(self, call, self._get_downcall_wrapper(downcall)) 363 "auth",
210 364 "refresh_token",
211 def _get_downcall_wrapper(self, downcall): 365 "set_user_perms",
212 def wrapper(*args, **kwargs): 366 "get_user",
213 return self.loop.run_until_complete(downcall(*args, **kwargs)) 367 "get_all_users",
214 368 "new_user",
215 return wrapper 369 "delete_user",
216 370 "become_user",
217 def connect_unix(self, path): 371 "get_db_usage",
218 # AF_UNIX has path length issues so chdir here to workaround 372 "get_db_query_columns",
219 cwd = os.getcwd() 373 "gc_status",
220 try: 374 "gc_mark",
221 os.chdir(os.path.dirname(path)) 375 "gc_mark_stream",
222 self.loop.run_until_complete(self.client.connect_unix(os.path.basename(path))) 376 "gc_sweep",
223 self.loop.run_until_complete(self.client.connect()) 377 )
224 finally:
225 os.chdir(cwd)
226
227 @property
228 def max_chunk(self):
229 return self.client.max_chunk
230 378
231 @max_chunk.setter 379 def _get_async_client(self):
232 def max_chunk(self, value): 380 return AsyncClient(self.username, self.password)
233 self.client.max_chunk = value
diff --git a/bitbake/lib/hashserv/server.py b/bitbake/lib/hashserv/server.py
index a0dc0c170f..58f95c7bcd 100644
--- a/bitbake/lib/hashserv/server.py
+++ b/bitbake/lib/hashserv/server.py
@@ -3,20 +3,52 @@
3# SPDX-License-Identifier: GPL-2.0-only 3# SPDX-License-Identifier: GPL-2.0-only
4# 4#
5 5
6from contextlib import closing, contextmanager 6from datetime import datetime, timedelta
7from datetime import datetime
8import asyncio 7import asyncio
9import json
10import logging 8import logging
11import math 9import math
12import os
13import signal
14import socket
15import sys
16import time 10import time
17from . import chunkify, DEFAULT_MAX_CHUNK, create_async_client, TABLE_COLUMNS 11import os
12import base64
13import json
14import hashlib
15from . import create_async_client
16import bb.asyncrpc
17
18logger = logging.getLogger("hashserv.server")
19
20
21# This permission only exists to match nothing
22NONE_PERM = "@none"
23
24READ_PERM = "@read"
25REPORT_PERM = "@report"
26DB_ADMIN_PERM = "@db-admin"
27USER_ADMIN_PERM = "@user-admin"
28ALL_PERM = "@all"
29
30ALL_PERMISSIONS = {
31 READ_PERM,
32 REPORT_PERM,
33 DB_ADMIN_PERM,
34 USER_ADMIN_PERM,
35 ALL_PERM,
36}
37
38DEFAULT_ANON_PERMS = (
39 READ_PERM,
40 REPORT_PERM,
41 DB_ADMIN_PERM,
42)
43
44TOKEN_ALGORITHM = "sha256"
45
46# 48 bytes of random data will result in 64 characters when base64
47# encoded. This number also ensures that the base64 encoding won't have any
48# trailing '=' characters.
49TOKEN_SIZE = 48
18 50
19logger = logging.getLogger('hashserv.server') 51SALT_SIZE = 8
20 52
21 53
22class Measurement(object): 54class Measurement(object):
@@ -106,522 +138,773 @@ class Stats(object):
106 return math.sqrt(self.s / (self.num - 1)) 138 return math.sqrt(self.s / (self.num - 1))
107 139
108 def todict(self): 140 def todict(self):
109 return {k: getattr(self, k) for k in ('num', 'total_time', 'max_time', 'average', 'stdev')} 141 return {
110 142 k: getattr(self, k)
111 143 for k in ("num", "total_time", "max_time", "average", "stdev")
112class ClientError(Exception):
113 pass
114
115class ServerError(Exception):
116 pass
117
118def insert_task(cursor, data, ignore=False):
119 keys = sorted(data.keys())
120 query = '''INSERT%s INTO tasks_v2 (%s) VALUES (%s)''' % (
121 " OR IGNORE" if ignore else "",
122 ', '.join(keys),
123 ', '.join(':' + k for k in keys))
124 cursor.execute(query, data)
125
126async def copy_from_upstream(client, db, method, taskhash):
127 d = await client.get_taskhash(method, taskhash, True)
128 if d is not None:
129 # Filter out unknown columns
130 d = {k: v for k, v in d.items() if k in TABLE_COLUMNS}
131 keys = sorted(d.keys())
132
133 with closing(db.cursor()) as cursor:
134 insert_task(cursor, d)
135 db.commit()
136
137 return d
138
139async def copy_outhash_from_upstream(client, db, method, outhash, taskhash):
140 d = await client.get_outhash(method, outhash, taskhash)
141 if d is not None:
142 # Filter out unknown columns
143 d = {k: v for k, v in d.items() if k in TABLE_COLUMNS}
144 keys = sorted(d.keys())
145
146 with closing(db.cursor()) as cursor:
147 insert_task(cursor, d)
148 db.commit()
149
150 return d
151
152class ServerClient(object):
153 FAST_QUERY = 'SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
154 ALL_QUERY = 'SELECT * FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
155 OUTHASH_QUERY = '''
156 -- Find tasks with a matching outhash (that is, tasks that
157 -- are equivalent)
158 SELECT * FROM tasks_v2 WHERE method=:method AND outhash=:outhash
159
160 -- If there is an exact match on the taskhash, return it.
161 -- Otherwise return the oldest matching outhash of any
162 -- taskhash
163 ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END,
164 created ASC
165
166 -- Only return one row
167 LIMIT 1
168 '''
169
170 def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only):
171 self.reader = reader
172 self.writer = writer
173 self.db = db
174 self.request_stats = request_stats
175 self.max_chunk = DEFAULT_MAX_CHUNK
176 self.backfill_queue = backfill_queue
177 self.upstream = upstream
178
179 self.handlers = {
180 'get': self.handle_get,
181 'get-outhash': self.handle_get_outhash,
182 'get-stream': self.handle_get_stream,
183 'get-stats': self.handle_get_stats,
184 'chunk-stream': self.handle_chunk,
185 } 144 }
186 145
187 if not read_only:
188 self.handlers.update({
189 'report': self.handle_report,
190 'report-equiv': self.handle_equivreport,
191 'reset-stats': self.handle_reset_stats,
192 'backfill-wait': self.handle_backfill_wait,
193 })
194 146
195 async def process_requests(self): 147token_refresh_semaphore = asyncio.Lock()
196 if self.upstream is not None:
197 self.upstream_client = await create_async_client(self.upstream)
198 else:
199 self.upstream_client = None
200 148
201 try:
202 149
150async def new_token():
151 # Prevent malicious users from using this API to deduce the entropy
152 # pool on the server and thus be able to guess a token. *All* token
153 # refresh requests lock the same global semaphore and then sleep for a
154 # short time. The effectively rate limits the total number of requests
155 # than can be made across all clients to 10/second, which should be enough
156 # since you have to be an authenticated users to make the request in the
157 # first place
158 async with token_refresh_semaphore:
159 await asyncio.sleep(0.1)
160 raw = os.getrandom(TOKEN_SIZE, os.GRND_NONBLOCK)
203 161
204 self.addr = self.writer.get_extra_info('peername') 162 return base64.b64encode(raw, b"._").decode("utf-8")
205 logger.debug('Client %r connected' % (self.addr,))
206 163
207 # Read protocol and version
208 protocol = await self.reader.readline()
209 if protocol is None:
210 return
211 164
212 (proto_name, proto_version) = protocol.decode('utf-8').rstrip().split() 165def new_salt():
213 if proto_name != 'OEHASHEQUIV': 166 return os.getrandom(SALT_SIZE, os.GRND_NONBLOCK).hex()
214 return
215 167
216 proto_version = tuple(int(v) for v in proto_version.split('.'))
217 if proto_version < (1, 0) or proto_version > (1, 1):
218 return
219 168
220 # Read headers. Currently, no headers are implemented, so look for 169def hash_token(algo, salt, token):
221 # an empty line to signal the end of the headers 170 h = hashlib.new(algo)
222 while True: 171 h.update(salt.encode("utf-8"))
223 line = await self.reader.readline() 172 h.update(token.encode("utf-8"))
224 if line is None: 173 return ":".join([algo, salt, h.hexdigest()])
225 return
226 174
227 line = line.decode('utf-8').rstrip()
228 if not line:
229 break
230 175
231 # Handle messages 176def permissions(*permissions, allow_anon=True, allow_self_service=False):
232 while True: 177 """
233 d = await self.read_message() 178 Function decorator that can be used to decorate an RPC function call and
234 if d is None: 179 check that the current users permissions match the require permissions.
235 break
236 await self.dispatch_message(d)
237 await self.writer.drain()
238 except ClientError as e:
239 logger.error(str(e))
240 finally:
241 if self.upstream_client is not None:
242 await self.upstream_client.close()
243 180
244 self.writer.close() 181 If allow_anon is True, the user will also be allowed to make the RPC call
182 if the anonymous user permissions match the permissions.
245 183
246 async def dispatch_message(self, msg): 184 If allow_self_service is True, and the "username" property in the request
247 for k in self.handlers.keys(): 185 is the currently logged in user, or not specified, the user will also be
248 if k in msg: 186 allowed to make the request. This allows users to access normal privileged
249 logger.debug('Handling %s' % k) 187 API, as long as they are only modifying their own user properties (e.g.
250 if 'stream' in k: 188 users can be allowed to reset their own token without @user-admin
251 await self.handlers[k](msg[k]) 189 permissions, but not the token for any other user.
190 """
191
192 def wrapper(func):
193 async def wrap(self, request):
194 if allow_self_service and self.user is not None:
195 username = request.get("username", self.user.username)
196 if username == self.user.username:
197 request["username"] = self.user.username
198 return await func(self, request)
199
200 if not self.user_has_permissions(*permissions, allow_anon=allow_anon):
201 if not self.user:
202 username = "Anonymous user"
203 user_perms = self.server.anon_perms
252 else: 204 else:
253 with self.request_stats.start_sample() as self.request_sample, \ 205 username = self.user.username
254 self.request_sample.measure(): 206 user_perms = self.user.permissions
255 await self.handlers[k](msg[k]) 207
256 return 208 self.logger.info(
209 "User %s with permissions %r denied from calling %s. Missing permissions(s) %r",
210 username,
211 ", ".join(user_perms),
212 func.__name__,
213 ", ".join(permissions),
214 )
215 raise bb.asyncrpc.InvokeError(
216 f"{username} is not allowed to access permissions(s) {', '.join(permissions)}"
217 )
218
219 return await func(self, request)
220
221 return wrap
222
223 return wrapper
224
225
226class ServerClient(bb.asyncrpc.AsyncServerConnection):
227 def __init__(self, socket, server):
228 super().__init__(socket, "OEHASHEQUIV", server.logger)
229 self.server = server
230 self.max_chunk = bb.asyncrpc.DEFAULT_MAX_CHUNK
231 self.user = None
232
233 self.handlers.update(
234 {
235 "get": self.handle_get,
236 "get-outhash": self.handle_get_outhash,
237 "get-stream": self.handle_get_stream,
238 "exists-stream": self.handle_exists_stream,
239 "get-stats": self.handle_get_stats,
240 "get-db-usage": self.handle_get_db_usage,
241 "get-db-query-columns": self.handle_get_db_query_columns,
242 # Not always read-only, but internally checks if the server is
243 # read-only
244 "report": self.handle_report,
245 "auth": self.handle_auth,
246 "get-user": self.handle_get_user,
247 "get-all-users": self.handle_get_all_users,
248 "become-user": self.handle_become_user,
249 }
250 )
257 251
258 raise ClientError("Unrecognized command %r" % msg) 252 if not self.server.read_only:
253 self.handlers.update(
254 {
255 "report-equiv": self.handle_equivreport,
256 "reset-stats": self.handle_reset_stats,
257 "backfill-wait": self.handle_backfill_wait,
258 "remove": self.handle_remove,
259 "gc-mark": self.handle_gc_mark,
260 "gc-mark-stream": self.handle_gc_mark_stream,
261 "gc-sweep": self.handle_gc_sweep,
262 "gc-status": self.handle_gc_status,
263 "clean-unused": self.handle_clean_unused,
264 "refresh-token": self.handle_refresh_token,
265 "set-user-perms": self.handle_set_perms,
266 "new-user": self.handle_new_user,
267 "delete-user": self.handle_delete_user,
268 }
269 )
259 270
260 def write_message(self, msg): 271 def raise_no_user_error(self, username):
261 for c in chunkify(json.dumps(msg), self.max_chunk): 272 raise bb.asyncrpc.InvokeError(f"No user named '{username}' exists")
262 self.writer.write(c.encode('utf-8'))
263 273
264 async def read_message(self): 274 def user_has_permissions(self, *permissions, allow_anon=True):
265 l = await self.reader.readline() 275 permissions = set(permissions)
266 if not l: 276 if allow_anon:
267 return None 277 if ALL_PERM in self.server.anon_perms:
278 return True
268 279
269 try: 280 if not permissions - self.server.anon_perms:
270 message = l.decode('utf-8') 281 return True
271 282
272 if not message.endswith('\n'): 283 if self.user is None:
273 return None 284 return False
274 285
275 return json.loads(message) 286 if ALL_PERM in self.user.permissions:
276 except (json.JSONDecodeError, UnicodeDecodeError) as e: 287 return True
277 logger.error('Bad message from client: %r' % message)
278 raise e
279 288
280 async def handle_chunk(self, request): 289 if not permissions - self.user.permissions:
281 lines = [] 290 return True
282 try:
283 while True:
284 l = await self.reader.readline()
285 l = l.rstrip(b"\n").decode("utf-8")
286 if not l:
287 break
288 lines.append(l)
289 291
290 msg = json.loads(''.join(lines)) 292 return False
291 except (json.JSONDecodeError, UnicodeDecodeError) as e:
292 logger.error('Bad message from client: %r' % message)
293 raise e
294 293
295 if 'chunk-stream' in msg: 294 def validate_proto_version(self):
296 raise ClientError("Nested chunks are not allowed") 295 return self.proto_version > (1, 0) and self.proto_version <= (1, 1)
297 296
298 await self.dispatch_message(msg) 297 async def process_requests(self):
298 async with self.server.db_engine.connect(self.logger) as db:
299 self.db = db
300 if self.server.upstream is not None:
301 self.upstream_client = await create_async_client(self.server.upstream)
302 else:
303 self.upstream_client = None
299 304
300 async def handle_get(self, request): 305 try:
301 method = request['method'] 306 await super().process_requests()
302 taskhash = request['taskhash'] 307 finally:
308 if self.upstream_client is not None:
309 await self.upstream_client.close()
303 310
304 if request.get('all', False): 311 async def dispatch_message(self, msg):
305 row = self.query_equivalent(method, taskhash, self.ALL_QUERY) 312 for k in self.handlers.keys():
306 else: 313 if k in msg:
307 row = self.query_equivalent(method, taskhash, self.FAST_QUERY) 314 self.logger.debug("Handling %s" % k)
315 if "stream" in k:
316 return await self.handlers[k](msg[k])
317 else:
318 with self.server.request_stats.start_sample() as self.request_sample, self.request_sample.measure():
319 return await self.handlers[k](msg[k])
308 320
309 if row is not None: 321 raise bb.asyncrpc.ClientError("Unrecognized command %r" % msg)
310 logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash'])) 322
311 d = {k: row[k] for k in row.keys()} 323 @permissions(READ_PERM)
312 elif self.upstream_client is not None: 324 async def handle_get(self, request):
313 d = await copy_from_upstream(self.upstream_client, self.db, method, taskhash) 325 method = request["method"]
326 taskhash = request["taskhash"]
327 fetch_all = request.get("all", False)
328
329 return await self.get_unihash(method, taskhash, fetch_all)
330
331 async def get_unihash(self, method, taskhash, fetch_all=False):
332 d = None
333
334 if fetch_all:
335 row = await self.db.get_unihash_by_taskhash_full(method, taskhash)
336 if row is not None:
337 d = {k: row[k] for k in row.keys()}
338 elif self.upstream_client is not None:
339 d = await self.upstream_client.get_taskhash(method, taskhash, True)
340 await self.update_unified(d)
314 else: 341 else:
315 d = None 342 row = await self.db.get_equivalent(method, taskhash)
343
344 if row is not None:
345 d = {k: row[k] for k in row.keys()}
346 elif self.upstream_client is not None:
347 d = await self.upstream_client.get_taskhash(method, taskhash)
348 await self.db.insert_unihash(d["method"], d["taskhash"], d["unihash"])
316 349
317 self.write_message(d) 350 return d
318 351
352 @permissions(READ_PERM)
319 async def handle_get_outhash(self, request): 353 async def handle_get_outhash(self, request):
320 with closing(self.db.cursor()) as cursor: 354 method = request["method"]
321 cursor.execute(self.OUTHASH_QUERY, 355 outhash = request["outhash"]
322 {k: request[k] for k in ('method', 'outhash', 'taskhash')}) 356 taskhash = request["taskhash"]
357 with_unihash = request.get("with_unihash", True)
323 358
324 row = cursor.fetchone() 359 return await self.get_outhash(method, outhash, taskhash, with_unihash)
360
361 async def get_outhash(self, method, outhash, taskhash, with_unihash=True):
362 d = None
363 if with_unihash:
364 row = await self.db.get_unihash_by_outhash(method, outhash)
365 else:
366 row = await self.db.get_outhash(method, outhash)
325 367
326 if row is not None: 368 if row is not None:
327 logger.debug('Found equivalent outhash %s -> %s', (row['outhash'], row['unihash']))
328 d = {k: row[k] for k in row.keys()} 369 d = {k: row[k] for k in row.keys()}
329 else: 370 elif self.upstream_client is not None:
330 d = None 371 d = await self.upstream_client.get_outhash(method, outhash, taskhash)
372 await self.update_unified(d)
331 373
332 self.write_message(d) 374 return d
333 375
334 async def handle_get_stream(self, request): 376 async def update_unified(self, data):
335 self.write_message('ok') 377 if data is None:
378 return
379
380 await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"])
381 await self.db.insert_outhash(data)
382
383 async def _stream_handler(self, handler):
384 await self.socket.send_message("ok")
336 385
337 while True: 386 while True:
338 upstream = None 387 upstream = None
339 388
340 l = await self.reader.readline() 389 l = await self.socket.recv()
341 if not l: 390 if not l:
342 return 391 break
343 392
344 try: 393 try:
345 # This inner loop is very sensitive and must be as fast as 394 # This inner loop is very sensitive and must be as fast as
346 # possible (which is why the request sample is handled manually 395 # possible (which is why the request sample is handled manually
347 # instead of using 'with', and also why logging statements are 396 # instead of using 'with', and also why logging statements are
348 # commented out. 397 # commented out.
349 self.request_sample = self.request_stats.start_sample() 398 self.request_sample = self.server.request_stats.start_sample()
350 request_measure = self.request_sample.measure() 399 request_measure = self.request_sample.measure()
351 request_measure.start() 400 request_measure.start()
352 401
353 l = l.decode('utf-8').rstrip() 402 if l == "END":
354 if l == 'END': 403 break
355 self.writer.write('ok\n'.encode('utf-8'))
356 return
357
358 (method, taskhash) = l.split()
359 #logger.debug('Looking up %s %s' % (method, taskhash))
360 row = self.query_equivalent(method, taskhash, self.FAST_QUERY)
361 if row is not None:
362 msg = ('%s\n' % row['unihash']).encode('utf-8')
363 #logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
364 elif self.upstream_client is not None:
365 upstream = await self.upstream_client.get_unihash(method, taskhash)
366 if upstream:
367 msg = ("%s\n" % upstream).encode("utf-8")
368 else:
369 msg = "\n".encode("utf-8")
370 else:
371 msg = '\n'.encode('utf-8')
372 404
373 self.writer.write(msg) 405 msg = await handler(l)
406 await self.socket.send(msg)
374 finally: 407 finally:
375 request_measure.end() 408 request_measure.end()
376 self.request_sample.end() 409 self.request_sample.end()
377 410
378 await self.writer.drain() 411 await self.socket.send("ok")
412 return self.NO_RESPONSE
379 413
380 # Post to the backfill queue after writing the result to minimize 414 @permissions(READ_PERM)
381 # the turn around time on a request 415 async def handle_get_stream(self, request):
382 if upstream is not None: 416 async def handler(l):
383 await self.backfill_queue.put((method, taskhash)) 417 (method, taskhash) = l.split()
418 # self.logger.debug('Looking up %s %s' % (method, taskhash))
419 row = await self.db.get_equivalent(method, taskhash)
384 420
385 async def handle_report(self, data): 421 if row is not None:
386 with closing(self.db.cursor()) as cursor: 422 # self.logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
387 cursor.execute(self.OUTHASH_QUERY, 423 return row["unihash"]
388 {k: data[k] for k in ('method', 'outhash', 'taskhash')})
389
390 row = cursor.fetchone()
391
392 if row is None and self.upstream_client:
393 # Try upstream
394 row = await copy_outhash_from_upstream(self.upstream_client,
395 self.db,
396 data['method'],
397 data['outhash'],
398 data['taskhash'])
399
400 # If no matching outhash was found, or one *was* found but it
401 # wasn't an exact match on the taskhash, a new entry for this
402 # taskhash should be added
403 if row is None or row['taskhash'] != data['taskhash']:
404 # If a row matching the outhash was found, the unihash for
405 # the new taskhash should be the same as that one.
406 # Otherwise the caller provided unihash is used.
407 unihash = data['unihash']
408 if row is not None:
409 unihash = row['unihash']
410
411 insert_data = {
412 'method': data['method'],
413 'outhash': data['outhash'],
414 'taskhash': data['taskhash'],
415 'unihash': unihash,
416 'created': datetime.now()
417 }
418 424
419 for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'): 425 if self.upstream_client is not None:
420 if k in data: 426 upstream = await self.upstream_client.get_unihash(method, taskhash)
421 insert_data[k] = data[k] 427 if upstream:
428 await self.server.backfill_queue.put((method, taskhash))
429 return upstream
422 430
423 insert_task(cursor, insert_data) 431 return ""
424 self.db.commit()
425 432
426 logger.info('Adding taskhash %s with unihash %s', 433 return await self._stream_handler(handler)
427 data['taskhash'], unihash)
428 434
429 d = { 435 @permissions(READ_PERM)
430 'taskhash': data['taskhash'], 436 async def handle_exists_stream(self, request):
431 'method': data['method'], 437 async def handler(l):
432 'unihash': unihash 438 if await self.db.unihash_exists(l):
433 } 439 return "true"
434 else:
435 d = {k: row[k] for k in ('taskhash', 'method', 'unihash')}
436 440
437 self.write_message(d) 441 if self.upstream_client is not None:
442 if await self.upstream_client.unihash_exists(l):
443 return "true"
438 444
439 async def handle_equivreport(self, data): 445 return "false"
440 with closing(self.db.cursor()) as cursor:
441 insert_data = {
442 'method': data['method'],
443 'outhash': "",
444 'taskhash': data['taskhash'],
445 'unihash': data['unihash'],
446 'created': datetime.now()
447 }
448 446
449 for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'): 447 return await self._stream_handler(handler)
450 if k in data:
451 insert_data[k] = data[k]
452 448
453 insert_task(cursor, insert_data, ignore=True) 449 async def report_readonly(self, data):
454 self.db.commit() 450 method = data["method"]
451 outhash = data["outhash"]
452 taskhash = data["taskhash"]
455 453
456 # Fetch the unihash that will be reported for the taskhash. If the 454 info = await self.get_outhash(method, outhash, taskhash)
457 # unihash matches, it means this row was inserted (or the mapping 455 if info:
458 # was already valid) 456 unihash = info["unihash"]
459 row = self.query_equivalent(data['method'], data['taskhash'], self.FAST_QUERY) 457 else:
458 unihash = data["unihash"]
460 459
461 if row['unihash'] == data['unihash']: 460 return {
462 logger.info('Adding taskhash equivalence for %s with unihash %s', 461 "taskhash": taskhash,
463 data['taskhash'], row['unihash']) 462 "method": method,
463 "unihash": unihash,
464 }
464 465
465 d = {k: row[k] for k in ('taskhash', 'method', 'unihash')} 466 # Since this can be called either read only or to report, the check to
467 # report is made inside the function
468 @permissions(READ_PERM)
469 async def handle_report(self, data):
470 if self.server.read_only or not self.user_has_permissions(REPORT_PERM):
471 return await self.report_readonly(data)
472
473 outhash_data = {
474 "method": data["method"],
475 "outhash": data["outhash"],
476 "taskhash": data["taskhash"],
477 "created": datetime.now(),
478 }
466 479
467 self.write_message(d) 480 for k in ("owner", "PN", "PV", "PR", "task", "outhash_siginfo"):
481 if k in data:
482 outhash_data[k] = data[k]
468 483
484 if self.user:
485 outhash_data["owner"] = self.user.username
469 486
470 async def handle_get_stats(self, request): 487 # Insert the new entry, unless it already exists
471 d = { 488 if await self.db.insert_outhash(outhash_data):
472 'requests': self.request_stats.todict(), 489 # If this row is new, check if it is equivalent to another
490 # output hash
491 row = await self.db.get_equivalent_for_outhash(
492 data["method"], data["outhash"], data["taskhash"]
493 )
494
495 if row is not None:
496 # A matching output hash was found. Set our taskhash to the
497 # same unihash since they are equivalent
498 unihash = row["unihash"]
499 else:
500 # No matching output hash was found. This is probably the
501 # first outhash to be added.
502 unihash = data["unihash"]
503
504 # Query upstream to see if it has a unihash we can use
505 if self.upstream_client is not None:
506 upstream_data = await self.upstream_client.get_outhash(
507 data["method"], data["outhash"], data["taskhash"]
508 )
509 if upstream_data is not None:
510 unihash = upstream_data["unihash"]
511
512 await self.db.insert_unihash(data["method"], data["taskhash"], unihash)
513
514 unihash_data = await self.get_unihash(data["method"], data["taskhash"])
515 if unihash_data is not None:
516 unihash = unihash_data["unihash"]
517 else:
518 unihash = data["unihash"]
519
520 return {
521 "taskhash": data["taskhash"],
522 "method": data["method"],
523 "unihash": unihash,
473 } 524 }
474 525
475 self.write_message(d) 526 @permissions(READ_PERM, REPORT_PERM)
527 async def handle_equivreport(self, data):
528 await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"])
529
530 # Fetch the unihash that will be reported for the taskhash. If the
531 # unihash matches, it means this row was inserted (or the mapping
532 # was already valid)
533 row = await self.db.get_equivalent(data["method"], data["taskhash"])
534
535 if row["unihash"] == data["unihash"]:
536 self.logger.info(
537 "Adding taskhash equivalence for %s with unihash %s",
538 data["taskhash"],
539 row["unihash"],
540 )
541
542 return {k: row[k] for k in ("taskhash", "method", "unihash")}
476 543
544 @permissions(READ_PERM)
545 async def handle_get_stats(self, request):
546 return {
547 "requests": self.server.request_stats.todict(),
548 }
549
550 @permissions(DB_ADMIN_PERM)
477 async def handle_reset_stats(self, request): 551 async def handle_reset_stats(self, request):
478 d = { 552 d = {
479 'requests': self.request_stats.todict(), 553 "requests": self.server.request_stats.todict(),
480 } 554 }
481 555
482 self.request_stats.reset() 556 self.server.request_stats.reset()
483 self.write_message(d) 557 return d
484 558
559 @permissions(READ_PERM)
485 async def handle_backfill_wait(self, request): 560 async def handle_backfill_wait(self, request):
486 d = { 561 d = {
487 'tasks': self.backfill_queue.qsize(), 562 "tasks": self.server.backfill_queue.qsize(),
488 } 563 }
489 await self.backfill_queue.join() 564 await self.server.backfill_queue.join()
490 self.write_message(d) 565 return d
491 566
492 def query_equivalent(self, method, taskhash, query): 567 @permissions(DB_ADMIN_PERM)
493 # This is part of the inner loop and must be as fast as possible 568 async def handle_remove(self, request):
494 try: 569 condition = request["where"]
495 cursor = self.db.cursor() 570 if not isinstance(condition, dict):
496 cursor.execute(query, {'method': method, 'taskhash': taskhash}) 571 raise TypeError("Bad condition type %s" % type(condition))
497 return cursor.fetchone()
498 except:
499 cursor.close()
500 572
573 return {"count": await self.db.remove(condition)}
501 574
502class Server(object): 575 @permissions(DB_ADMIN_PERM)
503 def __init__(self, db, loop=None, upstream=None, read_only=False): 576 async def handle_gc_mark(self, request):
504 if upstream and read_only: 577 condition = request["where"]
505 raise ServerError("Read-only hashserv cannot pull from an upstream server") 578 mark = request["mark"]
506 579
507 self.request_stats = Stats() 580 if not isinstance(condition, dict):
508 self.db = db 581 raise TypeError("Bad condition type %s" % type(condition))
509 582
510 if loop is None: 583 if not isinstance(mark, str):
511 self.loop = asyncio.new_event_loop() 584 raise TypeError("Bad mark type %s" % type(mark))
512 self.close_loop = True
513 else:
514 self.loop = loop
515 self.close_loop = False
516 585
517 self.upstream = upstream 586 return {"count": await self.db.gc_mark(mark, condition)}
518 self.read_only = read_only
519 587
520 self._cleanup_socket = None 588 @permissions(DB_ADMIN_PERM)
589 async def handle_gc_mark_stream(self, request):
590 async def handler(line):
591 try:
592 decoded_line = json.loads(line)
593 except json.JSONDecodeError as exc:
594 raise bb.asyncrpc.InvokeError(
595 "Could not decode JSONL input '%s'" % line
596 ) from exc
521 597
522 def start_tcp_server(self, host, port): 598 try:
523 self.server = self.loop.run_until_complete( 599 mark = decoded_line["mark"]
524 asyncio.start_server(self.handle_client, host, port, loop=self.loop) 600 condition = decoded_line["where"]
525 ) 601 if not isinstance(mark, str):
602 raise TypeError("Bad mark type %s" % type(mark))
526 603
527 for s in self.server.sockets: 604 if not isinstance(condition, dict):
528 logger.info('Listening on %r' % (s.getsockname(),)) 605 raise TypeError("Bad condition type %s" % type(condition))
529 # Newer python does this automatically. Do it manually here for 606 except KeyError as exc:
530 # maximum compatibility 607 raise bb.asyncrpc.InvokeError(
531 s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) 608 "Input line is missing key '%s' " % exc
532 s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1) 609 ) from exc
533 610
534 name = self.server.sockets[0].getsockname() 611 return json.dumps({"count": await self.db.gc_mark(mark, condition)})
535 if self.server.sockets[0].family == socket.AF_INET6:
536 self.address = "[%s]:%d" % (name[0], name[1])
537 else:
538 self.address = "%s:%d" % (name[0], name[1])
539 612
540 def start_unix_server(self, path): 613 return await self._stream_handler(handler)
541 def cleanup():
542 os.unlink(path)
543 614
544 cwd = os.getcwd() 615 @permissions(DB_ADMIN_PERM)
545 try: 616 async def handle_gc_sweep(self, request):
546 # Work around path length limits in AF_UNIX 617 mark = request["mark"]
547 os.chdir(os.path.dirname(path)) 618
548 self.server = self.loop.run_until_complete( 619 if not isinstance(mark, str):
549 asyncio.start_unix_server(self.handle_client, os.path.basename(path), loop=self.loop) 620 raise TypeError("Bad mark type %s" % type(mark))
621
622 current_mark = await self.db.get_current_gc_mark()
623
624 if not current_mark or mark != current_mark:
625 raise bb.asyncrpc.InvokeError(
626 f"'{mark}' is not the current mark. Refusing to sweep"
550 ) 627 )
551 finally:
552 os.chdir(cwd)
553 628
554 logger.info('Listening on %r' % path) 629 count = await self.db.gc_sweep()
630
631 return {"count": count}
555 632
556 self._cleanup_socket = cleanup 633 @permissions(DB_ADMIN_PERM)
557 self.address = "unix://%s" % os.path.abspath(path) 634 async def handle_gc_status(self, request):
635 (keep_rows, remove_rows, current_mark) = await self.db.gc_status()
636 return {
637 "keep": keep_rows,
638 "remove": remove_rows,
639 "mark": current_mark,
640 }
641
642 @permissions(DB_ADMIN_PERM)
643 async def handle_clean_unused(self, request):
644 max_age = request["max_age_seconds"]
645 oldest = datetime.now() - timedelta(seconds=-max_age)
646 return {"count": await self.db.clean_unused(oldest)}
647
648 @permissions(DB_ADMIN_PERM)
649 async def handle_get_db_usage(self, request):
650 return {"usage": await self.db.get_usage()}
651
652 @permissions(DB_ADMIN_PERM)
653 async def handle_get_db_query_columns(self, request):
654 return {"columns": await self.db.get_query_columns()}
655
656 # The authentication API is always allowed
657 async def handle_auth(self, request):
658 username = str(request["username"])
659 token = str(request["token"])
660
661 async def fail_auth():
662 nonlocal username
663 # Rate limit bad login attempts
664 await asyncio.sleep(1)
665 raise bb.asyncrpc.InvokeError(f"Unable to authenticate as {username}")
666
667 user, db_token = await self.db.lookup_user_token(username)
668
669 if not user or not db_token:
670 await fail_auth()
558 671
559 async def handle_client(self, reader, writer):
560 # writer.transport.set_write_buffer_limits(0)
561 try: 672 try:
562 client = ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream, self.read_only) 673 algo, salt, _ = db_token.split(":")
563 await client.process_requests() 674 except ValueError:
564 except Exception as e: 675 await fail_auth()
565 import traceback
566 logger.error('Error from client: %s' % str(e), exc_info=True)
567 traceback.print_exc()
568 writer.close()
569 logger.info('Client disconnected')
570
571 @contextmanager
572 def _backfill_worker(self):
573 async def backfill_worker_task():
574 client = await create_async_client(self.upstream)
575 try:
576 while True:
577 item = await self.backfill_queue.get()
578 if item is None:
579 self.backfill_queue.task_done()
580 break
581 method, taskhash = item
582 await copy_from_upstream(client, self.db, method, taskhash)
583 self.backfill_queue.task_done()
584 finally:
585 await client.close()
586 676
587 async def join_worker(worker): 677 if hash_token(algo, salt, token) != db_token:
588 await self.backfill_queue.put(None) 678 await fail_auth()
589 await worker
590 679
591 if self.upstream is not None: 680 self.user = user
592 worker = asyncio.ensure_future(backfill_worker_task()) 681
593 try: 682 self.logger.info("Authenticated as %s", username)
594 yield 683
595 finally: 684 return {
596 self.loop.run_until_complete(join_worker(worker)) 685 "result": True,
597 else: 686 "username": self.user.username,
598 yield 687 "permissions": sorted(list(self.user.permissions)),
688 }
599 689
600 def serve_forever(self): 690 @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
601 def signal_handler(): 691 async def handle_refresh_token(self, request):
602 self.loop.stop() 692 username = str(request["username"])
603 693
604 asyncio.set_event_loop(self.loop) 694 token = await new_token()
695
696 updated = await self.db.set_user_token(
697 username,
698 hash_token(TOKEN_ALGORITHM, new_salt(), token),
699 )
700 if not updated:
701 self.raise_no_user_error(username)
702
703 return {"username": username, "token": token}
704
705 def get_perm_arg(self, arg):
706 if not isinstance(arg, list):
707 raise bb.asyncrpc.InvokeError("Unexpected type for permissions")
708
709 arg = set(arg)
605 try: 710 try:
606 self.backfill_queue = asyncio.Queue() 711 arg.remove(NONE_PERM)
712 except KeyError:
713 pass
714
715 unknown_perms = arg - ALL_PERMISSIONS
716 if unknown_perms:
717 raise bb.asyncrpc.InvokeError(
718 "Unknown permissions %s" % ", ".join(sorted(list(unknown_perms)))
719 )
720
721 return sorted(list(arg))
722
723 def return_perms(self, permissions):
724 if ALL_PERM in permissions:
725 return sorted(list(ALL_PERMISSIONS))
726 return sorted(list(permissions))
607 727
608 self.loop.add_signal_handler(signal.SIGTERM, signal_handler) 728 @permissions(USER_ADMIN_PERM, allow_anon=False)
729 async def handle_set_perms(self, request):
730 username = str(request["username"])
731 permissions = self.get_perm_arg(request["permissions"])
609 732
610 with self._backfill_worker(): 733 if not await self.db.set_user_perms(username, permissions):
611 try: 734 self.raise_no_user_error(username)
612 self.loop.run_forever()
613 except KeyboardInterrupt:
614 pass
615 735
616 self.server.close() 736 return {
737 "username": username,
738 "permissions": self.return_perms(permissions),
739 }
740
741 @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
742 async def handle_get_user(self, request):
743 username = str(request["username"])
617 744
618 self.loop.run_until_complete(self.server.wait_closed()) 745 user = await self.db.lookup_user(username)
619 logger.info('Server shutting down') 746 if user is None:
620 finally: 747 return None
621 if self.close_loop: 748
622 if sys.version_info >= (3, 6): 749 return {
623 self.loop.run_until_complete(self.loop.shutdown_asyncgens()) 750 "username": user.username,
624 self.loop.close() 751 "permissions": self.return_perms(user.permissions),
752 }
753
754 @permissions(USER_ADMIN_PERM, allow_anon=False)
755 async def handle_get_all_users(self, request):
756 users = await self.db.get_all_users()
757 return {
758 "users": [
759 {
760 "username": u.username,
761 "permissions": self.return_perms(u.permissions),
762 }
763 for u in users
764 ]
765 }
625 766
626 if self._cleanup_socket is not None: 767 @permissions(USER_ADMIN_PERM, allow_anon=False)
627 self._cleanup_socket() 768 async def handle_new_user(self, request):
769 username = str(request["username"])
770 permissions = self.get_perm_arg(request["permissions"])
771
772 token = await new_token()
773
774 inserted = await self.db.new_user(
775 username,
776 permissions,
777 hash_token(TOKEN_ALGORITHM, new_salt(), token),
778 )
779 if not inserted:
780 raise bb.asyncrpc.InvokeError(f"Cannot create new user '{username}'")
781
782 return {
783 "username": username,
784 "permissions": self.return_perms(permissions),
785 "token": token,
786 }
787
788 @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
789 async def handle_delete_user(self, request):
790 username = str(request["username"])
791
792 if not await self.db.delete_user(username):
793 self.raise_no_user_error(username)
794
795 return {"username": username}
796
797 @permissions(USER_ADMIN_PERM, allow_anon=False)
798 async def handle_become_user(self, request):
799 username = str(request["username"])
800
801 user = await self.db.lookup_user(username)
802 if user is None:
803 raise bb.asyncrpc.InvokeError(f"User {username} doesn't exist")
804
805 self.user = user
806
807 self.logger.info("Became user %s", username)
808
809 return {
810 "username": self.user.username,
811 "permissions": self.return_perms(self.user.permissions),
812 }
813
814
815class Server(bb.asyncrpc.AsyncServer):
816 def __init__(
817 self,
818 db_engine,
819 upstream=None,
820 read_only=False,
821 anon_perms=DEFAULT_ANON_PERMS,
822 admin_username=None,
823 admin_password=None,
824 ):
825 if upstream and read_only:
826 raise bb.asyncrpc.ServerError(
827 "Read-only hashserv cannot pull from an upstream server"
828 )
829
830 disallowed_perms = set(anon_perms) - set(
831 [NONE_PERM, READ_PERM, REPORT_PERM, DB_ADMIN_PERM]
832 )
833
834 if disallowed_perms:
835 raise bb.asyncrpc.ServerError(
836 f"Permission(s) {' '.join(disallowed_perms)} are not allowed for anonymous users"
837 )
838
839 super().__init__(logger)
840
841 self.request_stats = Stats()
842 self.db_engine = db_engine
843 self.upstream = upstream
844 self.read_only = read_only
845 self.backfill_queue = None
846 self.anon_perms = set(anon_perms)
847 self.admin_username = admin_username
848 self.admin_password = admin_password
849
850 self.logger.info(
851 "Anonymous user permissions are: %s", ", ".join(self.anon_perms)
852 )
853
854 def accept_client(self, socket):
855 return ServerClient(socket, self)
856
857 async def create_admin_user(self):
858 admin_permissions = (ALL_PERM,)
859 async with self.db_engine.connect(self.logger) as db:
860 added = await db.new_user(
861 self.admin_username,
862 admin_permissions,
863 hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password),
864 )
865 if added:
866 self.logger.info("Created admin user '%s'", self.admin_username)
867 else:
868 await db.set_user_perms(
869 self.admin_username,
870 admin_permissions,
871 )
872 await db.set_user_token(
873 self.admin_username,
874 hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password),
875 )
876 self.logger.info("Admin user '%s' updated", self.admin_username)
877
878 async def backfill_worker_task(self):
879 async with await create_async_client(
880 self.upstream
881 ) as client, self.db_engine.connect(self.logger) as db:
882 while True:
883 item = await self.backfill_queue.get()
884 if item is None:
885 self.backfill_queue.task_done()
886 break
887
888 method, taskhash = item
889 d = await client.get_taskhash(method, taskhash)
890 if d is not None:
891 await db.insert_unihash(d["method"], d["taskhash"], d["unihash"])
892 self.backfill_queue.task_done()
893
894 def start(self):
895 tasks = super().start()
896 if self.upstream:
897 self.backfill_queue = asyncio.Queue()
898 tasks += [self.backfill_worker_task()]
899
900 self.loop.run_until_complete(self.db_engine.create())
901
902 if self.admin_username:
903 self.loop.run_until_complete(self.create_admin_user())
904
905 return tasks
906
907 async def stop(self):
908 if self.backfill_queue is not None:
909 await self.backfill_queue.put(None)
910 await super().stop()
diff --git a/bitbake/lib/hashserv/sqlalchemy.py b/bitbake/lib/hashserv/sqlalchemy.py
new file mode 100644
index 0000000000..f7b0226a7a
--- /dev/null
+++ b/bitbake/lib/hashserv/sqlalchemy.py
@@ -0,0 +1,598 @@
1#! /usr/bin/env python3
2#
3# Copyright (C) 2023 Garmin Ltd.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import logging
9from datetime import datetime
10from . import User
11
12from sqlalchemy.ext.asyncio import create_async_engine
13from sqlalchemy.pool import NullPool
14from sqlalchemy import (
15 MetaData,
16 Column,
17 Table,
18 Text,
19 Integer,
20 UniqueConstraint,
21 DateTime,
22 Index,
23 select,
24 insert,
25 exists,
26 literal,
27 and_,
28 delete,
29 update,
30 func,
31 inspect,
32)
33import sqlalchemy.engine
34from sqlalchemy.orm import declarative_base
35from sqlalchemy.exc import IntegrityError
36from sqlalchemy.dialects.postgresql import insert as postgres_insert
37
38Base = declarative_base()
39
40
41class UnihashesV3(Base):
42 __tablename__ = "unihashes_v3"
43 id = Column(Integer, primary_key=True, autoincrement=True)
44 method = Column(Text, nullable=False)
45 taskhash = Column(Text, nullable=False)
46 unihash = Column(Text, nullable=False)
47 gc_mark = Column(Text, nullable=False)
48
49 __table_args__ = (
50 UniqueConstraint("method", "taskhash"),
51 Index("taskhash_lookup_v4", "method", "taskhash"),
52 Index("unihash_lookup_v1", "unihash"),
53 )
54
55
56class OuthashesV2(Base):
57 __tablename__ = "outhashes_v2"
58 id = Column(Integer, primary_key=True, autoincrement=True)
59 method = Column(Text, nullable=False)
60 taskhash = Column(Text, nullable=False)
61 outhash = Column(Text, nullable=False)
62 created = Column(DateTime)
63 owner = Column(Text)
64 PN = Column(Text)
65 PV = Column(Text)
66 PR = Column(Text)
67 task = Column(Text)
68 outhash_siginfo = Column(Text)
69
70 __table_args__ = (
71 UniqueConstraint("method", "taskhash", "outhash"),
72 Index("outhash_lookup_v3", "method", "outhash"),
73 )
74
75
76class Users(Base):
77 __tablename__ = "users"
78 id = Column(Integer, primary_key=True, autoincrement=True)
79 username = Column(Text, nullable=False)
80 token = Column(Text, nullable=False)
81 permissions = Column(Text)
82
83 __table_args__ = (UniqueConstraint("username"),)
84
85
86class Config(Base):
87 __tablename__ = "config"
88 id = Column(Integer, primary_key=True, autoincrement=True)
89 name = Column(Text, nullable=False)
90 value = Column(Text)
91 __table_args__ = (
92 UniqueConstraint("name"),
93 Index("config_lookup", "name"),
94 )
95
96
97#
98# Old table versions
99#
100DeprecatedBase = declarative_base()
101
102
103class UnihashesV2(DeprecatedBase):
104 __tablename__ = "unihashes_v2"
105 id = Column(Integer, primary_key=True, autoincrement=True)
106 method = Column(Text, nullable=False)
107 taskhash = Column(Text, nullable=False)
108 unihash = Column(Text, nullable=False)
109
110 __table_args__ = (
111 UniqueConstraint("method", "taskhash"),
112 Index("taskhash_lookup_v3", "method", "taskhash"),
113 )
114
115
116class DatabaseEngine(object):
117 def __init__(self, url, username=None, password=None):
118 self.logger = logging.getLogger("hashserv.sqlalchemy")
119 self.url = sqlalchemy.engine.make_url(url)
120
121 if username is not None:
122 self.url = self.url.set(username=username)
123
124 if password is not None:
125 self.url = self.url.set(password=password)
126
127 async def create(self):
128 def check_table_exists(conn, name):
129 return inspect(conn).has_table(name)
130
131 self.logger.info("Using database %s", self.url)
132 if self.url.drivername == 'postgresql+psycopg':
133 # Psygopg 3 (psygopg) driver can handle async connection pooling
134 self.engine = create_async_engine(self.url, max_overflow=-1)
135 else:
136 self.engine = create_async_engine(self.url, poolclass=NullPool)
137
138 async with self.engine.begin() as conn:
139 # Create tables
140 self.logger.info("Creating tables...")
141 await conn.run_sync(Base.metadata.create_all)
142
143 if await conn.run_sync(check_table_exists, UnihashesV2.__tablename__):
144 self.logger.info("Upgrading Unihashes V2 -> V3...")
145 statement = insert(UnihashesV3).from_select(
146 ["id", "method", "unihash", "taskhash", "gc_mark"],
147 select(
148 UnihashesV2.id,
149 UnihashesV2.method,
150 UnihashesV2.unihash,
151 UnihashesV2.taskhash,
152 literal("").label("gc_mark"),
153 ),
154 )
155 self.logger.debug("%s", statement)
156 await conn.execute(statement)
157
158 await conn.run_sync(Base.metadata.drop_all, [UnihashesV2.__table__])
159 self.logger.info("Upgrade complete")
160
161 def connect(self, logger):
162 return Database(self.engine, logger)
163
164
165def map_row(row):
166 if row is None:
167 return None
168 return dict(**row._mapping)
169
170
171def map_user(row):
172 if row is None:
173 return None
174 return User(
175 username=row.username,
176 permissions=set(row.permissions.split()),
177 )
178
179
180def _make_condition_statement(table, condition):
181 where = {}
182 for c in table.__table__.columns:
183 if c.key in condition and condition[c.key] is not None:
184 where[c] = condition[c.key]
185
186 return [(k == v) for k, v in where.items()]
187
188
189class Database(object):
190 def __init__(self, engine, logger):
191 self.engine = engine
192 self.db = None
193 self.logger = logger
194
195 async def __aenter__(self):
196 self.db = await self.engine.connect()
197 return self
198
199 async def __aexit__(self, exc_type, exc_value, traceback):
200 await self.close()
201
202 async def close(self):
203 await self.db.close()
204 self.db = None
205
206 async def _execute(self, statement):
207 self.logger.debug("%s", statement)
208 return await self.db.execute(statement)
209
210 async def _set_config(self, name, value):
211 while True:
212 result = await self._execute(
213 update(Config).where(Config.name == name).values(value=value)
214 )
215
216 if result.rowcount == 0:
217 self.logger.debug("Config '%s' not found. Adding it", name)
218 try:
219 await self._execute(insert(Config).values(name=name, value=value))
220 except IntegrityError:
221 # Race. Try again
222 continue
223
224 break
225
226 def _get_config_subquery(self, name, default=None):
227 if default is not None:
228 return func.coalesce(
229 select(Config.value).where(Config.name == name).scalar_subquery(),
230 default,
231 )
232 return select(Config.value).where(Config.name == name).scalar_subquery()
233
234 async def _get_config(self, name):
235 result = await self._execute(select(Config.value).where(Config.name == name))
236 row = result.first()
237 if row is None:
238 return None
239 return row.value
240
241 async def get_unihash_by_taskhash_full(self, method, taskhash):
242 async with self.db.begin():
243 result = await self._execute(
244 select(
245 OuthashesV2,
246 UnihashesV3.unihash.label("unihash"),
247 )
248 .join(
249 UnihashesV3,
250 and_(
251 UnihashesV3.method == OuthashesV2.method,
252 UnihashesV3.taskhash == OuthashesV2.taskhash,
253 ),
254 )
255 .where(
256 OuthashesV2.method == method,
257 OuthashesV2.taskhash == taskhash,
258 )
259 .order_by(
260 OuthashesV2.created.asc(),
261 )
262 .limit(1)
263 )
264 return map_row(result.first())
265
266 async def get_unihash_by_outhash(self, method, outhash):
267 async with self.db.begin():
268 result = await self._execute(
269 select(OuthashesV2, UnihashesV3.unihash.label("unihash"))
270 .join(
271 UnihashesV3,
272 and_(
273 UnihashesV3.method == OuthashesV2.method,
274 UnihashesV3.taskhash == OuthashesV2.taskhash,
275 ),
276 )
277 .where(
278 OuthashesV2.method == method,
279 OuthashesV2.outhash == outhash,
280 )
281 .order_by(
282 OuthashesV2.created.asc(),
283 )
284 .limit(1)
285 )
286 return map_row(result.first())
287
288 async def unihash_exists(self, unihash):
289 async with self.db.begin():
290 result = await self._execute(
291 select(UnihashesV3).where(UnihashesV3.unihash == unihash).limit(1)
292 )
293
294 return result.first() is not None
295
296 async def get_outhash(self, method, outhash):
297 async with self.db.begin():
298 result = await self._execute(
299 select(OuthashesV2)
300 .where(
301 OuthashesV2.method == method,
302 OuthashesV2.outhash == outhash,
303 )
304 .order_by(
305 OuthashesV2.created.asc(),
306 )
307 .limit(1)
308 )
309 return map_row(result.first())
310
311 async def get_equivalent_for_outhash(self, method, outhash, taskhash):
312 async with self.db.begin():
313 result = await self._execute(
314 select(
315 OuthashesV2.taskhash.label("taskhash"),
316 UnihashesV3.unihash.label("unihash"),
317 )
318 .join(
319 UnihashesV3,
320 and_(
321 UnihashesV3.method == OuthashesV2.method,
322 UnihashesV3.taskhash == OuthashesV2.taskhash,
323 ),
324 )
325 .where(
326 OuthashesV2.method == method,
327 OuthashesV2.outhash == outhash,
328 OuthashesV2.taskhash != taskhash,
329 )
330 .order_by(
331 OuthashesV2.created.asc(),
332 )
333 .limit(1)
334 )
335 return map_row(result.first())
336
337 async def get_equivalent(self, method, taskhash):
338 async with self.db.begin():
339 result = await self._execute(
340 select(
341 UnihashesV3.unihash,
342 UnihashesV3.method,
343 UnihashesV3.taskhash,
344 ).where(
345 UnihashesV3.method == method,
346 UnihashesV3.taskhash == taskhash,
347 )
348 )
349 return map_row(result.first())
350
351 async def remove(self, condition):
352 async def do_remove(table):
353 where = _make_condition_statement(table, condition)
354 if where:
355 async with self.db.begin():
356 result = await self._execute(delete(table).where(*where))
357 return result.rowcount
358
359 return 0
360
361 count = 0
362 count += await do_remove(UnihashesV3)
363 count += await do_remove(OuthashesV2)
364
365 return count
366
367 async def get_current_gc_mark(self):
368 async with self.db.begin():
369 return await self._get_config("gc-mark")
370
371 async def gc_status(self):
372 async with self.db.begin():
373 gc_mark_subquery = self._get_config_subquery("gc-mark", "")
374
375 result = await self._execute(
376 select(func.count())
377 .select_from(UnihashesV3)
378 .where(UnihashesV3.gc_mark == gc_mark_subquery)
379 )
380 keep_rows = result.scalar()
381
382 result = await self._execute(
383 select(func.count())
384 .select_from(UnihashesV3)
385 .where(UnihashesV3.gc_mark != gc_mark_subquery)
386 )
387 remove_rows = result.scalar()
388
389 return (keep_rows, remove_rows, await self._get_config("gc-mark"))
390
391 async def gc_mark(self, mark, condition):
392 async with self.db.begin():
393 await self._set_config("gc-mark", mark)
394
395 where = _make_condition_statement(UnihashesV3, condition)
396 if not where:
397 return 0
398
399 result = await self._execute(
400 update(UnihashesV3)
401 .values(gc_mark=self._get_config_subquery("gc-mark", ""))
402 .where(*where)
403 )
404 return result.rowcount
405
406 async def gc_sweep(self):
407 async with self.db.begin():
408 result = await self._execute(
409 delete(UnihashesV3).where(
410 # A sneaky conditional that provides some errant use
411 # protection: If the config mark is NULL, this will not
412 # match any rows because No default is specified in the
413 # select statement
414 UnihashesV3.gc_mark
415 != self._get_config_subquery("gc-mark")
416 )
417 )
418 await self._set_config("gc-mark", None)
419
420 return result.rowcount
421
422 async def clean_unused(self, oldest):
423 async with self.db.begin():
424 result = await self._execute(
425 delete(OuthashesV2).where(
426 OuthashesV2.created < oldest,
427 ~(
428 select(UnihashesV3.id)
429 .where(
430 UnihashesV3.method == OuthashesV2.method,
431 UnihashesV3.taskhash == OuthashesV2.taskhash,
432 )
433 .limit(1)
434 .exists()
435 ),
436 )
437 )
438 return result.rowcount
439
440 async def insert_unihash(self, method, taskhash, unihash):
441 # Postgres specific ignore on insert duplicate
442 if self.engine.name == "postgresql":
443 statement = (
444 postgres_insert(UnihashesV3)
445 .values(
446 method=method,
447 taskhash=taskhash,
448 unihash=unihash,
449 gc_mark=self._get_config_subquery("gc-mark", ""),
450 )
451 .on_conflict_do_nothing(index_elements=("method", "taskhash"))
452 )
453 else:
454 statement = insert(UnihashesV3).values(
455 method=method,
456 taskhash=taskhash,
457 unihash=unihash,
458 gc_mark=self._get_config_subquery("gc-mark", ""),
459 )
460
461 try:
462 async with self.db.begin():
463 result = await self._execute(statement)
464 return result.rowcount != 0
465 except IntegrityError:
466 self.logger.debug(
467 "%s, %s, %s already in unihash database", method, taskhash, unihash
468 )
469 return False
470
471 async def insert_outhash(self, data):
472 outhash_columns = set(c.key for c in OuthashesV2.__table__.columns)
473
474 data = {k: v for k, v in data.items() if k in outhash_columns}
475
476 if "created" in data and not isinstance(data["created"], datetime):
477 data["created"] = datetime.fromisoformat(data["created"])
478
479 # Postgres specific ignore on insert duplicate
480 if self.engine.name == "postgresql":
481 statement = (
482 postgres_insert(OuthashesV2)
483 .values(**data)
484 .on_conflict_do_nothing(
485 index_elements=("method", "taskhash", "outhash")
486 )
487 )
488 else:
489 statement = insert(OuthashesV2).values(**data)
490
491 try:
492 async with self.db.begin():
493 result = await self._execute(statement)
494 return result.rowcount != 0
495 except IntegrityError:
496 self.logger.debug(
497 "%s, %s already in outhash database", data["method"], data["outhash"]
498 )
499 return False
500
501 async def _get_user(self, username):
502 async with self.db.begin():
503 result = await self._execute(
504 select(
505 Users.username,
506 Users.permissions,
507 Users.token,
508 ).where(
509 Users.username == username,
510 )
511 )
512 return result.first()
513
514 async def lookup_user_token(self, username):
515 row = await self._get_user(username)
516 if not row:
517 return None, None
518 return map_user(row), row.token
519
520 async def lookup_user(self, username):
521 return map_user(await self._get_user(username))
522
523 async def set_user_token(self, username, token):
524 async with self.db.begin():
525 result = await self._execute(
526 update(Users)
527 .where(
528 Users.username == username,
529 )
530 .values(
531 token=token,
532 )
533 )
534 return result.rowcount != 0
535
536 async def set_user_perms(self, username, permissions):
537 async with self.db.begin():
538 result = await self._execute(
539 update(Users)
540 .where(Users.username == username)
541 .values(permissions=" ".join(permissions))
542 )
543 return result.rowcount != 0
544
545 async def get_all_users(self):
546 async with self.db.begin():
547 result = await self._execute(
548 select(
549 Users.username,
550 Users.permissions,
551 )
552 )
553 return [map_user(row) for row in result]
554
555 async def new_user(self, username, permissions, token):
556 try:
557 async with self.db.begin():
558 await self._execute(
559 insert(Users).values(
560 username=username,
561 permissions=" ".join(permissions),
562 token=token,
563 )
564 )
565 return True
566 except IntegrityError as e:
567 self.logger.debug("Cannot create new user %s: %s", username, e)
568 return False
569
570 async def delete_user(self, username):
571 async with self.db.begin():
572 result = await self._execute(
573 delete(Users).where(Users.username == username)
574 )
575 return result.rowcount != 0
576
577 async def get_usage(self):
578 usage = {}
579 async with self.db.begin() as session:
580 for name, table in Base.metadata.tables.items():
581 result = await self._execute(
582 statement=select(func.count()).select_from(table)
583 )
584 usage[name] = {
585 "rows": result.scalar(),
586 }
587
588 return usage
589
590 async def get_query_columns(self):
591 columns = set()
592 for table in (UnihashesV3, OuthashesV2):
593 for c in table.__table__.columns:
594 if not isinstance(c.type, Text):
595 continue
596 columns.add(c.key)
597
598 return list(columns)
diff --git a/bitbake/lib/hashserv/sqlite.py b/bitbake/lib/hashserv/sqlite.py
new file mode 100644
index 0000000000..976504d7f4
--- /dev/null
+++ b/bitbake/lib/hashserv/sqlite.py
@@ -0,0 +1,579 @@
1#! /usr/bin/env python3
2#
3# Copyright (C) 2023 Garmin Ltd.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7from datetime import datetime, timezone
8import sqlite3
9import logging
10from contextlib import closing
11from . import User
12
13logger = logging.getLogger("hashserv.sqlite")
14
15UNIHASH_TABLE_DEFINITION = (
16 ("method", "TEXT NOT NULL", "UNIQUE"),
17 ("taskhash", "TEXT NOT NULL", "UNIQUE"),
18 ("unihash", "TEXT NOT NULL", ""),
19 ("gc_mark", "TEXT NOT NULL", ""),
20)
21
22UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION)
23
24OUTHASH_TABLE_DEFINITION = (
25 ("method", "TEXT NOT NULL", "UNIQUE"),
26 ("taskhash", "TEXT NOT NULL", "UNIQUE"),
27 ("outhash", "TEXT NOT NULL", "UNIQUE"),
28 ("created", "DATETIME", ""),
29 # Optional fields
30 ("owner", "TEXT", ""),
31 ("PN", "TEXT", ""),
32 ("PV", "TEXT", ""),
33 ("PR", "TEXT", ""),
34 ("task", "TEXT", ""),
35 ("outhash_siginfo", "TEXT", ""),
36)
37
38OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION)
39
40USERS_TABLE_DEFINITION = (
41 ("username", "TEXT NOT NULL", "UNIQUE"),
42 ("token", "TEXT NOT NULL", ""),
43 ("permissions", "TEXT NOT NULL", ""),
44)
45
46USERS_TABLE_COLUMNS = tuple(name for name, _, _ in USERS_TABLE_DEFINITION)
47
48
49CONFIG_TABLE_DEFINITION = (
50 ("name", "TEXT NOT NULL", "UNIQUE"),
51 ("value", "TEXT", ""),
52)
53
54CONFIG_TABLE_COLUMNS = tuple(name for name, _, _ in CONFIG_TABLE_DEFINITION)
55
56
57def adapt_datetime_iso(val):
58 """Adapt datetime.datetime to UTC ISO 8601 date."""
59 return val.astimezone(timezone.utc).isoformat()
60
61
62sqlite3.register_adapter(datetime, adapt_datetime_iso)
63
64
65def convert_datetime(val):
66 """Convert ISO 8601 datetime to datetime.datetime object."""
67 return datetime.fromisoformat(val.decode())
68
69
70sqlite3.register_converter("DATETIME", convert_datetime)
71
72
73def _make_table(cursor, name, definition):
74 cursor.execute(
75 """
76 CREATE TABLE IF NOT EXISTS {name} (
77 id INTEGER PRIMARY KEY AUTOINCREMENT,
78 {fields}
79 UNIQUE({unique})
80 )
81 """.format(
82 name=name,
83 fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition),
84 unique=", ".join(
85 name for name, _, flags in definition if "UNIQUE" in flags
86 ),
87 )
88 )
89
90
91def map_user(row):
92 if row is None:
93 return None
94 return User(
95 username=row["username"],
96 permissions=set(row["permissions"].split()),
97 )
98
99
100def _make_condition_statement(columns, condition):
101 where = {}
102 for c in columns:
103 if c in condition and condition[c] is not None:
104 where[c] = condition[c]
105
106 return where, " AND ".join("%s=:%s" % (k, k) for k in where.keys())
107
108
109def _get_sqlite_version(cursor):
110 cursor.execute("SELECT sqlite_version()")
111
112 version = []
113 for v in cursor.fetchone()[0].split("."):
114 try:
115 version.append(int(v))
116 except ValueError:
117 version.append(v)
118
119 return tuple(version)
120
121
122def _schema_table_name(version):
123 if version >= (3, 33):
124 return "sqlite_schema"
125
126 return "sqlite_master"
127
128
129class DatabaseEngine(object):
130 def __init__(self, dbname, sync):
131 self.dbname = dbname
132 self.logger = logger
133 self.sync = sync
134
135 async def create(self):
136 db = sqlite3.connect(self.dbname)
137 db.row_factory = sqlite3.Row
138
139 with closing(db.cursor()) as cursor:
140 _make_table(cursor, "unihashes_v3", UNIHASH_TABLE_DEFINITION)
141 _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION)
142 _make_table(cursor, "users", USERS_TABLE_DEFINITION)
143 _make_table(cursor, "config", CONFIG_TABLE_DEFINITION)
144
145 cursor.execute("PRAGMA journal_mode = WAL")
146 cursor.execute(
147 "PRAGMA synchronous = %s" % ("NORMAL" if self.sync else "OFF")
148 )
149
150 # Drop old indexes
151 cursor.execute("DROP INDEX IF EXISTS taskhash_lookup")
152 cursor.execute("DROP INDEX IF EXISTS outhash_lookup")
153 cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v2")
154 cursor.execute("DROP INDEX IF EXISTS outhash_lookup_v2")
155 cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v3")
156
157 # TODO: Upgrade from tasks_v2?
158 cursor.execute("DROP TABLE IF EXISTS tasks_v2")
159
160 # Create new indexes
161 cursor.execute(
162 "CREATE INDEX IF NOT EXISTS taskhash_lookup_v4 ON unihashes_v3 (method, taskhash)"
163 )
164 cursor.execute(
165 "CREATE INDEX IF NOT EXISTS unihash_lookup_v1 ON unihashes_v3 (unihash)"
166 )
167 cursor.execute(
168 "CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)"
169 )
170 cursor.execute("CREATE INDEX IF NOT EXISTS config_lookup ON config (name)")
171
172 sqlite_version = _get_sqlite_version(cursor)
173
174 cursor.execute(
175 f"""
176 SELECT name FROM {_schema_table_name(sqlite_version)} WHERE type = 'table' AND name = 'unihashes_v2'
177 """
178 )
179 if cursor.fetchone():
180 self.logger.info("Upgrading Unihashes V2 -> V3...")
181 cursor.execute(
182 """
183 INSERT INTO unihashes_v3 (id, method, unihash, taskhash, gc_mark)
184 SELECT id, method, unihash, taskhash, '' FROM unihashes_v2
185 """
186 )
187 cursor.execute("DROP TABLE unihashes_v2")
188 db.commit()
189 self.logger.info("Upgrade complete")
190
191 def connect(self, logger):
192 return Database(logger, self.dbname, self.sync)
193
194
195class Database(object):
196 def __init__(self, logger, dbname, sync):
197 self.dbname = dbname
198 self.logger = logger
199
200 self.db = sqlite3.connect(self.dbname)
201 self.db.row_factory = sqlite3.Row
202
203 with closing(self.db.cursor()) as cursor:
204 cursor.execute("PRAGMA journal_mode = WAL")
205 cursor.execute(
206 "PRAGMA synchronous = %s" % ("NORMAL" if sync else "OFF")
207 )
208
209 self.sqlite_version = _get_sqlite_version(cursor)
210
211 async def __aenter__(self):
212 return self
213
214 async def __aexit__(self, exc_type, exc_value, traceback):
215 await self.close()
216
217 async def _set_config(self, cursor, name, value):
218 cursor.execute(
219 """
220 INSERT OR REPLACE INTO config (id, name, value) VALUES
221 ((SELECT id FROM config WHERE name=:name), :name, :value)
222 """,
223 {
224 "name": name,
225 "value": value,
226 },
227 )
228
229 async def _get_config(self, cursor, name):
230 cursor.execute(
231 "SELECT value FROM config WHERE name=:name",
232 {
233 "name": name,
234 },
235 )
236 row = cursor.fetchone()
237 if row is None:
238 return None
239 return row["value"]
240
241 async def close(self):
242 self.db.close()
243
244 async def get_unihash_by_taskhash_full(self, method, taskhash):
245 with closing(self.db.cursor()) as cursor:
246 cursor.execute(
247 """
248 SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2
249 INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
250 WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash
251 ORDER BY outhashes_v2.created ASC
252 LIMIT 1
253 """,
254 {
255 "method": method,
256 "taskhash": taskhash,
257 },
258 )
259 return cursor.fetchone()
260
261 async def get_unihash_by_outhash(self, method, outhash):
262 with closing(self.db.cursor()) as cursor:
263 cursor.execute(
264 """
265 SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2
266 INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
267 WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
268 ORDER BY outhashes_v2.created ASC
269 LIMIT 1
270 """,
271 {
272 "method": method,
273 "outhash": outhash,
274 },
275 )
276 return cursor.fetchone()
277
278 async def unihash_exists(self, unihash):
279 with closing(self.db.cursor()) as cursor:
280 cursor.execute(
281 """
282 SELECT * FROM unihashes_v3 WHERE unihash=:unihash
283 LIMIT 1
284 """,
285 {
286 "unihash": unihash,
287 },
288 )
289 return cursor.fetchone() is not None
290
291 async def get_outhash(self, method, outhash):
292 with closing(self.db.cursor()) as cursor:
293 cursor.execute(
294 """
295 SELECT * FROM outhashes_v2
296 WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
297 ORDER BY outhashes_v2.created ASC
298 LIMIT 1
299 """,
300 {
301 "method": method,
302 "outhash": outhash,
303 },
304 )
305 return cursor.fetchone()
306
307 async def get_equivalent_for_outhash(self, method, outhash, taskhash):
308 with closing(self.db.cursor()) as cursor:
309 cursor.execute(
310 """
311 SELECT outhashes_v2.taskhash AS taskhash, unihashes_v3.unihash AS unihash FROM outhashes_v2
312 INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
313 -- Select any matching output hash except the one we just inserted
314 WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash
315 -- Pick the oldest hash
316 ORDER BY outhashes_v2.created ASC
317 LIMIT 1
318 """,
319 {
320 "method": method,
321 "outhash": outhash,
322 "taskhash": taskhash,
323 },
324 )
325 return cursor.fetchone()
326
327 async def get_equivalent(self, method, taskhash):
328 with closing(self.db.cursor()) as cursor:
329 cursor.execute(
330 "SELECT taskhash, method, unihash FROM unihashes_v3 WHERE method=:method AND taskhash=:taskhash",
331 {
332 "method": method,
333 "taskhash": taskhash,
334 },
335 )
336 return cursor.fetchone()
337
338 async def remove(self, condition):
339 def do_remove(columns, table_name, cursor):
340 where, clause = _make_condition_statement(columns, condition)
341 if where:
342 query = f"DELETE FROM {table_name} WHERE {clause}"
343 cursor.execute(query, where)
344 return cursor.rowcount
345
346 return 0
347
348 count = 0
349 with closing(self.db.cursor()) as cursor:
350 count += do_remove(OUTHASH_TABLE_COLUMNS, "outhashes_v2", cursor)
351 count += do_remove(UNIHASH_TABLE_COLUMNS, "unihashes_v3", cursor)
352 self.db.commit()
353
354 return count
355
356 async def get_current_gc_mark(self):
357 with closing(self.db.cursor()) as cursor:
358 return await self._get_config(cursor, "gc-mark")
359
360 async def gc_status(self):
361 with closing(self.db.cursor()) as cursor:
362 cursor.execute(
363 """
364 SELECT COUNT() FROM unihashes_v3 WHERE
365 gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
366 """
367 )
368 keep_rows = cursor.fetchone()[0]
369
370 cursor.execute(
371 """
372 SELECT COUNT() FROM unihashes_v3 WHERE
373 gc_mark!=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
374 """
375 )
376 remove_rows = cursor.fetchone()[0]
377
378 current_mark = await self._get_config(cursor, "gc-mark")
379
380 return (keep_rows, remove_rows, current_mark)
381
382 async def gc_mark(self, mark, condition):
383 with closing(self.db.cursor()) as cursor:
384 await self._set_config(cursor, "gc-mark", mark)
385
386 where, clause = _make_condition_statement(UNIHASH_TABLE_COLUMNS, condition)
387
388 new_rows = 0
389 if where:
390 cursor.execute(
391 f"""
392 UPDATE unihashes_v3 SET
393 gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
394 WHERE {clause}
395 """,
396 where,
397 )
398 new_rows = cursor.rowcount
399
400 self.db.commit()
401 return new_rows
402
403 async def gc_sweep(self):
404 with closing(self.db.cursor()) as cursor:
405 # NOTE: COALESCE is not used in this query so that if the current
406 # mark is NULL, nothing will happen
407 cursor.execute(
408 """
409 DELETE FROM unihashes_v3 WHERE
410 gc_mark!=(SELECT value FROM config WHERE name='gc-mark')
411 """
412 )
413 count = cursor.rowcount
414 await self._set_config(cursor, "gc-mark", None)
415
416 self.db.commit()
417 return count
418
419 async def clean_unused(self, oldest):
420 with closing(self.db.cursor()) as cursor:
421 cursor.execute(
422 """
423 DELETE FROM outhashes_v2 WHERE created<:oldest AND NOT EXISTS (
424 SELECT unihashes_v3.id FROM unihashes_v3 WHERE unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash LIMIT 1
425 )
426 """,
427 {
428 "oldest": oldest,
429 },
430 )
431 self.db.commit()
432 return cursor.rowcount
433
434 async def insert_unihash(self, method, taskhash, unihash):
435 with closing(self.db.cursor()) as cursor:
436 prevrowid = cursor.lastrowid
437 cursor.execute(
438 """
439 INSERT OR IGNORE INTO unihashes_v3 (method, taskhash, unihash, gc_mark) VALUES
440 (
441 :method,
442 :taskhash,
443 :unihash,
444 COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
445 )
446 """,
447 {
448 "method": method,
449 "taskhash": taskhash,
450 "unihash": unihash,
451 },
452 )
453 self.db.commit()
454 return cursor.lastrowid != prevrowid
455
456 async def insert_outhash(self, data):
457 data = {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS}
458 keys = sorted(data.keys())
459 query = "INSERT OR IGNORE INTO outhashes_v2 ({fields}) VALUES({values})".format(
460 fields=", ".join(keys),
461 values=", ".join(":" + k for k in keys),
462 )
463 with closing(self.db.cursor()) as cursor:
464 prevrowid = cursor.lastrowid
465 cursor.execute(query, data)
466 self.db.commit()
467 return cursor.lastrowid != prevrowid
468
469 def _get_user(self, username):
470 with closing(self.db.cursor()) as cursor:
471 cursor.execute(
472 """
473 SELECT username, permissions, token FROM users WHERE username=:username
474 """,
475 {
476 "username": username,
477 },
478 )
479 return cursor.fetchone()
480
481 async def lookup_user_token(self, username):
482 row = self._get_user(username)
483 if row is None:
484 return None, None
485 return map_user(row), row["token"]
486
487 async def lookup_user(self, username):
488 return map_user(self._get_user(username))
489
490 async def set_user_token(self, username, token):
491 with closing(self.db.cursor()) as cursor:
492 cursor.execute(
493 """
494 UPDATE users SET token=:token WHERE username=:username
495 """,
496 {
497 "username": username,
498 "token": token,
499 },
500 )
501 self.db.commit()
502 return cursor.rowcount != 0
503
504 async def set_user_perms(self, username, permissions):
505 with closing(self.db.cursor()) as cursor:
506 cursor.execute(
507 """
508 UPDATE users SET permissions=:permissions WHERE username=:username
509 """,
510 {
511 "username": username,
512 "permissions": " ".join(permissions),
513 },
514 )
515 self.db.commit()
516 return cursor.rowcount != 0
517
518 async def get_all_users(self):
519 with closing(self.db.cursor()) as cursor:
520 cursor.execute("SELECT username, permissions FROM users")
521 return [map_user(r) for r in cursor.fetchall()]
522
523 async def new_user(self, username, permissions, token):
524 with closing(self.db.cursor()) as cursor:
525 try:
526 cursor.execute(
527 """
528 INSERT INTO users (username, token, permissions) VALUES (:username, :token, :permissions)
529 """,
530 {
531 "username": username,
532 "token": token,
533 "permissions": " ".join(permissions),
534 },
535 )
536 self.db.commit()
537 return True
538 except sqlite3.IntegrityError:
539 return False
540
541 async def delete_user(self, username):
542 with closing(self.db.cursor()) as cursor:
543 cursor.execute(
544 """
545 DELETE FROM users WHERE username=:username
546 """,
547 {
548 "username": username,
549 },
550 )
551 self.db.commit()
552 return cursor.rowcount != 0
553
554 async def get_usage(self):
555 usage = {}
556 with closing(self.db.cursor()) as cursor:
557 cursor.execute(
558 f"""
559 SELECT name FROM {_schema_table_name(self.sqlite_version)} WHERE type = 'table' AND name NOT LIKE 'sqlite_%'
560 """
561 )
562 for row in cursor.fetchall():
563 cursor.execute(
564 """
565 SELECT COUNT() FROM %s
566 """
567 % row["name"],
568 )
569 usage[row["name"]] = {
570 "rows": cursor.fetchone()[0],
571 }
572 return usage
573
574 async def get_query_columns(self):
575 columns = set()
576 for name, typ, _ in UNIHASH_TABLE_DEFINITION + OUTHASH_TABLE_DEFINITION:
577 if typ.startswith("TEXT"):
578 columns.add(name)
579 return list(columns)
diff --git a/bitbake/lib/hashserv/tests.py b/bitbake/lib/hashserv/tests.py
index 1a696481e3..da3f8e0884 100644
--- a/bitbake/lib/hashserv/tests.py
+++ b/bitbake/lib/hashserv/tests.py
@@ -6,7 +6,8 @@
6# 6#
7 7
8from . import create_server, create_client 8from . import create_server, create_client
9from .client import HashConnectionError 9from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS
10from bb.asyncrpc import InvokeError
10import hashlib 11import hashlib
11import logging 12import logging
12import multiprocessing 13import multiprocessing
@@ -16,72 +17,161 @@ import tempfile
16import threading 17import threading
17import unittest 18import unittest
18import socket 19import socket
19 20import time
20def _run_server(server, idx): 21import signal
21 # logging.basicConfig(level=logging.DEBUG, filename='bbhashserv.log', filemode='w', 22import subprocess
22 # format='%(levelname)s %(filename)s:%(lineno)d %(message)s') 23import json
23 sys.stdout = open('bbhashserv-%d.log' % idx, 'w') 24import re
25from pathlib import Path
26
27
28THIS_DIR = Path(__file__).parent
29BIN_DIR = THIS_DIR.parent.parent / "bin"
30
31def server_prefunc(server, idx):
32 logging.basicConfig(level=logging.DEBUG, filename='bbhashserv-%d.log' % idx, filemode='w',
33 format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
34 server.logger.debug("Running server %d" % idx)
35 sys.stdout = open('bbhashserv-stdout-%d.log' % idx, 'w')
24 sys.stderr = sys.stdout 36 sys.stderr = sys.stdout
25 server.serve_forever()
26
27 37
28class HashEquivalenceTestSetup(object): 38class HashEquivalenceTestSetup(object):
29 METHOD = 'TestMethod' 39 METHOD = 'TestMethod'
30 40
31 server_index = 0 41 server_index = 0
42 client_index = 0
32 43
33 def start_server(self, dbpath=None, upstream=None, read_only=False): 44 def start_server(self, dbpath=None, upstream=None, read_only=False, prefunc=server_prefunc, anon_perms=DEFAULT_ANON_PERMS, admin_username=None, admin_password=None):
34 self.server_index += 1 45 self.server_index += 1
35 if dbpath is None: 46 if dbpath is None:
36 dbpath = os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index) 47 dbpath = self.make_dbpath()
48
49 def cleanup_server(server):
50 if server.process.exitcode is not None:
51 return
37 52
38 def cleanup_thread(thread): 53 server.process.terminate()
39 thread.terminate() 54 server.process.join()
40 thread.join()
41 55
42 server = create_server(self.get_server_addr(self.server_index), 56 server = create_server(self.get_server_addr(self.server_index),
43 dbpath, 57 dbpath,
44 upstream=upstream, 58 upstream=upstream,
45 read_only=read_only) 59 read_only=read_only,
60 anon_perms=anon_perms,
61 admin_username=admin_username,
62 admin_password=admin_password)
46 server.dbpath = dbpath 63 server.dbpath = dbpath
47 64
48 server.thread = multiprocessing.Process(target=_run_server, args=(server, self.server_index)) 65 server.serve_as_process(prefunc=prefunc, args=(self.server_index,))
49 server.thread.start() 66 self.addCleanup(cleanup_server, server)
50 self.addCleanup(cleanup_thread, server.thread) 67
68 return server
69
70 def make_dbpath(self):
71 return os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
51 72
73 def start_client(self, server_address, username=None, password=None):
52 def cleanup_client(client): 74 def cleanup_client(client):
53 client.close() 75 client.close()
54 76
55 client = create_client(server.address) 77 client = create_client(server_address, username=username, password=password)
56 self.addCleanup(cleanup_client, client) 78 self.addCleanup(cleanup_client, client)
57 79
58 return (client, server) 80 return client
59 81
60 def setUp(self): 82 def start_test_server(self):
61 if sys.version_info < (3, 5, 0): 83 self.server = self.start_server()
62 self.skipTest('Python 3.5 or later required') 84 return self.server.address
85
86 def start_auth_server(self):
87 auth_server = self.start_server(self.server.dbpath, anon_perms=[], admin_username="admin", admin_password="password")
88 self.auth_server_address = auth_server.address
89 self.admin_client = self.start_client(auth_server.address, username="admin", password="password")
90 return self.admin_client
91
92 def auth_client(self, user):
93 return self.start_client(self.auth_server_address, user["username"], user["token"])
63 94
95 def setUp(self):
64 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv') 96 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv')
65 self.addCleanup(self.temp_dir.cleanup) 97 self.addCleanup(self.temp_dir.cleanup)
66 98
67 (self.client, self.server) = self.start_server() 99 self.server_address = self.start_test_server()
100
101 self.client = self.start_client(self.server_address)
68 102
69 def assertClientGetHash(self, client, taskhash, unihash): 103 def assertClientGetHash(self, client, taskhash, unihash):
70 result = client.get_unihash(self.METHOD, taskhash) 104 result = client.get_unihash(self.METHOD, taskhash)
71 self.assertEqual(result, unihash) 105 self.assertEqual(result, unihash)
72 106
107 def assertUserPerms(self, user, permissions):
108 with self.auth_client(user) as client:
109 info = client.get_user()
110 self.assertEqual(info, {
111 "username": user["username"],
112 "permissions": permissions,
113 })
73 114
74class HashEquivalenceCommonTests(object): 115 def assertUserCanAuth(self, user):
75 def test_create_hash(self): 116 with self.start_client(self.auth_server_address) as client:
117 client.auth(user["username"], user["token"])
118
119 def assertUserCannotAuth(self, user):
120 with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError):
121 client.auth(user["username"], user["token"])
122
123 def create_test_hash(self, client):
76 # Simple test that hashes can be created 124 # Simple test that hashes can be created
77 taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9' 125 taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
78 outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f' 126 outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
79 unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd' 127 unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
80 128
81 self.assertClientGetHash(self.client, taskhash, None) 129 self.assertClientGetHash(client, taskhash, None)
82 130
83 result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) 131 result = client.report_unihash(taskhash, self.METHOD, outhash, unihash)
84 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') 132 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
133 return taskhash, outhash, unihash
134
135 def run_hashclient(self, args, **kwargs):
136 try:
137 p = subprocess.run(
138 [BIN_DIR / "bitbake-hashclient"] + args,
139 stdout=subprocess.PIPE,
140 stderr=subprocess.STDOUT,
141 encoding="utf-8",
142 **kwargs
143 )
144 except subprocess.CalledProcessError as e:
145 print(e.output)
146 raise e
147
148 print(p.stdout)
149 return p
150
151
152class HashEquivalenceCommonTests(object):
153 def auth_perms(self, *permissions):
154 self.client_index += 1
155 user = self.create_user(f"user-{self.client_index}", permissions)
156 return self.auth_client(user)
157
158 def create_user(self, username, permissions, *, client=None):
159 def remove_user(username):
160 try:
161 self.admin_client.delete_user(username)
162 except bb.asyncrpc.InvokeError:
163 pass
164
165 if client is None:
166 client = self.admin_client
167
168 user = client.new_user(username, permissions)
169 self.addCleanup(remove_user, username)
170
171 return user
172
173 def test_create_hash(self):
174 return self.create_test_hash(self.client)
85 175
86 def test_create_equivalent(self): 176 def test_create_equivalent(self):
87 # Tests that a second reported task with the same outhash will be 177 # Tests that a second reported task with the same outhash will be
@@ -123,6 +213,57 @@ class HashEquivalenceCommonTests(object):
123 213
124 self.assertClientGetHash(self.client, taskhash, unihash) 214 self.assertClientGetHash(self.client, taskhash, unihash)
125 215
216 def test_remove_taskhash(self):
217 taskhash, outhash, unihash = self.create_test_hash(self.client)
218 result = self.client.remove({"taskhash": taskhash})
219 self.assertGreater(result["count"], 0)
220 self.assertClientGetHash(self.client, taskhash, None)
221
222 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
223 self.assertIsNone(result_outhash)
224
225 def test_remove_unihash(self):
226 taskhash, outhash, unihash = self.create_test_hash(self.client)
227 result = self.client.remove({"unihash": unihash})
228 self.assertGreater(result["count"], 0)
229 self.assertClientGetHash(self.client, taskhash, None)
230
231 def test_remove_outhash(self):
232 taskhash, outhash, unihash = self.create_test_hash(self.client)
233 result = self.client.remove({"outhash": outhash})
234 self.assertGreater(result["count"], 0)
235
236 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
237 self.assertIsNone(result_outhash)
238
239 def test_remove_method(self):
240 taskhash, outhash, unihash = self.create_test_hash(self.client)
241 result = self.client.remove({"method": self.METHOD})
242 self.assertGreater(result["count"], 0)
243 self.assertClientGetHash(self.client, taskhash, None)
244
245 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
246 self.assertIsNone(result_outhash)
247
248 def test_clean_unused(self):
249 taskhash, outhash, unihash = self.create_test_hash(self.client)
250
251 # Clean the database, which should not remove anything because all hashes an in-use
252 result = self.client.clean_unused(0)
253 self.assertEqual(result["count"], 0)
254 self.assertClientGetHash(self.client, taskhash, unihash)
255
256 # Remove the unihash. The row in the outhash table should still be present
257 self.client.remove({"unihash": unihash})
258 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
259 self.assertIsNotNone(result_outhash)
260
261 # Now clean with no minimum age which will remove the outhash
262 result = self.client.clean_unused(0)
263 self.assertEqual(result["count"], 1)
264 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
265 self.assertIsNone(result_outhash)
266
126 def test_huge_message(self): 267 def test_huge_message(self):
127 # Simple test that hashes can be created 268 # Simple test that hashes can be created
128 taskhash = 'c665584ee6817aa99edfc77a44dd853828279370' 269 taskhash = 'c665584ee6817aa99edfc77a44dd853828279370'
@@ -138,16 +279,21 @@ class HashEquivalenceCommonTests(object):
138 }) 279 })
139 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') 280 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
140 281
141 result = self.client.get_taskhash(self.METHOD, taskhash, True) 282 result_unihash = self.client.get_taskhash(self.METHOD, taskhash, True)
142 self.assertEqual(result['taskhash'], taskhash) 283 self.assertEqual(result_unihash['taskhash'], taskhash)
143 self.assertEqual(result['unihash'], unihash) 284 self.assertEqual(result_unihash['unihash'], unihash)
144 self.assertEqual(result['method'], self.METHOD) 285 self.assertEqual(result_unihash['method'], self.METHOD)
145 self.assertEqual(result['outhash'], outhash) 286
146 self.assertEqual(result['outhash_siginfo'], siginfo) 287 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
288 self.assertEqual(result_outhash['taskhash'], taskhash)
289 self.assertEqual(result_outhash['method'], self.METHOD)
290 self.assertEqual(result_outhash['unihash'], unihash)
291 self.assertEqual(result_outhash['outhash'], outhash)
292 self.assertEqual(result_outhash['outhash_siginfo'], siginfo)
147 293
148 def test_stress(self): 294 def test_stress(self):
149 def query_server(failures): 295 def query_server(failures):
150 client = Client(self.server.address) 296 client = Client(self.server_address)
151 try: 297 try:
152 for i in range(1000): 298 for i in range(1000):
153 taskhash = hashlib.sha256() 299 taskhash = hashlib.sha256()
@@ -186,8 +332,10 @@ class HashEquivalenceCommonTests(object):
186 # the side client. It also verifies that the results are pulled into 332 # the side client. It also verifies that the results are pulled into
187 # the downstream database by checking that the downstream and side servers 333 # the downstream database by checking that the downstream and side servers
188 # match after the downstream is done waiting for all backfill tasks 334 # match after the downstream is done waiting for all backfill tasks
189 (down_client, down_server) = self.start_server(upstream=self.server.address) 335 down_server = self.start_server(upstream=self.server_address)
190 (side_client, side_server) = self.start_server(dbpath=down_server.dbpath) 336 down_client = self.start_client(down_server.address)
337 side_server = self.start_server(dbpath=down_server.dbpath)
338 side_client = self.start_client(side_server.address)
191 339
192 def check_hash(taskhash, unihash, old_sidehash): 340 def check_hash(taskhash, unihash, old_sidehash):
193 nonlocal down_client 341 nonlocal down_client
@@ -258,15 +406,57 @@ class HashEquivalenceCommonTests(object):
258 result = down_client.report_unihash(taskhash6, self.METHOD, outhash5, unihash6) 406 result = down_client.report_unihash(taskhash6, self.METHOD, outhash5, unihash6)
259 self.assertEqual(result['unihash'], unihash5, 'Server failed to copy unihash from upstream') 407 self.assertEqual(result['unihash'], unihash5, 'Server failed to copy unihash from upstream')
260 408
409 # Tests read through from server with
410 taskhash7 = '9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74'
411 outhash7 = '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69'
412 unihash7 = '05d2a63c81e32f0a36542ca677e8ad852365c538'
413 self.client.report_unihash(taskhash7, self.METHOD, outhash7, unihash7)
414
415 result = down_client.get_taskhash(self.METHOD, taskhash7, True)
416 self.assertEqual(result['unihash'], unihash7, 'Server failed to copy unihash from upstream')
417 self.assertEqual(result['outhash'], outhash7, 'Server failed to copy unihash from upstream')
418 self.assertEqual(result['taskhash'], taskhash7, 'Server failed to copy unihash from upstream')
419 self.assertEqual(result['method'], self.METHOD)
420
421 taskhash8 = '86978a4c8c71b9b487330b0152aade10c1ee58aa'
422 outhash8 = 'ca8c128e9d9e4a28ef24d0508aa20b5cf880604eacd8f65c0e366f7e0cc5fbcf'
423 unihash8 = 'd8bcf25369d40590ad7d08c84d538982f2023e01'
424 self.client.report_unihash(taskhash8, self.METHOD, outhash8, unihash8)
425
426 result = down_client.get_outhash(self.METHOD, outhash8, taskhash8)
427 self.assertEqual(result['unihash'], unihash8, 'Server failed to copy unihash from upstream')
428 self.assertEqual(result['outhash'], outhash8, 'Server failed to copy unihash from upstream')
429 self.assertEqual(result['taskhash'], taskhash8, 'Server failed to copy unihash from upstream')
430 self.assertEqual(result['method'], self.METHOD)
431
432 taskhash9 = 'ae6339531895ddf5b67e663e6a374ad8ec71d81c'
433 outhash9 = 'afc78172c81880ae10a1fec994b5b4ee33d196a001a1b66212a15ebe573e00b5'
434 unihash9 = '6662e699d6e3d894b24408ff9a4031ef9b038ee8'
435 self.client.report_unihash(taskhash9, self.METHOD, outhash9, unihash9)
436
437 result = down_client.get_taskhash(self.METHOD, taskhash9, False)
438 self.assertEqual(result['unihash'], unihash9, 'Server failed to copy unihash from upstream')
439 self.assertEqual(result['taskhash'], taskhash9, 'Server failed to copy unihash from upstream')
440 self.assertEqual(result['method'], self.METHOD)
441
442 def test_unihash_exsits(self):
443 taskhash, outhash, unihash = self.create_test_hash(self.client)
444 self.assertTrue(self.client.unihash_exists(unihash))
445 self.assertFalse(self.client.unihash_exists('6662e699d6e3d894b24408ff9a4031ef9b038ee8'))
446
261 def test_ro_server(self): 447 def test_ro_server(self):
262 (ro_client, ro_server) = self.start_server(dbpath=self.server.dbpath, read_only=True) 448 rw_server = self.start_server()
449 rw_client = self.start_client(rw_server.address)
450
451 ro_server = self.start_server(dbpath=rw_server.dbpath, read_only=True)
452 ro_client = self.start_client(ro_server.address)
263 453
264 # Report a hash via the read-write server 454 # Report a hash via the read-write server
265 taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9' 455 taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
266 outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f' 456 outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
267 unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd' 457 unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
268 458
269 result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) 459 result = rw_client.report_unihash(taskhash, self.METHOD, outhash, unihash)
270 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') 460 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
271 461
272 # Check the hash via the read-only server 462 # Check the hash via the read-only server
@@ -277,11 +467,976 @@ class HashEquivalenceCommonTests(object):
277 outhash2 = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44' 467 outhash2 = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44'
278 unihash2 = '90e9bc1d1f094c51824adca7f8ea79a048d68824' 468 unihash2 = '90e9bc1d1f094c51824adca7f8ea79a048d68824'
279 469
280 with self.assertRaises(HashConnectionError): 470 result = ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
281 ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) 471 self.assertEqual(result['unihash'], unihash2)
282 472
283 # Ensure that the database was not modified 473 # Ensure that the database was not modified
474 self.assertClientGetHash(rw_client, taskhash2, None)
475
476
477 def test_slow_server_start(self):
478 # Ensures that the server will exit correctly even if it gets a SIGTERM
479 # before entering the main loop
480
481 event = multiprocessing.Event()
482
483 def prefunc(server, idx):
484 nonlocal event
485 server_prefunc(server, idx)
486 event.wait()
487
488 def do_nothing(signum, frame):
489 pass
490
491 old_signal = signal.signal(signal.SIGTERM, do_nothing)
492 self.addCleanup(signal.signal, signal.SIGTERM, old_signal)
493
494 server = self.start_server(prefunc=prefunc)
495 server.process.terminate()
496 time.sleep(30)
497 event.set()
498 server.process.join(300)
499 self.assertIsNotNone(server.process.exitcode, "Server did not exit in a timely manner!")
500
501 def test_diverging_report_race(self):
502 # Tests that a reported task will correctly pick up an updated unihash
503
504 # This is a baseline report added to the database to ensure that there
505 # is something to match against as equivalent
506 outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa'
507 taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
508 unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
509 result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1)
510
511 # Add a report that is equivalent to Task 1. It should ignore the
512 # provided unihash and report the unihash from task 1
513 taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273'
514 unihash2 = taskhash2
515 result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2)
516 self.assertEqual(result['unihash'], unihash1)
517
518 # Add another report for Task 2, but with a different outhash (e.g. the
519 # task is non-deterministic). It should still be marked with the Task 1
520 # unihash because it has the Task 2 taskhash, which is equivalent to
521 # Task 1
522 outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c'
523 result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2)
524 self.assertEqual(result['unihash'], unihash1)
525
526
527 def test_diverging_report_reverse_race(self):
528 # Same idea as the previous test, but Tasks 2 and 3 are reported in
529 # reverse order the opposite order
530
531 outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa'
532 taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
533 unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
534 result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1)
535
536 taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273'
537 unihash2 = taskhash2
538
539 # Report Task 3 first. Since there is nothing else in the database it
540 # will use the client provided unihash
541 outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c'
542 result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2)
543 self.assertEqual(result['unihash'], unihash2)
544
545 # Report Task 2. This is equivalent to Task 1 but there is already a mapping for
546 # taskhash2 so it will report unihash2
547 result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2)
548 self.assertEqual(result['unihash'], unihash2)
549
550 # The originally reported unihash for Task 3 should be unchanged even if it
551 # shares a taskhash with Task 2
552 self.assertClientGetHash(self.client, taskhash2, unihash2)
553
554 def test_get_unihash_batch(self):
555 TEST_INPUT = (
556 # taskhash outhash unihash
557 ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'),
558 # Duplicated taskhash with multiple output hashes and unihashes.
559 ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'),
560 # Equivalent hash
561 ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"),
562 ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"),
563 ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'),
564 ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'),
565 ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'),
566 )
567 EXTRA_QUERIES = (
568 "6b6be7a84ab179b4240c4302518dc3f6",
569 )
570
571 for taskhash, outhash, unihash in TEST_INPUT:
572 self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
573
574
575 result = self.client.get_unihash_batch(
576 [(self.METHOD, data[0]) for data in TEST_INPUT] +
577 [(self.METHOD, e) for e in EXTRA_QUERIES]
578 )
579
580 self.assertListEqual(result, [
581 "218e57509998197d570e2c98512d0105985dffc9",
582 "218e57509998197d570e2c98512d0105985dffc9",
583 "218e57509998197d570e2c98512d0105985dffc9",
584 "3b5d3d83f07f259e9086fcb422c855286e18a57d",
585 "f46d3fbb439bd9b921095da657a4de906510d2cd",
586 "f46d3fbb439bd9b921095da657a4de906510d2cd",
587 "05d2a63c81e32f0a36542ca677e8ad852365c538",
588 None,
589 ])
590
591 def test_unihash_exists_batch(self):
592 TEST_INPUT = (
593 # taskhash outhash unihash
594 ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'),
595 # Duplicated taskhash with multiple output hashes and unihashes.
596 ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'),
597 # Equivalent hash
598 ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"),
599 ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"),
600 ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'),
601 ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'),
602 ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'),
603 )
604 EXTRA_QUERIES = (
605 "6b6be7a84ab179b4240c4302518dc3f6",
606 )
607
608 result_unihashes = set()
609
610
611 for taskhash, outhash, unihash in TEST_INPUT:
612 result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
613 result_unihashes.add(result["unihash"])
614
615 query = []
616 expected = []
617
618 for _, _, unihash in TEST_INPUT:
619 query.append(unihash)
620 expected.append(unihash in result_unihashes)
621
622
623 for unihash in EXTRA_QUERIES:
624 query.append(unihash)
625 expected.append(False)
626
627 result = self.client.unihash_exists_batch(query)
628 self.assertListEqual(result, expected)
629
630 def test_auth_read_perms(self):
631 admin_client = self.start_auth_server()
632
633 # Create hashes with non-authenticated server
634 taskhash, outhash, unihash = self.create_test_hash(self.client)
635
636 # Validate hash can be retrieved using authenticated client
637 with self.auth_perms("@read") as client:
638 self.assertClientGetHash(client, taskhash, unihash)
639
640 with self.auth_perms() as client, self.assertRaises(InvokeError):
641 self.assertClientGetHash(client, taskhash, unihash)
642
643 def test_auth_report_perms(self):
644 admin_client = self.start_auth_server()
645
646 # Without read permission, the user is completely denied
647 with self.auth_perms() as client, self.assertRaises(InvokeError):
648 self.create_test_hash(client)
649
650 # Read permission allows the call to succeed, but it doesn't record
651 # anythin in the database
652 with self.auth_perms("@read") as client:
653 taskhash, outhash, unihash = self.create_test_hash(client)
654 self.assertClientGetHash(client, taskhash, None)
655
656 # Report permission alone is insufficient
657 with self.auth_perms("@report") as client, self.assertRaises(InvokeError):
658 self.create_test_hash(client)
659
660 # Read and report permission actually modify the database
661 with self.auth_perms("@read", "@report") as client:
662 taskhash, outhash, unihash = self.create_test_hash(client)
663 self.assertClientGetHash(client, taskhash, unihash)
664
665 def test_auth_no_token_refresh_from_anon_user(self):
666 self.start_auth_server()
667
668 with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError):
669 client.refresh_token()
670
671 def test_auth_self_token_refresh(self):
672 admin_client = self.start_auth_server()
673
674 # Create a new user with no permissions
675 user = self.create_user("test-user", [])
676
677 with self.auth_client(user) as client:
678 new_user = client.refresh_token()
679
680 self.assertEqual(user["username"], new_user["username"])
681 self.assertNotEqual(user["token"], new_user["token"])
682 self.assertUserCanAuth(new_user)
683 self.assertUserCannotAuth(user)
684
685 # Explicitly specifying with your own username is fine also
686 with self.auth_client(new_user) as client:
687 new_user2 = client.refresh_token(user["username"])
688
689 self.assertEqual(user["username"], new_user2["username"])
690 self.assertNotEqual(user["token"], new_user2["token"])
691 self.assertUserCanAuth(new_user2)
692 self.assertUserCannotAuth(new_user)
693 self.assertUserCannotAuth(user)
694
695 def test_auth_token_refresh(self):
696 admin_client = self.start_auth_server()
697
698 user = self.create_user("test-user", [])
699
700 with self.auth_perms() as client, self.assertRaises(InvokeError):
701 client.refresh_token(user["username"])
702
703 with self.auth_perms("@user-admin") as client:
704 new_user = client.refresh_token(user["username"])
705
706 self.assertEqual(user["username"], new_user["username"])
707 self.assertNotEqual(user["token"], new_user["token"])
708 self.assertUserCanAuth(new_user)
709 self.assertUserCannotAuth(user)
710
711 def test_auth_self_get_user(self):
712 admin_client = self.start_auth_server()
713
714 user = self.create_user("test-user", [])
715 user_info = user.copy()
716 del user_info["token"]
717
718 with self.auth_client(user) as client:
719 info = client.get_user()
720 self.assertEqual(info, user_info)
721
722 # Explicitly asking for your own username is fine also
723 info = client.get_user(user["username"])
724 self.assertEqual(info, user_info)
725
726 def test_auth_get_user(self):
727 admin_client = self.start_auth_server()
728
729 user = self.create_user("test-user", [])
730 user_info = user.copy()
731 del user_info["token"]
732
733 with self.auth_perms() as client, self.assertRaises(InvokeError):
734 client.get_user(user["username"])
735
736 with self.auth_perms("@user-admin") as client:
737 info = client.get_user(user["username"])
738 self.assertEqual(info, user_info)
739
740 info = client.get_user("nonexist-user")
741 self.assertIsNone(info)
742
743 def test_auth_reconnect(self):
744 admin_client = self.start_auth_server()
745
746 user = self.create_user("test-user", [])
747 user_info = user.copy()
748 del user_info["token"]
749
750 with self.auth_client(user) as client:
751 info = client.get_user()
752 self.assertEqual(info, user_info)
753
754 client.disconnect()
755
756 info = client.get_user()
757 self.assertEqual(info, user_info)
758
759 def test_auth_delete_user(self):
760 admin_client = self.start_auth_server()
761
762 user = self.create_user("test-user", [])
763
764 # self service
765 with self.auth_client(user) as client:
766 client.delete_user(user["username"])
767
768 self.assertIsNone(admin_client.get_user(user["username"]))
769 user = self.create_user("test-user", [])
770
771 with self.auth_perms() as client, self.assertRaises(InvokeError):
772 client.delete_user(user["username"])
773
774 with self.auth_perms("@user-admin") as client:
775 client.delete_user(user["username"])
776
777 # User doesn't exist, so even though the permission is correct, it's an
778 # error
779 with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError):
780 client.delete_user(user["username"])
781
782 def test_auth_set_user_perms(self):
783 admin_client = self.start_auth_server()
784
785 user = self.create_user("test-user", [])
786
787 self.assertUserPerms(user, [])
788
789 # No self service to change permissions
790 with self.auth_client(user) as client, self.assertRaises(InvokeError):
791 client.set_user_perms(user["username"], ["@all"])
792 self.assertUserPerms(user, [])
793
794 with self.auth_perms() as client, self.assertRaises(InvokeError):
795 client.set_user_perms(user["username"], ["@all"])
796 self.assertUserPerms(user, [])
797
798 with self.auth_perms("@user-admin") as client:
799 client.set_user_perms(user["username"], ["@all"])
800 self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS)))
801
802 # Bad permissions
803 with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError):
804 client.set_user_perms(user["username"], ["@this-is-not-a-permission"])
805 self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS)))
806
807 def test_auth_get_all_users(self):
808 admin_client = self.start_auth_server()
809
810 user = self.create_user("test-user", [])
811
812 with self.auth_client(user) as client, self.assertRaises(InvokeError):
813 client.get_all_users()
814
815 # Give the test user the correct permission
816 admin_client.set_user_perms(user["username"], ["@user-admin"])
817
818 with self.auth_client(user) as client:
819 all_users = client.get_all_users()
820
821 # Convert to a dictionary for easier comparison
822 all_users = {u["username"]: u for u in all_users}
823
824 self.assertEqual(all_users,
825 {
826 "admin": {
827 "username": "admin",
828 "permissions": sorted(list(ALL_PERMISSIONS)),
829 },
830 "test-user": {
831 "username": "test-user",
832 "permissions": ["@user-admin"],
833 }
834 }
835 )
836
837 def test_auth_new_user(self):
838 self.start_auth_server()
839
840 permissions = ["@read", "@report", "@db-admin", "@user-admin"]
841 permissions.sort()
842
843 with self.auth_perms() as client, self.assertRaises(InvokeError):
844 self.create_user("test-user", permissions, client=client)
845
846 with self.auth_perms("@user-admin") as client:
847 user = self.create_user("test-user", permissions, client=client)
848 self.assertIn("token", user)
849 self.assertEqual(user["username"], "test-user")
850 self.assertEqual(user["permissions"], permissions)
851
852 def test_auth_become_user(self):
853 admin_client = self.start_auth_server()
854
855 user = self.create_user("test-user", ["@read", "@report"])
856 user_info = user.copy()
857 del user_info["token"]
858
859 with self.auth_perms() as client, self.assertRaises(InvokeError):
860 client.become_user(user["username"])
861
862 with self.auth_perms("@user-admin") as client:
863 become = client.become_user(user["username"])
864 self.assertEqual(become, user_info)
865
866 info = client.get_user()
867 self.assertEqual(info, user_info)
868
869 # Verify become user is preserved across disconnect
870 client.disconnect()
871
872 info = client.get_user()
873 self.assertEqual(info, user_info)
874
875 # test-user doesn't have become_user permissions, so this should
876 # not work
877 with self.assertRaises(InvokeError):
878 client.become_user(user["username"])
879
880 # No self-service of become
881 with self.auth_client(user) as client, self.assertRaises(InvokeError):
882 client.become_user(user["username"])
883
884 # Give test user permissions to become
885 admin_client.set_user_perms(user["username"], ["@user-admin"])
886
887 # It's possible to become yourself (effectively a noop)
888 with self.auth_perms("@user-admin") as client:
889 become = client.become_user(client.username)
890
891 def test_auth_gc(self):
892 admin_client = self.start_auth_server()
893
894 with self.auth_perms() as client, self.assertRaises(InvokeError):
895 client.gc_mark("ABC", {"unihash": "123"})
896
897 with self.auth_perms() as client, self.assertRaises(InvokeError):
898 client.gc_status()
899
900 with self.auth_perms() as client, self.assertRaises(InvokeError):
901 client.gc_sweep("ABC")
902
903 with self.auth_perms("@db-admin") as client:
904 client.gc_mark("ABC", {"unihash": "123"})
905
906 with self.auth_perms("@db-admin") as client:
907 client.gc_status()
908
909 with self.auth_perms("@db-admin") as client:
910 client.gc_sweep("ABC")
911
912 def test_get_db_usage(self):
913 usage = self.client.get_db_usage()
914
915 self.assertTrue(isinstance(usage, dict))
916 for name in usage.keys():
917 self.assertTrue(isinstance(usage[name], dict))
918 self.assertIn("rows", usage[name])
919 self.assertTrue(isinstance(usage[name]["rows"], int))
920
921 def test_get_db_query_columns(self):
922 columns = self.client.get_db_query_columns()
923
924 self.assertTrue(isinstance(columns, list))
925 self.assertTrue(len(columns) > 0)
926
927 for col in columns:
928 self.client.remove({col: ""})
929
930 def test_auth_is_owner(self):
931 admin_client = self.start_auth_server()
932
933 user = self.create_user("test-user", ["@read", "@report"])
934 with self.auth_client(user) as client:
935 taskhash, outhash, unihash = self.create_test_hash(client)
936 data = client.get_taskhash(self.METHOD, taskhash, True)
937 self.assertEqual(data["owner"], user["username"])
938
939 def test_gc(self):
940 taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
941 outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
942 unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
943
944 result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
945 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
946
947 taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
948 outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
949 unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
950
951 result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
952 self.assertClientGetHash(self.client, taskhash2, unihash2)
953
954 # Mark the first unihash to be kept
955 ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
956 self.assertEqual(ret, {"count": 1})
957
958 ret = self.client.gc_status()
959 self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
960
961 # Second hash is still there; mark doesn't delete hashes
962 self.assertClientGetHash(self.client, taskhash2, unihash2)
963
964 ret = self.client.gc_sweep("ABC")
965 self.assertEqual(ret, {"count": 1})
966
967 # Hash is gone. Taskhash is returned for second hash
968 self.assertClientGetHash(self.client, taskhash2, None)
969 # First hash is still present
970 self.assertClientGetHash(self.client, taskhash, unihash)
971
972 def test_gc_stream(self):
973 taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
974 outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
975 unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
976
977 result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
978 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
979
980 taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
981 outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
982 unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
983
984 result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
985 self.assertClientGetHash(self.client, taskhash2, unihash2)
986
987 taskhash3 = 'a1117c1f5a7c9ab2f5a39cc6fe5e6152169d09c0'
988 outhash3 = '7289c414905303700a1117c1f5a7c9ab2f5a39cc6fe5e6152169d09c04f9a53c'
989 unihash3 = '905303700a1117c1f5a7c9ab2f5a39cc6fe5e615'
990
991 result = self.client.report_unihash(taskhash3, self.METHOD, outhash3, unihash3)
992 self.assertClientGetHash(self.client, taskhash3, unihash3)
993
994 # Mark the first unihash to be kept
995 ret = self.client.gc_mark_stream("ABC", (f"unihash {h}" for h in [unihash, unihash2]))
996 self.assertEqual(ret, {"count": 2})
997
998 ret = self.client.gc_status()
999 self.assertEqual(ret, {"mark": "ABC", "keep": 2, "remove": 1})
1000
1001 # Third hash is still there; mark doesn't delete hashes
1002 self.assertClientGetHash(self.client, taskhash3, unihash3)
1003
1004 ret = self.client.gc_sweep("ABC")
1005 self.assertEqual(ret, {"count": 1})
1006
1007 # Hash is gone. Taskhash is returned for second hash
1008 self.assertClientGetHash(self.client, taskhash3, None)
1009 # First hash is still present
1010 self.assertClientGetHash(self.client, taskhash, unihash)
1011 # Second hash is still present
1012 self.assertClientGetHash(self.client, taskhash2, unihash2)
1013
1014 def test_gc_switch_mark(self):
1015 taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
1016 outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
1017 unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
1018
1019 result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
1020 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
1021
1022 taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
1023 outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
1024 unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
1025
1026 result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
1027 self.assertClientGetHash(self.client, taskhash2, unihash2)
1028
1029 # Mark the first unihash to be kept
1030 ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
1031 self.assertEqual(ret, {"count": 1})
1032
1033 ret = self.client.gc_status()
1034 self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
1035
1036 # Second hash is still there; mark doesn't delete hashes
1037 self.assertClientGetHash(self.client, taskhash2, unihash2)
1038
1039 # Switch to a different mark and mark the second hash. This will start
1040 # a new collection cycle
1041 ret = self.client.gc_mark("DEF", {"unihash": unihash2, "method": self.METHOD})
1042 self.assertEqual(ret, {"count": 1})
1043
1044 ret = self.client.gc_status()
1045 self.assertEqual(ret, {"mark": "DEF", "keep": 1, "remove": 1})
1046
1047 # Both hashes are still present
1048 self.assertClientGetHash(self.client, taskhash2, unihash2)
1049 self.assertClientGetHash(self.client, taskhash, unihash)
1050
1051 # Sweep with the new mark
1052 ret = self.client.gc_sweep("DEF")
1053 self.assertEqual(ret, {"count": 1})
1054
1055 # First hash is gone, second is kept
1056 self.assertClientGetHash(self.client, taskhash2, unihash2)
1057 self.assertClientGetHash(self.client, taskhash, None)
1058
1059 def test_gc_switch_sweep_mark(self):
1060 taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
1061 outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
1062 unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
1063
1064 result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
1065 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
1066
1067 taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
1068 outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
1069 unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
1070
1071 result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
1072 self.assertClientGetHash(self.client, taskhash2, unihash2)
1073
1074 # Mark the first unihash to be kept
1075 ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
1076 self.assertEqual(ret, {"count": 1})
1077
1078 ret = self.client.gc_status()
1079 self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
1080
1081 # Sweeping with a different mark raises an error
1082 with self.assertRaises(InvokeError):
1083 self.client.gc_sweep("DEF")
1084
1085 # Both hashes are present
1086 self.assertClientGetHash(self.client, taskhash2, unihash2)
1087 self.assertClientGetHash(self.client, taskhash, unihash)
1088
1089 def test_gc_new_hashes(self):
1090 taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
1091 outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
1092 unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
1093
1094 result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
1095 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
1096
1097 # Start a new garbage collection
1098 ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
1099 self.assertEqual(ret, {"count": 1})
1100
1101 ret = self.client.gc_status()
1102 self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 0})
1103
1104 # Add second hash. It should inherit the mark from the current garbage
1105 # collection operation
1106
1107 taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
1108 outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
1109 unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
1110
1111 result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
1112 self.assertClientGetHash(self.client, taskhash2, unihash2)
1113
1114 # Sweep should remove nothing
1115 ret = self.client.gc_sweep("ABC")
1116 self.assertEqual(ret, {"count": 0})
1117
1118 # Both hashes are present
1119 self.assertClientGetHash(self.client, taskhash2, unihash2)
1120 self.assertClientGetHash(self.client, taskhash, unihash)
1121
1122
1123class TestHashEquivalenceClient(HashEquivalenceTestSetup, unittest.TestCase):
1124 def get_server_addr(self, server_idx):
1125 return "unix://" + os.path.join(self.temp_dir.name, 'sock%d' % server_idx)
1126
1127 def test_get(self):
1128 taskhash, outhash, unihash = self.create_test_hash(self.client)
1129
1130 p = self.run_hashclient(["--address", self.server_address, "get", self.METHOD, taskhash])
1131 data = json.loads(p.stdout)
1132 self.assertEqual(data["unihash"], unihash)
1133 self.assertEqual(data["outhash"], outhash)
1134 self.assertEqual(data["taskhash"], taskhash)
1135 self.assertEqual(data["method"], self.METHOD)
1136
1137 def test_get_outhash(self):
1138 taskhash, outhash, unihash = self.create_test_hash(self.client)
1139
1140 p = self.run_hashclient(["--address", self.server_address, "get-outhash", self.METHOD, outhash, taskhash])
1141 data = json.loads(p.stdout)
1142 self.assertEqual(data["unihash"], unihash)
1143 self.assertEqual(data["outhash"], outhash)
1144 self.assertEqual(data["taskhash"], taskhash)
1145 self.assertEqual(data["method"], self.METHOD)
1146
1147 def test_stats(self):
1148 p = self.run_hashclient(["--address", self.server_address, "stats"], check=True)
1149 json.loads(p.stdout)
1150
1151 def test_stress(self):
1152 self.run_hashclient(["--address", self.server_address, "stress"], check=True)
1153
1154 def test_unihash_exsits(self):
1155 taskhash, outhash, unihash = self.create_test_hash(self.client)
1156
1157 p = self.run_hashclient([
1158 "--address", self.server_address,
1159 "unihash-exists", unihash,
1160 ], check=True)
1161 self.assertEqual(p.stdout.strip(), "true")
1162
1163 p = self.run_hashclient([
1164 "--address", self.server_address,
1165 "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8',
1166 ], check=True)
1167 self.assertEqual(p.stdout.strip(), "false")
1168
1169 def test_unihash_exsits_quiet(self):
1170 taskhash, outhash, unihash = self.create_test_hash(self.client)
1171
1172 p = self.run_hashclient([
1173 "--address", self.server_address,
1174 "unihash-exists", unihash,
1175 "--quiet",
1176 ])
1177 self.assertEqual(p.returncode, 0)
1178 self.assertEqual(p.stdout.strip(), "")
1179
1180 p = self.run_hashclient([
1181 "--address", self.server_address,
1182 "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8',
1183 "--quiet",
1184 ])
1185 self.assertEqual(p.returncode, 1)
1186 self.assertEqual(p.stdout.strip(), "")
1187
1188 def test_remove_taskhash(self):
1189 taskhash, outhash, unihash = self.create_test_hash(self.client)
1190 self.run_hashclient([
1191 "--address", self.server_address,
1192 "remove",
1193 "--where", "taskhash", taskhash,
1194 ], check=True)
1195 self.assertClientGetHash(self.client, taskhash, None)
1196
1197 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
1198 self.assertIsNone(result_outhash)
1199
1200 def test_remove_unihash(self):
1201 taskhash, outhash, unihash = self.create_test_hash(self.client)
1202 self.run_hashclient([
1203 "--address", self.server_address,
1204 "remove",
1205 "--where", "unihash", unihash,
1206 ], check=True)
1207 self.assertClientGetHash(self.client, taskhash, None)
1208
1209 def test_remove_outhash(self):
1210 taskhash, outhash, unihash = self.create_test_hash(self.client)
1211 self.run_hashclient([
1212 "--address", self.server_address,
1213 "remove",
1214 "--where", "outhash", outhash,
1215 ], check=True)
1216
1217 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
1218 self.assertIsNone(result_outhash)
1219
1220 def test_remove_method(self):
1221 taskhash, outhash, unihash = self.create_test_hash(self.client)
1222 self.run_hashclient([
1223 "--address", self.server_address,
1224 "remove",
1225 "--where", "method", self.METHOD,
1226 ], check=True)
1227 self.assertClientGetHash(self.client, taskhash, None)
1228
1229 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
1230 self.assertIsNone(result_outhash)
1231
1232 def test_clean_unused(self):
1233 taskhash, outhash, unihash = self.create_test_hash(self.client)
1234
1235 # Clean the database, which should not remove anything because all hashes an in-use
1236 self.run_hashclient([
1237 "--address", self.server_address,
1238 "clean-unused", "0",
1239 ], check=True)
1240 self.assertClientGetHash(self.client, taskhash, unihash)
1241
1242 # Remove the unihash. The row in the outhash table should still be present
1243 self.run_hashclient([
1244 "--address", self.server_address,
1245 "remove",
1246 "--where", "unihash", unihash,
1247 ], check=True)
1248 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
1249 self.assertIsNotNone(result_outhash)
1250
1251 # Now clean with no minimum age which will remove the outhash
1252 self.run_hashclient([
1253 "--address", self.server_address,
1254 "clean-unused", "0",
1255 ], check=True)
1256 result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
1257 self.assertIsNone(result_outhash)
1258
1259 def test_refresh_token(self):
1260 admin_client = self.start_auth_server()
1261
1262 user = admin_client.new_user("test-user", ["@read", "@report"])
1263
1264 p = self.run_hashclient([
1265 "--address", self.auth_server_address,
1266 "--login", user["username"],
1267 "--password", user["token"],
1268 "refresh-token"
1269 ], check=True)
1270
1271 new_token = None
1272 for l in p.stdout.splitlines():
1273 l = l.rstrip()
1274 m = re.match(r'Token: +(.*)$', l)
1275 if m is not None:
1276 new_token = m.group(1)
1277
1278 self.assertTrue(new_token)
1279
1280 print("New token is %r" % new_token)
1281
1282 self.run_hashclient([
1283 "--address", self.auth_server_address,
1284 "--login", user["username"],
1285 "--password", new_token,
1286 "get-user"
1287 ], check=True)
1288
1289 def test_set_user_perms(self):
1290 admin_client = self.start_auth_server()
1291
1292 user = admin_client.new_user("test-user", ["@read"])
1293
1294 self.run_hashclient([
1295 "--address", self.auth_server_address,
1296 "--login", admin_client.username,
1297 "--password", admin_client.password,
1298 "set-user-perms",
1299 "-u", user["username"],
1300 "@read", "@report",
1301 ], check=True)
1302
1303 new_user = admin_client.get_user(user["username"])
1304
1305 self.assertEqual(set(new_user["permissions"]), {"@read", "@report"})
1306
1307 def test_get_user(self):
1308 admin_client = self.start_auth_server()
1309
1310 user = admin_client.new_user("test-user", ["@read"])
1311
1312 p = self.run_hashclient([
1313 "--address", self.auth_server_address,
1314 "--login", admin_client.username,
1315 "--password", admin_client.password,
1316 "get-user",
1317 "-u", user["username"],
1318 ], check=True)
1319
1320 self.assertIn("Username:", p.stdout)
1321 self.assertIn("Permissions:", p.stdout)
1322
1323 p = self.run_hashclient([
1324 "--address", self.auth_server_address,
1325 "--login", user["username"],
1326 "--password", user["token"],
1327 "get-user",
1328 ], check=True)
1329
1330 self.assertIn("Username:", p.stdout)
1331 self.assertIn("Permissions:", p.stdout)
1332
1333 def test_get_all_users(self):
1334 admin_client = self.start_auth_server()
1335
1336 admin_client.new_user("test-user1", ["@read"])
1337 admin_client.new_user("test-user2", ["@read"])
1338
1339 p = self.run_hashclient([
1340 "--address", self.auth_server_address,
1341 "--login", admin_client.username,
1342 "--password", admin_client.password,
1343 "get-all-users",
1344 ], check=True)
1345
1346 self.assertIn("admin", p.stdout)
1347 self.assertIn("test-user1", p.stdout)
1348 self.assertIn("test-user2", p.stdout)
1349
1350 def test_new_user(self):
1351 admin_client = self.start_auth_server()
1352
1353 p = self.run_hashclient([
1354 "--address", self.auth_server_address,
1355 "--login", admin_client.username,
1356 "--password", admin_client.password,
1357 "new-user",
1358 "-u", "test-user",
1359 "@read", "@report",
1360 ], check=True)
1361
1362 new_token = None
1363 for l in p.stdout.splitlines():
1364 l = l.rstrip()
1365 m = re.match(r'Token: +(.*)$', l)
1366 if m is not None:
1367 new_token = m.group(1)
1368
1369 self.assertTrue(new_token)
1370
1371 user = {
1372 "username": "test-user",
1373 "token": new_token,
1374 }
1375
1376 self.assertUserPerms(user, ["@read", "@report"])
1377
1378 def test_delete_user(self):
1379 admin_client = self.start_auth_server()
1380
1381 user = admin_client.new_user("test-user", ["@read"])
1382
1383 p = self.run_hashclient([
1384 "--address", self.auth_server_address,
1385 "--login", admin_client.username,
1386 "--password", admin_client.password,
1387 "delete-user",
1388 "-u", user["username"],
1389 ], check=True)
1390
1391 self.assertIsNone(admin_client.get_user(user["username"]))
1392
1393 def test_get_db_usage(self):
1394 p = self.run_hashclient([
1395 "--address", self.server_address,
1396 "get-db-usage",
1397 ], check=True)
1398
1399 def test_get_db_query_columns(self):
1400 p = self.run_hashclient([
1401 "--address", self.server_address,
1402 "get-db-query-columns",
1403 ], check=True)
1404
1405 def test_gc(self):
1406 taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
1407 outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
1408 unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
1409
1410 result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
1411 self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
1412
1413 taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
1414 outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
1415 unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
1416
1417 result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
1418 self.assertClientGetHash(self.client, taskhash2, unihash2)
1419
1420 # Mark the first unihash to be kept
1421 self.run_hashclient([
1422 "--address", self.server_address,
1423 "gc-mark", "ABC",
1424 "--where", "unihash", unihash,
1425 "--where", "method", self.METHOD
1426 ], check=True)
1427
1428 # Second hash is still there; mark doesn't delete hashes
1429 self.assertClientGetHash(self.client, taskhash2, unihash2)
1430
1431 self.run_hashclient([
1432 "--address", self.server_address,
1433 "gc-sweep", "ABC",
1434 ], check=True)
1435
1436 # Hash is gone. Taskhash is returned for second hash
284 self.assertClientGetHash(self.client, taskhash2, None) 1437 self.assertClientGetHash(self.client, taskhash2, None)
1438 # First hash is still present
1439 self.assertClientGetHash(self.client, taskhash, unihash)
285 1440
286 1441
287class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase): 1442class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
@@ -314,3 +1469,77 @@ class TestHashEquivalenceTCPServer(HashEquivalenceTestSetup, HashEquivalenceComm
314 # If IPv6 is enabled, it should be safe to use localhost directly, in general 1469 # If IPv6 is enabled, it should be safe to use localhost directly, in general
315 # case it is more reliable to resolve the IP address explicitly. 1470 # case it is more reliable to resolve the IP address explicitly.
316 return socket.gethostbyname("localhost") + ":0" 1471 return socket.gethostbyname("localhost") + ":0"
1472
1473
1474class TestHashEquivalenceWebsocketServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
1475 def setUp(self):
1476 try:
1477 import websockets
1478 except ImportError as e:
1479 self.skipTest(str(e))
1480
1481 super().setUp()
1482
1483 def get_server_addr(self, server_idx):
1484 # Some hosts cause asyncio module to misbehave, when IPv6 is not enabled.
1485 # If IPv6 is enabled, it should be safe to use localhost directly, in general
1486 # case it is more reliable to resolve the IP address explicitly.
1487 host = socket.gethostbyname("localhost")
1488 return "ws://%s:0" % host
1489
1490
1491class TestHashEquivalenceWebsocketsSQLAlchemyServer(TestHashEquivalenceWebsocketServer):
1492 def setUp(self):
1493 try:
1494 import sqlalchemy
1495 import aiosqlite
1496 except ImportError as e:
1497 self.skipTest(str(e))
1498
1499 super().setUp()
1500
1501 def make_dbpath(self):
1502 return "sqlite+aiosqlite:///%s" % os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
1503
1504
1505class TestHashEquivalenceExternalServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
1506 def get_env(self, name):
1507 v = os.environ.get(name)
1508 if not v:
1509 self.skipTest(f'{name} not defined to test an external server')
1510 return v
1511
1512 def start_test_server(self):
1513 return self.get_env('BB_TEST_HASHSERV')
1514
1515 def start_server(self, *args, **kwargs):
1516 self.skipTest('Cannot start local server when testing external servers')
1517
1518 def start_auth_server(self):
1519
1520 self.auth_server_address = self.server_address
1521 self.admin_client = self.start_client(
1522 self.server_address,
1523 username=self.get_env('BB_TEST_HASHSERV_USERNAME'),
1524 password=self.get_env('BB_TEST_HASHSERV_PASSWORD'),
1525 )
1526 return self.admin_client
1527
1528 def setUp(self):
1529 super().setUp()
1530 if "BB_TEST_HASHSERV_USERNAME" in os.environ:
1531 self.client = self.start_client(
1532 self.server_address,
1533 username=os.environ["BB_TEST_HASHSERV_USERNAME"],
1534 password=os.environ["BB_TEST_HASHSERV_PASSWORD"],
1535 )
1536 self.client.remove({"method": self.METHOD})
1537
1538 def tearDown(self):
1539 self.client.remove({"method": self.METHOD})
1540 super().tearDown()
1541
1542
1543 def test_auth_get_all_users(self):
1544 self.skipTest("Cannot test all users with external server")
1545
diff --git a/bitbake/lib/layerindexlib/__init__.py b/bitbake/lib/layerindexlib/__init__.py
index 9ca127b9df..c3265ddaa1 100644
--- a/bitbake/lib/layerindexlib/__init__.py
+++ b/bitbake/lib/layerindexlib/__init__.py
@@ -6,7 +6,6 @@
6import datetime 6import datetime
7 7
8import logging 8import logging
9import imp
10import os 9import os
11 10
12from collections import OrderedDict 11from collections import OrderedDict
@@ -179,9 +178,9 @@ class LayerIndex():
179 '''Load the layerindex. 178 '''Load the layerindex.
180 179
181 indexURI - An index to load. (Use multiple calls to load multiple indexes) 180 indexURI - An index to load. (Use multiple calls to load multiple indexes)
182 181
183 reload - If reload is True, then any previously loaded indexes will be forgotten. 182 reload - If reload is True, then any previously loaded indexes will be forgotten.
184 183
185 load - List of elements to load. Default loads all items. 184 load - List of elements to load. Default loads all items.
186 Note: plugs may ignore this. 185 Note: plugs may ignore this.
187 186
@@ -199,7 +198,7 @@ The format of the indexURI:
199 198
200 For example: 199 For example:
201 200
202 http://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index 201 https://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index
203 cooker:// 202 cooker://
204''' 203'''
205 if reload: 204 if reload:
@@ -384,7 +383,14 @@ layerBranches set. If not, they are effectively blank.'''
384 383
385 # Get a list of dependencies and then recursively process them 384 # Get a list of dependencies and then recursively process them
386 for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]: 385 for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]:
387 deplayerbranch = layerdependency.dependency_layerBranch 386 try:
387 deplayerbranch = layerdependency.dependency_layerBranch
388 except AttributeError as e:
389 logger.error('LayerBranch does not exist for dependent layer {}:{}\n' \
390 ' Cannot continue successfully.\n' \
391 ' You might be able to resolve this by checking out the layer locally.\n' \
392 ' Consider reaching out the to the layer maintainers or the layerindex admins' \
393 .format(layerdependency.dependency.name, layerbranch.branch.name))
388 394
389 if ignores and deplayerbranch.layer.name in ignores: 395 if ignores and deplayerbranch.layer.name in ignores:
390 continue 396 continue
@@ -577,7 +583,7 @@ This function is used to implement debugging and provide the user info.
577# index['config'] - configuration data for this index 583# index['config'] - configuration data for this index
578# index['branches'] - dictionary of Branch objects, by id number 584# index['branches'] - dictionary of Branch objects, by id number
579# index['layerItems'] - dictionary of layerItem objects, by id number 585# index['layerItems'] - dictionary of layerItem objects, by id number
580# ...etc... (See: http://layers.openembedded.org/layerindex/api/) 586# ...etc... (See: https://layers.openembedded.org/layerindex/api/)
581# 587#
582# The class needs to manage the 'index' entries and allow easily adding 588# The class needs to manage the 'index' entries and allow easily adding
583# of new items, as well as simply loading of the items. 589# of new items, as well as simply loading of the items.
@@ -847,7 +853,7 @@ class LayerIndexObj():
847 continue 853 continue
848 854
849 for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]: 855 for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]:
850 deplayerbranch = layerdependency.dependency_layerBranch 856 deplayerbranch = layerdependency.dependency_layerBranch or None
851 857
852 if ignores and deplayerbranch.layer.name in ignores: 858 if ignores and deplayerbranch.layer.name in ignores:
853 continue 859 continue
@@ -1279,7 +1285,7 @@ class Recipe(LayerIndexItemObj_LayerBranch):
1279 filename, filepath, pn, pv, layerbranch, 1285 filename, filepath, pn, pv, layerbranch,
1280 summary="", description="", section="", license="", 1286 summary="", description="", section="", license="",
1281 homepage="", bugtracker="", provides="", bbclassextend="", 1287 homepage="", bugtracker="", provides="", bbclassextend="",
1282 inherits="", blacklisted="", updated=None): 1288 inherits="", disallowed="", updated=None):
1283 self.id = id 1289 self.id = id
1284 self.filename = filename 1290 self.filename = filename
1285 self.filepath = filepath 1291 self.filepath = filepath
@@ -1295,7 +1301,7 @@ class Recipe(LayerIndexItemObj_LayerBranch):
1295 self.bbclassextend = bbclassextend 1301 self.bbclassextend = bbclassextend
1296 self.inherits = inherits 1302 self.inherits = inherits
1297 self.updated = updated or datetime.datetime.today().isoformat() 1303 self.updated = updated or datetime.datetime.today().isoformat()
1298 self.blacklisted = blacklisted 1304 self.disallowed = disallowed
1299 if isinstance(layerbranch, LayerBranch): 1305 if isinstance(layerbranch, LayerBranch):
1300 self.layerbranch = layerbranch 1306 self.layerbranch = layerbranch
1301 else: 1307 else:
diff --git a/bitbake/lib/layerindexlib/cooker.py b/bitbake/lib/layerindexlib/cooker.py
index 2de6e5faa0..ced3e06360 100644
--- a/bitbake/lib/layerindexlib/cooker.py
+++ b/bitbake/lib/layerindexlib/cooker.py
@@ -279,7 +279,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin):
279 summary=pn, description=pn, section='?', 279 summary=pn, description=pn, section='?',
280 license='?', homepage='?', bugtracker='?', 280 license='?', homepage='?', bugtracker='?',
281 provides='?', bbclassextend='?', inherits='?', 281 provides='?', bbclassextend='?', inherits='?',
282 blacklisted='?', layerbranch=depBranchId) 282 disallowed='?', layerbranch=depBranchId)
283 283
284 index = addElement("recipes", [recipe], index) 284 index = addElement("recipes", [recipe], index)
285 285
diff --git a/bitbake/lib/layerindexlib/restapi.py b/bitbake/lib/layerindexlib/restapi.py
index 26a1c9674e..81d99b02ea 100644
--- a/bitbake/lib/layerindexlib/restapi.py
+++ b/bitbake/lib/layerindexlib/restapi.py
@@ -31,7 +31,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
31 The return value is a LayerIndexObj. 31 The return value is a LayerIndexObj.
32 32
33 url is the url to the rest api of the layer index, such as: 33 url is the url to the rest api of the layer index, such as:
34 http://layers.openembedded.org/layerindex/api/ 34 https://layers.openembedded.org/layerindex/api/
35 35
36 Or a local file... 36 Or a local file...
37 """ 37 """
@@ -138,7 +138,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
138 The return value is a LayerIndexObj. 138 The return value is a LayerIndexObj.
139 139
140 ud is the parsed url to the rest api of the layer index, such as: 140 ud is the parsed url to the rest api of the layer index, such as:
141 http://layers.openembedded.org/layerindex/api/ 141 https://layers.openembedded.org/layerindex/api/
142 """ 142 """
143 143
144 def _get_json_response(apiurl=None, username=None, password=None, retry=True): 144 def _get_json_response(apiurl=None, username=None, password=None, retry=True):
diff --git a/bitbake/lib/layerindexlib/tests/restapi.py b/bitbake/lib/layerindexlib/tests/restapi.py
index 33b5c1c4c8..71f0ae8a9d 100644
--- a/bitbake/lib/layerindexlib/tests/restapi.py
+++ b/bitbake/lib/layerindexlib/tests/restapi.py
@@ -22,7 +22,7 @@ class LayerIndexWebRestApiTest(LayersTest):
22 self.assertFalse(os.environ.get("BB_SKIP_NETTESTS") == "yes", msg="BB_SKIP_NETTESTS set, but we tried to test anyway") 22 self.assertFalse(os.environ.get("BB_SKIP_NETTESTS") == "yes", msg="BB_SKIP_NETTESTS set, but we tried to test anyway")
23 LayersTest.setUp(self) 23 LayersTest.setUp(self)
24 self.layerindex = layerindexlib.LayerIndex(self.d) 24 self.layerindex = layerindexlib.LayerIndex(self.d)
25 self.layerindex.load_layerindex('http://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies']) 25 self.layerindex.load_layerindex('https://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies'])
26 26
27 @skipIfNoNetwork() 27 @skipIfNoNetwork()
28 def test_layerindex_is_empty(self): 28 def test_layerindex_is_empty(self):
diff --git a/bitbake/lib/ply/yacc.py b/bitbake/lib/ply/yacc.py
index 46e7dc96f6..529f85b081 100644
--- a/bitbake/lib/ply/yacc.py
+++ b/bitbake/lib/ply/yacc.py
@@ -1122,7 +1122,6 @@ class LRParser:
1122# manipulate the rules that make up a grammar. 1122# manipulate the rules that make up a grammar.
1123# ----------------------------------------------------------------------------- 1123# -----------------------------------------------------------------------------
1124 1124
1125import re
1126 1125
1127# regex matching identifiers 1126# regex matching identifiers
1128_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$') 1127_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
@@ -2797,11 +2796,15 @@ class ParserReflect(object):
2797 # Compute a signature over the grammar 2796 # Compute a signature over the grammar
2798 def signature(self): 2797 def signature(self):
2799 try: 2798 try:
2800 from hashlib import md5 2799 import hashlib
2801 except ImportError: 2800 except ImportError:
2802 from md5 import md5 2801 raise RuntimeError("Unable to import hashlib")
2802 try:
2803 sig = hashlib.new('MD5', usedforsecurity=False)
2804 except TypeError:
2805 # Some configurations don't appear to support two arguments
2806 sig = hashlib.new('MD5')
2803 try: 2807 try:
2804 sig = md5()
2805 if self.start: 2808 if self.start:
2806 sig.update(self.start.encode('latin-1')) 2809 sig.update(self.start.encode('latin-1'))
2807 if self.prec: 2810 if self.prec:
diff --git a/bitbake/lib/progressbar/progressbar.py b/bitbake/lib/progressbar/progressbar.py
index e2b6ba1083..d4da10ab75 100644
--- a/bitbake/lib/progressbar/progressbar.py
+++ b/bitbake/lib/progressbar/progressbar.py
@@ -253,7 +253,7 @@ class ProgressBar(object):
253 if (self.maxval is not UnknownLength 253 if (self.maxval is not UnknownLength
254 and not 0 <= value <= self.maxval): 254 and not 0 <= value <= self.maxval):
255 255
256 raise ValueError('Value out of range') 256 self.maxval = value
257 257
258 self.currval = value 258 self.currval = value
259 259
diff --git a/bitbake/lib/prserv/__init__.py b/bitbake/lib/prserv/__init__.py
index 9961040b58..ffc5a40a28 100644
--- a/bitbake/lib/prserv/__init__.py
+++ b/bitbake/lib/prserv/__init__.py
@@ -1,18 +1,95 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5__version__ = "1.0.0"
6 7
7import os, time 8__version__ = "2.0.0"
8import sys,logging 9
10import logging
11logger = logging.getLogger("BitBake.PRserv")
12
13from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS
14
15def create_server(addr, dbpath, upstream=None, read_only=False):
16 from . import serv
17
18 s = serv.PRServer(dbpath, upstream=upstream, read_only=read_only)
19 host, port = addr.split(":")
20 s.start_tcp_server(host, int(port))
21
22 return s
23
24def increase_revision(ver):
25 """Take a revision string such as "1" or "1.2.3" or even a number and increase its last number
26 This fails if the last number is not an integer"""
27
28 fields=str(ver).split('.')
29 last = fields[-1]
30
31 try:
32 val = int(last)
33 except Exception as e:
34 logger.critical("Unable to increase revision value %s: %s" % (ver, e))
35 raise e
36
37 return ".".join(fields[0:-1] + [ str(val + 1) ])
38
39def _revision_greater_or_equal(rev1, rev2):
40 """Compares x.y.z revision numbers, using integer comparison
41 Returns True if rev1 is greater or equal to rev2"""
42
43 fields1 = rev1.split(".")
44 fields2 = rev2.split(".")
45 l1 = len(fields1)
46 l2 = len(fields2)
47
48 for i in range(l1):
49 val1 = int(fields1[i])
50 if i < l2:
51 val2 = int(fields2[i])
52 if val2 < val1:
53 return True
54 elif val2 > val1:
55 return False
56 else:
57 return True
58 return True
59
60def revision_smaller(rev1, rev2):
61 """Compares x.y.z revision numbers, using integer comparison
62 Returns True if rev1 is strictly smaller than rev2"""
63 return not(_revision_greater_or_equal(rev1, rev2))
64
65def revision_greater(rev1, rev2):
66 """Compares x.y.z revision numbers, using integer comparison
67 Returns True if rev1 is strictly greater than rev2"""
68 return _revision_greater_or_equal(rev1, rev2) and (rev1 != rev2)
69
70def create_client(addr):
71 from . import client
72
73 c = client.PRClient()
74
75 try:
76 (typ, a) = parse_address(addr)
77 c.connect_tcp(*a)
78 return c
79 except Exception as e:
80 c.close()
81 raise e
82
83async def create_async_client(addr):
84 from . import client
85
86 c = client.PRAsyncClient()
9 87
10def init_logger(logfile, loglevel): 88 try:
11 numeric_level = getattr(logging, loglevel.upper(), None) 89 (typ, a) = parse_address(addr)
12 if not isinstance(numeric_level, int): 90 await c.connect_tcp(*a)
13 raise ValueError('Invalid log level: %s' % loglevel) 91 return c
14 FORMAT = '%(asctime)-15s %(message)s'
15 logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
16 92
17class NotFoundError(Exception): 93 except Exception as e:
18 pass 94 await c.close()
95 raise e
diff --git a/bitbake/lib/prserv/client.py b/bitbake/lib/prserv/client.py
new file mode 100644
index 0000000000..9f5794c433
--- /dev/null
+++ b/bitbake/lib/prserv/client.py
@@ -0,0 +1,72 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import logging
8import bb.asyncrpc
9from . import create_async_client
10
11logger = logging.getLogger("BitBake.PRserv")
12
13class PRAsyncClient(bb.asyncrpc.AsyncClient):
14 def __init__(self):
15 super().__init__("PRSERVICE", "1.0", logger)
16
17 async def getPR(self, version, pkgarch, checksum, history=False):
18 response = await self.invoke(
19 {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "history": history}}
20 )
21 if response:
22 return response["value"]
23
24 async def test_pr(self, version, pkgarch, checksum, history=False):
25 response = await self.invoke(
26 {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "history": history}}
27 )
28 if response:
29 return response["value"]
30
31 async def test_package(self, version, pkgarch):
32 response = await self.invoke(
33 {"test-package": {"version": version, "pkgarch": pkgarch}}
34 )
35 if response:
36 return response["value"]
37
38 async def max_package_pr(self, version, pkgarch):
39 response = await self.invoke(
40 {"max-package-pr": {"version": version, "pkgarch": pkgarch}}
41 )
42 if response:
43 return response["value"]
44
45 async def importone(self, version, pkgarch, checksum, value):
46 response = await self.invoke(
47 {"import-one": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "value": value}}
48 )
49 if response:
50 return response["value"]
51
52 async def export(self, version, pkgarch, checksum, colinfo, history=False):
53 response = await self.invoke(
54 {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo, "history": history}}
55 )
56 if response:
57 return (response["metainfo"], response["datainfo"])
58
59 async def is_readonly(self):
60 response = await self.invoke(
61 {"is-readonly": {}}
62 )
63 if response:
64 return response["readonly"]
65
66class PRClient(bb.asyncrpc.Client):
67 def __init__(self):
68 super().__init__()
69 self._add_methods("getPR", "test_pr", "test_package", "max_package_pr", "importone", "export", "is_readonly")
70
71 def _get_async_client(self):
72 return PRAsyncClient()
diff --git a/bitbake/lib/prserv/db.py b/bitbake/lib/prserv/db.py
index cb2a2461e0..2da493ddf5 100644
--- a/bitbake/lib/prserv/db.py
+++ b/bitbake/lib/prserv/db.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -6,19 +8,13 @@ import logging
6import os.path 8import os.path
7import errno 9import errno
8import prserv 10import prserv
9import time 11import sqlite3
10 12
11try: 13from contextlib import closing
12 import sqlite3 14from . import increase_revision, revision_greater, revision_smaller
13except ImportError:
14 from pysqlite2 import dbapi2 as sqlite3
15 15
16logger = logging.getLogger("BitBake.PRserv") 16logger = logging.getLogger("BitBake.PRserv")
17 17
18sqlversion = sqlite3.sqlite_version_info
19if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
20 raise Exception("sqlite3 version 3.3.0 or later is required.")
21
22# 18#
23# "No History" mode - for a given query tuple (version, pkgarch, checksum), 19# "No History" mode - for a given query tuple (version, pkgarch, checksum),
24# the returned value will be the largest among all the values of the same 20# the returned value will be the largest among all the values of the same
@@ -27,212 +23,232 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
27# "History" mode - Return a new higher value for previously unseen query 23# "History" mode - Return a new higher value for previously unseen query
28# tuple (version, pkgarch, checksum), otherwise return historical value. 24# tuple (version, pkgarch, checksum), otherwise return historical value.
29# Value can decrement if returning to a previous build. 25# Value can decrement if returning to a previous build.
30#
31 26
32class PRTable(object): 27class PRTable(object):
33 def __init__(self, conn, table, nohist): 28 def __init__(self, conn, table, read_only):
34 self.conn = conn 29 self.conn = conn
35 self.nohist = nohist 30 self.read_only = read_only
36 self.dirty = False 31 self.table = table
37 if nohist: 32
38 self.table = "%s_nohist" % table 33 # Creating the table even if the server is read-only.
39 else: 34 # This avoids a race condition if a shared database
40 self.table = "%s_hist" % table 35 # is accessed by a read-only server first.
41 36
42 self._execute("CREATE TABLE IF NOT EXISTS %s \ 37 with closing(self.conn.cursor()) as cursor:
43 (version TEXT NOT NULL, \ 38 cursor.execute("CREATE TABLE IF NOT EXISTS %s \
44 pkgarch TEXT NOT NULL, \ 39 (version TEXT NOT NULL, \
45 checksum TEXT NOT NULL, \ 40 pkgarch TEXT NOT NULL, \
46 value INTEGER, \ 41 checksum TEXT NOT NULL, \
47 PRIMARY KEY (version, pkgarch, checksum));" % self.table) 42 value TEXT, \
48 43 PRIMARY KEY (version, pkgarch, checksum, value));" % self.table)
49 def _execute(self, *query): 44 self.conn.commit()
50 """Execute a query, waiting to acquire a lock if necessary""" 45
51 start = time.time() 46 def _extremum_value(self, rows, is_max):
52 end = start + 20 47 value = None
53 while True: 48
54 try: 49 for row in rows:
55 return self.conn.execute(*query) 50 current_value = row[0]
56 except sqlite3.OperationalError as exc: 51 if value is None:
57 if 'is locked' in str(exc) and end > time.time(): 52 value = current_value
58 continue 53 else:
59 raise exc 54 if is_max:
60 55 is_new_extremum = revision_greater(current_value, value)
61 def sync(self): 56 else:
62 self.conn.commit() 57 is_new_extremum = revision_smaller(current_value, value)
63 self._execute("BEGIN EXCLUSIVE TRANSACTION") 58 if is_new_extremum:
64 59 value = current_value
65 def sync_if_dirty(self): 60 return value
66 if self.dirty: 61
67 self.sync() 62 def _max_value(self, rows):
68 self.dirty = False 63 return self._extremum_value(rows, True)
69 64
70 def _getValueHist(self, version, pkgarch, checksum): 65 def _min_value(self, rows):
71 data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, 66 return self._extremum_value(rows, False)
72 (version, pkgarch, checksum)) 67
73 row=data.fetchone() 68 def test_package(self, version, pkgarch):
74 if row is not None: 69 """Returns whether the specified package version is found in the database for the specified architecture"""
75 return row[0] 70
76 else: 71 # Just returns the value if found or None otherwise
77 #no value found, try to insert 72 with closing(self.conn.cursor()) as cursor:
78 try: 73 data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table,
79 self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));" 74 (version, pkgarch))
80 % (self.table,self.table),
81 (version,pkgarch, checksum,version, pkgarch))
82 except sqlite3.IntegrityError as exc:
83 logger.error(str(exc))
84
85 self.dirty = True
86
87 data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
88 (version, pkgarch, checksum))
89 row=data.fetchone() 75 row=data.fetchone()
90 if row is not None: 76 if row is not None:
91 return row[0] 77 return True
92 else: 78 else:
93 raise prserv.NotFoundError 79 return False
94 80
95 def _getValueNohist(self, version, pkgarch, checksum): 81 def test_checksum_value(self, version, pkgarch, checksum, value):
96 data=self._execute("SELECT value FROM %s \ 82 """Returns whether the specified value is found in the database for the specified package, architecture and checksum"""
97 WHERE version=? AND pkgarch=? AND checksum=? AND \ 83
98 value >= (select max(value) from %s where version=? AND pkgarch=?);" 84 with closing(self.conn.cursor()) as cursor:
99 % (self.table, self.table), 85 data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and checksum=? and value=?;" % self.table,
100 (version, pkgarch, checksum, version, pkgarch)) 86 (version, pkgarch, checksum, value))
101 row=data.fetchone()
102 if row is not None:
103 return row[0]
104 else:
105 #no value found, try to insert
106 try:
107 self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"
108 % (self.table,self.table),
109 (version, pkgarch, checksum, version, pkgarch))
110 except sqlite3.IntegrityError as exc:
111 logger.error(str(exc))
112 self.conn.rollback()
113
114 self.dirty = True
115
116 data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
117 (version, pkgarch, checksum))
118 row=data.fetchone() 87 row=data.fetchone()
119 if row is not None: 88 if row is not None:
120 return row[0] 89 return True
121 else: 90 else:
122 raise prserv.NotFoundError 91 return False
123 92
124 def getValue(self, version, pkgarch, checksum): 93 def test_value(self, version, pkgarch, value):
125 if self.nohist: 94 """Returns whether the specified value is found in the database for the specified package and architecture"""
126 return self._getValueNohist(version, pkgarch, checksum) 95
127 else: 96 # Just returns the value if found or None otherwise
128 return self._getValueHist(version, pkgarch, checksum) 97 with closing(self.conn.cursor()) as cursor:
129 98 data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table,
130 def _importHist(self, version, pkgarch, checksum, value): 99 (version, pkgarch, value))
131 val = None 100 row=data.fetchone()
132 data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, 101 if row is not None:
133 (version, pkgarch, checksum)) 102 return True
134 row = data.fetchone() 103 else:
135 if row is not None: 104 return False
136 val=row[0] 105
106
107 def find_package_max_value(self, version, pkgarch):
108 """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value"""
109
110 with closing(self.conn.cursor()) as cursor:
111 data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=?;" % (self.table),
112 (version, pkgarch))
113 rows = data.fetchall()
114 value = self._max_value(rows)
115 return value
116
117 def find_value(self, version, pkgarch, checksum, history=False):
118 """Returns the value for the specified checksum if found or None otherwise."""
119
120 if history:
121 return self.find_min_value(version, pkgarch, checksum)
137 else: 122 else:
138 #no value found, try to insert 123 return self.find_max_value(version, pkgarch, checksum)
139 try: 124
140 self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), 125
126 def _find_extremum_value(self, version, pkgarch, checksum, is_max):
127 """Returns the maximum (if is_max is True) or minimum (if is_max is False) value
128 for (version, pkgarch, checksum), or None if not found. Doesn't create a new value"""
129
130 with closing(self.conn.cursor()) as cursor:
131 data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=? AND checksum=?;" % (self.table),
132 (version, pkgarch, checksum))
133 rows = data.fetchall()
134 return self._extremum_value(rows, is_max)
135
136 def find_max_value(self, version, pkgarch, checksum):
137 return self._find_extremum_value(version, pkgarch, checksum, True)
138
139 def find_min_value(self, version, pkgarch, checksum):
140 return self._find_extremum_value(version, pkgarch, checksum, False)
141
142 def find_new_subvalue(self, version, pkgarch, base):
143 """Take and increase the greatest "<base>.y" value for (version, pkgarch), or return "<base>.0" if not found.
144 This doesn't store a new value."""
145
146 with closing(self.conn.cursor()) as cursor:
147 data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=? AND value LIKE '%s.%%';" % (self.table, base),
148 (version, pkgarch))
149 rows = data.fetchall()
150 value = self._max_value(rows)
151
152 if value is not None:
153 return increase_revision(value)
154 else:
155 return base + ".0"
156
157 def store_value(self, version, pkgarch, checksum, value):
158 """Store value in the database"""
159
160 if not self.read_only and not self.test_checksum_value(version, pkgarch, checksum, value):
161 with closing(self.conn.cursor()) as cursor:
162 cursor.execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table),
141 (version, pkgarch, checksum, value)) 163 (version, pkgarch, checksum, value))
142 except sqlite3.IntegrityError as exc: 164 self.conn.commit()
143 logger.error(str(exc))
144 165
145 self.dirty = True 166 def _get_value(self, version, pkgarch, checksum, history):
146 167
147 data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, 168 max_value = self.find_package_max_value(version, pkgarch)
148 (version, pkgarch, checksum))
149 row = data.fetchone()
150 if row is not None:
151 val = row[0]
152 return val
153 169
154 def _importNohist(self, version, pkgarch, checksum, value): 170 if max_value is None:
155 try: 171 # version, pkgarch completely unknown. Return initial value.
156 #try to insert 172 return "0"
157 self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), 173
158 (version, pkgarch, checksum,value)) 174 value = self.find_value(version, pkgarch, checksum, history)
159 except sqlite3.IntegrityError as exc: 175
160 #already have the record, try to update 176 if value is None:
161 try: 177 # version, pkgarch found but not checksum. Create a new value from the maximum one
162 self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?" 178 return increase_revision(max_value)
163 % (self.table), 179
164 (value,version,pkgarch,checksum,value)) 180 if history:
165 except sqlite3.IntegrityError as exc: 181 return value
166 logger.error(str(exc)) 182
167 183 # "no history" mode - If the value is not the maximum value for the package, need to increase it.
168 self.dirty = True 184 if max_value > value:
169 185 return increase_revision(max_value)
170 data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table,
171 (version,pkgarch,checksum,value))
172 row=data.fetchone()
173 if row is not None:
174 return row[0]
175 else: 186 else:
176 return None 187 return value
188
189 def get_value(self, version, pkgarch, checksum, history):
190 value = self._get_value(version, pkgarch, checksum, history)
191 if not self.read_only:
192 self.store_value(version, pkgarch, checksum, value)
193 return value
177 194
178 def importone(self, version, pkgarch, checksum, value): 195 def importone(self, version, pkgarch, checksum, value):
179 if self.nohist: 196 self.store_value(version, pkgarch, checksum, value)
180 return self._importNohist(version, pkgarch, checksum, value) 197 return value
181 else:
182 return self._importHist(version, pkgarch, checksum, value)
183 198
184 def export(self, version, pkgarch, checksum, colinfo): 199 def export(self, version, pkgarch, checksum, colinfo, history=False):
185 metainfo = {} 200 metainfo = {}
186 #column info 201 with closing(self.conn.cursor()) as cursor:
187 if colinfo: 202 #column info
188 metainfo['tbl_name'] = self.table 203 if colinfo:
189 metainfo['core_ver'] = prserv.__version__ 204 metainfo["tbl_name"] = self.table
190 metainfo['col_info'] = [] 205 metainfo["core_ver"] = prserv.__version__
191 data = self._execute("PRAGMA table_info(%s);" % self.table) 206 metainfo["col_info"] = []
207 data = cursor.execute("PRAGMA table_info(%s);" % self.table)
208 for row in data:
209 col = {}
210 col["name"] = row["name"]
211 col["type"] = row["type"]
212 col["notnull"] = row["notnull"]
213 col["dflt_value"] = row["dflt_value"]
214 col["pk"] = row["pk"]
215 metainfo["col_info"].append(col)
216
217 #data info
218 datainfo = []
219
220 if history:
221 sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table
222 else:
223 sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \
224 (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \
225 WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table)
226 sqlarg = []
227 where = ""
228 if version:
229 where += "AND T1.version=? "
230 sqlarg.append(str(version))
231 if pkgarch:
232 where += "AND T1.pkgarch=? "
233 sqlarg.append(str(pkgarch))
234 if checksum:
235 where += "AND T1.checksum=? "
236 sqlarg.append(str(checksum))
237
238 sqlstmt += where + ";"
239
240 if len(sqlarg):
241 data = cursor.execute(sqlstmt, tuple(sqlarg))
242 else:
243 data = cursor.execute(sqlstmt)
192 for row in data: 244 for row in data:
193 col = {} 245 if row["version"]:
194 col['name'] = row['name'] 246 col = {}
195 col['type'] = row['type'] 247 col["version"] = row["version"]
196 col['notnull'] = row['notnull'] 248 col["pkgarch"] = row["pkgarch"]
197 col['dflt_value'] = row['dflt_value'] 249 col["checksum"] = row["checksum"]
198 col['pk'] = row['pk'] 250 col["value"] = row["value"]
199 metainfo['col_info'].append(col) 251 datainfo.append(col)
200
201 #data info
202 datainfo = []
203
204 if self.nohist:
205 sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \
206 (SELECT version,pkgarch,max(value) as maxvalue FROM %s GROUP BY version,pkgarch) as T2 \
207 WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table)
208 else:
209 sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table
210 sqlarg = []
211 where = ""
212 if version:
213 where += "AND T1.version=? "
214 sqlarg.append(str(version))
215 if pkgarch:
216 where += "AND T1.pkgarch=? "
217 sqlarg.append(str(pkgarch))
218 if checksum:
219 where += "AND T1.checksum=? "
220 sqlarg.append(str(checksum))
221
222 sqlstmt += where + ";"
223
224 if len(sqlarg):
225 data = self._execute(sqlstmt, tuple(sqlarg))
226 else:
227 data = self._execute(sqlstmt)
228 for row in data:
229 if row['version']:
230 col = {}
231 col['version'] = row['version']
232 col['pkgarch'] = row['pkgarch']
233 col['checksum'] = row['checksum']
234 col['value'] = row['value']
235 datainfo.append(col)
236 return (metainfo, datainfo) 252 return (metainfo, datainfo)
237 253
238 def dump_db(self, fd): 254 def dump_db(self, fd):
@@ -240,41 +256,46 @@ class PRTable(object):
240 for line in self.conn.iterdump(): 256 for line in self.conn.iterdump():
241 writeCount = writeCount + len(line) + 1 257 writeCount = writeCount + len(line) + 1
242 fd.write(line) 258 fd.write(line)
243 fd.write('\n') 259 fd.write("\n")
244 return writeCount 260 return writeCount
245 261
246class PRData(object): 262class PRData(object):
247 """Object representing the PR database""" 263 """Object representing the PR database"""
248 def __init__(self, filename, nohist=True): 264 def __init__(self, filename, read_only=False):
249 self.filename=os.path.abspath(filename) 265 self.filename=os.path.abspath(filename)
250 self.nohist=nohist 266 self.read_only = read_only
251 #build directory hierarchy 267 #build directory hierarchy
252 try: 268 try:
253 os.makedirs(os.path.dirname(self.filename)) 269 os.makedirs(os.path.dirname(self.filename))
254 except OSError as e: 270 except OSError as e:
255 if e.errno != errno.EEXIST: 271 if e.errno != errno.EEXIST:
256 raise e 272 raise e
257 self.connection=sqlite3.connect(self.filename, isolation_level="EXCLUSIVE", check_same_thread = False) 273 uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "")
274 logger.debug("Opening PRServ database '%s'" % (uri))
275 self.connection=sqlite3.connect(uri, uri=True)
258 self.connection.row_factory=sqlite3.Row 276 self.connection.row_factory=sqlite3.Row
259 self.connection.execute("pragma synchronous = off;") 277 self.connection.execute("PRAGMA synchronous = OFF;")
260 self.connection.execute("PRAGMA journal_mode = MEMORY;") 278 self.connection.execute("PRAGMA journal_mode = WAL;")
279 self.connection.commit()
261 self._tables={} 280 self._tables={}
262 281
263 def disconnect(self): 282 def disconnect(self):
283 self.connection.commit()
264 self.connection.close() 284 self.connection.close()
265 285
266 def __getitem__(self,tblname): 286 def __getitem__(self, tblname):
267 if not isinstance(tblname, str): 287 if not isinstance(tblname, str):
268 raise TypeError("tblname argument must be a string, not '%s'" % 288 raise TypeError("tblname argument must be a string, not '%s'" %
269 type(tblname)) 289 type(tblname))
270 if tblname in self._tables: 290 if tblname in self._tables:
271 return self._tables[tblname] 291 return self._tables[tblname]
272 else: 292 else:
273 tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist) 293 tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.read_only)
274 return tableobj 294 return tableobj
275 295
276 def __delitem__(self, tblname): 296 def __delitem__(self, tblname):
277 if tblname in self._tables: 297 if tblname in self._tables:
278 del self._tables[tblname] 298 del self._tables[tblname]
279 logger.info("drop table %s" % (tblname)) 299 logger.info("drop table %s" % (tblname))
280 self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname) 300 self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname)
301 self.connection.commit()
diff --git a/bitbake/lib/prserv/serv.py b/bitbake/lib/prserv/serv.py
index 25dcf8a0ee..e175886308 100644
--- a/bitbake/lib/prserv/serv.py
+++ b/bitbake/lib/prserv/serv.py
@@ -1,354 +1,326 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import os,sys,logging 7import os,sys,logging
6import signal, time 8import signal, time
7from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
8import threading
9import queue
10import socket 9import socket
11import io 10import io
12import sqlite3 11import sqlite3
13import bb.server.xmlrpcclient
14import prserv 12import prserv
15import prserv.db 13import prserv.db
16import errno 14import errno
17import select 15from . import create_async_client, revision_smaller, increase_revision
16import bb.asyncrpc
18 17
19logger = logging.getLogger("BitBake.PRserv") 18logger = logging.getLogger("BitBake.PRserv")
20 19
21if sys.hexversion < 0x020600F0: 20PIDPREFIX = "/tmp/PRServer_%s_%s.pid"
22 print("Sorry, python 2.6 or later is required.") 21singleton = None
23 sys.exit(1)
24 22
25class Handler(SimpleXMLRPCRequestHandler): 23class PRServerClient(bb.asyncrpc.AsyncServerConnection):
26 def _dispatch(self,method,params): 24 def __init__(self, socket, server):
25 super().__init__(socket, "PRSERVICE", server.logger)
26 self.server = server
27
28 self.handlers.update({
29 "get-pr": self.handle_get_pr,
30 "test-pr": self.handle_test_pr,
31 "test-package": self.handle_test_package,
32 "max-package-pr": self.handle_max_package_pr,
33 "import-one": self.handle_import_one,
34 "export": self.handle_export,
35 "is-readonly": self.handle_is_readonly,
36 })
37
38 def validate_proto_version(self):
39 return (self.proto_version == (1, 0))
40
41 async def dispatch_message(self, msg):
27 try: 42 try:
28 value=self.server.funcs[method](*params) 43 return await super().dispatch_message(msg)
29 except: 44 except:
30 import traceback
31 traceback.print_exc()
32 raise 45 raise
33 return value
34 46
35PIDPREFIX = "/tmp/PRServer_%s_%s.pid" 47 async def handle_test_pr(self, request):
36singleton = None 48 '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value'''
49 version = request["version"]
50 pkgarch = request["pkgarch"]
51 checksum = request["checksum"]
52 history = request["history"]
37 53
54 value = self.server.table.find_value(version, pkgarch, checksum, history)
55 return {"value": value}
38 56
39class PRServer(SimpleXMLRPCServer): 57 async def handle_test_package(self, request):
40 def __init__(self, dbfile, logfile, interface, daemon=True): 58 '''Tells whether there are entries for (version, pkgarch) in the db. Returns True or False'''
41 ''' constructor ''' 59 version = request["version"]
42 try: 60 pkgarch = request["pkgarch"]
43 SimpleXMLRPCServer.__init__(self, interface,
44 logRequests=False, allow_none=True)
45 except socket.error:
46 ip=socket.gethostbyname(interface[0])
47 port=interface[1]
48 msg="PR Server unable to bind to %s:%s\n" % (ip, port)
49 sys.stderr.write(msg)
50 raise PRServiceConfigError
51 61
52 self.dbfile=dbfile 62 value = self.server.table.test_package(version, pkgarch)
53 self.daemon=daemon 63 return {"value": value}
54 self.logfile=logfile
55 self.working_thread=None
56 self.host, self.port = self.socket.getsockname()
57 self.pidfile=PIDPREFIX % (self.host, self.port)
58
59 self.register_function(self.getPR, "getPR")
60 self.register_function(self.quit, "quit")
61 self.register_function(self.ping, "ping")
62 self.register_function(self.export, "export")
63 self.register_function(self.dump_db, "dump_db")
64 self.register_function(self.importone, "importone")
65 self.register_introspection_functions()
66
67 self.quitpipein, self.quitpipeout = os.pipe()
68
69 self.requestqueue = queue.Queue()
70 self.handlerthread = threading.Thread(target = self.process_request_thread)
71 self.handlerthread.daemon = False
72
73 def process_request_thread(self):
74 """Same as in BaseServer but as a thread.
75
76 In addition, exception handling is done here.
77
78 """
79 iter_count = 1
80 # 60 iterations between syncs or sync if dirty every ~30 seconds
81 iterations_between_sync = 60
82
83 bb.utils.set_process_name("PRServ Handler")
84
85 while not self.quitflag:
86 try:
87 (request, client_address) = self.requestqueue.get(True, 30)
88 except queue.Empty:
89 self.table.sync_if_dirty()
90 continue
91 if request is None:
92 continue
93 try:
94 self.finish_request(request, client_address)
95 self.shutdown_request(request)
96 iter_count = (iter_count + 1) % iterations_between_sync
97 if iter_count == 0:
98 self.table.sync_if_dirty()
99 except:
100 self.handle_error(request, client_address)
101 self.shutdown_request(request)
102 self.table.sync()
103 self.table.sync_if_dirty()
104
105 def sigint_handler(self, signum, stack):
106 if self.table:
107 self.table.sync()
108
109 def sigterm_handler(self, signum, stack):
110 if self.table:
111 self.table.sync()
112 self.quit()
113 self.requestqueue.put((None, None))
114
115 def process_request(self, request, client_address):
116 self.requestqueue.put((request, client_address))
117
118 def export(self, version=None, pkgarch=None, checksum=None, colinfo=True):
119 try:
120 return self.table.export(version, pkgarch, checksum, colinfo)
121 except sqlite3.Error as exc:
122 logger.error(str(exc))
123 return None
124
125 def dump_db(self):
126 """
127 Returns a script (string) that reconstructs the state of the
128 entire database at the time this function is called. The script
129 language is defined by the backing database engine, which is a
130 function of server configuration.
131 Returns None if the database engine does not support dumping to
132 script or if some other error is encountered in processing.
133 """
134 buff = io.StringIO()
135 try:
136 self.table.sync()
137 self.table.dump_db(buff)
138 return buff.getvalue()
139 except Exception as exc:
140 logger.error(str(exc))
141 return None
142 finally:
143 buff.close()
144 64
145 def importone(self, version, pkgarch, checksum, value): 65 async def handle_max_package_pr(self, request):
146 return self.table.importone(version, pkgarch, checksum, value) 66 '''Finds the greatest PR value for (version, pkgarch) in the db. Returns None if no entry was found'''
67 version = request["version"]
68 pkgarch = request["pkgarch"]
147 69
148 def ping(self): 70 value = self.server.table.find_package_max_value(version, pkgarch)
149 return not self.quitflag 71 return {"value": value}
150 72
151 def getinfo(self): 73 async def handle_get_pr(self, request):
152 return (self.host, self.port) 74 version = request["version"]
75 pkgarch = request["pkgarch"]
76 checksum = request["checksum"]
77 history = request["history"]
153 78
154 def getPR(self, version, pkgarch, checksum): 79 if self.upstream_client is None:
155 try: 80 value = self.server.table.get_value(version, pkgarch, checksum, history)
156 return self.table.getValue(version, pkgarch, checksum) 81 return {"value": value}
157 except prserv.NotFoundError:
158 logger.error("can not find value for (%s, %s)",version, checksum)
159 return None
160 except sqlite3.Error as exc:
161 logger.error(str(exc))
162 return None
163
164 def quit(self):
165 self.quitflag=True
166 os.write(self.quitpipeout, b"q")
167 os.close(self.quitpipeout)
168 return
169
170 def work_forever(self,):
171 self.quitflag = False
172 # This timeout applies to the poll in TCPServer, we need the select
173 # below to wake on our quit pipe closing. We only ever call into handle_request
174 # if there is data there.
175 self.timeout = 0.01
176
177 bb.utils.set_process_name("PRServ")
178
179 # DB connection must be created after all forks
180 self.db = prserv.db.PRData(self.dbfile)
181 self.table = self.db["PRMAIN"]
182 82
183 logger.info("Started PRServer with DBfile: %s, IP: %s, PORT: %s, PID: %s" % 83 # We have an upstream server.
184 (self.dbfile, self.host, self.port, str(os.getpid()))) 84 # Check whether the local server already knows the requested configuration.
185 85 # If the configuration is a new one, the generated value we will add will
186 self.handlerthread.start() 86 # depend on what's on the upstream server. That's why we're calling find_value()
187 while not self.quitflag: 87 # instead of get_value() directly.
188 ready = select.select([self.fileno(), self.quitpipein], [], [], 30)
189 if self.quitflag:
190 break
191 if self.fileno() in ready[0]:
192 self.handle_request()
193 self.handlerthread.join()
194 self.db.disconnect()
195 logger.info("PRServer: stopping...")
196 self.server_close()
197 os.close(self.quitpipein)
198 return
199 88
200 def start(self): 89 value = self.server.table.find_value(version, pkgarch, checksum, history)
201 if self.daemon: 90 upstream_max = await self.upstream_client.max_package_pr(version, pkgarch)
202 pid = self.daemonize()
203 else:
204 pid = self.fork()
205 self.pid = pid
206 91
207 # Ensure both the parent sees this and the child from the work_forever log entry above 92 if value is not None:
208 logger.info("Started PRServer with DBfile: %s, IP: %s, PORT: %s, PID: %s" %
209 (self.dbfile, self.host, self.port, str(pid)))
210 93
211 def delpid(self): 94 # The configuration is already known locally.
212 os.remove(self.pidfile)
213 95
214 def daemonize(self): 96 if history:
215 """ 97 value = self.server.table.get_value(version, pkgarch, checksum, history)
216 See Advanced Programming in the UNIX, Sec 13.3 98 else:
217 """ 99 existing_value = value
218 try: 100 # In "no history", we need to make sure the value doesn't decrease
219 pid = os.fork() 101 # and is at least greater than the maximum upstream value
220 if pid > 0: 102 # and the maximum local value
221 os.waitpid(pid, 0)
222 #parent return instead of exit to give control
223 return pid
224 except OSError as e:
225 raise Exception("%s [%d]" % (e.strerror, e.errno))
226
227 os.setsid()
228 """
229 fork again to make sure the daemon is not session leader,
230 which prevents it from acquiring controlling terminal
231 """
232 try:
233 pid = os.fork()
234 if pid > 0: #parent
235 os._exit(0)
236 except OSError as e:
237 raise Exception("%s [%d]" % (e.strerror, e.errno))
238 103
239 self.cleanup_handles() 104 local_max = self.server.table.find_package_max_value(version, pkgarch)
240 os._exit(0) 105 if revision_smaller(value, local_max):
106 value = increase_revision(local_max)
107
108 if revision_smaller(value, upstream_max):
109 # Ask upstream whether it knows the checksum
110 upstream_value = await self.upstream_client.test_pr(version, pkgarch, checksum)
111 if upstream_value is None:
112 # Upstream doesn't have our checksum, let create a new one
113 value = upstream_max + ".0"
114 else:
115 # Fine to take the same value as upstream
116 value = upstream_max
117
118 if not value == existing_value and not self.server.read_only:
119 self.server.table.store_value(version, pkgarch, checksum, value)
120
121 return {"value": value}
122
123 # The configuration is a new one for the local server
124 # Let's ask the upstream server whether it knows it
125
126 known_upstream = await self.upstream_client.test_package(version, pkgarch)
127
128 if not known_upstream:
129
130 # The package is not known upstream, must be a local-only package
131 # Let's compute the PR number using the local-only method
132
133 value = self.server.table.get_value(version, pkgarch, checksum, history)
134 return {"value": value}
135
136 # The package is known upstream, let's ask the upstream server
137 # whether it knows our new output hash
138
139 value = await self.upstream_client.test_pr(version, pkgarch, checksum)
140
141 if value is not None:
142
143 # Upstream knows this output hash, let's store it and use it too.
144
145 if not self.server.read_only:
146 self.server.table.store_value(version, pkgarch, checksum, value)
147 # If the local server is read only, won't be able to store the new
148 # value in the database and will have to keep asking the upstream server
149 return {"value": value}
150
151 # The output hash doesn't exist upstream, get the most recent number from upstream (x)
152 # Then, we want to have a new PR value for the local server: x.y
153
154 upstream_max = await self.upstream_client.max_package_pr(version, pkgarch)
155 # Here we know that the package is known upstream, so upstream_max can't be None
156 subvalue = self.server.table.find_new_subvalue(version, pkgarch, upstream_max)
157
158 if not self.server.read_only:
159 self.server.table.store_value(version, pkgarch, checksum, subvalue)
160
161 return {"value": subvalue}
162
163 async def process_requests(self):
164 if self.server.upstream is not None:
165 self.upstream_client = await create_async_client(self.server.upstream)
166 else:
167 self.upstream_client = None
241 168
242 def fork(self):
243 try:
244 pid = os.fork()
245 if pid > 0:
246 self.socket.close() # avoid ResourceWarning in parent
247 return pid
248 except OSError as e:
249 raise Exception("%s [%d]" % (e.strerror, e.errno))
250
251 bb.utils.signal_on_parent_exit("SIGTERM")
252 self.cleanup_handles()
253 os._exit(0)
254
255 def cleanup_handles(self):
256 signal.signal(signal.SIGINT, self.sigint_handler)
257 signal.signal(signal.SIGTERM, self.sigterm_handler)
258 os.chdir("/")
259
260 sys.stdout.flush()
261 sys.stderr.flush()
262
263 # We could be called from a python thread with io.StringIO as
264 # stdout/stderr or it could be 'real' unix fd forking where we need
265 # to physically close the fds to prevent the program launching us from
266 # potentially hanging on a pipe. Handle both cases.
267 si = open('/dev/null', 'r')
268 try:
269 os.dup2(si.fileno(),sys.stdin.fileno())
270 except (AttributeError, io.UnsupportedOperation):
271 sys.stdin = si
272 so = open(self.logfile, 'a+')
273 try: 169 try:
274 os.dup2(so.fileno(),sys.stdout.fileno()) 170 await super().process_requests()
275 except (AttributeError, io.UnsupportedOperation): 171 finally:
276 sys.stdout = so 172 if self.upstream_client is not None:
173 await self.upstream_client.close()
174
175 async def handle_import_one(self, request):
176 response = None
177 if not self.server.read_only:
178 version = request["version"]
179 pkgarch = request["pkgarch"]
180 checksum = request["checksum"]
181 value = request["value"]
182
183 value = self.server.table.importone(version, pkgarch, checksum, value)
184 if value is not None:
185 response = {"value": value}
186
187 return response
188
189 async def handle_export(self, request):
190 version = request["version"]
191 pkgarch = request["pkgarch"]
192 checksum = request["checksum"]
193 colinfo = request["colinfo"]
194 history = request["history"]
195
277 try: 196 try:
278 os.dup2(so.fileno(),sys.stderr.fileno()) 197 (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo, history)
279 except (AttributeError, io.UnsupportedOperation): 198 except sqlite3.Error as exc:
280 sys.stderr = so 199 self.logger.error(str(exc))
281 200 metainfo = datainfo = None
282 # Clear out all log handlers prior to the fork() to avoid calling
283 # event handlers not part of the PRserver
284 for logger_iter in logging.Logger.manager.loggerDict.keys():
285 logging.getLogger(logger_iter).handlers = []
286
287 # Ensure logging makes it to the logfile
288 streamhandler = logging.StreamHandler()
289 streamhandler.setLevel(logging.DEBUG)
290 formatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
291 streamhandler.setFormatter(formatter)
292 logger.addHandler(streamhandler)
293
294 # write pidfile
295 pid = str(os.getpid())
296 with open(self.pidfile, 'w') as pf:
297 pf.write("%s\n" % pid)
298
299 self.work_forever()
300 self.delpid()
301 201
302class PRServSingleton(object): 202 return {"metainfo": metainfo, "datainfo": datainfo}
303 def __init__(self, dbfile, logfile, interface): 203
204 async def handle_is_readonly(self, request):
205 return {"readonly": self.server.read_only}
206
207class PRServer(bb.asyncrpc.AsyncServer):
208 def __init__(self, dbfile, read_only=False, upstream=None):
209 super().__init__(logger)
304 self.dbfile = dbfile 210 self.dbfile = dbfile
305 self.logfile = logfile 211 self.table = None
306 self.interface = interface 212 self.read_only = read_only
307 self.host = None 213 self.upstream = upstream
308 self.port = None 214
215 def accept_client(self, socket):
216 return PRServerClient(socket, self)
309 217
310 def start(self): 218 def start(self):
311 self.prserv = PRServer(self.dbfile, self.logfile, self.interface, daemon=False) 219 tasks = super().start()
312 self.prserv.start() 220 self.db = prserv.db.PRData(self.dbfile, read_only=self.read_only)
313 self.host, self.port = self.prserv.getinfo() 221 self.table = self.db["PRMAIN"]
222
223 self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" %
224 (self.dbfile, self.address, str(os.getpid())))
314 225
315 def getinfo(self): 226 if self.upstream is not None:
316 return (self.host, self.port) 227 self.logger.info("And upstream PRServer: %s " % (self.upstream))
317 228
318class PRServerConnection(object): 229 return tasks
319 def __init__(self, host, port): 230
320 if is_local_special(host, port): 231 async def stop(self):
321 host, port = singleton.getinfo() 232 self.db.disconnect()
233 await super().stop()
234
235class PRServSingleton(object):
236 def __init__(self, dbfile, logfile, host, port, upstream):
237 self.dbfile = dbfile
238 self.logfile = logfile
322 self.host = host 239 self.host = host
323 self.port = port 240 self.port = port
324 self.connection, self.transport = bb.server.xmlrpcclient._create_server(self.host, self.port) 241 self.upstream = upstream
325
326 def terminate(self):
327 try:
328 logger.info("Terminating PRServer...")
329 self.connection.quit()
330 except Exception as exc:
331 sys.stderr.write("%s\n" % str(exc))
332 242
333 def getPR(self, version, pkgarch, checksum): 243 def start(self):
334 return self.connection.getPR(version, pkgarch, checksum) 244 self.prserv = PRServer(self.dbfile, upstream=self.upstream)
245 self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port)
246 self.process = self.prserv.serve_as_process(log_level=logging.WARNING)
335 247
336 def ping(self): 248 if not self.prserv.address:
337 return self.connection.ping() 249 raise PRServiceConfigError
250 if not self.port:
251 self.port = int(self.prserv.address.rsplit(":", 1)[1])
338 252
339 def export(self,version=None, pkgarch=None, checksum=None, colinfo=True): 253def run_as_daemon(func, pidfile, logfile):
340 return self.connection.export(version, pkgarch, checksum, colinfo) 254 """
255 See Advanced Programming in the UNIX, Sec 13.3
256 """
257 try:
258 pid = os.fork()
259 if pid > 0:
260 os.waitpid(pid, 0)
261 #parent return instead of exit to give control
262 return pid
263 except OSError as e:
264 raise Exception("%s [%d]" % (e.strerror, e.errno))
341 265
342 def dump_db(self): 266 os.setsid()
343 return self.connection.dump_db() 267 """
268 fork again to make sure the daemon is not session leader,
269 which prevents it from acquiring controlling terminal
270 """
271 try:
272 pid = os.fork()
273 if pid > 0: #parent
274 os._exit(0)
275 except OSError as e:
276 raise Exception("%s [%d]" % (e.strerror, e.errno))
344 277
345 def importone(self, version, pkgarch, checksum, value): 278 os.chdir("/")
346 return self.connection.importone(version, pkgarch, checksum, value)
347 279
348 def getinfo(self): 280 sys.stdout.flush()
349 return self.host, self.port 281 sys.stderr.flush()
350 282
351def start_daemon(dbfile, host, port, logfile): 283 # We could be called from a python thread with io.StringIO as
284 # stdout/stderr or it could be 'real' unix fd forking where we need
285 # to physically close the fds to prevent the program launching us from
286 # potentially hanging on a pipe. Handle both cases.
287 si = open("/dev/null", "r")
288 try:
289 os.dup2(si.fileno(), sys.stdin.fileno())
290 except (AttributeError, io.UnsupportedOperation):
291 sys.stdin = si
292 so = open(logfile, "a+")
293 try:
294 os.dup2(so.fileno(), sys.stdout.fileno())
295 except (AttributeError, io.UnsupportedOperation):
296 sys.stdout = so
297 try:
298 os.dup2(so.fileno(), sys.stderr.fileno())
299 except (AttributeError, io.UnsupportedOperation):
300 sys.stderr = so
301
302 # Clear out all log handlers prior to the fork() to avoid calling
303 # event handlers not part of the PRserver
304 for logger_iter in logging.Logger.manager.loggerDict.keys():
305 logging.getLogger(logger_iter).handlers = []
306
307 # Ensure logging makes it to the logfile
308 streamhandler = logging.StreamHandler()
309 streamhandler.setLevel(logging.DEBUG)
310 formatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
311 streamhandler.setFormatter(formatter)
312 logger.addHandler(streamhandler)
313
314 # write pidfile
315 pid = str(os.getpid())
316 with open(pidfile, "w") as pf:
317 pf.write("%s\n" % pid)
318
319 func()
320 os.remove(pidfile)
321 os._exit(0)
322
323def start_daemon(dbfile, host, port, logfile, read_only=False, upstream=None):
352 ip = socket.gethostbyname(host) 324 ip = socket.gethostbyname(host)
353 pidfile = PIDPREFIX % (ip, port) 325 pidfile = PIDPREFIX % (ip, port)
354 try: 326 try:
@@ -362,15 +334,13 @@ def start_daemon(dbfile, host, port, logfile):
362 % pidfile) 334 % pidfile)
363 return 1 335 return 1
364 336
365 server = PRServer(os.path.abspath(dbfile), os.path.abspath(logfile), (ip,port)) 337 dbfile = os.path.abspath(dbfile)
366 server.start() 338 def daemon_main():
339 server = PRServer(dbfile, read_only=read_only, upstream=upstream)
340 server.start_tcp_server(ip, port)
341 server.serve_forever()
367 342
368 # Sometimes, the port (i.e. localhost:0) indicated by the user does not match with 343 run_as_daemon(daemon_main, pidfile, os.path.abspath(logfile))
369 # the one the server actually is listening, so at least warn the user about it
370 _,rport = server.getinfo()
371 if port != rport:
372 sys.stdout.write("Server is listening at port %s instead of %s\n"
373 % (rport,port))
374 return 0 344 return 0
375 345
376def stop_daemon(host, port): 346def stop_daemon(host, port):
@@ -388,37 +358,28 @@ def stop_daemon(host, port):
388 # so at least advise the user which ports the corresponding server is listening 358 # so at least advise the user which ports the corresponding server is listening
389 ports = [] 359 ports = []
390 portstr = "" 360 portstr = ""
391 for pf in glob.glob(PIDPREFIX % (ip,'*')): 361 for pf in glob.glob(PIDPREFIX % (ip, "*")):
392 bn = os.path.basename(pf) 362 bn = os.path.basename(pf)
393 root, _ = os.path.splitext(bn) 363 root, _ = os.path.splitext(bn)
394 ports.append(root.split('_')[-1]) 364 ports.append(root.split("_")[-1])
395 if len(ports): 365 if len(ports):
396 portstr = "Wrong port? Other ports listening at %s: %s" % (host, ' '.join(ports)) 366 portstr = "Wrong port? Other ports listening at %s: %s" % (host, " ".join(ports))
397 367
398 sys.stderr.write("pidfile %s does not exist. Daemon not running? %s\n" 368 sys.stderr.write("pidfile %s does not exist. Daemon not running? %s\n"
399 % (pidfile,portstr)) 369 % (pidfile, portstr))
400 return 1 370 return 1
401 371
402 try: 372 try:
403 PRServerConnection(ip, port).terminate() 373 if is_running(pid):
404 except: 374 print("Sending SIGTERM to pr-server.")
405 logger.critical("Stop PRService %s:%d failed" % (host,port)) 375 os.kill(pid, signal.SIGTERM)
406 376 time.sleep(0.1)
407 try:
408 if pid:
409 wait_timeout = 0
410 print("Waiting for pr-server to exit.")
411 while is_running(pid) and wait_timeout < 50:
412 time.sleep(0.1)
413 wait_timeout += 1
414 377
415 if is_running(pid): 378 try:
416 print("Sending SIGTERM to pr-server.") 379 os.remove(pidfile)
417 os.kill(pid,signal.SIGTERM) 380 except FileNotFoundError:
418 time.sleep(0.1) 381 # The PID file might have been removed by the exiting process
419 382 pass
420 if os.path.exists(pidfile):
421 os.remove(pidfile)
422 383
423 except OSError as e: 384 except OSError as e:
424 err = str(e) 385 err = str(e)
@@ -436,7 +397,7 @@ def is_running(pid):
436 return True 397 return True
437 398
438def is_local_special(host, port): 399def is_local_special(host, port):
439 if host.strip().upper() == 'localhost'.upper() and (not port): 400 if (host == "localhost" or host == "127.0.0.1") and not port:
440 return True 401 return True
441 else: 402 else:
442 return False 403 return False
@@ -447,7 +408,7 @@ class PRServiceConfigError(Exception):
447def auto_start(d): 408def auto_start(d):
448 global singleton 409 global singleton
449 410
450 host_params = list(filter(None, (d.getVar('PRSERV_HOST') or '').split(':'))) 411 host_params = list(filter(None, (d.getVar("PRSERV_HOST") or "").split(":")))
451 if not host_params: 412 if not host_params:
452 # Shutdown any existing PR Server 413 # Shutdown any existing PR Server
453 auto_shutdown() 414 auto_shutdown()
@@ -456,11 +417,16 @@ def auto_start(d):
456 if len(host_params) != 2: 417 if len(host_params) != 2:
457 # Shutdown any existing PR Server 418 # Shutdown any existing PR Server
458 auto_shutdown() 419 auto_shutdown()
459 logger.critical('\n'.join(['PRSERV_HOST: incorrect format', 420 logger.critical("\n".join(["PRSERV_HOST: incorrect format",
460 'Usage: PRSERV_HOST = "<hostname>:<port>"'])) 421 'Usage: PRSERV_HOST = "<hostname>:<port>"']))
461 raise PRServiceConfigError 422 raise PRServiceConfigError
462 423
463 if is_local_special(host_params[0], int(host_params[1])): 424 host = host_params[0].strip().lower()
425 port = int(host_params[1])
426
427 upstream = d.getVar("PRSERV_UPSTREAM") or None
428
429 if is_local_special(host, port):
464 import bb.utils 430 import bb.utils
465 cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) 431 cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE"))
466 if not cachedir: 432 if not cachedir:
@@ -474,39 +440,43 @@ def auto_start(d):
474 auto_shutdown() 440 auto_shutdown()
475 if not singleton: 441 if not singleton:
476 bb.utils.mkdirhier(cachedir) 442 bb.utils.mkdirhier(cachedir)
477 singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), ("localhost",0)) 443 singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port, upstream)
478 singleton.start() 444 singleton.start()
479 if singleton: 445 if singleton:
480 host, port = singleton.getinfo() 446 host = singleton.host
481 else: 447 port = singleton.port
482 host = host_params[0]
483 port = int(host_params[1])
484 448
485 try: 449 try:
486 connection = PRServerConnection(host,port) 450 ping(host, port)
487 connection.ping() 451 return str(host) + ":" + str(port)
488 realhost, realport = connection.getinfo() 452
489 return str(realhost) + ":" + str(realport)
490
491 except Exception: 453 except Exception:
492 logger.critical("PRservice %s:%d not available" % (host, port)) 454 logger.critical("PRservice %s:%d not available" % (host, port))
493 raise PRServiceConfigError 455 raise PRServiceConfigError
494 456
495def auto_shutdown(): 457def auto_shutdown():
496 global singleton 458 global singleton
497 if singleton: 459 if singleton and singleton.process:
498 host, port = singleton.getinfo() 460 singleton.process.terminate()
499 try: 461 singleton.process.join()
500 PRServerConnection(host, port).terminate()
501 except:
502 logger.critical("Stop PRService %s:%d failed" % (host,port))
503
504 try:
505 os.waitpid(singleton.prserv.pid, 0)
506 except ChildProcessError:
507 pass
508 singleton = None 462 singleton = None
509 463
510def ping(host, port): 464def ping(host, port):
511 conn=PRServerConnection(host, port) 465 from . import client
512 return conn.ping() 466
467 with client.PRClient() as conn:
468 conn.connect_tcp(host, port)
469 return conn.ping()
470
471def connect(host, port):
472 from . import client
473
474 global singleton
475
476 if host.strip().lower() == "localhost" and not port:
477 host = "localhost"
478 port = singleton.port
479
480 conn = client.PRClient()
481 conn.connect_tcp(host, port)
482 return conn
diff --git a/bitbake/lib/prserv/tests.py b/bitbake/lib/prserv/tests.py
new file mode 100644
index 0000000000..df0c003003
--- /dev/null
+++ b/bitbake/lib/prserv/tests.py
@@ -0,0 +1,388 @@
1#! /usr/bin/env python3
2#
3# Copyright (C) 2024 BitBake Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8from . import create_server, create_client, increase_revision, revision_greater, revision_smaller, _revision_greater_or_equal
9import prserv.db as db
10from bb.asyncrpc import InvokeError
11import logging
12import os
13import sys
14import tempfile
15import unittest
16import socket
17import subprocess
18from pathlib import Path
19
20THIS_DIR = Path(__file__).parent
21BIN_DIR = THIS_DIR.parent.parent / "bin"
22
23version = "dummy-1.0-r0"
24pkgarch = "core2-64"
25other_arch = "aarch64"
26
27checksumX = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4f0"
28checksum0 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a0"
29checksum1 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a1"
30checksum2 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a2"
31checksum3 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a3"
32checksum4 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a4"
33checksum5 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a5"
34checksum6 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a6"
35checksum7 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a7"
36checksum8 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a8"
37checksum9 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a9"
38checksum10 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4aa"
39
40def server_prefunc(server, name):
41 logging.basicConfig(level=logging.DEBUG, filename='prserv-%s.log' % name, filemode='w',
42 format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
43 server.logger.debug("Running server %s" % name)
44 sys.stdout = open('prserv-stdout-%s.log' % name, 'w')
45 sys.stderr = sys.stdout
46
47class PRTestSetup(object):
48
49 def start_server(self, name, dbfile, upstream=None, read_only=False, prefunc=server_prefunc):
50
51 def cleanup_server(server):
52 if server.process.exitcode is not None:
53 return
54 server.process.terminate()
55 server.process.join()
56
57 server = create_server(socket.gethostbyname("localhost") + ":0",
58 dbfile,
59 upstream=upstream,
60 read_only=read_only)
61
62 server.serve_as_process(prefunc=prefunc, args=(name,))
63 self.addCleanup(cleanup_server, server)
64
65 return server
66
67 def start_client(self, server_address):
68 def cleanup_client(client):
69 client.close()
70
71 client = create_client(server_address)
72 self.addCleanup(cleanup_client, client)
73
74 return client
75
76class FunctionTests(unittest.TestCase):
77
78 def setUp(self):
79 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv')
80 self.addCleanup(self.temp_dir.cleanup)
81
82 def test_increase_revision(self):
83 self.assertEqual(increase_revision("1"), "2")
84 self.assertEqual(increase_revision("1.0"), "1.1")
85 self.assertEqual(increase_revision("1.1.1"), "1.1.2")
86 self.assertEqual(increase_revision("1.1.1.3"), "1.1.1.4")
87 self.assertEqual(increase_revision("9"), "10")
88 self.assertEqual(increase_revision("1.9"), "1.10")
89 self.assertRaises(ValueError, increase_revision, "1.a")
90 self.assertRaises(ValueError, increase_revision, "1.")
91 self.assertRaises(ValueError, increase_revision, "")
92
93 def test_revision_greater_or_equal(self):
94 self.assertTrue(_revision_greater_or_equal("2", "2"))
95 self.assertTrue(_revision_greater_or_equal("2", "1"))
96 self.assertTrue(_revision_greater_or_equal("10", "2"))
97 self.assertTrue(_revision_greater_or_equal("1.10", "1.2"))
98 self.assertFalse(_revision_greater_or_equal("1.2", "1.10"))
99 self.assertTrue(_revision_greater_or_equal("1.10", "1"))
100 self.assertTrue(_revision_greater_or_equal("1.10.1", "1.10"))
101 self.assertFalse(_revision_greater_or_equal("1.10.1", "1.10.2"))
102 self.assertTrue(_revision_greater_or_equal("1.10.1", "1.10.1"))
103 self.assertTrue(_revision_greater_or_equal("1.10.1", "1"))
104 self.assertTrue(revision_greater("1.20", "1.3"))
105 self.assertTrue(revision_smaller("1.3", "1.20"))
106
107 # DB tests
108
109 def test_db(self):
110 dbfile = os.path.join(self.temp_dir.name, "testtable.sqlite3")
111
112 self.db = db.PRData(dbfile)
113 self.table = self.db["PRMAIN"]
114
115 self.table.store_value(version, pkgarch, checksum0, "0")
116 self.table.store_value(version, pkgarch, checksum1, "1")
117 # "No history" mode supports multiple PRs for the same checksum
118 self.table.store_value(version, pkgarch, checksum0, "2")
119 self.table.store_value(version, pkgarch, checksum2, "1.0")
120
121 self.assertTrue(self.table.test_package(version, pkgarch))
122 self.assertFalse(self.table.test_package(version, other_arch))
123
124 self.assertTrue(self.table.test_value(version, pkgarch, "0"))
125 self.assertTrue(self.table.test_value(version, pkgarch, "1"))
126 self.assertTrue(self.table.test_value(version, pkgarch, "2"))
127
128 self.assertEqual(self.table.find_package_max_value(version, pkgarch), "2")
129
130 self.assertEqual(self.table.find_min_value(version, pkgarch, checksum0), "0")
131 self.assertEqual(self.table.find_max_value(version, pkgarch, checksum0), "2")
132
133 # Test history modes
134 self.assertEqual(self.table.find_value(version, pkgarch, checksum0, True), "0")
135 self.assertEqual(self.table.find_value(version, pkgarch, checksum0, False), "2")
136
137 self.assertEqual(self.table.find_new_subvalue(version, pkgarch, "3"), "3.0")
138 self.assertEqual(self.table.find_new_subvalue(version, pkgarch, "1"), "1.1")
139
140 # Revision comparison tests
141 self.table.store_value(version, pkgarch, checksum1, "1.3")
142 self.table.store_value(version, pkgarch, checksum1, "1.20")
143 self.assertEqual(self.table.find_min_value(version, pkgarch, checksum1), "1")
144 self.assertEqual(self.table.find_max_value(version, pkgarch, checksum1), "1.20")
145
146class PRBasicTests(PRTestSetup, unittest.TestCase):
147
148 def setUp(self):
149 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv')
150 self.addCleanup(self.temp_dir.cleanup)
151
152 dbfile = os.path.join(self.temp_dir.name, "prtest-basic.sqlite3")
153
154 self.server1 = self.start_server("basic", dbfile)
155 self.client1 = self.start_client(self.server1.address)
156
157 def test_basic(self):
158
159 # Checks on non existing configuration
160
161 result = self.client1.test_pr(version, pkgarch, checksum0)
162 self.assertIsNone(result, "test_pr should return 'None' for a non existing PR")
163
164 result = self.client1.test_package(version, pkgarch)
165 self.assertFalse(result, "test_package should return 'False' for a non existing PR")
166
167 result = self.client1.max_package_pr(version, pkgarch)
168 self.assertIsNone(result, "max_package_pr should return 'None' for a non existing PR")
169
170 # Add a first configuration
171
172 result = self.client1.getPR(version, pkgarch, checksum0)
173 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'")
174
175 result = self.client1.test_pr(version, pkgarch, checksum0)
176 self.assertEqual(result, "0", "test_pr should return '0' here, matching the result of getPR")
177
178 result = self.client1.test_package(version, pkgarch)
179 self.assertTrue(result, "test_package should return 'True' for an existing PR")
180
181 result = self.client1.max_package_pr(version, pkgarch)
182 self.assertEqual(result, "0", "max_package_pr should return '0' in the current test series")
183
184 # Check that the same request gets the same value
185
186 result = self.client1.getPR(version, pkgarch, checksum0)
187 self.assertEqual(result, "0", "getPR: asking for the same PR a second time in a row should return the same value.")
188
189 # Add new configurations
190
191 result = self.client1.getPR(version, pkgarch, checksum1)
192 self.assertEqual(result, "1", "getPR: second PR of a package should be '1'")
193
194 result = self.client1.test_pr(version, pkgarch, checksum1)
195 self.assertEqual(result, "1", "test_pr should return '1' here, matching the result of getPR")
196
197 result = self.client1.max_package_pr(version, pkgarch)
198 self.assertEqual(result, "1", "max_package_pr should return '1' in the current test series")
199
200 result = self.client1.getPR(version, pkgarch, checksum2)
201 self.assertEqual(result, "2", "getPR: second PR of a package should be '2'")
202
203 result = self.client1.test_pr(version, pkgarch, checksum2)
204 self.assertEqual(result, "2", "test_pr should return '2' here, matching the result of getPR")
205
206 result = self.client1.max_package_pr(version, pkgarch)
207 self.assertEqual(result, "2", "max_package_pr should return '2' in the current test series")
208
209 result = self.client1.getPR(version, pkgarch, checksum3)
210 self.assertEqual(result, "3", "getPR: second PR of a package should be '3'")
211
212 result = self.client1.test_pr(version, pkgarch, checksum3)
213 self.assertEqual(result, "3", "test_pr should return '3' here, matching the result of getPR")
214
215 result = self.client1.max_package_pr(version, pkgarch)
216 self.assertEqual(result, "3", "max_package_pr should return '3' in the current test series")
217
218 # Ask again for the first configuration
219
220 result = self.client1.getPR(version, pkgarch, checksum0)
221 self.assertEqual(result, "4", "getPR: should return '4' in this configuration")
222
223 # Ask again with explicit "no history" mode
224
225 result = self.client1.getPR(version, pkgarch, checksum0, False)
226 self.assertEqual(result, "4", "getPR: should return '4' in this configuration")
227
228 # Ask again with explicit "history" mode. This should return the first recorded PR for checksum0
229
230 result = self.client1.getPR(version, pkgarch, checksum0, True)
231 self.assertEqual(result, "0", "getPR: should return '0' in this configuration")
232
233 # Check again that another pkgarg resets the counters
234
235 result = self.client1.test_pr(version, other_arch, checksum0)
236 self.assertIsNone(result, "test_pr should return 'None' for a non existing PR")
237
238 result = self.client1.test_package(version, other_arch)
239 self.assertFalse(result, "test_package should return 'False' for a non existing PR")
240
241 result = self.client1.max_package_pr(version, other_arch)
242 self.assertIsNone(result, "max_package_pr should return 'None' for a non existing PR")
243
244 # Now add the configuration
245
246 result = self.client1.getPR(version, other_arch, checksum0)
247 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'")
248
249 result = self.client1.test_pr(version, other_arch, checksum0)
250 self.assertEqual(result, "0", "test_pr should return '0' here, matching the result of getPR")
251
252 result = self.client1.test_package(version, other_arch)
253 self.assertTrue(result, "test_package should return 'True' for an existing PR")
254
255 result = self.client1.max_package_pr(version, other_arch)
256 self.assertEqual(result, "0", "max_package_pr should return '0' in the current test series")
257
258 result = self.client1.is_readonly()
259 self.assertFalse(result, "Server should not be described as 'read-only'")
260
261class PRUpstreamTests(PRTestSetup, unittest.TestCase):
262
263 def setUp(self):
264
265 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv')
266 self.addCleanup(self.temp_dir.cleanup)
267
268 dbfile2 = os.path.join(self.temp_dir.name, "prtest-upstream2.sqlite3")
269 self.server2 = self.start_server("upstream2", dbfile2)
270 self.client2 = self.start_client(self.server2.address)
271
272 dbfile1 = os.path.join(self.temp_dir.name, "prtest-upstream1.sqlite3")
273 self.server1 = self.start_server("upstream1", dbfile1, upstream=self.server2.address)
274 self.client1 = self.start_client(self.server1.address)
275
276 dbfile0 = os.path.join(self.temp_dir.name, "prtest-local.sqlite3")
277 self.server0 = self.start_server("local", dbfile0, upstream=self.server1.address)
278 self.client0 = self.start_client(self.server0.address)
279 self.shared_db = dbfile0
280
281 def test_upstream_and_readonly(self):
282
283 # For identical checksums, all servers should return the same PR
284
285 result = self.client2.getPR(version, pkgarch, checksum0)
286 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'")
287
288 result = self.client1.getPR(version, pkgarch, checksum0)
289 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0' (same as upstream)")
290
291 result = self.client0.getPR(version, pkgarch, checksum0)
292 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0' (same as upstream)")
293
294 # Now introduce new checksums on server1 for, same version
295
296 result = self.client1.getPR(version, pkgarch, checksum1)
297 self.assertEqual(result, "0.0", "getPR: first PR of a package which has a different checksum upstream should be '0.0'")
298
299 result = self.client1.getPR(version, pkgarch, checksum2)
300 self.assertEqual(result, "0.1", "getPR: second PR of a package that has a different checksum upstream should be '0.1'")
301
302 # Now introduce checksums on server0 for, same version
303
304 result = self.client1.getPR(version, pkgarch, checksum1)
305 self.assertEqual(result, "0.2", "getPR: can't decrease for known PR")
306
307 result = self.client1.getPR(version, pkgarch, checksum2)
308 self.assertEqual(result, "0.3")
309
310 result = self.client1.max_package_pr(version, pkgarch)
311 self.assertEqual(result, "0.3")
312
313 result = self.client0.getPR(version, pkgarch, checksum3)
314 self.assertEqual(result, "0.3.0", "getPR: first PR of a package that doesn't exist upstream should be '0.3.0'")
315
316 result = self.client0.getPR(version, pkgarch, checksum4)
317 self.assertEqual(result, "0.3.1", "getPR: second PR of a package that doesn't exist upstream should be '0.3.1'")
318
319 result = self.client0.getPR(version, pkgarch, checksum3)
320 self.assertEqual(result, "0.3.2")
321
322 # More upstream updates
323 # Here, we assume no communication between server2 and server0. server2 only impacts server0
324 # after impacting server1
325
326 self.assertEqual(self.client2.getPR(version, pkgarch, checksum5), "1")
327 self.assertEqual(self.client1.getPR(version, pkgarch, checksum6), "1.0")
328 self.assertEqual(self.client1.getPR(version, pkgarch, checksum7), "1.1")
329 self.assertEqual(self.client0.getPR(version, pkgarch, checksum8), "1.1.0")
330 self.assertEqual(self.client0.getPR(version, pkgarch, checksum9), "1.1.1")
331
332 # "history" mode tests
333
334 self.assertEqual(self.client2.getPR(version, pkgarch, checksum0, True), "0")
335 self.assertEqual(self.client1.getPR(version, pkgarch, checksum2, True), "0.1")
336 self.assertEqual(self.client0.getPR(version, pkgarch, checksum3, True), "0.3.0")
337
338 # More "no history" mode tests
339
340 self.assertEqual(self.client2.getPR(version, pkgarch, checksum0), "2")
341 self.assertEqual(self.client1.getPR(version, pkgarch, checksum0), "2") # Same as upstream
342 self.assertEqual(self.client0.getPR(version, pkgarch, checksum0), "2") # Same as upstream
343 self.assertEqual(self.client1.getPR(version, pkgarch, checksum7), "3") # This could be surprising, but since the previous revision was "2", increasing it yields "3".
344 # We don't know how many upstream servers we have
345 # Start read-only server with server1 as upstream
346 self.server_ro = self.start_server("local-ro", self.shared_db, upstream=self.server1.address, read_only=True)
347 self.client_ro = self.start_client(self.server_ro.address)
348
349 self.assertTrue(self.client_ro.is_readonly(), "Database should be described as 'read-only'")
350
351 # Checks on non existing configurations
352 self.assertIsNone(self.client_ro.test_pr(version, pkgarch, checksumX))
353 self.assertFalse(self.client_ro.test_package("unknown", pkgarch))
354
355 # Look up existing configurations
356 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum0), "3") # "no history" mode
357 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum0, True), "0") # "history" mode
358 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum3), "3")
359 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum3, True), "0.3.0")
360 self.assertEqual(self.client_ro.max_package_pr(version, pkgarch), "2") # normal as "3" was never saved
361
362 # Try to insert a new value. Here this one is know upstream.
363 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum7), "3")
364 # Try to insert a completely new value. As the max upstream value is already "3", it should be "3.0"
365 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum10), "3.0")
366 # Same with another value which only exists in the upstream upstream server
367 # This time, as the upstream server doesn't know it, it will ask its upstream server. So that's a known one.
368 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum9), "3")
369
370class ScriptTests(unittest.TestCase):
371
372 def setUp(self):
373
374 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv')
375 self.addCleanup(self.temp_dir.cleanup)
376 self.dbfile = os.path.join(self.temp_dir.name, "prtest.sqlite3")
377
378 def test_1_start_bitbake_prserv(self):
379 try:
380 subprocess.check_call([BIN_DIR / "bitbake-prserv", "--start", "-f", self.dbfile])
381 except subprocess.CalledProcessError as e:
382 self.fail("Failed to start bitbake-prserv: %s" % e.returncode)
383
384 def test_2_stop_bitbake_prserv(self):
385 try:
386 subprocess.check_call([BIN_DIR / "bitbake-prserv", "--stop"])
387 except subprocess.CalledProcessError as e:
388 self.fail("Failed to stop bitbake-prserv: %s" % e.returncode)
diff --git a/bitbake/lib/pyinotify.py b/bitbake/lib/pyinotify.py
index 6ae40a2d76..3c5dab0312 100644
--- a/bitbake/lib/pyinotify.py
+++ b/bitbake/lib/pyinotify.py
@@ -52,7 +52,6 @@ from collections import deque
52from datetime import datetime, timedelta 52from datetime import datetime, timedelta
53import time 53import time
54import re 54import re
55import asyncore
56import glob 55import glob
57import locale 56import locale
58import subprocess 57import subprocess
@@ -596,14 +595,24 @@ class _ProcessEvent:
596 @type event: Event object 595 @type event: Event object
597 @return: By convention when used from the ProcessEvent class: 596 @return: By convention when used from the ProcessEvent class:
598 - Returning False or None (default value) means keep on 597 - Returning False or None (default value) means keep on
599 executing next chained functors (see chain.py example). 598 executing next chained functors (see chain.py example).
600 - Returning True instead means do not execute next 599 - Returning True instead means do not execute next
601 processing functions. 600 processing functions.
602 @rtype: bool 601 @rtype: bool
603 @raise ProcessEventError: Event object undispatchable, 602 @raise ProcessEventError: Event object undispatchable,
604 unknown event. 603 unknown event.
605 """ 604 """
606 stripped_mask = event.mask - (event.mask & IN_ISDIR) 605 stripped_mask = event.mask & ~IN_ISDIR
606 # Bitbake hack - we see event masks of 0x6, i.e., IN_MODIFY & IN_ATTRIB.
607 # The kernel inotify code can set more than one of the bits in the mask,
608 # fsnotify_change() in linux/fsnotify.h is quite clear that IN_ATTRIB,
609 # IN_MODIFY and IN_ACCESS can arrive together.
610 # This breaks the code below which assume only one mask bit is ever
611 # set in an event. We don't care about attrib or access in bitbake so
612 # drop those.
613 if stripped_mask & IN_MODIFY:
614 stripped_mask &= ~(IN_ATTRIB | IN_ACCESS)
615
607 maskname = EventsCodes.ALL_VALUES.get(stripped_mask) 616 maskname = EventsCodes.ALL_VALUES.get(stripped_mask)
608 if maskname is None: 617 if maskname is None:
609 raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask) 618 raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask)
@@ -1475,35 +1484,6 @@ class ThreadedNotifier(threading.Thread, Notifier):
1475 self.loop() 1484 self.loop()
1476 1485
1477 1486
1478class AsyncNotifier(asyncore.file_dispatcher, Notifier):
1479 """
1480 This notifier inherits from asyncore.file_dispatcher in order to be able to
1481 use pyinotify along with the asyncore framework.
1482
1483 """
1484 def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
1485 threshold=0, timeout=None, channel_map=None):
1486 """
1487 Initializes the async notifier. The only additional parameter is
1488 'channel_map' which is the optional asyncore private map. See
1489 Notifier class for the meaning of the others parameters.
1490
1491 """
1492 Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
1493 threshold, timeout)
1494 asyncore.file_dispatcher.__init__(self, self._fd, channel_map)
1495
1496 def handle_read(self):
1497 """
1498 When asyncore tells us we can read from the fd, we proceed processing
1499 events. This method can be overridden for handling a notification
1500 differently.
1501
1502 """
1503 self.read_events()
1504 self.process_events()
1505
1506
1507class TornadoAsyncNotifier(Notifier): 1487class TornadoAsyncNotifier(Notifier):
1508 """ 1488 """
1509 Tornado ioloop adapter. 1489 Tornado ioloop adapter.
diff --git a/bitbake/lib/toaster/bldcollector/urls.py b/bitbake/lib/toaster/bldcollector/urls.py
index efd67a81a5..3c34070351 100644
--- a/bitbake/lib/toaster/bldcollector/urls.py
+++ b/bitbake/lib/toaster/bldcollector/urls.py
@@ -6,7 +6,7 @@
6# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
7# 7#
8 8
9from django.conf.urls import url 9from django.urls import re_path as url
10 10
11import bldcollector.views 11import bldcollector.views
12 12
diff --git a/bitbake/lib/toaster/bldcollector/views.py b/bitbake/lib/toaster/bldcollector/views.py
index 04cd8b3dd4..bdf38ae6e8 100644
--- a/bitbake/lib/toaster/bldcollector/views.py
+++ b/bitbake/lib/toaster/bldcollector/views.py
@@ -14,8 +14,11 @@ import subprocess
14import toastermain 14import toastermain
15from django.views.decorators.csrf import csrf_exempt 15from django.views.decorators.csrf import csrf_exempt
16 16
17from toastermain.logs import log_view_mixin
18
17 19
18@csrf_exempt 20@csrf_exempt
21@log_view_mixin
19def eventfile(request): 22def eventfile(request):
20 """ Receives a file by POST, and runs toaster-eventreply on this file """ 23 """ Receives a file by POST, and runs toaster-eventreply on this file """
21 if request.method != "POST": 24 if request.method != "POST":
diff --git a/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py b/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py
index 75674ccbf1..577e765f11 100644
--- a/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py
+++ b/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py
@@ -200,7 +200,7 @@ class LocalhostBEController(BuildEnvironmentController):
200 localdirpath = os.path.join(localdirname, dirpath) 200 localdirpath = os.path.join(localdirname, dirpath)
201 logger.debug("localhostbecontroller: localdirpath expects '%s'" % localdirpath) 201 logger.debug("localhostbecontroller: localdirpath expects '%s'" % localdirpath)
202 if not os.path.exists(localdirpath): 202 if not os.path.exists(localdirpath):
203 raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit)) 203 raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Exiting." % (localdirpath, giturl, commit))
204 204
205 if name != "bitbake": 205 if name != "bitbake":
206 layerlist.append("%03d:%s" % (index,localdirpath.rstrip("/"))) 206 layerlist.append("%03d:%s" % (index,localdirpath.rstrip("/")))
@@ -467,7 +467,7 @@ class LocalhostBEController(BuildEnvironmentController):
467 logger.debug("localhostbecontroller: waiting for bblock content to appear") 467 logger.debug("localhostbecontroller: waiting for bblock content to appear")
468 time.sleep(1) 468 time.sleep(1)
469 else: 469 else:
470 raise BuildSetupException("Cannot find bitbake server lock file '%s'. Aborting." % bblock) 470 raise BuildSetupException("Cannot find bitbake server lock file '%s'. Exiting." % bblock)
471 471
472 with open(bblock) as fplock: 472 with open(bblock) as fplock:
473 for line in fplock: 473 for line in fplock:
diff --git a/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py b/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py
index 19f659ec41..834e32b36f 100644
--- a/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py
+++ b/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py
@@ -180,6 +180,77 @@ class Command(BaseCommand):
180 except Exception as e: 180 except Exception as e:
181 logger.warning("runbuilds: schedule exception %s" % str(e)) 181 logger.warning("runbuilds: schedule exception %s" % str(e))
182 182
183 # Test to see if a build pre-maturely died due to a bitbake crash
184 def check_dead_builds(self):
185 do_cleanup = False
186 try:
187 for br in BuildRequest.objects.filter(state=BuildRequest.REQ_INPROGRESS):
188 # Get the build directory
189 if br.project.builddir:
190 builddir = br.project.builddir
191 else:
192 builddir = '%s-toaster-%d' % (br.environment.builddir,br.project.id)
193 # Check log to see if there is a recent traceback
194 toaster_ui_log = os.path.join(builddir, 'toaster_ui.log')
195 test_file = os.path.join(builddir, '._toaster_check.txt')
196 os.system("tail -n 50 %s > %s" % (os.path.join(builddir, 'toaster_ui.log'),test_file))
197 traceback_text = ''
198 is_traceback = False
199 with open(test_file,'r') as test_file_fd:
200 test_file_tail = test_file_fd.readlines()
201 for line in test_file_tail:
202 if line.startswith('Traceback (most recent call last):'):
203 traceback_text = line
204 is_traceback = True
205 elif line.startswith('NOTE: ToasterUI waiting for events'):
206 # Ignore any traceback before new build start
207 traceback_text = ''
208 is_traceback = False
209 elif line.startswith('Note: Toaster traceback auto-stop'):
210 # Ignore any traceback before this previous traceback catch
211 traceback_text = ''
212 is_traceback = False
213 elif is_traceback:
214 traceback_text += line
215 # Test the results
216 is_stop = False
217 if is_traceback:
218 # Found a traceback
219 errtype = 'Bitbake crash'
220 errmsg = 'Bitbake crash\n' + traceback_text
221 state = BuildRequest.REQ_FAILED
222 # Clean up bitbake files
223 bitbake_lock = os.path.join(builddir, 'bitbake.lock')
224 if os.path.isfile(bitbake_lock):
225 os.remove(bitbake_lock)
226 bitbake_sock = os.path.join(builddir, 'bitbake.sock')
227 if os.path.isfile(bitbake_sock):
228 os.remove(bitbake_sock)
229 if os.path.isfile(test_file):
230 os.remove(test_file)
231 # Add note to ignore this traceback on next check
232 os.system('echo "Note: Toaster traceback auto-stop" >> %s' % toaster_ui_log)
233 is_stop = True
234 # Add more tests here
235 #elif ...
236 # Stop the build request?
237 if is_stop:
238 brerror = BRError(
239 req = br,
240 errtype = errtype,
241 errmsg = errmsg,
242 traceback = traceback_text,
243 )
244 brerror.save()
245 br.state = state
246 br.save()
247 do_cleanup = True
248 # Do cleanup
249 if do_cleanup:
250 self.cleanup()
251 except Exception as e:
252 logger.error("runbuilds: Error in check_dead_builds %s" % e)
253
183 def handle(self, **options): 254 def handle(self, **options):
184 pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."), 255 pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."),
185 ".runbuilds.pid") 256 ".runbuilds.pid")
@@ -187,10 +258,18 @@ class Command(BaseCommand):
187 with open(pidfile_path, 'w') as pidfile: 258 with open(pidfile_path, 'w') as pidfile:
188 pidfile.write("%s" % os.getpid()) 259 pidfile.write("%s" % os.getpid())
189 260
261 # Clean up any stale/failed builds from previous Toaster run
190 self.runbuild() 262 self.runbuild()
191 263
192 signal.signal(signal.SIGUSR1, lambda sig, frame: None) 264 signal.signal(signal.SIGUSR1, lambda sig, frame: None)
193 265
194 while True: 266 while True:
195 signal.pause() 267 sigset = signal.sigtimedwait([signal.SIGUSR1], 5)
196 self.runbuild() 268 if sigset:
269 for sig in sigset:
270 # Consume each captured pending event
271 self.runbuild()
272 else:
273 # Check for build exceptions
274 self.check_dead_builds()
275
diff --git a/bitbake/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py b/bitbake/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py
new file mode 100644
index 0000000000..45b477d02c
--- /dev/null
+++ b/bitbake/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py
@@ -0,0 +1,48 @@
1# Generated by Django 3.2.12 on 2022-03-06 03:28
2
3from django.db import migrations, models
4
5
6class Migration(migrations.Migration):
7
8 dependencies = [
9 ('bldcontrol', '0007_brlayers_optional_gitinfo'),
10 ]
11
12 operations = [
13 migrations.AlterField(
14 model_name='brbitbake',
15 name='id',
16 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
17 ),
18 migrations.AlterField(
19 model_name='brerror',
20 name='id',
21 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
22 ),
23 migrations.AlterField(
24 model_name='brlayer',
25 name='id',
26 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
27 ),
28 migrations.AlterField(
29 model_name='brtarget',
30 name='id',
31 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
32 ),
33 migrations.AlterField(
34 model_name='brvariable',
35 name='id',
36 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
37 ),
38 migrations.AlterField(
39 model_name='buildenvironment',
40 name='id',
41 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
42 ),
43 migrations.AlterField(
44 model_name='buildrequest',
45 name='id',
46 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
47 ),
48 ]
diff --git a/bitbake/lib/toaster/bldcontrol/models.py b/bitbake/lib/toaster/bldcontrol/models.py
index c2f302da24..42750e7180 100644
--- a/bitbake/lib/toaster/bldcontrol/models.py
+++ b/bitbake/lib/toaster/bldcontrol/models.py
@@ -4,7 +4,7 @@
4 4
5from __future__ import unicode_literals 5from __future__ import unicode_literals
6from django.db import models 6from django.db import models
7from django.utils.encoding import force_text 7from django.utils.encoding import force_str
8from orm.models import Project, Build, Layer_Version 8from orm.models import Project, Build, Layer_Version
9 9
10import logging 10import logging
@@ -124,7 +124,7 @@ class BuildRequest(models.Model):
124 return self.brvariable_set.get(name="MACHINE").value 124 return self.brvariable_set.get(name="MACHINE").value
125 125
126 def __str__(self): 126 def __str__(self):
127 return force_text('%s %s' % (self.project, self.get_state_display())) 127 return force_str('%s %s' % (self.project, self.get_state_display()))
128 128
129# These tables specify the settings for running an actual build. 129# These tables specify the settings for running an actual build.
130# They MUST be kept in sync with the tables in orm.models.Project* 130# They MUST be kept in sync with the tables in orm.models.Project*
diff --git a/bitbake/lib/toaster/logs/.gitignore b/bitbake/lib/toaster/logs/.gitignore
new file mode 100644
index 0000000000..e5ebf25a49
--- /dev/null
+++ b/bitbake/lib/toaster/logs/.gitignore
@@ -0,0 +1 @@
*.log*
diff --git a/bitbake/lib/toaster/manage.py b/bitbake/lib/toaster/manage.py
index ae32619d12..f8de49c264 100755
--- a/bitbake/lib/toaster/manage.py
+++ b/bitbake/lib/toaster/manage.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright BitBake Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/bitbake/lib/toaster/orm/fixtures/README b/bitbake/lib/toaster/orm/fixtures/README
index 1b1c660aac..7cd745e26b 100644
--- a/bitbake/lib/toaster/orm/fixtures/README
+++ b/bitbake/lib/toaster/orm/fixtures/README
@@ -27,4 +27,4 @@ Data can be provided in XML, JSON and if installed YAML formats.
27 27
28Use the django management command manage.py loaddata <your fixture file> 28Use the django management command manage.py loaddata <your fixture file>
29For further information see the Django command documentation at: 29For further information see the Django command documentation at:
30https://docs.djangoproject.com/en/1.8/ref/django-admin/#django-admin-loaddata 30https://docs.djangoproject.com/en/3.2/ref/django-admin/#django-admin-loaddata
diff --git a/bitbake/lib/toaster/orm/fixtures/check_fixtures.py b/bitbake/lib/toaster/orm/fixtures/check_fixtures.py
new file mode 100755
index 0000000000..ae3722e0f6
--- /dev/null
+++ b/bitbake/lib/toaster/orm/fixtures/check_fixtures.py
@@ -0,0 +1,38 @@
1#!/usr/bin/env python3
2#
3# Copyright (C) 2025 Linux Foundation
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import json
8import urllib.request
9
10import gen_fixtures as fixtures
11
12RELEASE_URL = "https://dashboard.yoctoproject.org/releases.json"
13
14with urllib.request.urlopen(RELEASE_URL) as response:
15 if response.getcode() == 200:
16 data = response.read().decode("utf-8")
17 releases = json.loads(data)
18 else:
19 print("Couldn't access %s: %s" % (RELEASE_URL, reponse.getcode()))
20 exit(1)
21
22
23# grab the recent release branches and add master, so we can ignore old branches
24active_releases = [
25 e["release_codename"].lower() for e in releases if e["series"] == "current"
26]
27active_releases.append("master")
28active_releases.append("head")
29
30fixtures_releases = [x[0].lower() for x in fixtures.current_releases]
31
32if set(active_releases) != set(fixtures_releases):
33 print("WARNING: Active releases don't match toaster configured releases, the difference is: %s" % set(active_releases).difference(set(fixtures_releases)))
34 print("Active releases: %s" % sorted(active_releases))
35 print("Toaster configured releases: %s" % sorted(fixtures_releases))
36else:
37 print("Success, configuration matches")
38
diff --git a/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py b/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py
new file mode 100755
index 0000000000..6201f679b9
--- /dev/null
+++ b/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py
@@ -0,0 +1,451 @@
1#!/usr/bin/env python3
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Generate Toaster Fixtures for 'poky.xml' and 'oe-core.xml'
6#
7# Copyright (C) 2022 Wind River Systems
8# SPDX-License-Identifier: GPL-2.0-only
9#
10# Edit the 'current_releases' table for each new release cycle
11#
12# Usage: ./get_fixtures --all
13#
14
15import os
16import sys
17import argparse
18
19verbose = False
20
21####################################
22# Releases
23#
24# https://wiki.yoctoproject.org/wiki/Releases
25#
26# NOTE: for the current releases table, it helps to keep continuing releases
27# in the same table positions since this minimizes the patch diff for review.
28# The order of the table does not matter since Toaster presents them sorted.
29#
30# Traditionally, the two most current releases are included in addition to the
31# 'master' branch and the local installation's 'HEAD'.
32# It is also policy to include all active LTS releases.
33#
34
35# [Codename, Yocto Project Version, Release Date, Current Version, Support Level, Poky Version, BitBake branch]
36current_releases = [
37 # Release slot #1
38 ['Scarthgap','5.0','April 2024','5.0.0 (April 2024)','Long Term Support (until April 2028)','','2.8'],
39 # Release slot #2 'local'
40 ['HEAD','HEAD','','Local Yocto Project','HEAD','','HEAD'],
41 # Release slot #3 'master'
42 ['Master','master','','Yocto Project master','master','','master'],
43 # Release slot #4
44 ['Whinlatter','5.3','October 2025','5.3.0 (October 2024)','Support for 7 months (until May 2026)','','2.14'],
45 ['Walnascar','5.2','April 2025','5.2.0 (April 2025)','Support for 7 months (until October 2025)','','2.12'],
46 #['Styhead','5.1','November 2024','5.1.0 (November 2024)','Support for 7 months (until May 2025)','','2.10'],
47 #['Nanbield','4.3','November 2023','4.3.0 (November 2023)','Support for 7 months (until May 2024)','','2.6'],
48 #['Mickledore','4.2','April 2023','4.2.0 (April 2023)','Support for 7 months (until October 2023)','','2.4'],
49 #['Langdale','4.1','October 2022','4.1.2 (January 2023)','Support for 7 months (until May 2023)','','2.2'],
50 ['Kirkstone','4.0','April 2022','4.0.8 (March 2023)','Stable - Long Term Support (until Apr. 2024)','','2.0'],
51 #['Honister','3.4','October 2021','3.4.2 (February 2022)','Support for 7 months (until May 2022)','26.0','1.52'],
52 #['Hardknott','3.3','April 2021','3.3.5 (March 2022)','Stable - Support for 13 months (until Apr. 2022)','25.0','1.50'],
53 #['Gatesgarth','3.2','Oct 2020','3.2.4 (May 2021)','EOL','24.0','1.48'],
54 #['Dunfell','3.1','April 2020','3.1.23 (February 2023)','Stable - Long Term Support (until Apr. 2024)','23.0','1.46'],
55]
56
57default_poky_layers = [
58 'openembedded-core',
59 'meta-poky',
60 'meta-yocto-bsp',
61]
62
63default_oe_core_layers = [
64 'openembedded-core',
65]
66
67####################################
68# Templates
69
70prolog_template = '''\
71<?xml version="1.0" encoding="utf-8"?>
72<django-objects version="1.0">
73 <!-- Set the project default value for DISTRO -->
74 <object model="orm.toastersetting" pk="1">
75 <field type="CharField" name="name">DEFCONF_DISTRO</field>
76 <field type="CharField" name="value">{{distro}}</field>
77 </object>
78'''
79
80#<!-- Bitbake versions which correspond to the metadata release -->')
81bitbakeversion_poky_template = '''\
82 <object model="orm.bitbakeversion" pk="{{bitbake_id}}">
83 <field type="CharField" name="name">{{name}}</field>
84 <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
85 <field type="CharField" name="branch">{{branch}}</field>
86 <field type="CharField" name="dirpath">bitbake</field>
87 </object>
88'''
89bitbakeversion_oecore_template = '''\
90 <object model="orm.bitbakeversion" pk="{{bitbake_id}}">
91 <field type="CharField" name="name">{{name}}</field>
92 <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
93 <field type="CharField" name="branch">{{bitbakeversion}}</field>
94 </object>
95'''
96
97# <!-- Releases available -->
98releases_available_template = '''\
99 <object model="orm.release" pk="{{ra_count}}">
100 <field type="CharField" name="name">{{name}}</field>
101 <field type="CharField" name="description">{{description}}</field>
102 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">{{ra_count}}</field>
103 <field type="CharField" name="branch_name">{{release}}</field>
104 <field type="TextField" name="helptext">Toaster will run your builds {{help_source}}.</field>
105 </object>
106'''
107
108# <!-- Default project layers for each release -->
109default_layers_template = '''\
110 <object model="orm.releasedefaultlayer" pk="{{rdl_count}}">
111 <field rel="ManyToOneRel" to="orm.release" name="release">{{release_id}}</field>
112 <field type="CharField" name="layer_name">{{layer}}</field>
113 </object>
114'''
115
116default_layers_preface = '''\
117 <!-- Default layers provided by poky
118 openembedded-core
119 meta-poky
120 meta-yocto-bsp
121 -->
122'''
123
124layer_poky_template = '''\
125 <object model="orm.layer" pk="{{layer_id}}">
126 <field type="CharField" name="name">{{layer}}</field>
127 <field type="CharField" name="layer_index_url"></field>
128 <field type="CharField" name="vcs_url">{{vcs_url}}</field>
129 <field type="CharField" name="vcs_web_url">{{vcs_web_url}}</field>
130 <field type="CharField" name="vcs_web_tree_base_url">{{vcs_web_tree_base_url}}</field>
131 <field type="CharField" name="vcs_web_file_base_url">{{vcs_web_file_base_url}}</field>
132 </object>
133'''
134
135layer_oe_core_template = '''\
136 <object model="orm.layer" pk="{{layer_id}}">
137 <field type="CharField" name="name">{{layer}}</field>
138 <field type="CharField" name="vcs_url">{{vcs_url}}</field>
139 <field type="CharField" name="vcs_web_url">{{vcs_web_url}}</field>
140 <field type="CharField" name="vcs_web_tree_base_url">{{vcs_web_tree_base_url}}</field>
141 <field type="CharField" name="vcs_web_file_base_url">{{vcs_web_file_base_url}}</field>
142 </object>
143'''
144
145layer_version_template = '''\
146 <object model="orm.layer_version" pk="{{lv_count}}">
147 <field rel="ManyToOneRel" to="orm.layer" name="layer">{{layer_id}}</field>
148 <field type="IntegerField" name="layer_source">0</field>
149 <field rel="ManyToOneRel" to="orm.release" name="release">{{release_id}}</field>
150 <field type="CharField" name="branch">{{branch}}</field>
151 <field type="CharField" name="dirpath">{{dirpath}}</field>
152 </object>
153'''
154
155layer_version_HEAD_template = '''\
156 <object model="orm.layer_version" pk="{{lv_count}}">
157 <field rel="ManyToOneRel" to="orm.layer" name="layer">{{layer_id}}</field>
158 <field type="IntegerField" name="layer_source">0</field>
159 <field rel="ManyToOneRel" to="orm.release" name="release">{{release_id}}</field>
160 <field type="CharField" name="branch">{{branch}}</field>
161 <field type="CharField" name="commit">{{commit}}</field>
162 <field type="CharField" name="dirpath">{{dirpath}}</field>
163 </object>
164'''
165
166layer_version_oe_core_template = '''\
167 <object model="orm.layer_version" pk="1">
168 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
169 <field rel="ManyToOneRel" to="orm.release" name="release">2</field>
170 <field type="CharField" name="local_path">OE-CORE-LAYER-DIR</field>
171 <field type="CharField" name="branch">HEAD</field>
172 <field type="CharField" name="dirpath">meta</field>
173 <field type="IntegerField" name="layer_source">0</field>
174 </object>
175'''
176
177epilog_template = '''\
178</django-objects>
179'''
180
181#################################
182# Helper Routines
183#
184
185def print_str(str,fd):
186 # Avoid extra newline at end
187 if str and (str[-1] == '\n'):
188 str = str[0:-1]
189 print(str,file=fd)
190
191def print_template(template,params,fd):
192 for line in template.split('\n'):
193 p = line.find('{{')
194 while p > 0:
195 q = line.find('}}')
196 key = line[p+2:q]
197 if key in params:
198 line = line[0:p] + params[key] + line[q+2:]
199 else:
200 line = line[0:p] + '?' + key + '?' + line[q+2:]
201 p = line.find('{{')
202 if line:
203 print(line,file=fd)
204
205#################################
206# Generate poky.xml
207#
208
209def generate_poky():
210 fd = open('poky.xml','w')
211
212 params = {}
213 params['distro'] = 'poky'
214 print_template(prolog_template,params,fd)
215 print_str('',fd)
216
217 print_str(' <!-- Bitbake versions which correspond to the metadata release -->',fd)
218 for i,release in enumerate(current_releases):
219 params = {}
220 params['release'] = release[0]
221 params['Release'] = release[0]
222 params['release_version'] = release[1]
223 if not (params['release'] in ('HEAD')): # 'master',
224 params['release'] = params['release'][0].lower() + params['release'][1:]
225 params['name'] = params['release']
226 params['bitbake_id'] = str(i+1)
227 params['branch'] = params['release']
228 print_template(bitbakeversion_poky_template,params,fd)
229 print_str('',fd)
230
231 print_str('',fd)
232 print_str(' <!-- Releases available -->',fd)
233 for i,release in enumerate(current_releases):
234 params = {}
235 params['release'] = release[0]
236 params['Release'] = release[0]
237 params['release_version'] = release[1]
238 if not (params['release'] in ('HEAD')): #'master',
239 params['release'] = params['release'][0].lower() + params['release'][1:]
240 params['h_release'] = '?h={{release}}'
241 params['name'] = params['release']
242 params['ra_count'] = str(i+1)
243 params['branch'] = params['release']
244
245 if 'HEAD' == params['release']:
246 params['help_source'] = 'with the version of the Yocto Project you have cloned or downloaded to your computer'
247 params['description'] = 'Local Yocto Project'
248 params['name'] = 'local'
249 else:
250 params['help_source'] = 'using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/{{h_release}}"&gt;Yocto Project {{Release}} branch&lt;/a&gt;'
251 params['description'] = 'Yocto Project {{release_version}} "{{Release}}"'
252 if 'master' == params['release']:
253 params['h_release'] = ''
254 params['description'] = 'Yocto Project master'
255
256 print_template(releases_available_template,params,fd)
257 print_str('',fd)
258
259 print_str(' <!-- Default project layers for each release -->',fd)
260 rdl_count = 1
261 for i,release in enumerate(current_releases):
262 for j,layer in enumerate(default_poky_layers):
263 params = {}
264 params['layer'] = layer
265 params['release'] = release[0]
266 params['Release'] = release[0]
267 params['release_version'] = release[1]
268 if not (params['release'] in ('master','HEAD')):
269 params['release'] = params['release'][0].lower() + params['release'][1:]
270 params['release_id'] = str(i+1)
271 params['rdl_count'] = str(rdl_count)
272 params['branch'] = params['release']
273 print_template(default_layers_template,params,fd)
274 rdl_count += 1
275 print_str('',fd)
276
277 print_str(default_layers_preface,fd)
278 lv_count = 1
279 for i,layer in enumerate(default_poky_layers):
280 params = {}
281 params['layer'] = layer
282 params['layer_id'] = str(i+1)
283 params['vcs_url'] = 'git://git.yoctoproject.org/poky'
284 params['vcs_web_url'] = 'https://git.yoctoproject.org/cgit/cgit.cgi/poky'
285 params['vcs_web_tree_base_url'] = 'https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%'
286 params['vcs_web_file_base_url'] = 'https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%'
287
288 if i:
289 print_str('',fd)
290 print_template(layer_poky_template,params,fd)
291 for j,release in enumerate(current_releases):
292 params['release'] = release[0]
293 params['Release'] = release[0]
294 params['release_version'] = release[1]
295 if not (params['release'] in ('master','HEAD')):
296 params['release'] = params['release'][0].lower() + params['release'][1:]
297 params['release_id'] = str(j+1)
298 params['lv_count'] = str(lv_count)
299 params['branch'] = params['release']
300 params['commit'] = params['release']
301
302 params['dirpath'] = params['layer']
303 if params['layer'] in ('openembedded-core'): #'openembedded-core',
304 params['dirpath'] = 'meta'
305
306 if 'HEAD' == params['release']:
307 print_template(layer_version_HEAD_template,params,fd)
308 else:
309 print_template(layer_version_template,params,fd)
310 lv_count += 1
311
312 print_str(epilog_template,fd)
313 fd.close()
314
315#################################
316# Generate oe-core.xml
317#
318
319def generate_oe_core():
320 fd = open('oe-core.xml','w')
321
322 params = {}
323 params['distro'] = 'nodistro'
324 print_template(prolog_template,params,fd)
325 print_str('',fd)
326
327 print_str(' <!-- Bitbake versions which correspond to the metadata release -->',fd)
328 for i,release in enumerate(current_releases):
329 params = {}
330 params['release'] = release[0]
331 params['Release'] = release[0]
332 params['bitbakeversion'] = release[6]
333 params['release_version'] = release[1]
334 if not (params['release'] in ('HEAD')): # 'master',
335 params['release'] = params['release'][0].lower() + params['release'][1:]
336 params['name'] = params['release']
337 params['bitbake_id'] = str(i+1)
338 params['branch'] = params['release']
339 print_template(bitbakeversion_oecore_template,params,fd)
340 print_str('',fd)
341
342 print_str(' <!-- Releases available -->',fd)
343 for i,release in enumerate(current_releases):
344 params = {}
345 params['release'] = release[0]
346 params['Release'] = release[0]
347 params['release_version'] = release[1]
348 if not (params['release'] in ('HEAD')): #'master',
349 params['release'] = params['release'][0].lower() + params['release'][1:]
350 params['h_release'] = '?h={{release}}'
351 params['name'] = params['release']
352 params['ra_count'] = str(i+1)
353 params['branch'] = params['release']
354
355 if 'HEAD' == params['release']:
356 params['help_source'] = 'with the version of OpenEmbedded that you have cloned or downloaded to your computer'
357 params['description'] = 'Local Openembedded'
358 params['name'] = 'local'
359 else:
360 params['help_source'] = 'using the tip of the &lt;a href=\\"https://cgit.openembedded.org/openembedded-core/log/{{h_release}}\\"&gt;OpenEmbedded {{Release}}&lt;/a&gt; branch'
361 params['description'] = 'Openembedded {{Release}}'
362 if 'master' == params['release']:
363 params['h_release'] = ''
364 params['description'] = 'OpenEmbedded core master'
365 params['Release'] = params['release']
366
367 print_template(releases_available_template,params,fd)
368 print_str('',fd)
369
370 print_str(' <!-- Default layers for each release -->',fd)
371 rdl_count = 1
372 for i,release in enumerate(current_releases):
373 for j,layer in enumerate(default_oe_core_layers):
374 params = {}
375 params['layer'] = layer
376 params['release'] = release[0]
377 params['Release'] = release[0]
378 params['release_version'] = release[1]
379 if not (params['release'] in ('master','HEAD')):
380 params['release'] = params['release'][0].lower() + params['release'][1:]
381 params['release_id'] = str(i+1)
382 params['rdl_count'] = str(rdl_count)
383 params['branch'] = params['release']
384 print_template(default_layers_template,params,fd)
385 rdl_count += 1
386 print_str('',fd)
387
388 print_str('',fd)
389 print_str(' <!-- Layer for the Local release -->',fd)
390 lv_count = 1
391 for i,layer in enumerate(default_oe_core_layers):
392 params = {}
393 params['layer'] = layer
394 params['layer_id'] = str(i+1)
395 params['vcs_url'] = 'git://git.openembedded.org/openembedded-core'
396 params['vcs_web_url'] = 'https://cgit.openembedded.org/openembedded-core'
397 params['vcs_web_tree_base_url'] = 'https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%'
398 params['vcs_web_file_base_url'] = 'https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%'
399 if i:
400 print_str('',fd)
401 print_template(layer_oe_core_template,params,fd)
402
403 print_template(layer_version_oe_core_template,params,fd)
404 print_str('',fd)
405
406 print_str(epilog_template,fd)
407 fd.close()
408
409#################################
410# Help
411#
412
413def list_releases():
414 print("Release ReleaseVer BitbakeVer Support Level")
415 print("========== =========== ========== ==============================================")
416 for release in current_releases:
417 print("%10s %10s %11s %s" % (release[0],release[1],release[6],release[4]))
418
419#################################
420# main
421#
422
423def main(argv):
424 global verbose
425
426 parser = argparse.ArgumentParser(description='gen_fixtures.py: table generate the fixture files')
427 parser.add_argument('--poky', '-p', action='store_const', const='poky', dest='command', help='Generate the poky.xml file')
428 parser.add_argument('--oe-core', '-o', action='store_const', const='oe_core', dest='command', help='Generate the oe-core.xml file')
429 parser.add_argument('--all', '-a', action='store_const', const='all', dest='command', help='Generate all fixture files')
430 parser.add_argument('--list', '-l', action='store_const', const='list', dest='command', help='List the release table')
431 parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Enable verbose debugging output')
432 args = parser.parse_args()
433
434 verbose = args.verbose
435 if 'poky' == args.command:
436 generate_poky()
437 elif 'oe_core' == args.command:
438 generate_oe_core()
439 elif 'all' == args.command:
440 generate_poky()
441 generate_oe_core()
442 elif 'all' == args.command:
443 list_releases()
444 elif 'list' == args.command:
445 list_releases()
446
447 else:
448 print("No command for 'gen_fixtures.py' selected")
449
450if __name__ == '__main__':
451 main(sys.argv[1:])
diff --git a/bitbake/lib/toaster/orm/fixtures/oe-core.xml b/bitbake/lib/toaster/orm/fixtures/oe-core.xml
index 026d94869a..264231d139 100644
--- a/bitbake/lib/toaster/orm/fixtures/oe-core.xml
+++ b/bitbake/lib/toaster/orm/fixtures/oe-core.xml
@@ -8,9 +8,9 @@
8 8
9 <!-- Bitbake versions which correspond to the metadata release --> 9 <!-- Bitbake versions which correspond to the metadata release -->
10 <object model="orm.bitbakeversion" pk="1"> 10 <object model="orm.bitbakeversion" pk="1">
11 <field type="CharField" name="name">dunfell</field> 11 <field type="CharField" name="name">scarthgap</field>
12 <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> 12 <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
13 <field type="CharField" name="branch">1.46</field> 13 <field type="CharField" name="branch">2.8</field>
14 </object> 14 </object>
15 <object model="orm.bitbakeversion" pk="2"> 15 <object model="orm.bitbakeversion" pk="2">
16 <field type="CharField" name="name">HEAD</field> 16 <field type="CharField" name="name">HEAD</field>
@@ -23,18 +23,33 @@
23 <field type="CharField" name="branch">master</field> 23 <field type="CharField" name="branch">master</field>
24 </object> 24 </object>
25 <object model="orm.bitbakeversion" pk="4"> 25 <object model="orm.bitbakeversion" pk="4">
26 <field type="CharField" name="name">gatesgarth</field> 26 <field type="CharField" name="name">whinlatter</field>
27 <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> 27 <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
28 <field type="CharField" name="branch">1.48</field> 28 <field type="CharField" name="branch">2.14</field>
29 </object>
30 <object model="orm.bitbakeversion" pk="5">
31 <field type="CharField" name="name">walnascar</field>
32 <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
33 <field type="CharField" name="branch">2.12</field>
34 </object>
35 <object model="orm.bitbakeversion" pk="6">
36 <field type="CharField" name="name">styhead</field>
37 <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
38 <field type="CharField" name="branch">2.10</field>
39 </object>
40 <object model="orm.bitbakeversion" pk="7">
41 <field type="CharField" name="name">kirkstone</field>
42 <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
43 <field type="CharField" name="branch">2.0</field>
29 </object> 44 </object>
30 45
31 <!-- Releases available --> 46 <!-- Releases available -->
32 <object model="orm.release" pk="1"> 47 <object model="orm.release" pk="1">
33 <field type="CharField" name="name">dunfell</field> 48 <field type="CharField" name="name">scarthgap</field>
34 <field type="CharField" name="description">Openembedded Dunfell</field> 49 <field type="CharField" name="description">Openembedded Scarthgap</field>
35 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> 50 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field>
36 <field type="CharField" name="branch_name">dunfell</field> 51 <field type="CharField" name="branch_name">scarthgap</field>
37 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=dunfell\"&gt;OpenEmbedded Dunfell&lt;/a&gt; branch.</field> 52 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=scarthgap\"&gt;OpenEmbedded Scarthgap&lt;/a&gt; branch.</field>
38 </object> 53 </object>
39 <object model="orm.release" pk="2"> 54 <object model="orm.release" pk="2">
40 <field type="CharField" name="name">local</field> 55 <field type="CharField" name="name">local</field>
@@ -48,14 +63,35 @@
48 <field type="CharField" name="description">OpenEmbedded core master</field> 63 <field type="CharField" name="description">OpenEmbedded core master</field>
49 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field> 64 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field>
50 <field type="CharField" name="branch_name">master</field> 65 <field type="CharField" name="branch_name">master</field>
51 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"http://cgit.openembedded.org/openembedded-core/log/\"&gt;OpenEmbedded master&lt;/a&gt; branch.</field> 66 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/\"&gt;OpenEmbedded master&lt;/a&gt; branch.</field>
52 </object> 67 </object>
53 <object model="orm.release" pk="4"> 68 <object model="orm.release" pk="4">
54 <field type="CharField" name="name">gatesgarth</field> 69 <field type="CharField" name="name">whinlatter</field>
55 <field type="CharField" name="description">Openembedded Gatesgarth</field> 70 <field type="CharField" name="description">Openembedded Whinlatter</field>
56 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> 71 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field>
57 <field type="CharField" name="branch_name">gatesgarth</field> 72 <field type="CharField" name="branch_name">whinlatter</field>
58 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=gatesgarth\"&gt;OpenEmbedded Gatesgarth&lt;/a&gt; branch.</field> 73 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=whinlatter\"&gt;OpenEmbedded Whinlatter&lt;/a&gt; branch.</field>
74 </object>
75 <object model="orm.release" pk="5">
76 <field type="CharField" name="name">walnascar</field>
77 <field type="CharField" name="description">Openembedded Walnascar</field>
78 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field>
79 <field type="CharField" name="branch_name">walnascar</field>
80 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=walnascar\"&gt;OpenEmbedded Walnascar&lt;/a&gt; branch.</field>
81 </object>
82 <object model="orm.release" pk="6">
83 <field type="CharField" name="name">styhead</field>
84 <field type="CharField" name="description">Openembedded Styhead</field>
85 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">6</field>
86 <field type="CharField" name="branch_name">styhead</field>
87 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=styhead\"&gt;OpenEmbedded Styhead&lt;/a&gt; branch.</field>
88 </object>
89 <object model="orm.release" pk="7">
90 <field type="CharField" name="name">kirkstone</field>
91 <field type="CharField" name="description">Openembedded Kirkstone</field>
92 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">7</field>
93 <field type="CharField" name="branch_name">kirkstone</field>
94 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=kirkstone\"&gt;OpenEmbedded Kirkstone&lt;/a&gt; branch.</field>
59 </object> 95 </object>
60 96
61 <!-- Default layers for each release --> 97 <!-- Default layers for each release -->
@@ -75,15 +111,27 @@
75 <field rel="ManyToOneRel" to="orm.release" name="release">4</field> 111 <field rel="ManyToOneRel" to="orm.release" name="release">4</field>
76 <field type="CharField" name="layer_name">openembedded-core</field> 112 <field type="CharField" name="layer_name">openembedded-core</field>
77 </object> 113 </object>
114 <object model="orm.releasedefaultlayer" pk="5">
115 <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
116 <field type="CharField" name="layer_name">openembedded-core</field>
117 </object>
118 <object model="orm.releasedefaultlayer" pk="6">
119 <field rel="ManyToOneRel" to="orm.release" name="release">6</field>
120 <field type="CharField" name="layer_name">openembedded-core</field>
121 </object>
122 <object model="orm.releasedefaultlayer" pk="7">
123 <field rel="ManyToOneRel" to="orm.release" name="release">7</field>
124 <field type="CharField" name="layer_name">openembedded-core</field>
125 </object>
78 126
79 127
80 <!-- Layer for the Local release --> 128 <!-- Layer for the Local release -->
81 <object model="orm.layer" pk="1"> 129 <object model="orm.layer" pk="1">
82 <field type="CharField" name="name">openembedded-core</field> 130 <field type="CharField" name="name">openembedded-core</field>
83 <field type="CharField" name="vcs_url">git://git.openembedded.org/openembedded-core</field> 131 <field type="CharField" name="vcs_url">git://git.openembedded.org/openembedded-core</field>
84 <field type="CharField" name="vcs_web_url">http://cgit.openembedded.org/openembedded-core</field> 132 <field type="CharField" name="vcs_web_url">https://cgit.openembedded.org/openembedded-core</field>
85 <field type="CharField" name="vcs_web_tree_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> 133 <field type="CharField" name="vcs_web_tree_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
86 <field type="CharField" name="vcs_web_file_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> 134 <field type="CharField" name="vcs_web_file_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
87 </object> 135 </object>
88 <object model="orm.layer_version" pk="1"> 136 <object model="orm.layer_version" pk="1">
89 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> 137 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
diff --git a/bitbake/lib/toaster/orm/fixtures/poky.xml b/bitbake/lib/toaster/orm/fixtures/poky.xml
index a468a54c49..6cf4f0687a 100644
--- a/bitbake/lib/toaster/orm/fixtures/poky.xml
+++ b/bitbake/lib/toaster/orm/fixtures/poky.xml
@@ -8,9 +8,9 @@
8 8
9 <!-- Bitbake versions which correspond to the metadata release --> 9 <!-- Bitbake versions which correspond to the metadata release -->
10 <object model="orm.bitbakeversion" pk="1"> 10 <object model="orm.bitbakeversion" pk="1">
11 <field type="CharField" name="name">dunfell</field> 11 <field type="CharField" name="name">scarthgap</field>
12 <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> 12 <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
13 <field type="CharField" name="branch">dunfell</field> 13 <field type="CharField" name="branch">scarthgap</field>
14 <field type="CharField" name="dirpath">bitbake</field> 14 <field type="CharField" name="dirpath">bitbake</field>
15 </object> 15 </object>
16 <object model="orm.bitbakeversion" pk="2"> 16 <object model="orm.bitbakeversion" pk="2">
@@ -26,20 +26,38 @@
26 <field type="CharField" name="dirpath">bitbake</field> 26 <field type="CharField" name="dirpath">bitbake</field>
27 </object> 27 </object>
28 <object model="orm.bitbakeversion" pk="4"> 28 <object model="orm.bitbakeversion" pk="4">
29 <field type="CharField" name="name">gatesgarth</field> 29 <field type="CharField" name="name">whinlatter</field>
30 <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> 30 <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
31 <field type="CharField" name="branch">gatesgarth</field> 31 <field type="CharField" name="branch">whinlatter</field>
32 <field type="CharField" name="dirpath">bitbake</field>
33 </object>
34 <object model="orm.bitbakeversion" pk="5">
35 <field type="CharField" name="name">walnascar</field>
36 <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
37 <field type="CharField" name="branch">walnascar</field>
38 <field type="CharField" name="dirpath">bitbake</field>
39 </object>
40 <object model="orm.bitbakeversion" pk="6">
41 <field type="CharField" name="name">styhead</field>
42 <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
43 <field type="CharField" name="branch">styhead</field>
44 <field type="CharField" name="dirpath">bitbake</field>
45 </object>
46 <object model="orm.bitbakeversion" pk="7">
47 <field type="CharField" name="name">kirkstone</field>
48 <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
49 <field type="CharField" name="branch">kirkstone</field>
32 <field type="CharField" name="dirpath">bitbake</field> 50 <field type="CharField" name="dirpath">bitbake</field>
33 </object> 51 </object>
34 52
35 53
36 <!-- Releases available --> 54 <!-- Releases available -->
37 <object model="orm.release" pk="1"> 55 <object model="orm.release" pk="1">
38 <field type="CharField" name="name">dunfell</field> 56 <field type="CharField" name="name">scarthgap</field>
39 <field type="CharField" name="description">Yocto Project 3.1 "Dunfell"</field> 57 <field type="CharField" name="description">Yocto Project 5.0 "Scarthgap"</field>
40 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> 58 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field>
41 <field type="CharField" name="branch_name">dunfell</field> 59 <field type="CharField" name="branch_name">scarthgap</field>
42 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell"&gt;Yocto Project Dunfell branch&lt;/a&gt;.</field> 60 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=scarthgap"&gt;Yocto Project Scarthgap branch&lt;/a&gt;.</field>
43 </object> 61 </object>
44 <object model="orm.release" pk="2"> 62 <object model="orm.release" pk="2">
45 <field type="CharField" name="name">local</field> 63 <field type="CharField" name="name">local</field>
@@ -53,14 +71,35 @@
53 <field type="CharField" name="description">Yocto Project master</field> 71 <field type="CharField" name="description">Yocto Project master</field>
54 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field> 72 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field>
55 <field type="CharField" name="branch_name">master</field> 73 <field type="CharField" name="branch_name">master</field>
56 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/"&gt;Yocto Project Master branch&lt;/a&gt;.</field> 74 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/"&gt;Yocto Project Master branch&lt;/a&gt;.</field>
57 </object> 75 </object>
58 <object model="orm.release" pk="4"> 76 <object model="orm.release" pk="4">
59 <field type="CharField" name="name">gatesgarth</field> 77 <field type="CharField" name="name">whinlatter</field>
60 <field type="CharField" name="description">Yocto Project 3.2 "Gatesgarth"</field> 78 <field type="CharField" name="description">Yocto Project 5.3 "Whinlatter"</field>
61 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> 79 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field>
62 <field type="CharField" name="branch_name">gatesgarth</field> 80 <field type="CharField" name="branch_name">whinlatter</field>
63 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=gatesgarth"&gt;Yocto Project Gatesgarth branch&lt;/a&gt;.</field> 81 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=whinlatter"&gt;Yocto Project Whinlatter branch&lt;/a&gt;.</field>
82 </object>
83 <object model="orm.release" pk="5">
84 <field type="CharField" name="name">walnascar</field>
85 <field type="CharField" name="description">Yocto Project 5.2 "Walnascar"</field>
86 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field>
87 <field type="CharField" name="branch_name">walnascar</field>
88 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=walnascar"&gt;Yocto Project Walnascar branch&lt;/a&gt;.</field>
89 </object>
90 <object model="orm.release" pk="6">
91 <field type="CharField" name="name">styhead</field>
92 <field type="CharField" name="description">Yocto Project 5.1 "Styhead"</field>
93 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">6</field>
94 <field type="CharField" name="branch_name">styhead</field>
95 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=styhead"&gt;Yocto Project Styhead branch&lt;/a&gt;.</field>
96 </object>
97 <object model="orm.release" pk="7">
98 <field type="CharField" name="name">kirkstone</field>
99 <field type="CharField" name="description">Yocto Project 4.0 "Kirkstone"</field>
100 <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">7</field>
101 <field type="CharField" name="branch_name">kirkstone</field>
102 <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=kirkstone"&gt;Yocto Project Kirkstone branch&lt;/a&gt;.</field>
64 </object> 103 </object>
65 104
66 <!-- Default project layers for each release --> 105 <!-- Default project layers for each release -->
@@ -112,6 +151,42 @@
112 <field rel="ManyToOneRel" to="orm.release" name="release">4</field> 151 <field rel="ManyToOneRel" to="orm.release" name="release">4</field>
113 <field type="CharField" name="layer_name">meta-yocto-bsp</field> 152 <field type="CharField" name="layer_name">meta-yocto-bsp</field>
114 </object> 153 </object>
154 <object model="orm.releasedefaultlayer" pk="13">
155 <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
156 <field type="CharField" name="layer_name">openembedded-core</field>
157 </object>
158 <object model="orm.releasedefaultlayer" pk="14">
159 <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
160 <field type="CharField" name="layer_name">meta-poky</field>
161 </object>
162 <object model="orm.releasedefaultlayer" pk="15">
163 <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
164 <field type="CharField" name="layer_name">meta-yocto-bsp</field>
165 </object>
166 <object model="orm.releasedefaultlayer" pk="16">
167 <field rel="ManyToOneRel" to="orm.release" name="release">6</field>
168 <field type="CharField" name="layer_name">openembedded-core</field>
169 </object>
170 <object model="orm.releasedefaultlayer" pk="17">
171 <field rel="ManyToOneRel" to="orm.release" name="release">6</field>
172 <field type="CharField" name="layer_name">meta-poky</field>
173 </object>
174 <object model="orm.releasedefaultlayer" pk="18">
175 <field rel="ManyToOneRel" to="orm.release" name="release">6</field>
176 <field type="CharField" name="layer_name">meta-yocto-bsp</field>
177 </object>
178 <object model="orm.releasedefaultlayer" pk="19">
179 <field rel="ManyToOneRel" to="orm.release" name="release">7</field>
180 <field type="CharField" name="layer_name">openembedded-core</field>
181 </object>
182 <object model="orm.releasedefaultlayer" pk="20">
183 <field rel="ManyToOneRel" to="orm.release" name="release">7</field>
184 <field type="CharField" name="layer_name">meta-poky</field>
185 </object>
186 <object model="orm.releasedefaultlayer" pk="21">
187 <field rel="ManyToOneRel" to="orm.release" name="release">7</field>
188 <field type="CharField" name="layer_name">meta-yocto-bsp</field>
189 </object>
115 190
116 <!-- Default layers provided by poky 191 <!-- Default layers provided by poky
117 openembedded-core 192 openembedded-core
@@ -122,15 +197,15 @@
122 <field type="CharField" name="name">openembedded-core</field> 197 <field type="CharField" name="name">openembedded-core</field>
123 <field type="CharField" name="layer_index_url"></field> 198 <field type="CharField" name="layer_index_url"></field>
124 <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> 199 <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
125 <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field> 200 <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
126 <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> 201 <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
127 <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> 202 <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
128 </object> 203 </object>
129 <object model="orm.layer_version" pk="1"> 204 <object model="orm.layer_version" pk="1">
130 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> 205 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
131 <field type="IntegerField" name="layer_source">0</field> 206 <field type="IntegerField" name="layer_source">0</field>
132 <field rel="ManyToOneRel" to="orm.release" name="release">1</field> 207 <field rel="ManyToOneRel" to="orm.release" name="release">1</field>
133 <field type="CharField" name="branch">dunfell</field> 208 <field type="CharField" name="branch">scarthgap</field>
134 <field type="CharField" name="dirpath">meta</field> 209 <field type="CharField" name="dirpath">meta</field>
135 </object> 210 </object>
136 <object model="orm.layer_version" pk="2"> 211 <object model="orm.layer_version" pk="2">
@@ -152,7 +227,28 @@
152 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> 227 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
153 <field type="IntegerField" name="layer_source">0</field> 228 <field type="IntegerField" name="layer_source">0</field>
154 <field rel="ManyToOneRel" to="orm.release" name="release">4</field> 229 <field rel="ManyToOneRel" to="orm.release" name="release">4</field>
155 <field type="CharField" name="branch">gatesgarth</field> 230 <field type="CharField" name="branch">whinlatter</field>
231 <field type="CharField" name="dirpath">meta</field>
232 </object>
233 <object model="orm.layer_version" pk="5">
234 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
235 <field type="IntegerField" name="layer_source">0</field>
236 <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
237 <field type="CharField" name="branch">walnascar</field>
238 <field type="CharField" name="dirpath">meta</field>
239 </object>
240 <object model="orm.layer_version" pk="6">
241 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
242 <field type="IntegerField" name="layer_source">0</field>
243 <field rel="ManyToOneRel" to="orm.release" name="release">6</field>
244 <field type="CharField" name="branch">styhead</field>
245 <field type="CharField" name="dirpath">meta</field>
246 </object>
247 <object model="orm.layer_version" pk="7">
248 <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
249 <field type="IntegerField" name="layer_source">0</field>
250 <field rel="ManyToOneRel" to="orm.release" name="release">7</field>
251 <field type="CharField" name="branch">kirkstone</field>
156 <field type="CharField" name="dirpath">meta</field> 252 <field type="CharField" name="dirpath">meta</field>
157 </object> 253 </object>
158 254
@@ -160,18 +256,18 @@
160 <field type="CharField" name="name">meta-poky</field> 256 <field type="CharField" name="name">meta-poky</field>
161 <field type="CharField" name="layer_index_url"></field> 257 <field type="CharField" name="layer_index_url"></field>
162 <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> 258 <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
163 <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field> 259 <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
164 <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> 260 <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
165 <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> 261 <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
166 </object> 262 </object>
167 <object model="orm.layer_version" pk="5"> 263 <object model="orm.layer_version" pk="8">
168 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> 264 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
169 <field type="IntegerField" name="layer_source">0</field> 265 <field type="IntegerField" name="layer_source">0</field>
170 <field rel="ManyToOneRel" to="orm.release" name="release">1</field> 266 <field rel="ManyToOneRel" to="orm.release" name="release">1</field>
171 <field type="CharField" name="branch">dunfell</field> 267 <field type="CharField" name="branch">scarthgap</field>
172 <field type="CharField" name="dirpath">meta-poky</field> 268 <field type="CharField" name="dirpath">meta-poky</field>
173 </object> 269 </object>
174 <object model="orm.layer_version" pk="6"> 270 <object model="orm.layer_version" pk="9">
175 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> 271 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
176 <field type="IntegerField" name="layer_source">0</field> 272 <field type="IntegerField" name="layer_source">0</field>
177 <field rel="ManyToOneRel" to="orm.release" name="release">2</field> 273 <field rel="ManyToOneRel" to="orm.release" name="release">2</field>
@@ -179,18 +275,39 @@
179 <field type="CharField" name="commit">HEAD</field> 275 <field type="CharField" name="commit">HEAD</field>
180 <field type="CharField" name="dirpath">meta-poky</field> 276 <field type="CharField" name="dirpath">meta-poky</field>
181 </object> 277 </object>
182 <object model="orm.layer_version" pk="7"> 278 <object model="orm.layer_version" pk="10">
183 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> 279 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
184 <field type="IntegerField" name="layer_source">0</field> 280 <field type="IntegerField" name="layer_source">0</field>
185 <field rel="ManyToOneRel" to="orm.release" name="release">3</field> 281 <field rel="ManyToOneRel" to="orm.release" name="release">3</field>
186 <field type="CharField" name="branch">master</field> 282 <field type="CharField" name="branch">master</field>
187 <field type="CharField" name="dirpath">meta-poky</field> 283 <field type="CharField" name="dirpath">meta-poky</field>
188 </object> 284 </object>
189 <object model="orm.layer_version" pk="8"> 285 <object model="orm.layer_version" pk="11">
190 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> 286 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
191 <field type="IntegerField" name="layer_source">0</field> 287 <field type="IntegerField" name="layer_source">0</field>
192 <field rel="ManyToOneRel" to="orm.release" name="release">4</field> 288 <field rel="ManyToOneRel" to="orm.release" name="release">4</field>
193 <field type="CharField" name="branch">gatesgarth</field> 289 <field type="CharField" name="branch">whinlatter</field>
290 <field type="CharField" name="dirpath">meta-poky</field>
291 </object>
292 <object model="orm.layer_version" pk="12">
293 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
294 <field type="IntegerField" name="layer_source">0</field>
295 <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
296 <field type="CharField" name="branch">walnascar</field>
297 <field type="CharField" name="dirpath">meta-poky</field>
298 </object>
299 <object model="orm.layer_version" pk="13">
300 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
301 <field type="IntegerField" name="layer_source">0</field>
302 <field rel="ManyToOneRel" to="orm.release" name="release">6</field>
303 <field type="CharField" name="branch">styhead</field>
304 <field type="CharField" name="dirpath">meta-poky</field>
305 </object>
306 <object model="orm.layer_version" pk="14">
307 <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
308 <field type="IntegerField" name="layer_source">0</field>
309 <field rel="ManyToOneRel" to="orm.release" name="release">7</field>
310 <field type="CharField" name="branch">kirkstone</field>
194 <field type="CharField" name="dirpath">meta-poky</field> 311 <field type="CharField" name="dirpath">meta-poky</field>
195 </object> 312 </object>
196 313
@@ -198,18 +315,18 @@
198 <field type="CharField" name="name">meta-yocto-bsp</field> 315 <field type="CharField" name="name">meta-yocto-bsp</field>
199 <field type="CharField" name="layer_index_url"></field> 316 <field type="CharField" name="layer_index_url"></field>
200 <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> 317 <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
201 <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field> 318 <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
202 <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> 319 <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
203 <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> 320 <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
204 </object> 321 </object>
205 <object model="orm.layer_version" pk="9"> 322 <object model="orm.layer_version" pk="15">
206 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> 323 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
207 <field type="IntegerField" name="layer_source">0</field> 324 <field type="IntegerField" name="layer_source">0</field>
208 <field rel="ManyToOneRel" to="orm.release" name="release">1</field> 325 <field rel="ManyToOneRel" to="orm.release" name="release">1</field>
209 <field type="CharField" name="branch">dunfell</field> 326 <field type="CharField" name="branch">scarthgap</field>
210 <field type="CharField" name="dirpath">meta-yocto-bsp</field> 327 <field type="CharField" name="dirpath">meta-yocto-bsp</field>
211 </object> 328 </object>
212 <object model="orm.layer_version" pk="10"> 329 <object model="orm.layer_version" pk="16">
213 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> 330 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
214 <field type="IntegerField" name="layer_source">0</field> 331 <field type="IntegerField" name="layer_source">0</field>
215 <field rel="ManyToOneRel" to="orm.release" name="release">2</field> 332 <field rel="ManyToOneRel" to="orm.release" name="release">2</field>
@@ -217,18 +334,39 @@
217 <field type="CharField" name="commit">HEAD</field> 334 <field type="CharField" name="commit">HEAD</field>
218 <field type="CharField" name="dirpath">meta-yocto-bsp</field> 335 <field type="CharField" name="dirpath">meta-yocto-bsp</field>
219 </object> 336 </object>
220 <object model="orm.layer_version" pk="11"> 337 <object model="orm.layer_version" pk="17">
221 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> 338 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
222 <field type="IntegerField" name="layer_source">0</field> 339 <field type="IntegerField" name="layer_source">0</field>
223 <field rel="ManyToOneRel" to="orm.release" name="release">3</field> 340 <field rel="ManyToOneRel" to="orm.release" name="release">3</field>
224 <field type="CharField" name="branch">master</field> 341 <field type="CharField" name="branch">master</field>
225 <field type="CharField" name="dirpath">meta-yocto-bsp</field> 342 <field type="CharField" name="dirpath">meta-yocto-bsp</field>
226 </object> 343 </object>
227 <object model="orm.layer_version" pk="12"> 344 <object model="orm.layer_version" pk="18">
228 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> 345 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
229 <field type="IntegerField" name="layer_source">0</field> 346 <field type="IntegerField" name="layer_source">0</field>
230 <field rel="ManyToOneRel" to="orm.release" name="release">4</field> 347 <field rel="ManyToOneRel" to="orm.release" name="release">4</field>
231 <field type="CharField" name="branch">gatesgarth</field> 348 <field type="CharField" name="branch">whinlatter</field>
349 <field type="CharField" name="dirpath">meta-yocto-bsp</field>
350 </object>
351 <object model="orm.layer_version" pk="19">
352 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
353 <field type="IntegerField" name="layer_source">0</field>
354 <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
355 <field type="CharField" name="branch">walnascar</field>
356 <field type="CharField" name="dirpath">meta-yocto-bsp</field>
357 </object>
358 <object model="orm.layer_version" pk="20">
359 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
360 <field type="IntegerField" name="layer_source">0</field>
361 <field rel="ManyToOneRel" to="orm.release" name="release">6</field>
362 <field type="CharField" name="branch">styhead</field>
363 <field type="CharField" name="dirpath">meta-yocto-bsp</field>
364 </object>
365 <object model="orm.layer_version" pk="21">
366 <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
367 <field type="IntegerField" name="layer_source">0</field>
368 <field rel="ManyToOneRel" to="orm.release" name="release">7</field>
369 <field type="CharField" name="branch">kirkstone</field>
232 <field type="CharField" name="dirpath">meta-yocto-bsp</field> 370 <field type="CharField" name="dirpath">meta-yocto-bsp</field>
233 </object> 371 </object>
234</django-objects> 372</django-objects>
diff --git a/bitbake/lib/toaster/orm/fixtures/settings.xml b/bitbake/lib/toaster/orm/fixtures/settings.xml
index 78c0fdca7f..02c26a6974 100644
--- a/bitbake/lib/toaster/orm/fixtures/settings.xml
+++ b/bitbake/lib/toaster/orm/fixtures/settings.xml
@@ -12,14 +12,14 @@
12 </object> 12 </object>
13 <object model="orm.toastersetting" pk="4"> 13 <object model="orm.toastersetting" pk="4">
14 <field type="CharField" name="name">DEFCONF_MACHINE</field> 14 <field type="CharField" name="name">DEFCONF_MACHINE</field>
15 <field type="CharField" name="value">qemux86</field> 15 <field type="CharField" name="value">qemux86-64</field>
16 </object> 16 </object>
17 <object model="orm.toastersetting" pk="5"> 17 <object model="orm.toastersetting" pk="5">
18 <field type="CharField" name="name">DEFCONF_SSTATE_DIR</field> 18 <field type="CharField" name="name">DEFCONF_SSTATE_DIR</field>
19 <field type="CharField" name="value">${TOPDIR}/../sstate-cache</field> 19 <field type="CharField" name="value">${TOPDIR}/../sstate-cache</field>
20 </object> 20 </object>
21 <object model="orm.toastersetting" pk="6"> 21 <object model="orm.toastersetting" pk="6">
22 <field type="CharField" name="name">DEFCONF_IMAGE_INSTALL_append</field> 22 <field type="CharField" name="name">DEFCONF_IMAGE_INSTALL:append</field>
23 <field type="CharField" name="value"></field> 23 <field type="CharField" name="value"></field>
24 </object> 24 </object>
25 <object model="orm.toastersetting" pk="7"> 25 <object model="orm.toastersetting" pk="7">
diff --git a/bitbake/lib/toaster/orm/management/commands/lsupdates.py b/bitbake/lib/toaster/orm/management/commands/lsupdates.py
index 2fbd7be3d2..6d64830ebd 100644
--- a/bitbake/lib/toaster/orm/management/commands/lsupdates.py
+++ b/bitbake/lib/toaster/orm/management/commands/lsupdates.py
@@ -21,7 +21,7 @@ import threading
21import time 21import time
22logger = logging.getLogger("toaster") 22logger = logging.getLogger("toaster")
23 23
24DEFAULT_LAYERINDEX_SERVER = "http://layers.openembedded.org/layerindex/api/" 24DEFAULT_LAYERINDEX_SERVER = "https://layers.openembedded.org/layerindex/api/"
25 25
26# Add path to bitbake modules for layerindexlib 26# Add path to bitbake modules for layerindexlib
27# lib/toaster/orm/management/commands/lsupdates.py (abspath) 27# lib/toaster/orm/management/commands/lsupdates.py (abspath)
@@ -40,7 +40,7 @@ class Spinner(threading.Thread):
40 """ A simple progress spinner to indicate download/parsing is happening""" 40 """ A simple progress spinner to indicate download/parsing is happening"""
41 def __init__(self, *args, **kwargs): 41 def __init__(self, *args, **kwargs):
42 super(Spinner, self).__init__(*args, **kwargs) 42 super(Spinner, self).__init__(*args, **kwargs)
43 self.setDaemon(True) 43 self.daemon = True
44 self.signal = True 44 self.signal = True
45 45
46 def run(self): 46 def run(self):
@@ -87,13 +87,13 @@ class Command(BaseCommand):
87 87
88 # update branches; only those that we already have names listed in the 88 # update branches; only those that we already have names listed in the
89 # Releases table 89 # Releases table
90 whitelist_branch_names = [rel.branch_name 90 allowed_branch_names = [rel.branch_name
91 for rel in Release.objects.all()] 91 for rel in Release.objects.all()]
92 if len(whitelist_branch_names) == 0: 92 if len(allowed_branch_names) == 0:
93 raise Exception("Failed to make list of branches to fetch") 93 raise Exception("Failed to make list of branches to fetch")
94 94
95 logger.info("Fetching metadata for %s", 95 logger.info("Fetching metadata for %s",
96 " ".join(whitelist_branch_names)) 96 " ".join(allowed_branch_names))
97 97
98 # We require a non-empty bb.data, but we can fake it with a dictionary 98 # We require a non-empty bb.data, but we can fake it with a dictionary
99 layerindex = layerindexlib.LayerIndex({"DUMMY" : "VALUE"}) 99 layerindex = layerindexlib.LayerIndex({"DUMMY" : "VALUE"})
@@ -101,8 +101,8 @@ class Command(BaseCommand):
101 http_progress = Spinner() 101 http_progress = Spinner()
102 http_progress.start() 102 http_progress.start()
103 103
104 if whitelist_branch_names: 104 if allowed_branch_names:
105 url_branches = ";branch=%s" % ','.join(whitelist_branch_names) 105 url_branches = ";branch=%s" % ','.join(allowed_branch_names)
106 else: 106 else:
107 url_branches = "" 107 url_branches = ""
108 layerindex.load_layerindex("%s%s" % (self.apiurl, url_branches)) 108 layerindex.load_layerindex("%s%s" % (self.apiurl, url_branches))
diff --git a/bitbake/lib/toaster/orm/migrations/0020_models_bigautofield.py b/bitbake/lib/toaster/orm/migrations/0020_models_bigautofield.py
new file mode 100644
index 0000000000..f19b5dddbc
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0020_models_bigautofield.py
@@ -0,0 +1,173 @@
1# Generated by Django 3.2.12 on 2022-03-06 03:28
2
3from django.db import migrations, models
4
5
6class Migration(migrations.Migration):
7
8 dependencies = [
9 ('orm', '0019_django_2_2'),
10 ]
11
12 operations = [
13 migrations.AlterField(
14 model_name='bitbakeversion',
15 name='id',
16 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
17 ),
18 migrations.AlterField(
19 model_name='build',
20 name='id',
21 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
22 ),
23 migrations.AlterField(
24 model_name='distro',
25 name='id',
26 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
27 ),
28 migrations.AlterField(
29 model_name='helptext',
30 name='id',
31 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
32 ),
33 migrations.AlterField(
34 model_name='layer',
35 name='id',
36 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
37 ),
38 migrations.AlterField(
39 model_name='layer_version',
40 name='id',
41 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
42 ),
43 migrations.AlterField(
44 model_name='layerversiondependency',
45 name='id',
46 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
47 ),
48 migrations.AlterField(
49 model_name='logmessage',
50 name='id',
51 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
52 ),
53 migrations.AlterField(
54 model_name='machine',
55 name='id',
56 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
57 ),
58 migrations.AlterField(
59 model_name='package',
60 name='id',
61 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
62 ),
63 migrations.AlterField(
64 model_name='package_dependency',
65 name='id',
66 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
67 ),
68 migrations.AlterField(
69 model_name='package_file',
70 name='id',
71 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
72 ),
73 migrations.AlterField(
74 model_name='project',
75 name='id',
76 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
77 ),
78 migrations.AlterField(
79 model_name='projectlayer',
80 name='id',
81 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
82 ),
83 migrations.AlterField(
84 model_name='projecttarget',
85 name='id',
86 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
87 ),
88 migrations.AlterField(
89 model_name='projectvariable',
90 name='id',
91 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
92 ),
93 migrations.AlterField(
94 model_name='provides',
95 name='id',
96 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
97 ),
98 migrations.AlterField(
99 model_name='recipe',
100 name='id',
101 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
102 ),
103 migrations.AlterField(
104 model_name='recipe_dependency',
105 name='id',
106 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
107 ),
108 migrations.AlterField(
109 model_name='release',
110 name='id',
111 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
112 ),
113 migrations.AlterField(
114 model_name='releasedefaultlayer',
115 name='id',
116 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
117 ),
118 migrations.AlterField(
119 model_name='target',
120 name='id',
121 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
122 ),
123 migrations.AlterField(
124 model_name='target_file',
125 name='id',
126 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
127 ),
128 migrations.AlterField(
129 model_name='target_image_file',
130 name='id',
131 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
132 ),
133 migrations.AlterField(
134 model_name='target_installed_package',
135 name='id',
136 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
137 ),
138 migrations.AlterField(
139 model_name='targetkernelfile',
140 name='id',
141 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
142 ),
143 migrations.AlterField(
144 model_name='targetsdkfile',
145 name='id',
146 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
147 ),
148 migrations.AlterField(
149 model_name='task',
150 name='id',
151 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
152 ),
153 migrations.AlterField(
154 model_name='task_dependency',
155 name='id',
156 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
157 ),
158 migrations.AlterField(
159 model_name='toastersetting',
160 name='id',
161 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
162 ),
163 migrations.AlterField(
164 model_name='variable',
165 name='id',
166 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
167 ),
168 migrations.AlterField(
169 model_name='variablehistory',
170 name='id',
171 field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
172 ),
173 ]
diff --git a/bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py b/bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py
new file mode 100644
index 0000000000..328eb5753c
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py
@@ -0,0 +1,22 @@
1# Generated by Django 4.2.5 on 2023-11-23 18:44
2
3from django.db import migrations, models
4
5
6class Migration(migrations.Migration):
7
8 dependencies = [
9 ('orm', '0020_models_bigautofield'),
10 ]
11
12 operations = [
13 migrations.CreateModel(
14 name='EventLogsImports',
15 fields=[
16 ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
17 ('name', models.CharField(max_length=255)),
18 ('imported', models.BooleanField(default=False)),
19 ('build_id', models.IntegerField(blank=True, null=True)),
20 ],
21 ),
22 ]
diff --git a/bitbake/lib/toaster/orm/models.py b/bitbake/lib/toaster/orm/models.py
index 7f7e922ade..e2f488ed89 100644
--- a/bitbake/lib/toaster/orm/models.py
+++ b/bitbake/lib/toaster/orm/models.py
@@ -58,7 +58,6 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']:
58 return _base_insert(self, *args, **kwargs) 58 return _base_insert(self, *args, **kwargs)
59 QuerySet._insert = _insert 59 QuerySet._insert = _insert
60 60
61 from django.utils import six
62 def _create_object_from_params(self, lookup, params): 61 def _create_object_from_params(self, lookup, params):
63 """ 62 """
64 Tries to create an object using passed params. 63 Tries to create an object using passed params.
@@ -80,7 +79,6 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']:
80 # end of HACK 79 # end of HACK
81 80
82class GitURLValidator(validators.URLValidator): 81class GitURLValidator(validators.URLValidator):
83 import re
84 regex = re.compile( 82 regex = re.compile(
85 r'^(?:ssh|git|http|ftp)s?://' # http:// or https:// 83 r'^(?:ssh|git|http|ftp)s?://' # http:// or https://
86 r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... 84 r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
@@ -108,7 +106,7 @@ class ToasterSetting(models.Model):
108 106
109 107
110class ProjectManager(models.Manager): 108class ProjectManager(models.Manager):
111 def create_project(self, name, release, existing_project=None): 109 def create_project(self, name, release, existing_project=None, imported=False):
112 if existing_project and (release is not None): 110 if existing_project and (release is not None):
113 prj = existing_project 111 prj = existing_project
114 prj.bitbake_version = release.bitbake_version 112 prj.bitbake_version = release.bitbake_version
@@ -135,19 +133,19 @@ class ProjectManager(models.Manager):
135 133
136 if release is None: 134 if release is None:
137 return prj 135 return prj
138 136 if not imported:
139 for rdl in release.releasedefaultlayer_set.all(): 137 for rdl in release.releasedefaultlayer_set.all():
140 lv = Layer_Version.objects.filter( 138 lv = Layer_Version.objects.filter(
141 layer__name=rdl.layer_name, 139 layer__name=rdl.layer_name,
142 release=release).first() 140 release=release).first()
143 141
144 if lv: 142 if lv:
145 ProjectLayer.objects.create(project=prj, 143 ProjectLayer.objects.create(project=prj,
146 layercommit=lv, 144 layercommit=lv,
147 optional=False) 145 optional=False)
148 else: 146 else:
149 logger.warning("Default project layer %s not found" % 147 logger.warning("Default project layer %s not found" %
150 rdl.layer_name) 148 rdl.layer_name)
151 149
152 return prj 150 return prj
153 151
@@ -1390,9 +1388,6 @@ class Machine(models.Model):
1390 return "Machine " + self.name + "(" + self.description + ")" 1388 return "Machine " + self.name + "(" + self.description + ")"
1391 1389
1392 1390
1393
1394
1395
1396class BitbakeVersion(models.Model): 1391class BitbakeVersion(models.Model):
1397 1392
1398 name = models.CharField(max_length=32, unique = True) 1393 name = models.CharField(max_length=32, unique = True)
@@ -1504,7 +1499,7 @@ class Layer_Version(models.Model):
1504 # code lifted, with adaptations, from the layerindex-web application 1499 # code lifted, with adaptations, from the layerindex-web application
1505 # https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/ 1500 # https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/
1506 def _handle_url_path(self, base_url, path): 1501 def _handle_url_path(self, base_url, path):
1507 import re, posixpath 1502 import posixpath
1508 if base_url: 1503 if base_url:
1509 if self.dirpath: 1504 if self.dirpath:
1510 if path: 1505 if path:
@@ -1717,9 +1712,9 @@ class CustomImageRecipe(Recipe):
1717 1712
1718 def generate_recipe_file_contents(self): 1713 def generate_recipe_file_contents(self):
1719 """Generate the contents for the recipe file.""" 1714 """Generate the contents for the recipe file."""
1720 # If we have no excluded packages we only need to _append 1715 # If we have no excluded packages we only need to :append
1721 if self.excludes_set.count() == 0: 1716 if self.excludes_set.count() == 0:
1722 packages_conf = "IMAGE_INSTALL_append = \" " 1717 packages_conf = "IMAGE_INSTALL:append = \" "
1723 1718
1724 for pkg in self.appends_set.all(): 1719 for pkg in self.appends_set.all():
1725 packages_conf += pkg.name+' ' 1720 packages_conf += pkg.name+' '
@@ -1734,7 +1729,7 @@ class CustomImageRecipe(Recipe):
1734 packages_conf += "\"" 1729 packages_conf += "\""
1735 1730
1736 base_recipe_path = self.get_base_recipe_file() 1731 base_recipe_path = self.get_base_recipe_file()
1737 if base_recipe_path: 1732 if base_recipe_path and os.path.isfile(base_recipe_path):
1738 base_recipe = open(base_recipe_path, 'r').read() 1733 base_recipe = open(base_recipe_path, 'r').read()
1739 else: 1734 else:
1740 # Pass back None to trigger error message to user 1735 # Pass back None to trigger error message to user
@@ -1854,6 +1849,8 @@ def signal_runbuilds():
1854 os.kill(int(pidf.read()), SIGUSR1) 1849 os.kill(int(pidf.read()), SIGUSR1)
1855 except FileNotFoundError: 1850 except FileNotFoundError:
1856 logger.info("Stopping existing runbuilds: no current process found") 1851 logger.info("Stopping existing runbuilds: no current process found")
1852 except ProcessLookupError:
1853 logger.warning("Stopping existing runbuilds: process lookup not found")
1857 1854
1858class Distro(models.Model): 1855class Distro(models.Model):
1859 search_allowed_fields = ["name", "description", "layer_version__layer__name"] 1856 search_allowed_fields = ["name", "description", "layer_version__layer__name"]
@@ -1870,6 +1867,15 @@ class Distro(models.Model):
1870 def __unicode__(self): 1867 def __unicode__(self):
1871 return "Distro " + self.name + "(" + self.description + ")" 1868 return "Distro " + self.name + "(" + self.description + ")"
1872 1869
1870class EventLogsImports(models.Model):
1871 name = models.CharField(max_length=255)
1872 imported = models.BooleanField(default=False)
1873 build_id = models.IntegerField(blank=True, null=True)
1874
1875 def __str__(self):
1876 return self.name
1877
1878
1873django.db.models.signals.post_save.connect(invalidate_cache) 1879django.db.models.signals.post_save.connect(invalidate_cache)
1874django.db.models.signals.post_delete.connect(invalidate_cache) 1880django.db.models.signals.post_delete.connect(invalidate_cache)
1875django.db.models.signals.m2m_changed.connect(invalidate_cache) 1881django.db.models.signals.m2m_changed.connect(invalidate_cache)
diff --git a/bitbake/lib/toaster/pytest.ini b/bitbake/lib/toaster/pytest.ini
new file mode 100644
index 0000000000..071c65fcd5
--- /dev/null
+++ b/bitbake/lib/toaster/pytest.ini
@@ -0,0 +1,16 @@
1# -- FILE: pytest.ini (or tox.ini)
2[pytest]
3# --create-db - force re creation of the test database
4# https://pytest-django.readthedocs.io/en/latest/database.html#create-db-force-re-creation-of-the-test-database
5
6# --html=report.html --self-contained-html
7# https://docs.pytest.org/en/latest/usage.html#creating-html-reports
8# https://pytest-html.readthedocs.io/en/latest/user_guide.html#creating-a-self-contained-report
9addopts = --create-db --html="Toaster Tests Report.html" --self-contained-html
10
11# Define environment variables using pytest-env
12# A pytest plugin that enables you to set environment variables in the pytest.ini file.
13# https://pypi.org/project/pytest-env/
14env =
15 TOASTER_BUILDSERVER=1
16 DJANGO_SETTINGS_MODULE=toastermain.settings_test
diff --git a/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py b/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
index 644d45fe58..6953541ab5 100644
--- a/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
+++ b/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
@@ -19,11 +19,15 @@ import os
19import time 19import time
20import unittest 20import unittest
21 21
22import pytest
22from selenium import webdriver 23from selenium import webdriver
24from selenium.webdriver.support import expected_conditions as EC
23from selenium.webdriver.support.ui import WebDriverWait 25from selenium.webdriver.support.ui import WebDriverWait
26from selenium.webdriver.common.by import By
24from selenium.webdriver.common.desired_capabilities import DesiredCapabilities 27from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
25from selenium.common.exceptions import NoSuchElementException, \ 28from selenium.common.exceptions import NoSuchElementException, \
26 StaleElementReferenceException, TimeoutException 29 StaleElementReferenceException, TimeoutException, \
30 SessionNotCreatedException, WebDriverException
27 31
28def create_selenium_driver(cls,browser='chrome'): 32def create_selenium_driver(cls,browser='chrome'):
29 # set default browser string based on env (if available) 33 # set default browser string based on env (if available)
@@ -32,9 +36,32 @@ def create_selenium_driver(cls,browser='chrome'):
32 browser = env_browser 36 browser = env_browser
33 37
34 if browser == 'chrome': 38 if browser == 'chrome':
35 return webdriver.Chrome( 39 options = webdriver.ChromeOptions()
36 service_args=["--verbose", "--log-path=selenium.log"] 40 options.add_argument('--headless')
37 ) 41 options.add_argument('--disable-infobars')
42 options.add_argument('--disable-dev-shm-usage')
43 options.add_argument('--no-sandbox')
44 options.add_argument('--remote-debugging-port=9222')
45 try:
46 return webdriver.Chrome(options=options)
47 except SessionNotCreatedException as e:
48 exit_message = "Halting tests prematurely to avoid cascading errors."
49 # check if chrome / chromedriver exists
50 chrome_path = os.popen("find ~/.cache/selenium/chrome/ -name 'chrome' -type f -print -quit").read().strip()
51 if not chrome_path:
52 pytest.exit(f"Failed to install/find chrome.\n{exit_message}")
53 chromedriver_path = os.popen("find ~/.cache/selenium/chromedriver/ -name 'chromedriver' -type f -print -quit").read().strip()
54 if not chromedriver_path:
55 pytest.exit(f"Failed to install/find chromedriver.\n{exit_message}")
56 # check if depends on each are fulfilled
57 depends_chrome = os.popen(f"ldd {chrome_path} | grep 'not found'").read().strip()
58 if depends_chrome:
59 pytest.exit(f"Missing chrome dependencies.\n{depends_chrome}\n{exit_message}")
60 depends_chromedriver = os.popen(f"ldd {chromedriver_path} | grep 'not found'").read().strip()
61 if depends_chromedriver:
62 pytest.exit(f"Missing chromedriver dependencies.\n{depends_chromedriver}\n{exit_message}")
63 # print original error otherwise
64 pytest.exit(f"Failed to start chromedriver.\n{e}\n{exit_message}")
38 elif browser == 'firefox': 65 elif browser == 'firefox':
39 return webdriver.Firefox() 66 return webdriver.Firefox()
40 elif browser == 'marionette': 67 elif browser == 'marionette':
@@ -63,10 +90,12 @@ class Wait(WebDriverWait):
63 Subclass of WebDriverWait with predetermined timeout and poll 90 Subclass of WebDriverWait with predetermined timeout and poll
64 frequency. Also deals with a wider variety of exceptions. 91 frequency. Also deals with a wider variety of exceptions.
65 """ 92 """
66 _TIMEOUT = 10 93 _TIMEOUT = 20
67 _POLL_FREQUENCY = 0.5 94 _POLL_FREQUENCY = 0.5
68 95
69 def __init__(self, driver): 96 def __init__(self, driver, timeout=_TIMEOUT, poll=_POLL_FREQUENCY):
97 self._TIMEOUT = timeout
98 self._POLL_FREQUENCY = poll
70 super(Wait, self).__init__(driver, self._TIMEOUT, self._POLL_FREQUENCY) 99 super(Wait, self).__init__(driver, self._TIMEOUT, self._POLL_FREQUENCY)
71 100
72 def until(self, method, message=''): 101 def until(self, method, message=''):
@@ -85,6 +114,9 @@ class Wait(WebDriverWait):
85 pass 114 pass
86 except StaleElementReferenceException: 115 except StaleElementReferenceException:
87 pass 116 pass
117 except WebDriverException:
118 # selenium.common.exceptions.WebDriverException: Message: unknown error: unhandled inspector error: {"code":-32000,"message":"Node with given id does not belong to the document"}
119 pass
88 120
89 time.sleep(self._poll) 121 time.sleep(self._poll)
90 if time.time() > end_time: 122 if time.time() > end_time:
@@ -138,6 +170,8 @@ class SeleniumTestCaseBase(unittest.TestCase):
138 """ Clean up webdriver driver """ 170 """ Clean up webdriver driver """
139 171
140 cls.driver.quit() 172 cls.driver.quit()
173 # Allow driver resources to be properly freed before proceeding with further tests
174 time.sleep(5)
141 super(SeleniumTestCaseBase, cls).tearDownClass() 175 super(SeleniumTestCaseBase, cls).tearDownClass()
142 176
143 def get(self, url): 177 def get(self, url):
@@ -151,13 +185,20 @@ class SeleniumTestCaseBase(unittest.TestCase):
151 abs_url = '%s%s' % (self.live_server_url, url) 185 abs_url = '%s%s' % (self.live_server_url, url)
152 self.driver.get(abs_url) 186 self.driver.get(abs_url)
153 187
188 try: # Ensure page is loaded before proceeding
189 self.wait_until_visible("#global-nav")
190 except NoSuchElementException:
191 self.driver.implicitly_wait(3)
192 except TimeoutException:
193 self.driver.implicitly_wait(3)
194
154 def find(self, selector): 195 def find(self, selector):
155 """ Find single element by CSS selector """ 196 """ Find single element by CSS selector """
156 return self.driver.find_element_by_css_selector(selector) 197 return self.driver.find_element(By.CSS_SELECTOR, selector)
157 198
158 def find_all(self, selector): 199 def find_all(self, selector):
159 """ Find all elements matching CSS selector """ 200 """ Find all elements matching CSS selector """
160 return self.driver.find_elements_by_css_selector(selector) 201 return self.driver.find_elements(By.CSS_SELECTOR, selector)
161 202
162 def element_exists(self, selector): 203 def element_exists(self, selector):
163 """ 204 """
@@ -170,20 +211,43 @@ class SeleniumTestCaseBase(unittest.TestCase):
170 """ Return the element which currently has focus on the page """ 211 """ Return the element which currently has focus on the page """
171 return self.driver.switch_to.active_element 212 return self.driver.switch_to.active_element
172 213
173 def wait_until_present(self, selector): 214 def wait_until_present(self, selector, timeout=Wait._TIMEOUT):
174 """ Wait until element matching CSS selector is on the page """ 215 """ Wait until element matching CSS selector is on the page """
175 is_present = lambda driver: self.find(selector) 216 is_present = lambda driver: self.find(selector)
176 msg = 'An element matching "%s" should be on the page' % selector 217 msg = 'An element matching "%s" should be on the page' % selector
177 element = Wait(self.driver).until(is_present, msg) 218 element = Wait(self.driver, timeout=timeout).until(is_present, msg)
178 return element 219 return element
179 220
180 def wait_until_visible(self, selector): 221 def wait_until_visible(self, selector, timeout=Wait._TIMEOUT):
181 """ Wait until element matching CSS selector is visible on the page """ 222 """ Wait until element matching CSS selector is visible on the page """
182 is_visible = lambda driver: self.find(selector).is_displayed() 223 is_visible = lambda driver: self.find(selector).is_displayed()
183 msg = 'An element matching "%s" should be visible' % selector 224 msg = 'An element matching "%s" should be visible' % selector
184 Wait(self.driver).until(is_visible, msg) 225 Wait(self.driver, timeout=timeout).until(is_visible, msg)
226 return self.find(selector)
227
228 def wait_until_not_visible(self, selector, timeout=Wait._TIMEOUT):
229 """ Wait until element matching CSS selector is not visible on the page """
230 is_visible = lambda driver: self.find(selector).is_displayed()
231 msg = 'An element matching "%s" should be visible' % selector
232 Wait(self.driver, timeout=timeout).until_not(is_visible, msg)
185 return self.find(selector) 233 return self.find(selector)
186 234
235 def wait_until_clickable(self, selector, timeout=Wait._TIMEOUT):
236 """ Wait until element matching CSS selector is visible on the page """
237 WebDriverWait(self.driver, timeout=timeout).until(lambda driver: self.driver.execute_script("return jQuery.active == 0"))
238 is_clickable = lambda driver: (self.find(selector).is_displayed() and self.find(selector).is_enabled())
239 msg = 'An element matching "%s" should be clickable' % selector
240 Wait(self.driver, timeout=timeout).until(is_clickable, msg)
241 return self.find(selector)
242
243 def wait_until_element_clickable(self, finder, timeout=Wait._TIMEOUT):
244 """ Wait until element is clickable """
245 WebDriverWait(self.driver, timeout=timeout).until(lambda driver: self.driver.execute_script("return jQuery.active == 0"))
246 is_clickable = lambda driver: (finder(driver).is_displayed() and finder(driver).is_enabled())
247 msg = 'A matching element never became be clickable'
248 Wait(self.driver, timeout=timeout).until(is_clickable, msg)
249 return finder(self.driver)
250
187 def wait_until_focused(self, selector): 251 def wait_until_focused(self, selector):
188 """ Wait until element matching CSS selector has focus """ 252 """ Wait until element matching CSS selector has focus """
189 is_focused = \ 253 is_focused = \
diff --git a/bitbake/lib/toaster/tests/browser/test_all_builds_page.py b/bitbake/lib/toaster/tests/browser/test_all_builds_page.py
index 8423d3dab2..9ab81fb11b 100644
--- a/bitbake/lib/toaster/tests/browser/test_all_builds_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_all_builds_page.py
@@ -7,13 +7,18 @@
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9
10import os
10import re 11import re
11 12
12from django.urls import reverse 13from django.urls import reverse
14from selenium.webdriver.support.select import Select
13from django.utils import timezone 15from django.utils import timezone
16from bldcontrol.models import BuildRequest
14from tests.browser.selenium_helpers import SeleniumTestCase 17from tests.browser.selenium_helpers import SeleniumTestCase
15 18
16from orm.models import BitbakeVersion, Release, Project, Build, Target 19from orm.models import BitbakeVersion, Layer, Layer_Version, Recipe, Release, Project, Build, Target, Task
20
21from selenium.webdriver.common.by import By
17 22
18 23
19class TestAllBuildsPage(SeleniumTestCase): 24class TestAllBuildsPage(SeleniumTestCase):
@@ -23,7 +28,8 @@ class TestAllBuildsPage(SeleniumTestCase):
23 CLI_BUILDS_PROJECT_NAME = 'command line builds' 28 CLI_BUILDS_PROJECT_NAME = 'command line builds'
24 29
25 def setUp(self): 30 def setUp(self):
26 bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', 31 builldir = os.environ.get('BUILDDIR', './')
32 bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
27 branch='master', dirpath='') 33 branch='master', dirpath='')
28 release = Release.objects.create(name='release1', 34 release = Release.objects.create(name='release1',
29 bitbake_version=bbv) 35 bitbake_version=bbv)
@@ -69,7 +75,7 @@ class TestAllBuildsPage(SeleniumTestCase):
69 '[data-role="data-recent-build-buildtime-field"]' % build.id 75 '[data-role="data-recent-build-buildtime-field"]' % build.id
70 76
71 # because this loads via Ajax, wait for it to be visible 77 # because this loads via Ajax, wait for it to be visible
72 self.wait_until_present(selector) 78 self.wait_until_visible(selector)
73 79
74 build_time_spans = self.find_all(selector) 80 build_time_spans = self.find_all(selector)
75 81
@@ -79,7 +85,7 @@ class TestAllBuildsPage(SeleniumTestCase):
79 85
80 def _get_row_for_build(self, build): 86 def _get_row_for_build(self, build):
81 """ Get the table row for the build from the all builds table """ 87 """ Get the table row for the build from the all builds table """
82 self.wait_until_present('#allbuildstable') 88 self.wait_until_visible('#allbuildstable')
83 89
84 rows = self.find_all('#allbuildstable tr') 90 rows = self.find_all('#allbuildstable tr')
85 91
@@ -91,7 +97,7 @@ class TestAllBuildsPage(SeleniumTestCase):
91 found_row = None 97 found_row = None
92 for row in rows: 98 for row in rows:
93 99
94 outcome_links = row.find_elements_by_css_selector(selector) 100 outcome_links = row.find_elements(By.CSS_SELECTOR, selector)
95 if len(outcome_links) == 1: 101 if len(outcome_links) == 1:
96 found_row = row 102 found_row = row
97 break 103 break
@@ -100,6 +106,66 @@ class TestAllBuildsPage(SeleniumTestCase):
100 106
101 return found_row 107 return found_row
102 108
109 def _get_create_builds(self, **kwargs):
110 """ Create a build and return the build object """
111 build1 = Build.objects.create(**self.project1_build_success)
112 build2 = Build.objects.create(**self.project1_build_failure)
113
114 # add some targets to these builds so they have recipe links
115 # (and so we can find the row in the ToasterTable corresponding to
116 # a particular build)
117 Target.objects.create(build=build1, target='foo')
118 Target.objects.create(build=build2, target='bar')
119
120 if kwargs:
121 # Create kwargs.get('success') builds with success status with target
122 # and kwargs.get('failure') builds with failure status with target
123 for i in range(kwargs.get('success', 0)):
124 now = timezone.now()
125 self.project1_build_success['started_on'] = now
126 self.project1_build_success[
127 'completed_on'] = now - timezone.timedelta(days=i)
128 build = Build.objects.create(**self.project1_build_success)
129 Target.objects.create(build=build,
130 target=f'{i}_success_recipe',
131 task=f'{i}_success_task')
132
133 self._set_buildRequest_and_task_on_build(build)
134 for i in range(kwargs.get('failure', 0)):
135 now = timezone.now()
136 self.project1_build_failure['started_on'] = now
137 self.project1_build_failure[
138 'completed_on'] = now - timezone.timedelta(days=i)
139 build = Build.objects.create(**self.project1_build_failure)
140 Target.objects.create(build=build,
141 target=f'{i}_fail_recipe',
142 task=f'{i}_fail_task')
143 self._set_buildRequest_and_task_on_build(build)
144 return build1, build2
145
146 def _create_recipe(self):
147 """ Add a recipe to the database and return it """
148 layer = Layer.objects.create()
149 layer_version = Layer_Version.objects.create(layer=layer)
150 return Recipe.objects.create(name='recipe_foo', layer_version=layer_version)
151
152 def _set_buildRequest_and_task_on_build(self, build):
153 """ Set buildRequest and task on build """
154 build.recipes_parsed = 1
155 build.save()
156 buildRequest = BuildRequest.objects.create(
157 build=build,
158 project=self.project1,
159 state=BuildRequest.REQ_COMPLETED)
160 build.build_request = buildRequest
161 recipe = self._create_recipe()
162 task = Task.objects.create(build=build,
163 recipe=recipe,
164 task_name='task',
165 outcome=Task.OUTCOME_SUCCESS)
166 task.save()
167 build.save()
168
103 def test_show_tasks_with_suffix(self): 169 def test_show_tasks_with_suffix(self):
104 """ Task should be shown as suffix on build name """ 170 """ Task should be shown as suffix on build name """
105 build = Build.objects.create(**self.project1_build_success) 171 build = Build.objects.create(**self.project1_build_success)
@@ -109,7 +175,7 @@ class TestAllBuildsPage(SeleniumTestCase):
109 175
110 url = reverse('all-builds') 176 url = reverse('all-builds')
111 self.get(url) 177 self.get(url)
112 self.wait_until_present('td[class="target"]') 178 self.wait_until_visible('td[class="target"]')
113 179
114 cell = self.find('td[class="target"]') 180 cell = self.find('td[class="target"]')
115 content = cell.get_attribute('innerHTML') 181 content = cell.get_attribute('innerHTML')
@@ -126,23 +192,26 @@ class TestAllBuildsPage(SeleniumTestCase):
126 but should be shown for other builds 192 but should be shown for other builds
127 """ 193 """
128 build1 = Build.objects.create(**self.project1_build_success) 194 build1 = Build.objects.create(**self.project1_build_success)
129 default_build = Build.objects.create(**self.default_project_build_success) 195 default_build = Build.objects.create(
196 **self.default_project_build_success)
130 197
131 url = reverse('all-builds') 198 url = reverse('all-builds')
132 self.get(url) 199 self.get(url)
133 200
134 # shouldn't see a rebuild button for command-line builds
135 selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % default_build.id
136 run_again_button = self.find_all(selector)
137 self.assertEqual(len(run_again_button), 0,
138 'should not see a rebuild button for cli builds')
139
140 # should see a rebuild button for non-command-line builds 201 # should see a rebuild button for non-command-line builds
202 self.wait_until_visible('#allbuildstable tbody tr')
203 self.wait_until_visible('.rebuild-btn')
141 selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % build1.id 204 selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % build1.id
142 run_again_button = self.find_all(selector) 205 run_again_button = self.find_all(selector)
143 self.assertEqual(len(run_again_button), 1, 206 self.assertEqual(len(run_again_button), 1,
144 'should see a rebuild button for non-cli builds') 207 'should see a rebuild button for non-cli builds')
145 208
209 # shouldn't see a rebuild button for command-line builds
210 selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % default_build.id
211 run_again_button = self.find_all(selector)
212 self.assertEqual(len(run_again_button), 0,
213 'should not see a rebuild button for cli builds')
214
146 def test_tooltips_on_project_name(self): 215 def test_tooltips_on_project_name(self):
147 """ 216 """
148 Test tooltips shown next to project name in the main table 217 Test tooltips shown next to project name in the main table
@@ -156,6 +225,7 @@ class TestAllBuildsPage(SeleniumTestCase):
156 225
157 url = reverse('all-builds') 226 url = reverse('all-builds')
158 self.get(url) 227 self.get(url)
228 self.wait_until_visible('#allbuildstable')
159 229
160 # get the project name cells from the table 230 # get the project name cells from the table
161 cells = self.find_all('#allbuildstable td[class="project"]') 231 cells = self.find_all('#allbuildstable td[class="project"]')
@@ -164,7 +234,7 @@ class TestAllBuildsPage(SeleniumTestCase):
164 234
165 for cell in cells: 235 for cell in cells:
166 content = cell.get_attribute('innerHTML') 236 content = cell.get_attribute('innerHTML')
167 help_icons = cell.find_elements_by_css_selector(selector) 237 help_icons = cell.find_elements(By.CSS_SELECTOR, selector)
168 238
169 if re.search(self.PROJECT_NAME, content): 239 if re.search(self.PROJECT_NAME, content):
170 # no help icon next to non-cli project name 240 # no help icon next to non-cli project name
@@ -184,38 +254,224 @@ class TestAllBuildsPage(SeleniumTestCase):
184 recent builds area; failed builds should not have links on the time column, 254 recent builds area; failed builds should not have links on the time column,
185 or in the recent builds area 255 or in the recent builds area
186 """ 256 """
187 build1 = Build.objects.create(**self.project1_build_success) 257 build1, build2 = self._get_create_builds()
188 build2 = Build.objects.create(**self.project1_build_failure)
189
190 # add some targets to these builds so they have recipe links
191 # (and so we can find the row in the ToasterTable corresponding to
192 # a particular build)
193 Target.objects.create(build=build1, target='foo')
194 Target.objects.create(build=build2, target='bar')
195 258
196 url = reverse('all-builds') 259 url = reverse('all-builds')
197 self.get(url) 260 self.get(url)
261 self.wait_until_visible('#allbuildstable')
198 262
199 # test recent builds area for successful build 263 # test recent builds area for successful build
200 element = self._get_build_time_element(build1) 264 element = self._get_build_time_element(build1)
201 links = element.find_elements_by_css_selector('a') 265 links = element.find_elements(By.CSS_SELECTOR, 'a')
202 msg = 'should be a link on the build time for a successful recent build' 266 msg = 'should be a link on the build time for a successful recent build'
203 self.assertEquals(len(links), 1, msg) 267 self.assertEqual(len(links), 1, msg)
204 268
205 # test recent builds area for failed build 269 # test recent builds area for failed build
206 element = self._get_build_time_element(build2) 270 element = self._get_build_time_element(build2)
207 links = element.find_elements_by_css_selector('a') 271 links = element.find_elements(By.CSS_SELECTOR, 'a')
208 msg = 'should not be a link on the build time for a failed recent build' 272 msg = 'should not be a link on the build time for a failed recent build'
209 self.assertEquals(len(links), 0, msg) 273 self.assertEqual(len(links), 0, msg)
210 274
211 # test the time column for successful build 275 # test the time column for successful build
212 build1_row = self._get_row_for_build(build1) 276 build1_row = self._get_row_for_build(build1)
213 links = build1_row.find_elements_by_css_selector('td.time a') 277 links = build1_row.find_elements(By.CSS_SELECTOR, 'td.time a')
214 msg = 'should be a link on the build time for a successful build' 278 msg = 'should be a link on the build time for a successful build'
215 self.assertEquals(len(links), 1, msg) 279 self.assertEqual(len(links), 1, msg)
216 280
217 # test the time column for failed build 281 # test the time column for failed build
218 build2_row = self._get_row_for_build(build2) 282 build2_row = self._get_row_for_build(build2)
219 links = build2_row.find_elements_by_css_selector('td.time a') 283 links = build2_row.find_elements(By.CSS_SELECTOR, 'td.time a')
220 msg = 'should not be a link on the build time for a failed build' 284 msg = 'should not be a link on the build time for a failed build'
221 self.assertEquals(len(links), 0, msg) 285 self.assertEqual(len(links), 0, msg)
286
287 def test_builds_table_search_box(self):
288 """ Test the search box in the builds table on the all builds page """
289 self._get_create_builds()
290
291 url = reverse('all-builds')
292 self.get(url)
293
294 # Check search box is present and works
295 self.wait_until_visible('#allbuildstable tbody tr')
296 search_box = self.find('#search-input-allbuildstable')
297 self.assertTrue(search_box.is_displayed())
298
299 # Check that we can search for a build by recipe name
300 search_box.send_keys('foo')
301 search_btn = self.find('#search-submit-allbuildstable')
302 search_btn.click()
303 self.wait_until_visible('#allbuildstable tbody tr')
304 rows = self.find_all('#allbuildstable tbody tr')
305 self.assertTrue(len(rows) >= 1)
306
307 def test_filtering_on_failure_tasks_column(self):
308 """ Test the filtering on failure tasks column in the builds table on the all builds page """
309 def _check_if_filter_failed_tasks_column_is_visible():
310 # check if failed tasks filter column is visible, if not click on it
311 # Check edit column
312 edit_column = self.find('#edit-columns-button')
313 self.assertTrue(edit_column.is_displayed())
314 edit_column.click()
315 # Check dropdown is visible
316 self.wait_until_visible('ul.dropdown-menu.editcol')
317 filter_fails_task_checkbox = self.find('#checkbox-failed_tasks')
318 if not filter_fails_task_checkbox.is_selected():
319 filter_fails_task_checkbox.click()
320 edit_column.click()
321
322 self._get_create_builds(success=10, failure=10)
323
324 url = reverse('all-builds')
325 self.get(url)
326
327 # Check filtering on failure tasks column
328 self.wait_until_visible('#allbuildstable tbody tr')
329 _check_if_filter_failed_tasks_column_is_visible()
330 failed_tasks_filter = self.find('#failed_tasks_filter')
331 failed_tasks_filter.click()
332 # Check popup is visible
333 self.wait_until_visible('#filter-modal-allbuildstable')
334 self.assertTrue(
335 self.find('#filter-modal-allbuildstable').is_displayed())
336 # Check that we can filter by failure tasks
337 build_without_failure_tasks = self.find(
338 '#failed_tasks_filter\\:without_failed_tasks')
339 build_without_failure_tasks.click()
340 # click on apply button
341 self.find('#filter-modal-allbuildstable .btn-primary').click()
342 self.wait_until_visible('#allbuildstable tbody tr')
343 # Check if filter is applied, by checking if failed_tasks_filter has btn-primary class
344 self.assertTrue(self.find('#failed_tasks_filter').get_attribute(
345 'class').find('btn-primary') != -1)
346
347 def test_filtering_on_completedOn_column(self):
348 """ Test the filtering on completed_on column in the builds table on the all builds page """
349 self._get_create_builds(success=10, failure=10)
350
351 url = reverse('all-builds')
352 self.get(url)
353
354 # Check filtering on failure tasks column
355 self.wait_until_visible('#allbuildstable tbody tr')
356 completed_on_filter = self.find('#completed_on_filter')
357 completed_on_filter.click()
358 # Check popup is visible
359 self.wait_until_visible('#filter-modal-allbuildstable')
360 self.assertTrue(
361 self.find('#filter-modal-allbuildstable').is_displayed())
362 # Check that we can filter by failure tasks
363 build_without_failure_tasks = self.find(
364 '#completed_on_filter\\:date_range')
365 build_without_failure_tasks.click()
366 # click on apply button
367 self.find('#filter-modal-allbuildstable .btn-primary').click()
368 self.wait_until_visible('#allbuildstable tbody tr')
369 # Check if filter is applied, by checking if completed_on_filter has btn-primary class
370 self.assertTrue(self.find('#completed_on_filter').get_attribute(
371 'class').find('btn-primary') != -1)
372
373 # Filter by date range
374 self.find('#completed_on_filter').click()
375 self.wait_until_visible('#filter-modal-allbuildstable')
376 date_ranges = self.driver.find_elements(
377 By.XPATH, '//input[@class="form-control hasDatepicker"]')
378 today = timezone.now()
379 yestersday = today - timezone.timedelta(days=1)
380 date_ranges[0].send_keys(yestersday.strftime('%Y-%m-%d'))
381 date_ranges[1].send_keys(today.strftime('%Y-%m-%d'))
382 self.find('#filter-modal-allbuildstable .btn-primary').click()
383 self.wait_until_visible('#allbuildstable tbody tr')
384 self.assertTrue(self.find('#completed_on_filter').get_attribute(
385 'class').find('btn-primary') != -1)
386 # Check if filter is applied, number of builds displayed should be 6
387 self.assertTrue(len(self.find_all('#allbuildstable tbody tr')) >= 4)
388
389 def test_builds_table_editColumn(self):
390 """ Test the edit column feature in the builds table on the all builds page """
391 self._get_create_builds(success=10, failure=10)
392
393 def test_edit_column(check_box_id):
394 # Check that we can hide/show table column
395 check_box = self.find(f'#{check_box_id}')
396 th_class = str(check_box_id).replace('checkbox-', '')
397 if check_box.is_selected():
398 # check if column is visible in table
399 self.assertTrue(
400 self.find(
401 f'#allbuildstable thead th.{th_class}'
402 ).is_displayed(),
403 f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
404 )
405 check_box.click()
406 # check if column is hidden in table
407 self.assertFalse(
408 self.find(
409 f'#allbuildstable thead th.{th_class}'
410 ).is_displayed(),
411 f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
412 )
413 else:
414 # check if column is hidden in table
415 self.assertFalse(
416 self.find(
417 f'#allbuildstable thead th.{th_class}'
418 ).is_displayed(),
419 f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
420 )
421 check_box.click()
422 # check if column is visible in table
423 self.assertTrue(
424 self.find(
425 f'#allbuildstable thead th.{th_class}'
426 ).is_displayed(),
427 f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
428 )
429 url = reverse('all-builds')
430 self.get(url)
431 self.wait_until_visible('#allbuildstable tbody tr')
432
433 # Check edit column
434 edit_column = self.find('#edit-columns-button')
435 self.assertTrue(edit_column.is_displayed())
436 edit_column.click()
437 # Check dropdown is visible
438 self.wait_until_visible('ul.dropdown-menu.editcol')
439
440 # Check that we can hide the edit column
441 test_edit_column('checkbox-errors_no')
442 test_edit_column('checkbox-failed_tasks')
443 test_edit_column('checkbox-image_files')
444 test_edit_column('checkbox-project')
445 test_edit_column('checkbox-started_on')
446 test_edit_column('checkbox-time')
447 test_edit_column('checkbox-warnings_no')
448
449 def test_builds_table_show_rows(self):
450 """ Test the show rows feature in the builds table on the all builds page """
451 self._get_create_builds(success=100, failure=100)
452
453 def test_show_rows(row_to_show, show_row_link):
454 # Check that we can show rows == row_to_show
455 show_row_link.select_by_value(str(row_to_show))
456 self.wait_until_visible('#allbuildstable tbody tr')
457 # check at least some rows are visible
458 self.assertTrue(
459 len(self.find_all('#allbuildstable tbody tr')) > 0
460 )
461
462 url = reverse('all-builds')
463 self.get(url)
464 self.wait_until_visible('#allbuildstable tbody tr')
465
466 show_rows = self.driver.find_elements(
467 By.XPATH,
468 '//select[@class="form-control pagesize-allbuildstable"]'
469 )
470 # Check show rows
471 for show_row_link in show_rows:
472 show_row_link = Select(show_row_link)
473 test_show_rows(10, show_row_link)
474 test_show_rows(25, show_row_link)
475 test_show_rows(50, show_row_link)
476 test_show_rows(100, show_row_link)
477 test_show_rows(150, show_row_link)
diff --git a/bitbake/lib/toaster/tests/browser/test_all_projects_page.py b/bitbake/lib/toaster/tests/browser/test_all_projects_page.py
index 15b03400f9..05e12892be 100644
--- a/bitbake/lib/toaster/tests/browser/test_all_projects_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_all_projects_page.py
@@ -7,15 +7,20 @@
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9
10import os
10import re 11import re
11 12
12from django.urls import reverse 13from django.urls import reverse
13from django.utils import timezone 14from django.utils import timezone
15from selenium.webdriver.support.select import Select
14from tests.browser.selenium_helpers import SeleniumTestCase 16from tests.browser.selenium_helpers import SeleniumTestCase
15 17
16from orm.models import BitbakeVersion, Release, Project, Build 18from orm.models import BitbakeVersion, Release, Project, Build
17from orm.models import ProjectVariable 19from orm.models import ProjectVariable
18 20
21from selenium.webdriver.common.by import By
22
23
19class TestAllProjectsPage(SeleniumTestCase): 24class TestAllProjectsPage(SeleniumTestCase):
20 """ Browser tests for projects page /projects/ """ 25 """ Browser tests for projects page /projects/ """
21 26
@@ -25,7 +30,8 @@ class TestAllProjectsPage(SeleniumTestCase):
25 30
26 def setUp(self): 31 def setUp(self):
27 """ Add default project manually """ 32 """ Add default project manually """
28 project = Project.objects.create_project(self.CLI_BUILDS_PROJECT_NAME, None) 33 project = Project.objects.create_project(
34 self.CLI_BUILDS_PROJECT_NAME, None)
29 self.default_project = project 35 self.default_project = project
30 self.default_project.is_default = True 36 self.default_project.is_default = True
31 self.default_project.save() 37 self.default_project.save()
@@ -35,6 +41,17 @@ class TestAllProjectsPage(SeleniumTestCase):
35 41
36 self.release = None 42 self.release = None
37 43
44 def _create_projects(self, nb_project=10):
45 projects = []
46 for i in range(1, nb_project + 1):
47 projects.append(
48 Project(
49 name='test project {}'.format(i),
50 release=self.release,
51 )
52 )
53 Project.objects.bulk_create(projects)
54
38 def _add_build_to_default_project(self): 55 def _add_build_to_default_project(self):
39 """ Add a build to the default project (not used in all tests) """ 56 """ Add a build to the default project (not used in all tests) """
40 now = timezone.now() 57 now = timezone.now()
@@ -45,12 +62,14 @@ class TestAllProjectsPage(SeleniumTestCase):
45 62
46 def _add_non_default_project(self): 63 def _add_non_default_project(self):
47 """ Add another project """ 64 """ Add another project """
48 bbv = BitbakeVersion.objects.create(name='test bbv', giturl='/tmp/', 65 builldir = os.environ.get('BUILDDIR', './')
66 bbv = BitbakeVersion.objects.create(name='test bbv', giturl=f'{builldir}/',
49 branch='master', dirpath='') 67 branch='master', dirpath='')
50 self.release = Release.objects.create(name='test release', 68 self.release = Release.objects.create(name='test release',
51 branch_name='master', 69 branch_name='master',
52 bitbake_version=bbv) 70 bitbake_version=bbv)
53 self.project = Project.objects.create_project(self.PROJECT_NAME, self.release) 71 self.project = Project.objects.create_project(
72 self.PROJECT_NAME, self.release)
54 self.project.is_default = False 73 self.project.is_default = False
55 self.project.save() 74 self.project.save()
56 75
@@ -62,7 +81,7 @@ class TestAllProjectsPage(SeleniumTestCase):
62 81
63 def _get_row_for_project(self, project_name): 82 def _get_row_for_project(self, project_name):
64 """ Get the HTML row for a project, or None if not found """ 83 """ Get the HTML row for a project, or None if not found """
65 self.wait_until_present('#projectstable tbody tr') 84 self.wait_until_visible('#projectstable tbody tr')
66 rows = self.find_all('#projectstable tbody tr') 85 rows = self.find_all('#projectstable tbody tr')
67 86
68 # find the row with a project name matching the one supplied 87 # find the row with a project name matching the one supplied
@@ -93,7 +112,8 @@ class TestAllProjectsPage(SeleniumTestCase):
93 url = reverse('all-projects') 112 url = reverse('all-projects')
94 self.get(url) 113 self.get(url)
95 114
96 default_project_row = self._get_row_for_project(self.default_project.name) 115 default_project_row = self._get_row_for_project(
116 self.default_project.name)
97 117
98 self.assertNotEqual(default_project_row, None, 118 self.assertNotEqual(default_project_row, None,
99 'default project "cli builds" should be in page') 119 'default project "cli builds" should be in page')
@@ -113,11 +133,12 @@ class TestAllProjectsPage(SeleniumTestCase):
113 self.wait_until_visible("#projectstable tr") 133 self.wait_until_visible("#projectstable tr")
114 134
115 # find the row for the default project 135 # find the row for the default project
116 default_project_row = self._get_row_for_project(self.default_project.name) 136 default_project_row = self._get_row_for_project(
137 self.default_project.name)
117 138
118 # check the release text for the default project 139 # check the release text for the default project
119 selector = 'span[data-project-field="release"] span.text-muted' 140 selector = 'span[data-project-field="release"] span.text-muted'
120 element = default_project_row.find_element_by_css_selector(selector) 141 element = default_project_row.find_element(By.CSS_SELECTOR, selector)
121 text = element.text.strip() 142 text = element.text.strip()
122 self.assertEqual(text, 'Not applicable', 143 self.assertEqual(text, 'Not applicable',
123 'release should be "not applicable" for default project') 144 'release should be "not applicable" for default project')
@@ -127,7 +148,7 @@ class TestAllProjectsPage(SeleniumTestCase):
127 148
128 # check the link in the release cell for the other project 149 # check the link in the release cell for the other project
129 selector = 'span[data-project-field="release"]' 150 selector = 'span[data-project-field="release"]'
130 element = other_project_row.find_element_by_css_selector(selector) 151 element = other_project_row.find_element(By.CSS_SELECTOR, selector)
131 text = element.text.strip() 152 text = element.text.strip()
132 self.assertEqual(text, self.release.name, 153 self.assertEqual(text, self.release.name,
133 'release name should be shown for non-default project') 154 'release name should be shown for non-default project')
@@ -148,11 +169,12 @@ class TestAllProjectsPage(SeleniumTestCase):
148 self.wait_until_visible("#projectstable tr") 169 self.wait_until_visible("#projectstable tr")
149 170
150 # find the row for the default project 171 # find the row for the default project
151 default_project_row = self._get_row_for_project(self.default_project.name) 172 default_project_row = self._get_row_for_project(
173 self.default_project.name)
152 174
153 # check the machine cell for the default project 175 # check the machine cell for the default project
154 selector = 'span[data-project-field="machine"] span.text-muted' 176 selector = 'span[data-project-field="machine"] span.text-muted'
155 element = default_project_row.find_element_by_css_selector(selector) 177 element = default_project_row.find_element(By.CSS_SELECTOR, selector)
156 text = element.text.strip() 178 text = element.text.strip()
157 self.assertEqual(text, 'Not applicable', 179 self.assertEqual(text, 'Not applicable',
158 'machine should be not applicable for default project') 180 'machine should be not applicable for default project')
@@ -162,7 +184,7 @@ class TestAllProjectsPage(SeleniumTestCase):
162 184
163 # check the link in the machine cell for the other project 185 # check the link in the machine cell for the other project
164 selector = 'span[data-project-field="machine"]' 186 selector = 'span[data-project-field="machine"]'
165 element = other_project_row.find_element_by_css_selector(selector) 187 element = other_project_row.find_element(By.CSS_SELECTOR, selector)
166 text = element.text.strip() 188 text = element.text.strip()
167 self.assertEqual(text, self.MACHINE_NAME, 189 self.assertEqual(text, self.MACHINE_NAME,
168 'machine name should be shown for non-default project') 190 'machine name should be shown for non-default project')
@@ -183,13 +205,15 @@ class TestAllProjectsPage(SeleniumTestCase):
183 self.get(reverse('all-projects')) 205 self.get(reverse('all-projects'))
184 206
185 # find the row for the default project 207 # find the row for the default project
186 default_project_row = self._get_row_for_project(self.default_project.name) 208 default_project_row = self._get_row_for_project(
209 self.default_project.name)
187 210
188 # check the link on the name field 211 # check the link on the name field
189 selector = 'span[data-project-field="name"] a' 212 selector = 'span[data-project-field="name"] a'
190 element = default_project_row.find_element_by_css_selector(selector) 213 element = default_project_row.find_element(By.CSS_SELECTOR, selector)
191 link_url = element.get_attribute('href').strip() 214 link_url = element.get_attribute('href').strip()
192 expected_url = reverse('projectbuilds', args=(self.default_project.id,)) 215 expected_url = reverse(
216 'projectbuilds', args=(self.default_project.id,))
193 msg = 'link on default project name should point to builds but was %s' % link_url 217 msg = 'link on default project name should point to builds but was %s' % link_url
194 self.assertTrue(link_url.endswith(expected_url), msg) 218 self.assertTrue(link_url.endswith(expected_url), msg)
195 219
@@ -198,8 +222,116 @@ class TestAllProjectsPage(SeleniumTestCase):
198 222
199 # check the link for the other project 223 # check the link for the other project
200 selector = 'span[data-project-field="name"] a' 224 selector = 'span[data-project-field="name"] a'
201 element = other_project_row.find_element_by_css_selector(selector) 225 element = other_project_row.find_element(By.CSS_SELECTOR, selector)
202 link_url = element.get_attribute('href').strip() 226 link_url = element.get_attribute('href').strip()
203 expected_url = reverse('project', args=(self.project.id,)) 227 expected_url = reverse('project', args=(self.project.id,))
204 msg = 'link on project name should point to configuration but was %s' % link_url 228 msg = 'link on project name should point to configuration but was %s' % link_url
205 self.assertTrue(link_url.endswith(expected_url), msg) 229 self.assertTrue(link_url.endswith(expected_url), msg)
230
231 def test_allProject_table_search_box(self):
232 """ Test the search box in the all project table on the all projects page """
233 self._create_projects()
234
235 url = reverse('all-projects')
236 self.get(url)
237
238 # Chseck search box is present and works
239 self.wait_until_visible('#projectstable tbody tr')
240 search_box = self.find('#search-input-projectstable')
241 self.assertTrue(search_box.is_displayed())
242
243 # Check that we can search for a project by project name
244 search_box.send_keys('test project 10')
245 search_btn = self.find('#search-submit-projectstable')
246 search_btn.click()
247 self.wait_until_visible('#projectstable tbody tr')
248 rows = self.find_all('#projectstable tbody tr')
249 self.assertTrue(len(rows) == 1)
250
251 def test_allProject_table_editColumn(self):
252 """ Test the edit column feature in the projects table on the all projects page """
253 self._create_projects()
254
255 def test_edit_column(check_box_id):
256 # Check that we can hide/show table column
257 check_box = self.find(f'#{check_box_id}')
258 th_class = str(check_box_id).replace('checkbox-', '')
259 if check_box.is_selected():
260 # check if column is visible in table
261 self.assertTrue(
262 self.find(
263 f'#projectstable thead th.{th_class}'
264 ).is_displayed(),
265 f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
266 )
267 check_box.click()
268 # check if column is hidden in table
269 self.assertFalse(
270 self.find(
271 f'#projectstable thead th.{th_class}'
272 ).is_displayed(),
273 f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
274 )
275 else:
276 # check if column is hidden in table
277 self.assertFalse(
278 self.find(
279 f'#projectstable thead th.{th_class}'
280 ).is_displayed(),
281 f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
282 )
283 check_box.click()
284 # check if column is visible in table
285 self.assertTrue(
286 self.find(
287 f'#projectstable thead th.{th_class}'
288 ).is_displayed(),
289 f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
290 )
291 url = reverse('all-projects')
292 self.get(url)
293 self.wait_until_visible('#projectstable tbody tr')
294
295 # Check edit column
296 edit_column = self.find('#edit-columns-button')
297 self.assertTrue(edit_column.is_displayed())
298 edit_column.click()
299 # Check dropdown is visible
300 self.wait_until_visible('ul.dropdown-menu.editcol')
301
302 # Check that we can hide the edit column
303 test_edit_column('checkbox-errors')
304 test_edit_column('checkbox-image_files')
305 test_edit_column('checkbox-last_build_outcome')
306 test_edit_column('checkbox-recipe_name')
307 test_edit_column('checkbox-warnings')
308
309 def test_allProject_table_show_rows(self):
310 """ Test the show rows feature in the projects table on the all projects page """
311 self._create_projects(nb_project=200)
312
313 def test_show_rows(row_to_show, show_row_link):
314 # Check that we can show rows == row_to_show
315 show_row_link.select_by_value(str(row_to_show))
316 self.wait_until_visible('#projectstable tbody tr')
317 # check at least some rows are visible
318 self.assertTrue(
319 len(self.find_all('#projectstable tbody tr')) > 0
320 )
321
322 url = reverse('all-projects')
323 self.get(url)
324 self.wait_until_visible('#projectstable tbody tr')
325
326 show_rows = self.driver.find_elements(
327 By.XPATH,
328 '//select[@class="form-control pagesize-projectstable"]'
329 )
330 # Check show rows
331 for show_row_link in show_rows:
332 show_row_link = Select(show_row_link)
333 test_show_rows(10, show_row_link)
334 test_show_rows(25, show_row_link)
335 test_show_rows(50, show_row_link)
336 test_show_rows(100, show_row_link)
337 test_show_rows(150, show_row_link)
diff --git a/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py b/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py
index efcd89b346..82367108e2 100644
--- a/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py
@@ -7,6 +7,7 @@
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9
10import os
10from django.urls import reverse 11from django.urls import reverse
11from django.utils import timezone 12from django.utils import timezone
12 13
@@ -15,11 +16,14 @@ from tests.browser.selenium_helpers import SeleniumTestCase
15from orm.models import Project, Release, BitbakeVersion, Build, LogMessage 16from orm.models import Project, Release, BitbakeVersion, Build, LogMessage
16from orm.models import Layer, Layer_Version, Recipe, CustomImageRecipe, Variable 17from orm.models import Layer, Layer_Version, Recipe, CustomImageRecipe, Variable
17 18
19from selenium.webdriver.common.by import By
20
18class TestBuildDashboardPage(SeleniumTestCase): 21class TestBuildDashboardPage(SeleniumTestCase):
19 """ Tests for the build dashboard /build/X """ 22 """ Tests for the build dashboard /build/X """
20 23
21 def setUp(self): 24 def setUp(self):
22 bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', 25 builldir = os.environ.get('BUILDDIR', './')
26 bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
23 branch='master', dirpath="") 27 branch='master', dirpath="")
24 release = Release.objects.create(name='release1', 28 release = Release.objects.create(name='release1',
25 bitbake_version=bbv) 29 bitbake_version=bbv)
@@ -158,6 +162,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
158 """ 162 """
159 url = reverse('builddashboard', args=(build.id,)) 163 url = reverse('builddashboard', args=(build.id,))
160 self.get(url) 164 self.get(url)
165 self.wait_until_visible('#global-nav')
161 166
162 def _get_build_dashboard_errors(self, build): 167 def _get_build_dashboard_errors(self, build):
163 """ 168 """
@@ -183,7 +188,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
183 188
184 found = False 189 found = False
185 for element in message_elements: 190 for element in message_elements:
186 log_message_text = element.find_element_by_tag_name('pre').text.strip() 191 log_message_text = element.find_element(By.TAG_NAME, 'pre').text.strip()
187 text_matches = (log_message_text == expected_text) 192 text_matches = (log_message_text == expected_text)
188 193
189 log_message_pk = element.get_attribute('data-log-message-id') 194 log_message_pk = element.get_attribute('data-log-message-id')
@@ -213,7 +218,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
213 the WebElement modal match the list of text values in expected 218 the WebElement modal match the list of text values in expected
214 """ 219 """
215 # labels containing the radio buttons we're testing for 220 # labels containing the radio buttons we're testing for
216 labels = modal.find_elements_by_css_selector(".radio") 221 labels = modal.find_elements(By.CSS_SELECTOR,".radio")
217 222
218 labels_text = [lab.text for lab in labels] 223 labels_text = [lab.text for lab in labels]
219 self.assertEqual(len(labels_text), len(expected)) 224 self.assertEqual(len(labels_text), len(expected))
@@ -248,7 +253,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
248 selector = '[data-role="edit-custom-image-trigger"]' 253 selector = '[data-role="edit-custom-image-trigger"]'
249 self.click(selector) 254 self.click(selector)
250 255
251 modal = self.driver.find_element_by_id('edit-custom-image-modal') 256 modal = self.driver.find_element(By.ID, 'edit-custom-image-modal')
252 self.wait_until_visible("#edit-custom-image-modal") 257 self.wait_until_visible("#edit-custom-image-modal")
253 258
254 # recipes we expect to see in the edit custom image modal 259 # recipes we expect to see in the edit custom image modal
@@ -270,7 +275,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
270 selector = '[data-role="new-custom-image-trigger"]' 275 selector = '[data-role="new-custom-image-trigger"]'
271 self.click(selector) 276 self.click(selector)
272 277
273 modal = self.driver.find_element_by_id('new-custom-image-modal') 278 modal = self.driver.find_element(By.ID,'new-custom-image-modal')
274 self.wait_until_visible("#new-custom-image-modal") 279 self.wait_until_visible("#new-custom-image-modal")
275 280
276 # recipes we expect to see in the new custom image modal 281 # recipes we expect to see in the new custom image modal
diff --git a/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py b/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py
index c6226d60eb..675825bd40 100644
--- a/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py
+++ b/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py
@@ -7,6 +7,7 @@
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9
10import os
10from django.urls import reverse 11from django.urls import reverse
11from django.utils import timezone 12from django.utils import timezone
12 13
@@ -20,7 +21,8 @@ class TestBuildDashboardPageArtifacts(SeleniumTestCase):
20 """ Tests for artifacts on the build dashboard /build/X """ 21 """ Tests for artifacts on the build dashboard /build/X """
21 22
22 def setUp(self): 23 def setUp(self):
23 bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', 24 builldir = os.environ.get('BUILDDIR', './')
25 bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
24 branch='master', dirpath="") 26 branch='master', dirpath="")
25 release = Release.objects.create(name='release1', 27 release = Release.objects.create(name='release1',
26 bitbake_version=bbv) 28 bitbake_version=bbv)
@@ -197,12 +199,12 @@ class TestBuildDashboardPageArtifacts(SeleniumTestCase):
197 # check package count and size, link on target name 199 # check package count and size, link on target name
198 selector = '[data-value="target-package-count"]' 200 selector = '[data-value="target-package-count"]'
199 element = self.find(selector) 201 element = self.find(selector)
200 self.assertEquals(element.text, '1', 202 self.assertEqual(element.text, '1',
201 'package count should be shown for image builds') 203 'package count should be shown for image builds')
202 204
203 selector = '[data-value="target-package-size"]' 205 selector = '[data-value="target-package-size"]'
204 element = self.find(selector) 206 element = self.find(selector)
205 self.assertEquals(element.text, '1.0 KB', 207 self.assertEqual(element.text, '1.0 KB',
206 'package size should be shown for image builds') 208 'package size should be shown for image builds')
207 209
208 selector = '[data-link="target-packages"]' 210 selector = '[data-link="target-packages"]'
diff --git a/bitbake/lib/toaster/tests/browser/test_delete_project.py b/bitbake/lib/toaster/tests/browser/test_delete_project.py
new file mode 100644
index 0000000000..1941777ccc
--- /dev/null
+++ b/bitbake/lib/toaster/tests/browser/test_delete_project.py
@@ -0,0 +1,103 @@
1#!/usr/bin/env python3
2# -*- coding: utf-8 -*-
3# BitBake Toaster UI tests implementation
4#
5# Copyright (C) 2023 Savoir-faire Linux Inc
6#
7# SPDX-License-Identifier: GPL-2.0-only
8
9import pytest
10from django.urls import reverse
11from selenium.webdriver.support.ui import Select
12from tests.browser.selenium_helpers import SeleniumTestCase
13from orm.models import BitbakeVersion, Project, Release
14from selenium.webdriver.common.by import By
15
16class TestDeleteProject(SeleniumTestCase):
17
18 def setUp(self):
19 bitbake, _ = BitbakeVersion.objects.get_or_create(
20 name="master",
21 giturl="git://master",
22 branch="master",
23 dirpath="master")
24
25 self.release, _ = Release.objects.get_or_create(
26 name="master",
27 description="Yocto Project master",
28 branch_name="master",
29 helptext="latest",
30 bitbake_version=bitbake)
31
32 Release.objects.get_or_create(
33 name="foo",
34 description="Yocto Project foo",
35 branch_name="foo",
36 helptext="latest",
37 bitbake_version=bitbake)
38
39 @pytest.mark.django_db
40 def test_delete_project(self):
41 """ Test delete a project
42 - Check delete modal is visible
43 - Check delete modal has right text
44 - Confirm delete
45 - Check project is deleted
46 """
47 project_name = "project_to_delete"
48 url = reverse('newproject')
49 self.get(url)
50 self.enter_text('#new-project-name', project_name)
51 select = Select(self.find('#projectversion'))
52 select.select_by_value(str(self.release.pk))
53 self.click("#create-project-button")
54 # We should get redirected to the new project's page with the
55 # notification at the top
56 element = self.wait_until_visible('#project-created-notification')
57 self.assertTrue(project_name in element.text,
58 "New project name not in new project notification")
59 self.assertTrue(Project.objects.filter(name=project_name).count(),
60 "New project not found in database")
61
62 # Delete project
63 delete_project_link = self.driver.find_element(
64 By.XPATH, '//a[@href="#delete-project-modal"]')
65 delete_project_link.click()
66
67 # Check delete modal is visible
68 self.wait_until_visible('#delete-project-modal')
69
70 # Check delete modal has right text
71 modal_header_text = self.find('#delete-project-modal .modal-header').text
72 self.assertTrue(
73 "Are you sure you want to delete this project?" in modal_header_text,
74 "Delete project modal header text is wrong")
75
76 modal_body_text = self.find('#delete-project-modal .modal-body').text
77 self.assertTrue(
78 "Cancel its builds currently in progress" in modal_body_text,
79 "Modal body doesn't contain: Cancel its builds currently in progress")
80 self.assertTrue(
81 "Remove its configuration information" in modal_body_text,
82 "Modal body doesn't contain: Remove its configuration information")
83 self.assertTrue(
84 "Remove its imported layers" in modal_body_text,
85 "Modal body doesn't contain: Remove its imported layers")
86 self.assertTrue(
87 "Remove its custom images" in modal_body_text,
88 "Modal body doesn't contain: Remove its custom images")
89 self.assertTrue(
90 "Remove all its build information" in modal_body_text,
91 "Modal body doesn't contain: Remove all its build information")
92
93 # Confirm delete
94 delete_btn = self.find('#delete-project-confirmed')
95 delete_btn.click()
96
97 # Check project is deleted
98 self.wait_until_visible('#change-notification')
99 delete_notification = self.find('#change-notification-msg')
100 self.assertTrue("You have deleted 1 project:" in delete_notification.text)
101 self.assertTrue(project_name in delete_notification.text)
102 self.assertFalse(Project.objects.filter(name=project_name).exists(),
103 "Project not deleted from database")
diff --git a/bitbake/lib/toaster/tests/browser/test_landing_page.py b/bitbake/lib/toaster/tests/browser/test_landing_page.py
index 8bb64b9f3e..210359d561 100644
--- a/bitbake/lib/toaster/tests/browser/test_landing_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_landing_page.py
@@ -10,8 +10,10 @@
10from django.urls import reverse 10from django.urls import reverse
11from django.utils import timezone 11from django.utils import timezone
12from tests.browser.selenium_helpers import SeleniumTestCase 12from tests.browser.selenium_helpers import SeleniumTestCase
13from selenium.webdriver.common.by import By
14
15from orm.models import Layer, Layer_Version, Project, Build
13 16
14from orm.models import Project, Build
15 17
16class TestLandingPage(SeleniumTestCase): 18class TestLandingPage(SeleniumTestCase):
17 """ Tests for redirects on the landing page """ 19 """ Tests for redirects on the landing page """
@@ -29,12 +31,147 @@ class TestLandingPage(SeleniumTestCase):
29 self.project.is_default = True 31 self.project.is_default = True
30 self.project.save() 32 self.project.save()
31 33
34 def test_icon_info_visible_and_clickable(self):
35 """ Test that the information icon is visible and clickable """
36 self.get(reverse('landing'))
37 self.wait_until_visible('#toaster-version-info-sign')
38 info_sign = self.find('#toaster-version-info-sign')
39
40 # check that the info sign is visible
41 self.assertTrue(info_sign.is_displayed())
42
43 # check that the info sign is clickable
44 # and info modal is appearing when clicking on the info sign
45 info_sign.click() # click on the info sign make attribute 'aria-describedby' visible
46 info_model_id = info_sign.get_attribute('aria-describedby')
47 self.wait_until_visible(f'#{info_model_id}')
48 info_modal = self.find(f'#{info_model_id}')
49 self.assertTrue(info_modal.is_displayed())
50 self.assertTrue("Toaster version information" in info_modal.text)
51
52 def test_documentation_link_displayed(self):
53 """ Test that the documentation link is displayed """
54 self.get(reverse('landing'))
55 self.wait_until_visible('#navbar-docs')
56 documentation_link = self.find('#navbar-docs > a')
57
58 # check that the documentation link is visible
59 self.assertTrue(documentation_link.is_displayed())
60
61 # check browser open new tab toaster manual when clicking on the documentation link
62 self.assertEqual(documentation_link.get_attribute('target'), '_blank')
63 self.assertEqual(
64 documentation_link.get_attribute('href'),
65 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual')
66 self.assertTrue("Documentation" in documentation_link.text)
67
68 def test_openembedded_jumbotron_link_visible_and_clickable(self):
69 """ Test OpenEmbedded link jumbotron is visible and clickable: """
70 self.get(reverse('landing'))
71 self.wait_until_visible('.jumbotron')
72 jumbotron = self.find('.jumbotron')
73
74 # check OpenEmbedded
75 openembedded = jumbotron.find_element(By.LINK_TEXT, 'OpenEmbedded')
76 self.assertTrue(openembedded.is_displayed())
77 openembedded.click()
78 self.assertTrue("openembedded.org" in self.driver.current_url)
79
80 def test_bitbake_jumbotron_link_visible_and_clickable(self):
81 """ Test BitBake link jumbotron is visible and clickable: """
82 self.get(reverse('landing'))
83 self.wait_until_visible('.jumbotron')
84 jumbotron = self.find('.jumbotron')
85
86 # check BitBake
87 bitbake = jumbotron.find_element(By.LINK_TEXT, 'BitBake')
88 self.assertTrue(bitbake.is_displayed())
89 bitbake.click()
90 self.assertTrue(
91 "docs.yoctoproject.org/bitbake.html" in self.driver.current_url)
92
93 def test_yoctoproject_jumbotron_link_visible_and_clickable(self):
94 """ Test Yocto Project link jumbotron is visible and clickable: """
95 self.get(reverse('landing'))
96 self.wait_until_visible('.jumbotron')
97 jumbotron = self.find('.jumbotron')
98
99 # check Yocto Project
100 yoctoproject = jumbotron.find_element(By.LINK_TEXT, 'Yocto Project')
101 self.assertTrue(yoctoproject.is_displayed())
102 yoctoproject.click()
103 self.assertTrue("yoctoproject.org" in self.driver.current_url)
104
105 def test_link_setup_using_toaster_visible_and_clickable(self):
106 """ Test big magenta button setting up and using toaster link in jumbotron
107 if visible and clickable
108 """
109 self.get(reverse('landing'))
110 self.wait_until_visible('.jumbotron')
111 jumbotron = self.find('.jumbotron')
112
113 # check Big magenta button
114 big_magenta_button = jumbotron.find_element(By.LINK_TEXT,
115 'Toaster is ready to capture your command line builds'
116 )
117 self.assertTrue(big_magenta_button.is_displayed())
118 big_magenta_button.click()
119 self.assertTrue(
120 "docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster" in self.driver.current_url)
121
122 def test_link_create_new_project_in_jumbotron_visible_and_clickable(self):
123 """ Test big blue button create new project jumbotron if visible and clickable """
124 # Create a layer and a layer version to make visible the big blue button
125 layer = Layer.objects.create(name='bar')
126 Layer_Version.objects.create(layer=layer)
127
128 self.get(reverse('landing'))
129 self.wait_until_visible('.jumbotron')
130 jumbotron = self.find('.jumbotron')
131
132 # check Big Blue button
133 big_blue_button = jumbotron.find_element(By.LINK_TEXT,
134 'Create your first Toaster project to run manage builds'
135 )
136 self.assertTrue(big_blue_button.is_displayed())
137 big_blue_button.click()
138 self.assertTrue("toastergui/newproject/" in self.driver.current_url)
139
140 def test_toaster_manual_link_visible_and_clickable(self):
141 """ Test Read the Toaster manual link jumbotron is visible and clickable: """
142 self.get(reverse('landing'))
143 self.wait_until_visible('.jumbotron')
144 jumbotron = self.find('.jumbotron')
145
146 # check Read the Toaster manual
147 toaster_manual = jumbotron.find_element(
148 By.LINK_TEXT, 'Read the Toaster manual')
149 self.assertTrue(toaster_manual.is_displayed())
150 toaster_manual.click()
151 self.assertTrue(
152 "https://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual" in self.driver.current_url)
153
154 def test_contrib_to_toaster_link_visible_and_clickable(self):
155 """ Test Contribute to Toaster link jumbotron is visible and clickable: """
156 self.get(reverse('landing'))
157 self.wait_until_visible('.jumbotron')
158 jumbotron = self.find('.jumbotron')
159
160 # check Contribute to Toaster
161 contribute_to_toaster = jumbotron.find_element(
162 By.LINK_TEXT, 'Contribute to Toaster')
163 self.assertTrue(contribute_to_toaster.is_displayed())
164 contribute_to_toaster.click()
165 self.assertTrue(
166 "wiki.yoctoproject.org/wiki/contribute_to_toaster" in str(self.driver.current_url).lower())
167
32 def test_only_default_project(self): 168 def test_only_default_project(self):
33 """ 169 """
34 No projects except default 170 No projects except default
35 => should see the landing page 171 => should see the landing page
36 """ 172 """
37 self.get(reverse('landing')) 173 self.get(reverse('landing'))
174 self.wait_until_visible('.jumbotron')
38 self.assertTrue(self.LANDING_PAGE_TITLE in self.get_page_source()) 175 self.assertTrue(self.LANDING_PAGE_TITLE in self.get_page_source())
39 176
40 def test_default_project_has_build(self): 177 def test_default_project_has_build(self):
@@ -67,6 +204,7 @@ class TestLandingPage(SeleniumTestCase):
67 user_project.save() 204 user_project.save()
68 205
69 self.get(reverse('landing')) 206 self.get(reverse('landing'))
207 self.wait_until_visible('#projectstable')
70 208
71 elements = self.find_all('#projectstable') 209 elements = self.find_all('#projectstable')
72 self.assertEqual(len(elements), 1, 'should redirect to projects') 210 self.assertEqual(len(elements), 1, 'should redirect to projects')
@@ -87,10 +225,9 @@ class TestLandingPage(SeleniumTestCase):
87 225
88 self.get(reverse('landing')) 226 self.get(reverse('landing'))
89 227
228 self.wait_until_visible("#latest-builds")
90 elements = self.find_all('#allbuildstable') 229 elements = self.find_all('#allbuildstable')
91 self.assertEqual(len(elements), 1, 'should redirect to builds') 230 self.assertEqual(len(elements), 1, 'should redirect to builds')
92 content = self.get_page_source() 231 content = self.get_page_source()
93 self.assertTrue(self.PROJECT_NAME in content, 232 self.assertTrue(self.PROJECT_NAME in content,
94 'should show builds for project %s' % self.PROJECT_NAME) 233 'should show builds for project %s' % self.PROJECT_NAME)
95 self.assertFalse(self.CLI_BUILDS_PROJECT_NAME in content,
96 'should not show builds for cli project')
diff --git a/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py b/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py
index 71bdd2aafd..6abfdef699 100644
--- a/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py
@@ -8,6 +8,7 @@
8# 8#
9 9
10from django.urls import reverse 10from django.urls import reverse
11from selenium.common.exceptions import ElementClickInterceptedException, TimeoutException
11from tests.browser.selenium_helpers import SeleniumTestCase 12from tests.browser.selenium_helpers import SeleniumTestCase
12 13
13from orm.models import Layer, Layer_Version, Project, LayerSource, Release 14from orm.models import Layer, Layer_Version, Project, LayerSource, Release
@@ -63,11 +64,12 @@ class TestLayerDetailsPage(SeleniumTestCase):
63 args=(self.project.pk, 64 args=(self.project.pk,
64 self.imported_layer_version.pk)) 65 self.imported_layer_version.pk))
65 66
66 def test_edit_layerdetails(self): 67 def test_edit_layerdetails_page(self):
67 """ Edit all the editable fields for the layer refresh the page and 68 """ Edit all the editable fields for the layer refresh the page and
68 check that the new values exist""" 69 check that the new values exist"""
69 70
70 self.get(self.url) 71 self.get(self.url)
72 self.wait_until_visible("#add-remove-layer-btn")
71 73
72 self.click("#add-remove-layer-btn") 74 self.click("#add-remove-layer-btn")
73 self.click("#edit-layer-source") 75 self.click("#edit-layer-source")
@@ -97,13 +99,21 @@ class TestLayerDetailsPage(SeleniumTestCase):
97 "Expecting any of \"%s\"but got \"%s\"" % 99 "Expecting any of \"%s\"but got \"%s\"" %
98 (self.initial_values, value)) 100 (self.initial_values, value))
99 101
102 # Make sure the input visible beofre sending keys
103 self.wait_until_clickable("#layer-git input[type=text]")
100 inputs.send_keys("-edited") 104 inputs.send_keys("-edited")
101 105
102 # Save the new values 106 # Save the new values
103 for save_btn in self.find_all(".change-btn"): 107 for save_btn in self.find_all(".change-btn"):
104 save_btn.click() 108 save_btn.click()
105 109
106 self.click("#save-changes-for-switch") 110 self.wait_until_visible("#save-changes-for-switch")
111 # Ensure scrolled into view
112 self.driver.execute_script('window.scrollTo({behavior: "instant", top: 0, left: 0})')
113 btn_save_chg_for_switch = self.wait_until_clickable(
114 "#save-changes-for-switch")
115 btn_save_chg_for_switch.click()
116
107 self.wait_until_visible("#edit-layer-source") 117 self.wait_until_visible("#edit-layer-source")
108 118
109 # Refresh the page to see if the new values are returned 119 # Refresh the page to see if the new values are returned
@@ -132,7 +142,11 @@ class TestLayerDetailsPage(SeleniumTestCase):
132 new_dir = "/home/test/my-meta-dir" 142 new_dir = "/home/test/my-meta-dir"
133 dir_input.send_keys(new_dir) 143 dir_input.send_keys(new_dir)
134 144
135 self.click("#save-changes-for-switch") 145 self.wait_until_visible("#save-changes-for-switch")
146 btn_save_chg_for_switch = self.wait_until_clickable(
147 "#save-changes-for-switch")
148 btn_save_chg_for_switch.click()
149
136 self.wait_until_visible("#edit-layer-source") 150 self.wait_until_visible("#edit-layer-source")
137 151
138 # Refresh the page to see if the new values are returned 152 # Refresh the page to see if the new values are returned
@@ -142,6 +156,7 @@ class TestLayerDetailsPage(SeleniumTestCase):
142 "Expected %s in the dir value for layer directory" % 156 "Expected %s in the dir value for layer directory" %
143 new_dir) 157 new_dir)
144 158
159
145 def test_delete_layer(self): 160 def test_delete_layer(self):
146 """ Delete the layer """ 161 """ Delete the layer """
147 162
@@ -178,6 +193,7 @@ class TestLayerDetailsPage(SeleniumTestCase):
178 self.get(self.url) 193 self.get(self.url)
179 194
180 # Add the layer 195 # Add the layer
196 self.wait_until_clickable("#add-remove-layer-btn")
181 self.click("#add-remove-layer-btn") 197 self.click("#add-remove-layer-btn")
182 198
183 notification = self.wait_until_visible("#change-notification-msg") 199 notification = self.wait_until_visible("#change-notification-msg")
@@ -185,12 +201,17 @@ class TestLayerDetailsPage(SeleniumTestCase):
185 expected_text = "You have added 1 layer to your project: %s" % \ 201 expected_text = "You have added 1 layer to your project: %s" % \
186 self.imported_layer_version.layer.name 202 self.imported_layer_version.layer.name
187 203
188 self.assertTrue(expected_text in notification.text, 204 self.assertIn(expected_text, notification.text,
189 "Expected notification text %s not found was " 205 "Expected notification text %s not found was "
190 " \"%s\" instead" % 206 " \"%s\" instead" %
191 (expected_text, notification.text)) 207 (expected_text, notification.text))
192 208
209 hide_button = self.find('#hide-alert')
210 hide_button.click()
211 self.wait_until_not_visible('#change-notification')
212
193 # Remove the layer 213 # Remove the layer
214 self.wait_until_clickable("#add-remove-layer-btn")
194 self.click("#add-remove-layer-btn") 215 self.click("#add-remove-layer-btn")
195 216
196 notification = self.wait_until_visible("#change-notification-msg") 217 notification = self.wait_until_visible("#change-notification-msg")
@@ -198,7 +219,7 @@ class TestLayerDetailsPage(SeleniumTestCase):
198 expected_text = "You have removed 1 layer from your project: %s" % \ 219 expected_text = "You have removed 1 layer from your project: %s" % \
199 self.imported_layer_version.layer.name 220 self.imported_layer_version.layer.name
200 221
201 self.assertTrue(expected_text in notification.text, 222 self.assertIn(expected_text, notification.text,
202 "Expected notification text %s not found was " 223 "Expected notification text %s not found was "
203 " \"%s\" instead" % 224 " \"%s\" instead" %
204 (expected_text, notification.text)) 225 (expected_text, notification.text))
diff --git a/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py b/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py
index 7844aaa395..d7a4c34532 100644
--- a/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py
+++ b/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py
@@ -6,7 +6,6 @@
6# 6#
7# Copyright (C) 2013-2016 Intel Corporation 7# Copyright (C) 2013-2016 Intel Corporation
8# 8#
9
10from django.urls import reverse 9from django.urls import reverse
11from django.utils import timezone 10from django.utils import timezone
12from tests.browser.selenium_helpers import SeleniumTestCase 11from tests.browser.selenium_helpers import SeleniumTestCase
@@ -14,6 +13,8 @@ from tests.browser.selenium_helpers_base import Wait
14from orm.models import Project, Build, Task, Recipe, Layer, Layer_Version 13from orm.models import Project, Build, Task, Recipe, Layer, Layer_Version
15from bldcontrol.models import BuildRequest 14from bldcontrol.models import BuildRequest
16 15
16from selenium.webdriver.common.by import By
17
17class TestMostRecentBuildsStates(SeleniumTestCase): 18class TestMostRecentBuildsStates(SeleniumTestCase):
18 """ Test states update correctly in most recent builds area """ 19 """ Test states update correctly in most recent builds area """
19 20
@@ -45,13 +46,14 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
45 # build queued; check shown as queued 46 # build queued; check shown as queued
46 selector = base_selector + '[data-build-state="Queued"]' 47 selector = base_selector + '[data-build-state="Queued"]'
47 element = self.wait_until_visible(selector) 48 element = self.wait_until_visible(selector)
48 self.assertRegexpMatches(element.get_attribute('innerHTML'), 49 self.assertRegex(element.get_attribute('innerHTML'),
49 'Build queued', 'build should show queued status') 50 'Build queued', 'build should show queued status')
50 51
51 # waiting for recipes to be parsed 52 # waiting for recipes to be parsed
52 build.outcome = Build.IN_PROGRESS 53 build.outcome = Build.IN_PROGRESS
53 build.recipes_to_parse = recipes_to_parse 54 build.recipes_to_parse = recipes_to_parse
54 build.recipes_parsed = 0 55 build.recipes_parsed = 0
56 build.save()
55 57
56 build_request.state = BuildRequest.REQ_INPROGRESS 58 build_request.state = BuildRequest.REQ_INPROGRESS
57 build_request.save() 59 build_request.save()
@@ -62,7 +64,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
62 element = self.wait_until_visible(selector) 64 element = self.wait_until_visible(selector)
63 65
64 bar_selector = '#recipes-parsed-percentage-bar-%s' % build.id 66 bar_selector = '#recipes-parsed-percentage-bar-%s' % build.id
65 bar_element = element.find_element_by_css_selector(bar_selector) 67 bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
66 self.assertEqual(bar_element.value_of_css_property('width'), '0px', 68 self.assertEqual(bar_element.value_of_css_property('width'), '0px',
67 'recipe parse progress should be at 0') 69 'recipe parse progress should be at 0')
68 70
@@ -73,7 +75,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
73 self.get(url) 75 self.get(url)
74 76
75 element = self.wait_until_visible(selector) 77 element = self.wait_until_visible(selector)
76 bar_element = element.find_element_by_css_selector(bar_selector) 78 bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
77 recipe_bar_updated = lambda driver: \ 79 recipe_bar_updated = lambda driver: \
78 bar_element.get_attribute('style') == 'width: 50%;' 80 bar_element.get_attribute('style') == 'width: 50%;'
79 msg = 'recipe parse progress bar should update to 50%' 81 msg = 'recipe parse progress bar should update to 50%'
@@ -94,11 +96,11 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
94 96
95 selector = base_selector + '[data-build-state="Starting"]' 97 selector = base_selector + '[data-build-state="Starting"]'
96 element = self.wait_until_visible(selector) 98 element = self.wait_until_visible(selector)
97 self.assertRegexpMatches(element.get_attribute('innerHTML'), 99 self.assertRegex(element.get_attribute('innerHTML'),
98 'Tasks starting', 'build should show "tasks starting" status') 100 'Tasks starting', 'build should show "tasks starting" status')
99 101
100 # first task finished; check tasks progress bar 102 # first task finished; check tasks progress bar
101 task1.order = 1 103 task1.outcome = Task.OUTCOME_SUCCESS
102 task1.save() 104 task1.save()
103 105
104 self.get(url) 106 self.get(url)
@@ -107,7 +109,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
107 element = self.wait_until_visible(selector) 109 element = self.wait_until_visible(selector)
108 110
109 bar_selector = '#build-pc-done-bar-%s' % build.id 111 bar_selector = '#build-pc-done-bar-%s' % build.id
110 bar_element = element.find_element_by_css_selector(bar_selector) 112 bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
111 113
112 task_bar_updated = lambda driver: \ 114 task_bar_updated = lambda driver: \
113 bar_element.get_attribute('style') == 'width: 50%;' 115 bar_element.get_attribute('style') == 'width: 50%;'
@@ -115,13 +117,13 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
115 element = Wait(self.driver).until(task_bar_updated, msg) 117 element = Wait(self.driver).until(task_bar_updated, msg)
116 118
117 # last task finished; check tasks progress bar updates 119 # last task finished; check tasks progress bar updates
118 task2.order = 2 120 task2.outcome = Task.OUTCOME_SUCCESS
119 task2.save() 121 task2.save()
120 122
121 self.get(url) 123 self.get(url)
122 124
123 element = self.wait_until_visible(selector) 125 element = self.wait_until_visible(selector)
124 bar_element = element.find_element_by_css_selector(bar_selector) 126 bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
125 task_bar_updated = lambda driver: \ 127 task_bar_updated = lambda driver: \
126 bar_element.get_attribute('style') == 'width: 100%;' 128 bar_element.get_attribute('style') == 'width: 100%;'
127 msg = 'tasks progress bar should update to 100%' 129 msg = 'tasks progress bar should update to 100%'
@@ -183,7 +185,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
183 selector = '[data-latest-build-result="%s"] ' \ 185 selector = '[data-latest-build-result="%s"] ' \
184 '[data-build-state="Cancelling"]' % build.id 186 '[data-build-state="Cancelling"]' % build.id
185 element = self.wait_until_visible(selector) 187 element = self.wait_until_visible(selector)
186 self.assertRegexpMatches(element.get_attribute('innerHTML'), 188 self.assertRegex(element.get_attribute('innerHTML'),
187 'Cancelling the build', 'build should show "cancelling" status') 189 'Cancelling the build', 'build should show "cancelling" status')
188 190
189 # check cancelled state 191 # check cancelled state
@@ -195,5 +197,5 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
195 selector = '[data-latest-build-result="%s"] ' \ 197 selector = '[data-latest-build-result="%s"] ' \
196 '[data-build-state="Cancelled"]' % build.id 198 '[data-build-state="Cancelled"]' % build.id
197 element = self.wait_until_visible(selector) 199 element = self.wait_until_visible(selector)
198 self.assertRegexpMatches(element.get_attribute('innerHTML'), 200 self.assertRegex(element.get_attribute('innerHTML'),
199 'Build cancelled', 'build should show "cancelled" status') 201 'Build cancelled', 'build should show "cancelled" status')
diff --git a/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py b/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py
index 9906ae42a9..bf0304dbec 100644
--- a/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py
@@ -6,6 +6,7 @@
6# 6#
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9from bldcontrol.models import BuildEnvironment
9 10
10from django.urls import reverse 11from django.urls import reverse
11from tests.browser.selenium_helpers import SeleniumTestCase 12from tests.browser.selenium_helpers import SeleniumTestCase
@@ -18,6 +19,9 @@ class TestNewCustomImagePage(SeleniumTestCase):
18 CUSTOM_IMAGE_NAME = 'roopa-doopa' 19 CUSTOM_IMAGE_NAME = 'roopa-doopa'
19 20
20 def setUp(self): 21 def setUp(self):
22 BuildEnvironment.objects.get_or_create(
23 betype=BuildEnvironment.TYPE_LOCAL,
24 )
21 release = Release.objects.create( 25 release = Release.objects.create(
22 name='baz', 26 name='baz',
23 bitbake_version=BitbakeVersion.objects.create(name='v1') 27 bitbake_version=BitbakeVersion.objects.create(name='v1')
@@ -41,11 +45,16 @@ class TestNewCustomImagePage(SeleniumTestCase):
41 ) 45 )
42 46
43 # add a fake image recipe to the layer that can be customised 47 # add a fake image recipe to the layer that can be customised
48 builldir = os.environ.get('BUILDDIR', './')
44 self.recipe = Recipe.objects.create( 49 self.recipe = Recipe.objects.create(
45 name='core-image-minimal', 50 name='core-image-minimal',
46 layer_version=layer_version, 51 layer_version=layer_version,
52 file_path=f'{builldir}/core-image-minimal.bb',
47 is_image=True 53 is_image=True
48 ) 54 )
55 # create a tmp file for the recipe
56 with open(self.recipe.file_path, 'w') as f:
57 f.write('foo')
49 58
50 # another project with a custom image already in it 59 # another project with a custom image already in it
51 project2 = Project.objects.create(name='whoop', release=release) 60 project2 = Project.objects.create(name='whoop', release=release)
@@ -81,6 +90,7 @@ class TestNewCustomImagePage(SeleniumTestCase):
81 """ 90 """
82 url = reverse('newcustomimage', args=(self.project.id,)) 91 url = reverse('newcustomimage', args=(self.project.id,))
83 self.get(url) 92 self.get(url)
93 self.wait_until_visible('#global-nav')
84 94
85 self.click('button[data-recipe="%s"]' % self.recipe.id) 95 self.click('button[data-recipe="%s"]' % self.recipe.id)
86 96
@@ -128,7 +138,7 @@ class TestNewCustomImagePage(SeleniumTestCase):
128 """ 138 """
129 self._create_custom_image(self.recipe.name) 139 self._create_custom_image(self.recipe.name)
130 element = self.wait_until_visible('#invalid-name-help') 140 element = self.wait_until_visible('#invalid-name-help')
131 self.assertRegexpMatches(element.text.strip(), 141 self.assertRegex(element.text.strip(),
132 'image with this name already exists') 142 'image with this name already exists')
133 143
134 def test_new_duplicates_project_image(self): 144 def test_new_duplicates_project_image(self):
@@ -146,4 +156,4 @@ class TestNewCustomImagePage(SeleniumTestCase):
146 self._create_custom_image(custom_image_name) 156 self._create_custom_image(custom_image_name)
147 element = self.wait_until_visible('#invalid-name-help') 157 element = self.wait_until_visible('#invalid-name-help')
148 expected = 'An image with this name already exists in this project' 158 expected = 'An image with this name already exists in this project'
149 self.assertRegexpMatches(element.text.strip(), expected) 159 self.assertRegex(element.text.strip(), expected)
diff --git a/bitbake/lib/toaster/tests/browser/test_new_project_page.py b/bitbake/lib/toaster/tests/browser/test_new_project_page.py
index e20a1f686e..e50f236c32 100644
--- a/bitbake/lib/toaster/tests/browser/test_new_project_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_new_project_page.py
@@ -6,11 +6,11 @@
6# 6#
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9
10from django.urls import reverse 9from django.urls import reverse
11from tests.browser.selenium_helpers import SeleniumTestCase 10from tests.browser.selenium_helpers import SeleniumTestCase
12from selenium.webdriver.support.ui import Select 11from selenium.webdriver.support.ui import Select
13from selenium.common.exceptions import InvalidElementStateException 12from selenium.common.exceptions import InvalidElementStateException
13from selenium.webdriver.common.by import By
14 14
15from orm.models import Project, Release, BitbakeVersion 15from orm.models import Project, Release, BitbakeVersion
16 16
@@ -47,7 +47,7 @@ class TestNewProjectPage(SeleniumTestCase):
47 47
48 url = reverse('newproject') 48 url = reverse('newproject')
49 self.get(url) 49 self.get(url)
50 50 self.wait_until_visible('#new-project-name')
51 self.enter_text('#new-project-name', project_name) 51 self.enter_text('#new-project-name', project_name)
52 52
53 select = Select(self.find('#projectversion')) 53 select = Select(self.find('#projectversion'))
@@ -57,7 +57,8 @@ class TestNewProjectPage(SeleniumTestCase):
57 57
58 # We should get redirected to the new project's page with the 58 # We should get redirected to the new project's page with the
59 # notification at the top 59 # notification at the top
60 element = self.wait_until_visible('#project-created-notification') 60 element = self.wait_until_visible(
61 '#project-created-notification')
61 62
62 self.assertTrue(project_name in element.text, 63 self.assertTrue(project_name in element.text,
63 "New project name not in new project notification") 64 "New project name not in new project notification")
@@ -78,15 +79,20 @@ class TestNewProjectPage(SeleniumTestCase):
78 79
79 url = reverse('newproject') 80 url = reverse('newproject')
80 self.get(url) 81 self.get(url)
82 self.wait_until_visible('#new-project-name')
81 83
82 self.enter_text('#new-project-name', project_name) 84 self.enter_text('#new-project-name', project_name)
83 85
84 select = Select(self.find('#projectversion')) 86 select = Select(self.find('#projectversion'))
85 select.select_by_value(str(self.release.pk)) 87 select.select_by_value(str(self.release.pk))
86 88
87 element = self.wait_until_visible('#hint-error-project-name') 89 radio = self.driver.find_element(By.ID, 'type-new')
90 radio.click()
91
92 self.wait_until_visible('#hint-error-project-name')
93 element = self.find('#hint-error-project-name')
88 94
89 self.assertTrue(("Project names must be unique" in element.text), 95 self.assertIn("Project names must be unique", element.text,
90 "Did not find unique project name error message") 96 "Did not find unique project name error message")
91 97
92 # Try and click it anyway, if it submits we'll have a new project in 98 # Try and click it anyway, if it submits we'll have a new project in
diff --git a/bitbake/lib/toaster/tests/browser/test_project_builds_page.py b/bitbake/lib/toaster/tests/browser/test_project_builds_page.py
index 51717e72d4..0dba33b9c8 100644
--- a/bitbake/lib/toaster/tests/browser/test_project_builds_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_project_builds_page.py
@@ -7,6 +7,7 @@
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9
10import os
10import re 11import re
11 12
12from django.urls import reverse 13from django.urls import reverse
@@ -22,7 +23,8 @@ class TestProjectBuildsPage(SeleniumTestCase):
22 CLI_BUILDS_PROJECT_NAME = 'command line builds' 23 CLI_BUILDS_PROJECT_NAME = 'command line builds'
23 24
24 def setUp(self): 25 def setUp(self):
25 bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', 26 builldir = os.environ.get('BUILDDIR', './')
27 bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
26 branch='master', dirpath='') 28 branch='master', dirpath='')
27 release = Release.objects.create(name='release1', 29 release = Release.objects.create(name='release1',
28 bitbake_version=bbv) 30 bitbake_version=bbv)
diff --git a/bitbake/lib/toaster/tests/browser/test_project_config_page.py b/bitbake/lib/toaster/tests/browser/test_project_config_page.py
index 944bcb2631..b9de541efa 100644
--- a/bitbake/lib/toaster/tests/browser/test_project_config_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_project_config_page.py
@@ -7,10 +7,12 @@
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9
10import os
10from django.urls import reverse 11from django.urls import reverse
11from tests.browser.selenium_helpers import SeleniumTestCase 12from tests.browser.selenium_helpers import SeleniumTestCase
12 13
13from orm.models import BitbakeVersion, Release, Project, ProjectVariable 14from orm.models import BitbakeVersion, Release, Project, ProjectVariable
15from selenium.webdriver.common.by import By
14 16
15class TestProjectConfigsPage(SeleniumTestCase): 17class TestProjectConfigsPage(SeleniumTestCase):
16 """ Test data at /project/X/builds is displayed correctly """ 18 """ Test data at /project/X/builds is displayed correctly """
@@ -21,7 +23,8 @@ class TestProjectConfigsPage(SeleniumTestCase):
21 'any of these characters' 23 'any of these characters'
22 24
23 def setUp(self): 25 def setUp(self):
24 bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', 26 builldir = os.environ.get('BUILDDIR', './')
27 bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
25 branch='master', dirpath='') 28 branch='master', dirpath='')
26 release = Release.objects.create(name='release1', 29 release = Release.objects.create(name='release1',
27 bitbake_version=bbv) 30 bitbake_version=bbv)
@@ -66,7 +69,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
66 69
67 self.enter_text('#new-imagefs_types', imagefs_type) 70 self.enter_text('#new-imagefs_types', imagefs_type)
68 71
69 checkboxes = self.driver.find_elements_by_xpath("//input[@class='fs-checkbox-fstypes']") 72 checkboxes = self.driver.find_elements(By.XPATH, "//input[@class='fs-checkbox-fstypes']")
70 73
71 for checkbox in checkboxes: 74 for checkbox in checkboxes:
72 if checkbox.get_attribute("value") == "btrfs": 75 if checkbox.get_attribute("value") == "btrfs":
@@ -95,7 +98,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
95 for checkbox in checkboxes: 98 for checkbox in checkboxes:
96 if checkbox.get_attribute("value") == "cpio": 99 if checkbox.get_attribute("value") == "cpio":
97 checkbox.click() 100 checkbox.click()
98 element = self.driver.find_element_by_id('new-imagefs_types') 101 element = self.driver.find_element(By.ID, 'new-imagefs_types')
99 102
100 self.wait_until_visible('#new-imagefs_types') 103 self.wait_until_visible('#new-imagefs_types')
101 104
@@ -129,7 +132,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
129 self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg) 132 self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
130 133
131 # downloads dir path has a space 134 # downloads dir path has a space
132 self.driver.find_element_by_id('new-dl_dir').clear() 135 self.driver.find_element(By.ID, 'new-dl_dir').clear()
133 self.enter_text('#new-dl_dir', '/foo/bar a') 136 self.enter_text('#new-dl_dir', '/foo/bar a')
134 137
135 element = self.wait_until_visible('#hintError-dl_dir') 138 element = self.wait_until_visible('#hintError-dl_dir')
@@ -137,7 +140,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
137 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) 140 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
138 141
139 # downloads dir path starts with ${...} but has a space 142 # downloads dir path starts with ${...} but has a space
140 self.driver.find_element_by_id('new-dl_dir').clear() 143 self.driver.find_element(By.ID,'new-dl_dir').clear()
141 self.enter_text('#new-dl_dir', '${TOPDIR}/down foo') 144 self.enter_text('#new-dl_dir', '${TOPDIR}/down foo')
142 145
143 element = self.wait_until_visible('#hintError-dl_dir') 146 element = self.wait_until_visible('#hintError-dl_dir')
@@ -145,18 +148,18 @@ class TestProjectConfigsPage(SeleniumTestCase):
145 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) 148 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
146 149
147 # downloads dir path starts with / 150 # downloads dir path starts with /
148 self.driver.find_element_by_id('new-dl_dir').clear() 151 self.driver.find_element(By.ID,'new-dl_dir').clear()
149 self.enter_text('#new-dl_dir', '/bar/foo') 152 self.enter_text('#new-dl_dir', '/bar/foo')
150 153
151 hidden_element = self.driver.find_element_by_id('hintError-dl_dir') 154 hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
152 self.assertEqual(hidden_element.is_displayed(), False, 155 self.assertEqual(hidden_element.is_displayed(), False,
153 'downloads directory path valid but treated as invalid') 156 'downloads directory path valid but treated as invalid')
154 157
155 # downloads dir path starts with ${...} 158 # downloads dir path starts with ${...}
156 self.driver.find_element_by_id('new-dl_dir').clear() 159 self.driver.find_element(By.ID,'new-dl_dir').clear()
157 self.enter_text('#new-dl_dir', '${TOPDIR}/down') 160 self.enter_text('#new-dl_dir', '${TOPDIR}/down')
158 161
159 hidden_element = self.driver.find_element_by_id('hintError-dl_dir') 162 hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
160 self.assertEqual(hidden_element.is_displayed(), False, 163 self.assertEqual(hidden_element.is_displayed(), False,
161 'downloads directory path valid but treated as invalid') 164 'downloads directory path valid but treated as invalid')
162 165
@@ -184,7 +187,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
184 self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg) 187 self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
185 188
186 # path has a space 189 # path has a space
187 self.driver.find_element_by_id('new-sstate_dir').clear() 190 self.driver.find_element(By.ID, 'new-sstate_dir').clear()
188 self.enter_text('#new-sstate_dir', '/foo/bar a') 191 self.enter_text('#new-sstate_dir', '/foo/bar a')
189 192
190 element = self.wait_until_visible('#hintError-sstate_dir') 193 element = self.wait_until_visible('#hintError-sstate_dir')
@@ -192,7 +195,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
192 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) 195 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
193 196
194 # path starts with ${...} but has a space 197 # path starts with ${...} but has a space
195 self.driver.find_element_by_id('new-sstate_dir').clear() 198 self.driver.find_element(By.ID,'new-sstate_dir').clear()
196 self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo') 199 self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo')
197 200
198 element = self.wait_until_visible('#hintError-sstate_dir') 201 element = self.wait_until_visible('#hintError-sstate_dir')
@@ -200,18 +203,18 @@ class TestProjectConfigsPage(SeleniumTestCase):
200 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) 203 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
201 204
202 # path starts with / 205 # path starts with /
203 self.driver.find_element_by_id('new-sstate_dir').clear() 206 self.driver.find_element(By.ID,'new-sstate_dir').clear()
204 self.enter_text('#new-sstate_dir', '/bar/foo') 207 self.enter_text('#new-sstate_dir', '/bar/foo')
205 208
206 hidden_element = self.driver.find_element_by_id('hintError-sstate_dir') 209 hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
207 self.assertEqual(hidden_element.is_displayed(), False, 210 self.assertEqual(hidden_element.is_displayed(), False,
208 'sstate directory path valid but treated as invalid') 211 'sstate directory path valid but treated as invalid')
209 212
210 # paths starts with ${...} 213 # paths starts with ${...}
211 self.driver.find_element_by_id('new-sstate_dir').clear() 214 self.driver.find_element(By.ID, 'new-sstate_dir').clear()
212 self.enter_text('#new-sstate_dir', '${TOPDIR}/down') 215 self.enter_text('#new-sstate_dir', '${TOPDIR}/down')
213 216
214 hidden_element = self.driver.find_element_by_id('hintError-sstate_dir') 217 hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
215 self.assertEqual(hidden_element.is_displayed(), False, 218 self.assertEqual(hidden_element.is_displayed(), False,
216 'sstate directory path valid but treated as invalid') 219 'sstate directory path valid but treated as invalid')
217 220
diff --git a/bitbake/lib/toaster/tests/browser/test_sample.py b/bitbake/lib/toaster/tests/browser/test_sample.py
index b0067c21cd..f04f1d9a16 100644
--- a/bitbake/lib/toaster/tests/browser/test_sample.py
+++ b/bitbake/lib/toaster/tests/browser/test_sample.py
@@ -27,3 +27,13 @@ class TestSample(SeleniumTestCase):
27 self.get(url) 27 self.get(url)
28 brand_link = self.find('.toaster-navbar-brand a.brand') 28 brand_link = self.find('.toaster-navbar-brand a.brand')
29 self.assertEqual(brand_link.text.strip(), 'Toaster') 29 self.assertEqual(brand_link.text.strip(), 'Toaster')
30
31 def test_no_builds_message(self):
32 """ Test that a message is shown when there are no builds """
33 url = reverse('all-builds')
34 self.get(url)
35 self.wait_until_visible('#empty-state-allbuildstable') # wait for the empty state div to appear
36 div_msg = self.find('#empty-state-allbuildstable .alert-info')
37
38 msg = 'Sorry - no data found'
39 self.assertEqual(div_msg.text, msg)
diff --git a/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py b/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py
index e82d5ec654..691aca1ef0 100644
--- a/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py
+++ b/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py
@@ -8,11 +8,13 @@
8# 8#
9 9
10from datetime import datetime 10from datetime import datetime
11import os
11 12
12from django.urls import reverse 13from django.urls import reverse
13from django.utils import timezone 14from django.utils import timezone
14from tests.browser.selenium_helpers import SeleniumTestCase 15from tests.browser.selenium_helpers import SeleniumTestCase
15from orm.models import BitbakeVersion, Release, Project, Build 16from orm.models import BitbakeVersion, Release, Project, Build
17from selenium.webdriver.common.by import By
16 18
17class TestToasterTableUI(SeleniumTestCase): 19class TestToasterTableUI(SeleniumTestCase):
18 """ 20 """
@@ -33,7 +35,7 @@ class TestToasterTableUI(SeleniumTestCase):
33 table: WebElement for a ToasterTable 35 table: WebElement for a ToasterTable
34 """ 36 """
35 selector = 'thead a.sorted' 37 selector = 'thead a.sorted'
36 heading = table.find_element_by_css_selector(selector) 38 heading = table.find_element(By.CSS_SELECTOR, selector)
37 return heading.get_attribute('innerHTML').strip() 39 return heading.get_attribute('innerHTML').strip()
38 40
39 def _get_datetime_from_cell(self, row, selector): 41 def _get_datetime_from_cell(self, row, selector):
@@ -45,7 +47,7 @@ class TestToasterTableUI(SeleniumTestCase):
45 selector: CSS selector to use to find the cell containing the date time 47 selector: CSS selector to use to find the cell containing the date time
46 string 48 string
47 """ 49 """
48 cell = row.find_element_by_css_selector(selector) 50 cell = row.find_element(By.CSS_SELECTOR, selector)
49 cell_text = cell.get_attribute('innerHTML').strip() 51 cell_text = cell.get_attribute('innerHTML').strip()
50 return datetime.strptime(cell_text, '%d/%m/%y %H:%M') 52 return datetime.strptime(cell_text, '%d/%m/%y %H:%M')
51 53
@@ -58,7 +60,8 @@ class TestToasterTableUI(SeleniumTestCase):
58 later = now + timezone.timedelta(hours=1) 60 later = now + timezone.timedelta(hours=1)
59 even_later = later + timezone.timedelta(hours=1) 61 even_later = later + timezone.timedelta(hours=1)
60 62
61 bbv = BitbakeVersion.objects.create(name='test bbv', giturl='/tmp/', 63 builldir = os.environ.get('BUILDDIR', './')
64 bbv = BitbakeVersion.objects.create(name='test bbv', giturl=f'{builldir}/',
62 branch='master', dirpath='') 65 branch='master', dirpath='')
63 release = Release.objects.create(name='test release', 66 release = Release.objects.create(name='test release',
64 branch_name='master', 67 branch_name='master',
@@ -105,7 +108,7 @@ class TestToasterTableUI(SeleniumTestCase):
105 self.click('#checkbox-started_on') 108 self.click('#checkbox-started_on')
106 109
107 # sort by started_on column 110 # sort by started_on column
108 links = table.find_elements_by_css_selector('th.started_on a') 111 links = table.find_elements(By.CSS_SELECTOR, 'th.started_on a')
109 for link in links: 112 for link in links:
110 if link.get_attribute('innerHTML').strip() == 'Started on': 113 if link.get_attribute('innerHTML').strip() == 'Started on':
111 link.click() 114 link.click()
diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py b/bitbake/lib/toaster/tests/builds/buildtest.py
index 872bbd3775..e54d561334 100644
--- a/bitbake/lib/toaster/tests/builds/buildtest.py
+++ b/bitbake/lib/toaster/tests/builds/buildtest.py
@@ -88,7 +88,7 @@ def load_build_environment():
88class BuildTest(unittest.TestCase): 88class BuildTest(unittest.TestCase):
89 89
90 PROJECT_NAME = "Testbuild" 90 PROJECT_NAME = "Testbuild"
91 BUILDDIR = "/tmp/build/" 91 BUILDDIR = os.environ.get("BUILDDIR")
92 92
93 def build(self, target): 93 def build(self, target):
94 # So that the buildinfo helper uses the test database' 94 # So that the buildinfo helper uses the test database'
@@ -116,10 +116,19 @@ class BuildTest(unittest.TestCase):
116 project = Project.objects.create_project(name=BuildTest.PROJECT_NAME, 116 project = Project.objects.create_project(name=BuildTest.PROJECT_NAME,
117 release=release) 117 release=release)
118 118
119 passthrough_variable_names = ["SSTATE_DIR", "DL_DIR", "SSTATE_MIRRORS", "BB_HASHSERVE", "BB_HASHSERVE_UPSTREAM"]
120 for variable_name in passthrough_variable_names:
121 current_variable = os.environ.get(variable_name)
122 if current_variable:
123 ProjectVariable.objects.get_or_create(
124 name=variable_name,
125 value=current_variable,
126 project=project)
127
119 if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"): 128 if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
120 ProjectVariable.objects.get_or_create( 129 ProjectVariable.objects.get_or_create(
121 name="SSTATE_MIRRORS", 130 name="SSTATE_MIRRORS",
122 value="file://.* http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH", 131 value="file://.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH",
123 project=project) 132 project=project)
124 133
125 ProjectTarget.objects.create(project=project, 134 ProjectTarget.objects.create(project=project,
diff --git a/bitbake/lib/toaster/tests/builds/test_core_image_min.py b/bitbake/lib/toaster/tests/builds/test_core_image_min.py
index 44b6cbec7b..c5bfdbfbb5 100644
--- a/bitbake/lib/toaster/tests/builds/test_core_image_min.py
+++ b/bitbake/lib/toaster/tests/builds/test_core_image_min.py
@@ -10,6 +10,7 @@
10# Ionut Chisanovici, Paul Eggleton and Cristian Iorga 10# Ionut Chisanovici, Paul Eggleton and Cristian Iorga
11 11
12import os 12import os
13import pytest
13 14
14from django.db.models import Q 15from django.db.models import Q
15 16
@@ -20,12 +21,13 @@ from orm.models import CustomImagePackage
20 21
21from tests.builds.buildtest import BuildTest 22from tests.builds.buildtest import BuildTest
22 23
23 24@pytest.mark.order(4)
25@pytest.mark.django_db(True)
24class BuildCoreImageMinimal(BuildTest): 26class BuildCoreImageMinimal(BuildTest):
25 """Build core-image-minimal and test the results""" 27 """Build core-image-minimal and test the results"""
26 28
27 def setUp(self): 29 def setUp(self):
28 self.completed_build = self.build("core-image-minimal") 30 self.completed_build = self.target_already_built("core-image-minimal")
29 31
30 # Check if build name is unique - tc_id=795 32 # Check if build name is unique - tc_id=795
31 def test_Build_Unique_Name(self): 33 def test_Build_Unique_Name(self):
@@ -44,17 +46,6 @@ class BuildCoreImageMinimal(BuildTest):
44 total_builds, 46 total_builds,
45 msg='Build cooker log path is not unique') 47 msg='Build cooker log path is not unique')
46 48
47 # Check if task order is unique for one build - tc=824
48 def test_Task_Unique_Order(self):
49 total_task_order = Task.objects.filter(
50 build=self.built).values('order').count()
51 distinct_task_order = Task.objects.filter(
52 build=self.completed_build).values('order').distinct().count()
53
54 self.assertEqual(total_task_order,
55 distinct_task_order,
56 msg='Errors task order is not unique')
57
58 # Check task order sequence for one build - tc=825 49 # Check task order sequence for one build - tc=825
59 def test_Task_Order_Sequence(self): 50 def test_Task_Order_Sequence(self):
60 cnt_err = [] 51 cnt_err = []
@@ -98,7 +89,6 @@ class BuildCoreImageMinimal(BuildTest):
98 'task_name', 89 'task_name',
99 'sstate_result') 90 'sstate_result')
100 cnt_err = [] 91 cnt_err = []
101
102 for task in tasks: 92 for task in tasks:
103 if (task['sstate_result'] != Task.SSTATE_NA and 93 if (task['sstate_result'] != Task.SSTATE_NA and
104 task['sstate_result'] != Task.SSTATE_MISS): 94 task['sstate_result'] != Task.SSTATE_MISS):
@@ -221,6 +211,7 @@ class BuildCoreImageMinimal(BuildTest):
221 # orm_build.outcome=0 then if the file exists and its size matches 211 # orm_build.outcome=0 then if the file exists and its size matches
222 # the file_size value. Need to add the tc in the test run 212 # the file_size value. Need to add the tc in the test run
223 def test_Target_File_Name_Populated(self): 213 def test_Target_File_Name_Populated(self):
214 cnt_err = []
224 builds = Build.objects.filter(outcome=0).values('id') 215 builds = Build.objects.filter(outcome=0).values('id')
225 for build in builds: 216 for build in builds:
226 targets = Target.objects.filter( 217 targets = Target.objects.filter(
@@ -230,7 +221,6 @@ class BuildCoreImageMinimal(BuildTest):
230 target_id=target['id']).values('id', 221 target_id=target['id']).values('id',
231 'file_name', 222 'file_name',
232 'file_size') 223 'file_size')
233 cnt_err = []
234 for file_info in target_files: 224 for file_info in target_files:
235 target_id = file_info['id'] 225 target_id = file_info['id']
236 target_file_name = file_info['file_name'] 226 target_file_name = file_info['file_name']
diff --git a/bitbake/lib/toaster/tests/commands/test_loaddata.py b/bitbake/lib/toaster/tests/commands/test_loaddata.py
index 9e8d5553cf..7d04f030ee 100644
--- a/bitbake/lib/toaster/tests/commands/test_loaddata.py
+++ b/bitbake/lib/toaster/tests/commands/test_loaddata.py
@@ -6,13 +6,13 @@
6# 6#
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9import pytest
10from django.test import TestCase 10from django.test import TestCase
11from django.core import management 11from django.core import management
12 12
13from orm.models import Layer_Version, Layer, Release, ToasterSetting 13from orm.models import Layer_Version, Layer, Release, ToasterSetting
14 14
15 15@pytest.mark.order(2)
16class TestLoadDataFixtures(TestCase): 16class TestLoadDataFixtures(TestCase):
17 """ Test loading our 3 provided fixtures """ 17 """ Test loading our 3 provided fixtures """
18 def test_run_loaddata_poky_command(self): 18 def test_run_loaddata_poky_command(self):
diff --git a/bitbake/lib/toaster/tests/commands/test_lsupdates.py b/bitbake/lib/toaster/tests/commands/test_lsupdates.py
index 3c4fbe0550..30c6eeb4ac 100644
--- a/bitbake/lib/toaster/tests/commands/test_lsupdates.py
+++ b/bitbake/lib/toaster/tests/commands/test_lsupdates.py
@@ -7,12 +7,13 @@
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9
10import pytest
10from django.test import TestCase 11from django.test import TestCase
11from django.core import management 12from django.core import management
12 13
13from orm.models import Layer_Version, Machine, Recipe 14from orm.models import Layer_Version, Machine, Recipe
14 15
15 16@pytest.mark.order(3)
16class TestLayerIndexUpdater(TestCase): 17class TestLayerIndexUpdater(TestCase):
17 def test_run_lsupdates_command(self): 18 def test_run_lsupdates_command(self):
18 # Load some release information for us to fetch from the layer index 19 # Load some release information for us to fetch from the layer index
diff --git a/bitbake/lib/toaster/tests/commands/test_runbuilds.py b/bitbake/lib/toaster/tests/commands/test_runbuilds.py
index e223b95fcb..849c227edc 100644
--- a/bitbake/lib/toaster/tests/commands/test_runbuilds.py
+++ b/bitbake/lib/toaster/tests/commands/test_runbuilds.py
@@ -19,12 +19,14 @@ import time
19import subprocess 19import subprocess
20import signal 20import signal
21 21
22import logging
23
22 24
23class KillRunbuilds(threading.Thread): 25class KillRunbuilds(threading.Thread):
24 """ Kill the runbuilds process after an amount of time """ 26 """ Kill the runbuilds process after an amount of time """
25 def __init__(self, *args, **kwargs): 27 def __init__(self, *args, **kwargs):
26 super(KillRunbuilds, self).__init__(*args, **kwargs) 28 super(KillRunbuilds, self).__init__(*args, **kwargs)
27 self.setDaemon(True) 29 self.daemon = True
28 30
29 def run(self): 31 def run(self):
30 time.sleep(5) 32 time.sleep(5)
@@ -34,9 +36,12 @@ class KillRunbuilds(threading.Thread):
34 pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."), 36 pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."),
35 ".runbuilds.pid") 37 ".runbuilds.pid")
36 38
37 with open(pidfile_path) as pidfile: 39 try:
38 pid = pidfile.read() 40 with open(pidfile_path) as pidfile:
39 os.kill(int(pid), signal.SIGTERM) 41 pid = pidfile.read()
42 os.kill(int(pid), signal.SIGTERM)
43 except ProcessLookupError:
44 logging.warning("Runbuilds not running or already killed")
40 45
41 46
42class TestCommands(TestCase): 47class TestCommands(TestCase):
diff --git a/bitbake/lib/toaster/tests/db/test_db.py b/bitbake/lib/toaster/tests/db/test_db.py
index 0410422276..072ab94363 100644
--- a/bitbake/lib/toaster/tests/db/test_db.py
+++ b/bitbake/lib/toaster/tests/db/test_db.py
@@ -23,6 +23,7 @@
23# SOFTWARE. 23# SOFTWARE.
24 24
25import sys 25import sys
26import pytest
26 27
27try: 28try:
28 from StringIO import StringIO 29 from StringIO import StringIO
@@ -47,7 +48,7 @@ def capture(command, *args, **kwargs):
47def makemigrations(): 48def makemigrations():
48 management.call_command('makemigrations') 49 management.call_command('makemigrations')
49 50
50 51@pytest.mark.order(1)
51class MigrationTest(TestCase): 52class MigrationTest(TestCase):
52 53
53 def testPendingMigration(self): 54 def testPendingMigration(self):
diff --git a/bitbake/lib/toaster/tests/functional/functional_helpers.py b/bitbake/lib/toaster/tests/functional/functional_helpers.py
index 5c4ea71794..e28f2024f5 100644
--- a/bitbake/lib/toaster/tests/functional/functional_helpers.py
+++ b/bitbake/lib/toaster/tests/functional/functional_helpers.py
@@ -11,35 +11,58 @@ import os
11import logging 11import logging
12import subprocess 12import subprocess
13import signal 13import signal
14import time
15import re 14import re
15import requests
16 16
17from django.urls import reverse
17from tests.browser.selenium_helpers_base import SeleniumTestCaseBase 18from tests.browser.selenium_helpers_base import SeleniumTestCaseBase
18from tests.builds.buildtest import load_build_environment 19from selenium.webdriver.common.by import By
20from selenium.webdriver.support.select import Select
21from selenium.common.exceptions import NoSuchElementException
19 22
20logger = logging.getLogger("toaster") 23logger = logging.getLogger("toaster")
24toaster_processes = []
21 25
22class SeleniumFunctionalTestCase(SeleniumTestCaseBase): 26class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
23 wait_toaster_time = 5 27 wait_toaster_time = 10
24 28
25 @classmethod 29 @classmethod
26 def setUpClass(cls): 30 def setUpClass(cls):
27 # So that the buildinfo helper uses the test database' 31 # So that the buildinfo helper uses the test database'
28 if os.environ.get('DJANGO_SETTINGS_MODULE', '') != \ 32 if os.environ.get('DJANGO_SETTINGS_MODULE', '') != \
29 'toastermain.settings_test': 33 'toastermain.settings_test':
30 raise RuntimeError("Please initialise django with the tests settings: " \ 34 raise RuntimeError("Please initialise django with the tests settings: "
31 "DJANGO_SETTINGS_MODULE='toastermain.settings_test'") 35 "DJANGO_SETTINGS_MODULE='toastermain.settings_test'")
32 36
33 load_build_environment() 37 # Wait for any known toaster processes to exit
38 global toaster_processes
39 for toaster_process in toaster_processes:
40 try:
41 os.waitpid(toaster_process, os.WNOHANG)
42 except ChildProcessError:
43 pass
34 44
35 # start toaster 45 # start toaster
36 cmd = "bash -c 'source toaster start'" 46 cmd = "bash -c 'source toaster start'"
37 p = subprocess.Popen( 47 start_process = subprocess.Popen(
38 cmd, 48 cmd,
39 cwd=os.environ.get("BUILDDIR"), 49 cwd=os.environ.get("BUILDDIR"),
40 shell=True) 50 shell=True)
41 if p.wait() != 0: 51 toaster_processes = [start_process.pid]
42 raise RuntimeError("Can't initialize toaster") 52 if start_process.wait() != 0:
53 port_use = os.popen("lsof -i -P -n | grep '8000 (LISTEN)'").read().strip()
54 message = ''
55 if port_use:
56 process_id = port_use.split()[1]
57 process = os.popen(f"ps -o cmd= -p {process_id}").read().strip()
58 message = f"Port 8000 occupied by {process}"
59 raise RuntimeError(f"Can't initialize toaster. {message}")
60
61 builddir = os.environ.get("BUILDDIR")
62 with open(os.path.join(builddir, '.toastermain.pid'), 'r') as f:
63 toaster_processes.append(int(f.read()))
64 with open(os.path.join(builddir, '.runbuilds.pid'), 'r') as f:
65 toaster_processes.append(int(f.read()))
43 66
44 super(SeleniumFunctionalTestCase, cls).setUpClass() 67 super(SeleniumFunctionalTestCase, cls).setUpClass()
45 cls.live_server_url = 'http://localhost:8000/' 68 cls.live_server_url = 'http://localhost:8000/'
@@ -48,22 +71,30 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
48 def tearDownClass(cls): 71 def tearDownClass(cls):
49 super(SeleniumFunctionalTestCase, cls).tearDownClass() 72 super(SeleniumFunctionalTestCase, cls).tearDownClass()
50 73
51 # XXX: source toaster stop gets blocked, to review why? 74 global toaster_processes
52 # from now send SIGTERM by hand
53 time.sleep(cls.wait_toaster_time)
54 builddir = os.environ.get("BUILDDIR")
55 75
56 with open(os.path.join(builddir, '.toastermain.pid'), 'r') as f: 76 cmd = "bash -c 'source toaster stop'"
57 toastermain_pid = int(f.read()) 77 stop_process = subprocess.Popen(
58 os.kill(toastermain_pid, signal.SIGTERM) 78 cmd,
59 with open(os.path.join(builddir, '.runbuilds.pid'), 'r') as f: 79 cwd=os.environ.get("BUILDDIR"),
60 runbuilds_pid = int(f.read()) 80 shell=True)
61 os.kill(runbuilds_pid, signal.SIGTERM) 81 # Toaster stop has been known to hang in these tests so force kill if it stalls
82 try:
83 if stop_process.wait(cls.wait_toaster_time) != 0:
84 raise Exception('Toaster stop process failed')
85 except Exception as e:
86 if e is subprocess.TimeoutExpired:
87 print('Toaster stop process took too long. Force killing toaster...')
88 else:
89 print('Toaster stop process failed. Force killing toaster...')
90 stop_process.kill()
91 for toaster_process in toaster_processes:
92 os.kill(toaster_process, signal.SIGTERM)
62 93
63 94
64 def get_URL(self): 95 def get_URL(self):
65 rc=self.get_page_source() 96 rc=self.get_page_source()
66 project_url=re.search("(projectPageUrl\s:\s\")(.*)(\",)",rc) 97 project_url=re.search(r"(projectPageUrl\s:\s\")(.*)(\",)",rc)
67 return project_url.group(2) 98 return project_url.group(2)
68 99
69 100
@@ -74,8 +105,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
74 """ 105 """
75 try: 106 try:
76 table_element = self.get_table_element(table_id) 107 table_element = self.get_table_element(table_id)
77 element = table_element.find_element_by_link_text(link_text) 108 element = table_element.find_element(By.LINK_TEXT, link_text)
78 except self.NoSuchElementException: 109 except NoSuchElementException:
79 print('no element found') 110 print('no element found')
80 raise 111 raise
81 return element 112 return element
@@ -85,8 +116,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
85#return whole-table element 116#return whole-table element
86 element_xpath = "//*[@id='" + table_id + "']" 117 element_xpath = "//*[@id='" + table_id + "']"
87 try: 118 try:
88 element = self.driver.find_element_by_xpath(element_xpath) 119 element = self.driver.find_element(By.XPATH, element_xpath)
89 except self.NoSuchElementException: 120 except NoSuchElementException:
90 raise 121 raise
91 return element 122 return element
92 row = coordinate[0] 123 row = coordinate[0]
@@ -95,8 +126,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
95#return whole-row element 126#return whole-row element
96 element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]" 127 element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]"
97 try: 128 try:
98 element = self.driver.find_element_by_xpath(element_xpath) 129 element = self.driver.find_element(By.XPATH, element_xpath)
99 except self.NoSuchElementException: 130 except NoSuchElementException:
100 return False 131 return False
101 return element 132 return element
102#now we are looking for an element with specified X and Y 133#now we are looking for an element with specified X and Y
@@ -104,7 +135,90 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
104 135
105 element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]/td[" + str(column) + "]" 136 element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]/td[" + str(column) + "]"
106 try: 137 try:
107 element = self.driver.find_element_by_xpath(element_xpath) 138 element = self.driver.find_element(By.XPATH, element_xpath)
108 except self.NoSuchElementException: 139 except NoSuchElementException:
109 return False 140 return False
110 return element 141 return element
142
143 def create_new_project(
144 self,
145 project_name,
146 release,
147 release_title,
148 merge_toaster_settings,
149 ):
150 """ Create/Test new project using:
151 - Project Name: Any string
152 - Release: Any string
153 - Merge Toaster settings: True or False
154 """
155
156 # Obtain a CSRF token from a suitable URL
157 projs = requests.get(self.live_server_url + reverse('newproject'))
158 csrftoken = projs.cookies.get('csrftoken')
159
160 # Use the projects typeahead to find out if the project already exists
161 req = requests.get(self.live_server_url + reverse('xhr_projectstypeahead'), {'search': project_name, 'format' : 'json'})
162 data = req.json()
163 # Delete any existing projects
164 for result in data['results']:
165 del_url = reverse('xhr_project', args=(result['id'],))
166 del_response = requests.delete(self.live_server_url + del_url, cookies={'csrftoken': csrftoken}, headers={'X-CSRFToken': csrftoken})
167 self.assertEqual(del_response.status_code, 200)
168
169 self.get(reverse('newproject'))
170 self.wait_until_visible('#new-project-name')
171 self.driver.find_element(By.ID,
172 "new-project-name").send_keys(project_name)
173
174 select = Select(self.find('#projectversion'))
175 select.select_by_value(release)
176
177 # check merge toaster settings
178 checkbox = self.find('.checkbox-mergeattr')
179 if merge_toaster_settings:
180 if not checkbox.is_selected():
181 checkbox.click()
182 else:
183 if checkbox.is_selected():
184 checkbox.click()
185
186 self.wait_until_clickable('#create-project-button')
187
188 self.driver.find_element(By.ID, "create-project-button").click()
189
190 element = self.wait_until_visible('#project-created-notification')
191 self.assertTrue(
192 self.element_exists('#project-created-notification'),
193 f"Project:{project_name} creation notification not shown"
194 )
195 self.assertTrue(
196 project_name in element.text,
197 f"New project name:{project_name} not in new project notification"
198 )
199
200 # Use the projects typeahead again to check the project now exists
201 req = requests.get(self.live_server_url + reverse('xhr_projectstypeahead'), {'search': project_name, 'format' : 'json'})
202 data = req.json()
203 self.assertGreater(len(data['results']), 0, f"New project:{project_name} not found in database")
204
205 project_id = data['results'][0]['id']
206
207 self.wait_until_visible('#project-release-title')
208
209 # check release
210 if release_title is not None:
211 self.assertTrue(re.search(
212 release_title,
213 self.driver.find_element(By.XPATH,
214 "//span[@id='project-release-title']"
215 ).text),
216 'The project release is not defined')
217
218 return project_id
219
220 def load_projects_page_helper(self):
221 self.wait_until_present('#projectstable')
222 # Need to wait for some data in the table too
223 self.wait_until_present('td[class="updated"]')
224
diff --git a/bitbake/lib/toaster/tests/functional/test_create_new_project.py b/bitbake/lib/toaster/tests/functional/test_create_new_project.py
new file mode 100644
index 0000000000..66213c736e
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/test_create_new_project.py
@@ -0,0 +1,124 @@
1#! /usr/bin/env python3
2# BitBake Toaster UI tests implementation
3#
4# Copyright (C) 2023 Savoir-faire Linux
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import re
10import pytest
11from django.urls import reverse
12from selenium.webdriver.support.select import Select
13from tests.functional.functional_helpers import SeleniumFunctionalTestCase
14from selenium.webdriver.common.by import By
15
16class TestCreateNewProject(SeleniumFunctionalTestCase):
17
18 def test_create_new_project_master(self):
19 """ Test create new project using:
20 - Project Name: Any string
21 - Release: Yocto Project master (option value: 3)
22 - Merge Toaster settings: False
23 """
24 release = '3'
25 release_title = 'Yocto Project master'
26 project_name = 'projectmaster'
27 self.create_new_project(
28 project_name,
29 release,
30 release_title,
31 False,
32 )
33
34 def test_create_new_project_scarthgap(self):
35 """ Test create new project using:
36 - Project Name: Any string
37 - Release: Yocto Project 5.0 "Scarthgap" (option value: 1)
38 - Merge Toaster settings: True
39 """
40 release = '1'
41 release_title = 'Yocto Project 5.0 "Scarthgap"'
42 project_name = 'projectscarthgap'
43 self.create_new_project(
44 project_name,
45 release,
46 release_title,
47 True,
48 )
49
50 def test_create_new_project_kirkstone(self):
51 """ Test create new project using:
52 - Project Name: Any string
53 - Release: Yocto Project 4.0 "Kirkstone" (option value: 6)
54 - Merge Toaster settings: True
55 """
56 release = '7'
57 release_title = 'Yocto Project 4.0 "Kirkstone"'
58 project_name = 'projectkirkstone'
59 self.create_new_project(
60 project_name,
61 release,
62 release_title,
63 True,
64 )
65
66 def test_create_new_project_local(self):
67 """ Test create new project using:
68 - Project Name: Any string
69 - Release: Local Yocto Project (option value: 2)
70 - Merge Toaster settings: True
71 """
72 release = '2'
73 release_title = 'Local Yocto Project'
74 project_name = 'projectlocal'
75 self.create_new_project(
76 project_name,
77 release,
78 release_title,
79 True,
80 )
81
82 def test_create_new_project_without_name(self):
83 """ Test create new project without project name """
84 self.get(reverse('newproject'))
85
86 select = Select(self.find('#projectversion'))
87 select.select_by_value(str(3))
88
89 # Check input name has required attribute
90 input_name = self.driver.find_element(By.ID, "new-project-name")
91 self.assertIsNotNone(input_name.get_attribute('required'),
92 'Input name has not required attribute')
93
94 # Check create button is disabled
95 create_btn = self.driver.find_element(By.ID, "create-project-button")
96 self.assertIsNotNone(create_btn.get_attribute('disabled'),
97 'Create button is not disabled')
98
99 def test_import_new_project(self):
100 """ Test import new project using:
101 - Project Name: Any string
102 - Project type: select (Import command line project)
103 - Import existing project directory: Wrong Path
104 """
105 project_name = 'projectimport'
106 self.get(reverse('newproject'))
107 self.driver.find_element(By.ID,
108 "new-project-name").send_keys(project_name)
109 # select import project
110 self.find('#type-import').click()
111
112 # set wrong path
113 wrong_path = '/wrongpath'
114 self.driver.find_element(By.ID,
115 "import-project-dir").send_keys(wrong_path)
116 self.driver.find_element(By.ID, "create-project-button").click()
117
118 self.wait_until_visible('.alert-danger')
119
120 # check error message
121 self.assertTrue(self.element_exists('.alert-danger'),
122 'Alert message not shown')
123 self.assertTrue(wrong_path in self.find('.alert-danger').text,
124 "Wrong path not in alert message")
diff --git a/bitbake/lib/toaster/tests/functional/test_functional_basic.py b/bitbake/lib/toaster/tests/functional/test_functional_basic.py
index 5683e3873e..d5c9708617 100644
--- a/bitbake/lib/toaster/tests/functional/test_functional_basic.py
+++ b/bitbake/lib/toaster/tests/functional/test_functional_basic.py
@@ -8,223 +8,250 @@
8# 8#
9 9
10import re 10import re
11from django.urls import reverse
12import pytest
11from tests.functional.functional_helpers import SeleniumFunctionalTestCase 13from tests.functional.functional_helpers import SeleniumFunctionalTestCase
12from orm.models import Project 14from orm.models import Project
15from selenium.webdriver.common.by import By
16
17from tests.functional.utils import get_projectId_from_url
18
13 19
14class FuntionalTestBasic(SeleniumFunctionalTestCase): 20class FuntionalTestBasic(SeleniumFunctionalTestCase):
21 """Basic functional tests for Toaster"""
22 project_id = None
23 project_url = None
15 24
16# testcase (1514) 25 def setUp(self):
17 def test_create_slenium_project(self): 26 super(FuntionalTestBasic, self).setUp()
18 project_name = 'selenium-project' 27 if not FuntionalTestBasic.project_id:
19 self.get('') 28 FuntionalTestBasic.project_id = self.create_new_project('selenium-project', '3', None, False)
20 self.driver.find_element_by_link_text("To start building, create your first Toaster project").click()
21 self.driver.find_element_by_id("new-project-name").send_keys(project_name)
22 self.driver.find_element_by_id('projectversion').click()
23 self.driver.find_element_by_id("create-project-button").click()
24 element = self.wait_until_visible('#project-created-notification')
25 self.assertTrue(self.element_exists('#project-created-notification'),'Project creation notification not shown')
26 self.assertTrue(project_name in element.text,
27 "New project name not in new project notification")
28 self.assertTrue(Project.objects.filter(name=project_name).count(),
29 "New project not found in database")
30 29
31 # testcase (1515) 30 # testcase (1515)
32 def test_verify_left_bar_menu(self): 31 def test_verify_left_bar_menu(self):
33 self.get('') 32 self.get(reverse('all-projects'))
34 self.wait_until_visible('#projectstable') 33 self.load_projects_page_helper()
35 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() 34 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
35 self.wait_until_present('#config-nav')
36 self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist') 36 self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist')
37 project_URL=self.get_URL() 37 project_URL=self.get_URL()
38 self.driver.find_element_by_xpath('//a[@href="'+project_URL+'"]').click() 38 self.driver.find_element(By.XPATH, '//a[@href="'+project_URL+'"]').click()
39 39
40 try: 40 try:
41 self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click() 41 self.wait_until_present('#config-nav')
42 self.assertTrue(re.search("Custom images",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'Custom images information is not loading properly') 42 self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click()
43 self.wait_until_present('#filter-modal-customimagestable')
43 except: 44 except:
44 self.fail(msg='No Custom images tab available') 45 self.fail(msg='No Custom images tab available')
46 self.assertTrue(re.search("Custom images",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'Custom images information is not loading properly')
45 47
46 try: 48 try:
47 self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click() 49 self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click()
48 self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly') 50 self.wait_until_present('#filter-modal-imagerecipestable')
49 except: 51 except:
50 self.fail(msg='No Compatible image tab available') 52 self.fail(msg='No Compatible image tab available')
53 self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly')
51 54
52 try: 55 try:
53 self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click() 56 self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click()
54 self.assertTrue(re.search("Compatible software recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly') 57 self.wait_until_present('#filter-modal-softwarerecipestable')
55 except: 58 except:
56 self.fail(msg='No Compatible software recipe tab available') 59 self.fail(msg='No Compatible software recipe tab available')
60 self.assertTrue(re.search("Compatible software recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly')
57 61
58 try: 62 try:
59 self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click() 63 self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click()
60 self.assertTrue(re.search("Compatible machines",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly') 64 self.wait_until_present('#filter-modal-machinestable')
61 except: 65 except:
62 self.fail(msg='No Compatible machines tab available') 66 self.fail(msg='No Compatible machines tab available')
67 self.assertTrue(re.search("Compatible machines",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly')
63 68
64 try: 69 try:
65 self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click() 70 self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click()
66 self.assertTrue(re.search("Compatible layers",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly') 71 self.wait_until_present('#filter-modal-layerstable')
67 except: 72 except:
68 self.fail(msg='No Compatible layers tab available') 73 self.fail(msg='No Compatible layers tab available')
74 self.assertTrue(re.search("Compatible layers",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly')
69 75
70 try: 76 try:
71 self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click() 77 self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click()
72 self.assertTrue(re.search("Bitbake variables",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly') 78 self.wait_until_present('#configvar-list')
73 except: 79 except:
74 self.fail(msg='No Bitbake variables tab available') 80 self.fail(msg='No Bitbake variables tab available')
81 self.assertTrue(re.search("Bitbake variables",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly')
75 82
76# testcase (1516) 83# testcase (1516)
77 def test_review_configuration_information(self): 84 def test_review_configuration_information(self):
78 self.get('') 85 self.get(reverse('all-projects'))
79 self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() 86 self.load_projects_page_helper()
80 self.wait_until_visible('#projectstable')
81 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() 87 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
82 project_URL=self.get_URL() 88 project_URL=self.get_URL()
83 89
90 # Machine section of page
91 self.wait_until_visible('#machine-section')
92 self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
93 self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.XPATH, "//span[@id='project-machine-name']").text),'The machine type is not assigned')
84 try: 94 try:
85 self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') 95 self.driver.find_element(By.XPATH, "//span[@id='change-machine-toggle']").click()
86 self.assertTrue(re.search("qemux86",self.driver.find_element_by_xpath("//span[@id='project-machine-name']").text),'The machine type is not assigned')
87 self.driver.find_element_by_xpath("//span[@id='change-machine-toggle']").click()
88 self.wait_until_visible('#select-machine-form') 96 self.wait_until_visible('#select-machine-form')
89 self.wait_until_visible('#cancel-machine-change') 97 self.wait_until_visible('#cancel-machine-change')
90 self.driver.find_element_by_xpath("//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click() 98 self.driver.find_element(By.XPATH, "//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click()
91 except: 99 except:
92 self.fail(msg='The machine information is wrong in the configuration page') 100 self.fail(msg='The machine information is wrong in the configuration page')
93 101
102 # Most built recipes section
103 self.wait_until_visible('#no-most-built')
94 try: 104 try:
95 self.driver.find_element_by_id('no-most-built') 105 self.driver.find_element(By.ID, 'no-most-built')
96 except: 106 except:
97 self.fail(msg='No Most built information in project detail page') 107 self.fail(msg='No Most built information in project detail page')
98 108
99 try: 109 # Project Release title
100 self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_xpath("//span[@id='project-release-title']").text),'The project release is not defined') 110 self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.XPATH, "//span[@id='project-release-title']").text), 'The project release is not defined in the project detail page')
101 except:
102 self.fail(msg='No project release title information in project detail page')
103 111
112 # List of layers in project
113 self.wait_until_visible('#layer-container')
114 self.driver.find_element(By.XPATH, "//div[@id='layer-container']")
115 self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count')
104 try: 116 try:
105 self.driver.find_element_by_xpath("//div[@id='layer-container']") 117 layer_list = self.driver.find_element(By.ID, "layers-in-project-list")
106 self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count') 118 layers = layer_list.find_elements(By.TAG_NAME, "li")
107 layer_list = self.driver.find_element_by_id("layers-in-project-list")
108 layers = layer_list.find_elements_by_tag_name("li")
109 for layer in layers:
110 if re.match ("openembedded-core",layer.text):
111 print ("openembedded-core layer is a default layer in the project configuration")
112 elif re.match ("meta-poky",layer.text):
113 print ("meta-poky layer is a default layer in the project configuration")
114 elif re.match ("meta-yocto-bsp",layer.text):
115 print ("meta-yocto-bsp is a default layer in the project configuratoin")
116 else:
117 self.fail(msg='default layers are missing from the project configuration')
118 except: 119 except:
119 self.fail(msg='No Layer information in project detail page') 120 self.fail(msg='No Layer information in project detail page')
120 121
122 for layer in layers:
123 if re.match ("openembedded-core", layer.text):
124 print ("openembedded-core layer is a default layer in the project configuration")
125 elif re.match ("meta-poky", layer.text):
126 print ("meta-poky layer is a default layer in the project configuration")
127 elif re.match ("meta-yocto-bsp", layer.text):
128 print ("meta-yocto-bsp is a default layer in the project configuratoin")
129 else:
130 self.fail(msg='default layers are missing from the project configuration')
131
121# testcase (1517) 132# testcase (1517)
122 def test_verify_machine_information(self): 133 def test_verify_machine_information(self):
123 self.get('') 134 self.get(reverse('all-projects'))
124 self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() 135 self.load_projects_page_helper()
125 self.wait_until_visible('#projectstable')
126 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() 136 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
127 137
138 self.wait_until_visible('#machine-section')
139 self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
140 self.wait_until_visible('#project-machine-name')
141 self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.ID, "project-machine-name").text),'The machine type is not assigned')
128 try: 142 try:
129 self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') 143 self.driver.find_element(By.ID, "change-machine-toggle").click()
130 self.assertTrue(re.search("qemux86",self.driver.find_element_by_id("project-machine-name").text),'The machine type is not assigned')
131 self.driver.find_element_by_id("change-machine-toggle").click()
132 self.wait_until_visible('#select-machine-form') 144 self.wait_until_visible('#select-machine-form')
133 self.wait_until_visible('#cancel-machine-change') 145 self.wait_until_visible('#cancel-machine-change')
134 self.driver.find_element_by_id("cancel-machine-change").click() 146 self.driver.find_element(By.ID, "cancel-machine-change").click()
135 except: 147 except:
136 self.fail(msg='The machine information is wrong in the configuration page') 148 self.fail(msg='The machine information is wrong in the configuration page')
137 149
138# testcase (1518) 150# testcase (1518)
139 def test_verify_most_built_recipes_information(self): 151 def test_verify_most_built_recipes_information(self):
140 self.get('') 152 self.get(reverse('all-projects'))
141 self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() 153 self.load_projects_page_helper()
142 self.wait_until_visible('#projectstable')
143 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() 154 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
155 self.wait_until_present('#config-nav')
144 project_URL=self.get_URL() 156 project_URL=self.get_URL()
145 157
158 self.wait_until_visible('#no-most-built')
159 self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element(By.ID, "no-most-built").text),'Default message of no builds is not present')
146 try: 160 try:
147 self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element_by_id("no-most-built").text),'Default message of no builds is not present') 161 self.driver.find_element(By.XPATH, "//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click()
148 self.driver.find_element_by_xpath("//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click()
149 self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly')
150 except: 162 except:
151 self.fail(msg='No Most built information in project detail page') 163 self.fail(msg='No Most built information in project detail page')
164 self.wait_until_visible('#config-nav')
165 self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly')
152 166
153# testcase (1519) 167# testcase (1519)
154 def test_verify_project_release_information(self): 168 def test_verify_project_release_information(self):
155 self.get('') 169 self.get(reverse('all-projects'))
156 self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() 170 self.load_projects_page_helper()
157 self.wait_until_visible('#projectstable')
158 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() 171 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
159 172 self.wait_until_visible('#project-release-title')
160 try: 173 self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.ID, "project-release-title").text), 'No project release title information in project detail page')
161 self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_id("project-release-title").text),'The project release is not defined')
162 except:
163 self.fail(msg='No project release title information in project detail page')
164 174
165# testcase (1520) 175# testcase (1520)
166 def test_verify_layer_information(self): 176 def test_verify_layer_information(self):
167 self.get('') 177 self.get(reverse('all-projects'))
168 self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() 178 self.load_projects_page_helper()
169 self.wait_until_visible('#projectstable')
170 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() 179 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
180 self.wait_until_present('#config-nav')
171 project_URL=self.get_URL() 181 project_URL=self.get_URL()
182 self.wait_until_visible('#layer-container')
183 self.driver.find_element(By.XPATH, "//div[@id='layer-container']")
184 self.wait_until_visible('#project-layers-count')
185 self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count')
172 186
173 try: 187 try:
174 self.driver.find_element_by_xpath("//div[@id='layer-container']") 188 layer_list = self.driver.find_element(By.ID, "layers-in-project-list")
175 self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count') 189 layers = layer_list.find_elements(By.TAG_NAME, "li")
176 layer_list = self.driver.find_element_by_id("layers-in-project-list") 190 except:
177 layers = layer_list.find_elements_by_tag_name("li") 191 self.fail(msg='No Layer information in project detail page')
178 192
179 for layer in layers: 193 for layer in layers:
180 if re.match ("openembedded-core",layer.text): 194 if re.match ("openembedded-core",layer.text):
181 print ("openembedded-core layer is a default layer in the project configuration") 195 print ("openembedded-core layer is a default layer in the project configuration")
182 elif re.match ("meta-poky",layer.text): 196 elif re.match ("meta-poky",layer.text):
183 print ("meta-poky layer is a default layer in the project configuration") 197 print ("meta-poky layer is a default layer in the project configuration")
184 elif re.match ("meta-yocto-bsp",layer.text): 198 elif re.match ("meta-yocto-bsp",layer.text):
185 print ("meta-yocto-bsp is a default layer in the project configuratoin") 199 print ("meta-yocto-bsp is a default layer in the project configuratoin")
186 else: 200 else:
187 self.fail(msg='default layers are missing from the project configuration') 201 self.fail(msg='default layers are missing from the project configuration')
188 202
189 self.driver.find_element_by_xpath("//input[@id='layer-add-input']") 203 try:
190 self.driver.find_element_by_xpath("//button[@id='add-layer-btn']") 204 self.driver.find_element(By.XPATH, "//input[@id='layer-add-input']")
191 self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']") 205 self.driver.find_element(By.XPATH, "//button[@id='add-layer-btn']")
192 self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]") 206 self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']")
207 self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]")
193 except: 208 except:
194 self.fail(msg='No Layer information in project detail page') 209 self.fail(msg='Layer configuration controls missing')
195 210
196# testcase (1521) 211# testcase (1521)
197 def test_verify_project_detail_links(self): 212 def test_verify_project_detail_links(self):
198 self.get('') 213 self.get(reverse('all-projects'))
199 self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() 214 self.load_projects_page_helper()
200 self.wait_until_visible('#projectstable')
201 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() 215 self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
216 self.wait_until_present('#config-nav')
202 project_URL=self.get_URL() 217 project_URL=self.get_URL()
218 self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click()
219 self.wait_until_visible('#topbar-configuration-tab')
220 self.assertTrue(re.search("Configuration",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled')
203 221
204 self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click() 222 try:
205 self.assertTrue(re.search("Configuration",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled') 223 self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click()
224 except:
225 self.fail(msg='Builds tab information is not present')
206 226
227 self.wait_until_visible('#project-topbar')
228 self.assertTrue(re.search("Builds",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled')
207 try: 229 try:
208 self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click() 230 self.driver.find_element(By.XPATH, "//div[@id='empty-state-projectbuildstable']")
209 self.assertTrue(re.search("Builds",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled')
210 self.driver.find_element_by_xpath("//div[@id='empty-state-projectbuildstable']")
211 except: 231 except:
212 self.fail(msg='Builds tab information is not present') 232 self.fail(msg='Builds tab information is not present')
213 233
214 try: 234 try:
215 self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click() 235 self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click()
216 self.assertTrue(re.search("Import layer",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled')
217 self.driver.find_element_by_xpath("//fieldset[@id='repo-select']")
218 self.driver.find_element_by_xpath("//fieldset[@id='git-repo']")
219 except: 236 except:
220 self.fail(msg='Import layer tab not loading properly') 237 self.fail(msg='Import layer tab not loading properly')
221 238
239 self.wait_until_visible('#project-topbar')
240 self.assertTrue(re.search("Import layer",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled')
222 try: 241 try:
223 self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click() 242 self.driver.find_element(By.XPATH, "//fieldset[@id='repo-select']")
224 self.assertTrue(re.search("New custom image",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled') 243 self.driver.find_element(By.XPATH, "//fieldset[@id='git-repo']")
225 self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element_by_xpath("//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly') 244 except:
245 self.fail(msg='Import layer tab not loading properly')
246
247 try:
248 self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click()
226 except: 249 except:
227 self.fail(msg='New custom image tab not loading properly') 250 self.fail(msg='New custom image tab not loading properly')
228 251
252 self.wait_until_visible('#project-topbar')
253 self.assertTrue(re.search("New custom image",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled')
254 self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element(By.XPATH, "//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly')
255
229 256
230 257
diff --git a/bitbake/lib/toaster/tests/functional/test_project_config.py b/bitbake/lib/toaster/tests/functional/test_project_config.py
new file mode 100644
index 0000000000..fcb1bc3284
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/test_project_config.py
@@ -0,0 +1,294 @@
1#! /usr/bin/env python3 #
2# BitBake Toaster UI tests implementation
3#
4# Copyright (C) 2023 Savoir-faire Linux
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import string
10import pytest
11from django.urls import reverse
12from selenium.webdriver import Keys
13from selenium.webdriver.support.select import Select
14from selenium.common.exceptions import TimeoutException
15from tests.functional.functional_helpers import SeleniumFunctionalTestCase
16from selenium.webdriver.common.by import By
17
18from .utils import get_projectId_from_url
19
20class TestProjectConfig(SeleniumFunctionalTestCase):
21 project_id = None
22 PROJECT_NAME = 'TestProjectConfig'
23 INVALID_PATH_START_TEXT = 'The directory path should either start with a /'
24 INVALID_PATH_CHAR_TEXT = 'The directory path cannot include spaces or ' \
25 'any of these characters'
26
27 def _get_config_nav_item(self, index):
28 config_nav = self.find('#config-nav')
29 return config_nav.find_elements(By.TAG_NAME, 'li')[index]
30
31 def _navigate_bbv_page(self):
32 """ Navigate to project BitBake variables page """
33 # check if the menu is displayed
34 if TestProjectConfig.project_id is None:
35 TestProjectConfig.project_id = self.create_new_project(self.PROJECT_NAME, '3', None, True)
36
37 url = reverse('projectconf', args=(TestProjectConfig.project_id,))
38 self.get(url)
39 self.wait_until_visible('#config-nav')
40 bbv_page_link = self._get_config_nav_item(9)
41 bbv_page_link.click()
42 self.wait_until_visible('#config-nav')
43
44 def test_no_underscore_iamgefs_type(self):
45 """
46 Should not accept IMAGEFS_TYPE with an underscore
47 """
48 self._navigate_bbv_page()
49 imagefs_type = "foo_bar"
50
51 self.wait_until_visible('#change-image_fstypes-icon')
52
53 self.click('#change-image_fstypes-icon')
54
55 self.enter_text('#new-imagefs_types', imagefs_type)
56
57 element = self.wait_until_visible('#hintError-image-fs_type')
58
59 self.assertTrue(("A valid image type cannot include underscores" in element.text),
60 "Did not find underscore error message")
61
62 def test_checkbox_verification(self):
63 """
64 Should automatically check the checkbox if user enters value
65 text box, if value is there in the checkbox.
66 """
67 self._navigate_bbv_page()
68
69 imagefs_type = "btrfs"
70
71 self.wait_until_visible('#change-image_fstypes-icon')
72
73 self.click('#change-image_fstypes-icon')
74
75 self.enter_text('#new-imagefs_types', imagefs_type)
76
77 checkboxes = self.driver.find_elements(By.XPATH, "//input[@class='fs-checkbox-fstypes']")
78
79 for checkbox in checkboxes:
80 if checkbox.get_attribute("value") == "btrfs":
81 self.assertEqual(checkbox.is_selected(), True)
82
83 def test_textbox_with_checkbox_verification(self):
84 """
85 Should automatically add or remove value in textbox, if user checks
86 or unchecks checkboxes.
87 """
88 self._navigate_bbv_page()
89
90 self.wait_until_visible('#change-image_fstypes-icon')
91 self.click('#change-image_fstypes-icon')
92
93 checkboxes_selector = '.fs-checkbox-fstypes'
94
95 self.wait_until_visible(checkboxes_selector)
96 checkboxes = self.find_all(checkboxes_selector)
97
98 for checkbox in checkboxes:
99 if checkbox.get_attribute("value") == "cpio":
100 checkbox.click()
101 self.wait_until_visible('#new-imagefs_types')
102 element = self.driver.find_element(By.ID, 'new-imagefs_types')
103
104 self.assertTrue(("cpio" in element.get_attribute('value'),
105 "Imagefs not added into the textbox"))
106 checkbox.click()
107 self.assertTrue(("cpio" not in element.text),
108 "Image still present in the textbox")
109
110 def test_set_download_dir(self):
111 """
112 Validate the allowed and disallowed types in the directory field for
113 DL_DIR
114 """
115 self._navigate_bbv_page()
116
117 # activate the input to edit download dir
118 try:
119 change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon')
120 except TimeoutException:
121 # If download dir is not displayed, test is skipped
122 change_dl_dir_btn = None
123
124 if change_dl_dir_btn:
125 change_dl_dir_btn.click()
126
127 # downloads dir path doesn't start with / or ${...}
128 input_field = self.wait_until_visible('#new-dl_dir')
129 input_field.clear()
130 self.enter_text('#new-dl_dir', 'home/foo')
131 element = self.wait_until_visible('#hintError-initialChar-dl_dir')
132
133 msg = 'downloads directory path starts with invalid character but ' \
134 'treated as valid'
135 self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
136
137 # downloads dir path has a space
138 self.driver.find_element(By.ID, 'new-dl_dir').clear()
139 self.enter_text('#new-dl_dir', '/foo/bar a')
140
141 element = self.wait_until_visible('#hintError-dl_dir')
142 msg = 'downloads directory path characters invalid but treated as valid'
143 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
144
145 # downloads dir path starts with ${...} but has a space
146 self.driver.find_element(By.ID,'new-dl_dir').clear()
147 self.enter_text('#new-dl_dir', '${TOPDIR}/down foo')
148
149 element = self.wait_until_visible('#hintError-dl_dir')
150 msg = 'downloads directory path characters invalid but treated as valid'
151 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
152
153 # downloads dir path starts with /
154 self.driver.find_element(By.ID,'new-dl_dir').clear()
155 self.enter_text('#new-dl_dir', '/bar/foo')
156
157 hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
158 self.assertEqual(hidden_element.is_displayed(), False,
159 'downloads directory path valid but treated as invalid')
160
161 # downloads dir path starts with ${...}
162 self.driver.find_element(By.ID,'new-dl_dir').clear()
163 self.enter_text('#new-dl_dir', '${TOPDIR}/down')
164
165 hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
166 self.assertEqual(hidden_element.is_displayed(), False,
167 'downloads directory path valid but treated as invalid')
168
169 def test_set_sstate_dir(self):
170 """
171 Validate the allowed and disallowed types in the directory field for
172 SSTATE_DIR
173 """
174 self._navigate_bbv_page()
175
176 try:
177 btn_chg_sstate_dir = self.wait_until_visible('#change-sstate_dir-icon')
178 self.click('#change-sstate_dir-icon')
179 except TimeoutException:
180 # If sstate_dir is not displayed, test is skipped
181 btn_chg_sstate_dir = None
182
183 if btn_chg_sstate_dir: # Skip continuation if sstate_dir is not displayed
184 # path doesn't start with / or ${...}
185 input_field = self.wait_until_visible('#new-sstate_dir')
186 input_field.clear()
187 self.enter_text('#new-sstate_dir', 'home/foo')
188 element = self.wait_until_visible('#hintError-initialChar-sstate_dir')
189
190 msg = 'sstate directory path starts with invalid character but ' \
191 'treated as valid'
192 self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
193
194 # path has a space
195 self.driver.find_element(By.ID, 'new-sstate_dir').clear()
196 self.enter_text('#new-sstate_dir', '/foo/bar a')
197
198 element = self.wait_until_visible('#hintError-sstate_dir')
199 msg = 'sstate directory path characters invalid but treated as valid'
200 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
201
202 # path starts with ${...} but has a space
203 self.driver.find_element(By.ID,'new-sstate_dir').clear()
204 self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo')
205
206 element = self.wait_until_visible('#hintError-sstate_dir')
207 msg = 'sstate directory path characters invalid but treated as valid'
208 self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
209
210 # path starts with /
211 self.driver.find_element(By.ID,'new-sstate_dir').clear()
212 self.enter_text('#new-sstate_dir', '/bar/foo')
213
214 hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
215 self.assertEqual(hidden_element.is_displayed(), False,
216 'sstate directory path valid but treated as invalid')
217
218 # paths starts with ${...}
219 self.driver.find_element(By.ID, 'new-sstate_dir').clear()
220 self.enter_text('#new-sstate_dir', '${TOPDIR}/down')
221
222 hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
223 self.assertEqual(hidden_element.is_displayed(), False,
224 'sstate directory path valid but treated as invalid')
225
226 def _change_bbv_value(self, **kwargs):
227 var_name, field, btn_id, input_id, value, save_btn, *_ = kwargs.values()
228 """ Change bitbake variable value """
229 self._navigate_bbv_page()
230 self.wait_until_visible(f'#{btn_id}')
231 if kwargs.get('new_variable'):
232 self.find(f"#{btn_id}").clear()
233 self.enter_text(f"#{btn_id}", f"{var_name}")
234 else:
235 self.click(f'#{btn_id}')
236
237 self.wait_until_visible(f'#{input_id}')
238
239 if kwargs.get('is_select'):
240 select = Select(self.find(f'#{input_id}'))
241 select.select_by_visible_text(value)
242 else:
243 self.find(f"#{input_id}").clear()
244 self.enter_text(f'#{input_id}', f'{value}')
245 self.click(f'#{save_btn}')
246 value_displayed = str(self.wait_until_visible(f'#{field}').text).lower()
247 msg = f'{var_name} variable not changed'
248 self.assertTrue(str(value).lower() in value_displayed, msg)
249
250 def test_change_distro_var(self):
251 """ Test changing distro variable """
252 self._change_bbv_value(
253 var_name='DISTRO',
254 field='distro',
255 btn_id='change-distro-icon',
256 input_id='new-distro',
257 value='poky-changed',
258 save_btn="apply-change-distro",
259 )
260
261 def test_set_image_install_append_var(self):
262 """ Test setting IMAGE_INSTALL:append variable """
263 self._change_bbv_value(
264 var_name='IMAGE_INSTALL:append',
265 field='image_install',
266 btn_id='change-image_install-icon',
267 input_id='new-image_install',
268 value='bash, apt, busybox',
269 save_btn="apply-change-image_install",
270 )
271
272 def test_set_package_classes_var(self):
273 """ Test setting PACKAGE_CLASSES variable """
274 self._change_bbv_value(
275 var_name='PACKAGE_CLASSES',
276 field='package_classes',
277 btn_id='change-package_classes-icon',
278 input_id='package_classes-select',
279 value='package_deb',
280 save_btn="apply-change-package_classes",
281 is_select=True,
282 )
283
284 def test_create_new_bbv(self):
285 """ Test creating new bitbake variable """
286 self._change_bbv_value(
287 var_name='New_Custom_Variable',
288 field='configvar-list',
289 btn_id='variable',
290 input_id='value',
291 value='new variable value',
292 save_btn="add-configvar-button",
293 new_variable=True
294 )
diff --git a/bitbake/lib/toaster/tests/functional/test_project_page.py b/bitbake/lib/toaster/tests/functional/test_project_page.py
new file mode 100644
index 0000000000..429d86feba
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/test_project_page.py
@@ -0,0 +1,775 @@
1#! /usr/bin/env python3 #
2# BitBake Toaster UI tests implementation
3#
4# Copyright (C) 2023 Savoir-faire Linux
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import os
10import string
11import time
12from unittest import skip
13import pytest
14from django.urls import reverse
15from django.utils import timezone
16from selenium.webdriver.common.keys import Keys
17from selenium.webdriver.support.select import Select
18from selenium.common.exceptions import TimeoutException
19from tests.functional.functional_helpers import SeleniumFunctionalTestCase
20from orm.models import Build, Project, Target
21from selenium.webdriver.common.by import By
22
23from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled
24
25class TestProjectPageBase(SeleniumFunctionalTestCase):
26 project_id = None
27 PROJECT_NAME = 'TestProjectPage'
28
29 def _navigate_to_project_page(self):
30 # Navigate to project page
31 if TestProjectPageBase.project_id is None:
32 TestProjectPageBase.project_id = self.create_new_project(self.PROJECT_NAME, '3', None, True)
33
34 url = reverse('project', args=(TestProjectPageBase.project_id,))
35 self.get(url)
36 self.wait_until_visible('#config-nav')
37
38 def _get_create_builds(self, **kwargs):
39 """ Create a build and return the build object """
40 # parameters for builds to associate with the projects
41 now = timezone.now()
42 self.project1_build_success = {
43 'project': Project.objects.get(id=TestProjectPageBase.project_id),
44 'started_on': now,
45 'completed_on': now,
46 'outcome': Build.SUCCEEDED
47 }
48
49 self.project1_build_failure = {
50 'project': Project.objects.get(id=TestProjectPageBase.project_id),
51 'started_on': now,
52 'completed_on': now,
53 'outcome': Build.FAILED
54 }
55 build1 = Build.objects.create(**self.project1_build_success)
56 build2 = Build.objects.create(**self.project1_build_failure)
57
58 # add some targets to these builds so they have recipe links
59 # (and so we can find the row in the ToasterTable corresponding to
60 # a particular build)
61 Target.objects.create(build=build1, target='foo')
62 Target.objects.create(build=build2, target='bar')
63
64 if kwargs:
65 # Create kwargs.get('success') builds with success status with target
66 # and kwargs.get('failure') builds with failure status with target
67 for i in range(kwargs.get('success', 0)):
68 now = timezone.now()
69 self.project1_build_success['started_on'] = now
70 self.project1_build_success[
71 'completed_on'] = now - timezone.timedelta(days=i)
72 build = Build.objects.create(**self.project1_build_success)
73 Target.objects.create(build=build,
74 target=f'{i}_success_recipe',
75 task=f'{i}_success_task')
76
77 for i in range(kwargs.get('failure', 0)):
78 now = timezone.now()
79 self.project1_build_failure['started_on'] = now
80 self.project1_build_failure[
81 'completed_on'] = now - timezone.timedelta(days=i)
82 build = Build.objects.create(**self.project1_build_failure)
83 Target.objects.create(build=build,
84 target=f'{i}_fail_recipe',
85 task=f'{i}_fail_task')
86 return build1, build2
87
88 def _mixin_test_table_edit_column(
89 self,
90 table_id,
91 edit_btn_id,
92 list_check_box_id: list
93 ):
94 # Check edit column
95 finder = lambda driver: self.find(f'#{edit_btn_id}')
96 edit_column = self.wait_until_element_clickable(finder)
97 self.assertTrue(edit_column.is_displayed())
98 edit_column.click()
99 # Check dropdown is visible
100 self.wait_until_visible('ul.dropdown-menu.editcol')
101 for check_box_id in list_check_box_id:
102 # Check that we can hide/show table column
103 check_box = self.find(f'#{check_box_id}')
104 th_class = str(check_box_id).replace('checkbox-', '')
105 if check_box.is_selected():
106 # check if column is visible in table
107 self.assertTrue(
108 self.find(
109 f'#{table_id} thead th.{th_class}'
110 ).is_displayed(),
111 f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
112 )
113 check_box.click()
114 # check if column is hidden in table
115 self.assertFalse(
116 self.find(
117 f'#{table_id} thead th.{th_class}'
118 ).is_displayed(),
119 f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
120 )
121 else:
122 # check if column is hidden in table
123 self.assertFalse(
124 self.find(
125 f'#{table_id} thead th.{th_class}'
126 ).is_displayed(),
127 f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
128 )
129 check_box.click()
130 # check if column is visible in table
131 self.assertTrue(
132 self.find(
133 f'#{table_id} thead th.{th_class}'
134 ).is_displayed(),
135 f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
136 )
137
138 def _get_config_nav_item(self, index):
139 config_nav = self.find('#config-nav')
140 return config_nav.find_elements(By.TAG_NAME, 'li')[index]
141
142 def _navigate_to_config_nav(self, nav_id, nav_index):
143 # navigate to the project page
144 self._navigate_to_project_page()
145 # click on "Software recipe" tab
146 soft_recipe = self._get_config_nav_item(nav_index)
147 soft_recipe.click()
148 self.wait_until_visible(f'#{nav_id}')
149
150 def _mixin_test_table_show_rows(self, table_selector, **kwargs):
151 """ Test the show rows feature in the builds table on the all builds page """
152 def test_show_rows(row_to_show, show_row_link):
153 # Check that we can show rows == row_to_show
154 show_row_link.select_by_value(str(row_to_show))
155 self.wait_until_visible(f'#{table_selector} tbody tr')
156 # check at least some rows are visible
157 self.assertTrue(
158 len(self.find_all(f'#{table_selector} tbody tr')) > 0
159 )
160 self.wait_until_present(f'#{table_selector} tbody tr')
161 show_rows = self.driver.find_elements(
162 By.XPATH,
163 f'//select[@class="form-control pagesize-{table_selector}"]'
164 )
165 rows_to_show = [10, 25, 50, 100, 150]
166 to_skip = kwargs.get('to_skip', [])
167 # Check show rows
168 for show_row_link in show_rows:
169 show_row_link = Select(show_row_link)
170 for row_to_show in rows_to_show:
171 if row_to_show not in to_skip:
172 test_show_rows(row_to_show, show_row_link)
173
174 def _mixin_test_table_search_input(self, **kwargs):
175 input_selector, input_text, searchBtn_selector, table_selector, *_ = kwargs.values()
176 # Test search input
177 self.wait_until_visible(f'#{input_selector}')
178 recipe_input = self.find(f'#{input_selector}')
179 recipe_input.send_keys(input_text)
180 self.find(f'#{searchBtn_selector}').click()
181 self.wait_until_visible(f'#{table_selector} tbody tr')
182 rows = self.find_all(f'#{table_selector} tbody tr')
183 self.assertTrue(len(rows) > 0)
184
185class TestProjectPage(TestProjectPageBase):
186
187 def test_page_header_on_project_page(self):
188 """ Check page header in project page:
189 - AT LEFT -> Logo of Yocto project, displayed, clickable
190 - "Toaster"+" Information icon", displayed, clickable
191 - "Server Icon" + "All builds", displayed, clickable
192 - "Directory Icon" + "All projects", displayed, clickable
193 - "Book Icon" + "Documentation", displayed, clickable
194 - AT RIGHT -> button "New project", displayed, clickable
195 """
196 # navigate to the project page
197 self._navigate_to_project_page()
198
199 # check page header
200 # AT LEFT -> Logo of Yocto project
201 logo = self.driver.find_element(
202 By.XPATH,
203 "//div[@class='toaster-navbar-brand']",
204 )
205 logo_img = logo.find_element(By.TAG_NAME, 'img')
206 self.assertTrue(logo_img.is_displayed(),
207 'Logo of Yocto project not found')
208 self.assertIn(
209 '/static/img/logo.png', str(logo_img.get_attribute('src')),
210 'Logo of Yocto project not found'
211 )
212 # "Toaster"+" Information icon", clickable
213 toaster = self.driver.find_element(
214 By.XPATH,
215 "//div[@class='toaster-navbar-brand']//a[@class='brand']",
216 )
217 self.assertTrue(toaster.is_displayed(), 'Toaster not found')
218 self.assertEqual(toaster.text, 'Toaster')
219 info_sign = self.find('.glyphicon-info-sign')
220 self.assertTrue(info_sign.is_displayed())
221
222 # "Server Icon" + "All builds"
223 all_builds = self.find('#navbar-all-builds')
224 all_builds_link = all_builds.find_element(By.TAG_NAME, 'a')
225 self.assertIn("All builds", all_builds_link.text)
226 self.assertIn(
227 '/toastergui/builds/', str(all_builds_link.get_attribute('href'))
228 )
229 server_icon = all_builds.find_element(By.TAG_NAME, 'i')
230 self.assertEqual(
231 server_icon.get_attribute('class'), 'glyphicon glyphicon-tasks'
232 )
233 self.assertTrue(server_icon.is_displayed())
234
235 # "Directory Icon" + "All projects"
236 all_projects = self.find('#navbar-all-projects')
237 all_projects_link = all_projects.find_element(By.TAG_NAME, 'a')
238 self.assertIn("All projects", all_projects_link.text)
239 self.assertIn(
240 '/toastergui/projects/', str(all_projects_link.get_attribute(
241 'href'))
242 )
243 dir_icon = all_projects.find_element(By.TAG_NAME, 'i')
244 self.assertEqual(
245 dir_icon.get_attribute('class'), 'icon-folder-open'
246 )
247 self.assertTrue(dir_icon.is_displayed())
248
249 # "Book Icon" + "Documentation"
250 toaster_docs_link = self.find('#navbar-docs')
251 toaster_docs_link_link = toaster_docs_link.find_element(By.TAG_NAME,
252 'a')
253 self.assertIn("Documentation", toaster_docs_link_link.text)
254 self.assertEqual(
255 toaster_docs_link_link.get_attribute('href'), 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual'
256 )
257 book_icon = toaster_docs_link.find_element(By.TAG_NAME, 'i')
258 self.assertEqual(
259 book_icon.get_attribute('class'), 'glyphicon glyphicon-book'
260 )
261 self.assertTrue(book_icon.is_displayed())
262
263 # AT RIGHT -> button "New project"
264 new_project_button = self.find('#new-project-button')
265 self.assertTrue(new_project_button.is_displayed())
266 self.assertEqual(new_project_button.text, 'New project')
267 new_project_button.click()
268 self.assertIn(
269 '/toastergui/newproject/', str(self.driver.current_url)
270 )
271
272 def test_edit_project_name(self):
273 """ Test edit project name:
274 - Click on "Edit" icon button
275 - Change project name
276 - Click on "Save" button
277 - Check project name is changed
278 """
279 # navigate to the project page
280 self._navigate_to_project_page()
281
282 # click on "Edit" icon button
283 self.wait_until_visible('#project-name-container')
284 finder = lambda driver: self.find('#project-change-form-toggle')
285 edit_button = self.wait_until_element_clickable(finder)
286 edit_button.click()
287 project_name_input = self.find('#project-name-change-input')
288 self.assertTrue(project_name_input.is_displayed())
289 project_name_input.clear()
290 project_name_input.send_keys('New Name')
291 self.find('#project-name-change-btn').click()
292
293 # check project name is changed
294 self.wait_until_visible('#project-name-container')
295 self.assertIn(
296 'New Name', str(self.find('#project-name-container').text)
297 )
298
299 def test_project_page_tabs(self):
300 """ Test project tabs:
301 - "configuration" tab
302 - "Builds" tab
303 - "Import layers" tab
304 - "New custom image" tab
305 Check search box used to build recipes
306 """
307 # navigate to the project page
308 self._navigate_to_project_page()
309
310 # check "configuration" tab
311 self.wait_until_visible('#topbar-configuration-tab')
312 config_tab = self.find('#topbar-configuration-tab')
313 self.assertEqual(config_tab.get_attribute('class'), 'active')
314 self.assertIn('Configuration', str(config_tab.text))
315 self.assertIn(
316 f"/toastergui/project/{TestProjectPageBase.project_id}", str(self.driver.current_url)
317 )
318
319 def get_tabs():
320 # tabs links list
321 return self.driver.find_elements(
322 By.XPATH,
323 '//div[@id="project-topbar"]//li'
324 )
325
326 def check_tab_link(tab_index, tab_name, url):
327 tab = get_tabs()[tab_index]
328 tab_link = tab.find_element(By.TAG_NAME, 'a')
329 self.assertIn(url, tab_link.get_attribute('href'))
330 self.assertIn(tab_name, tab_link.text)
331 self.assertEqual(tab.get_attribute('class'), 'active')
332
333 # check "Builds" tab
334 builds_tab = get_tabs()[1]
335 builds_tab.find_element(By.TAG_NAME, 'a').click()
336 check_tab_link(
337 1,
338 'Builds',
339 f"/toastergui/project/{TestProjectPageBase.project_id}/builds"
340 )
341
342 # check "Import layers" tab
343 import_layers_tab = get_tabs()[2]
344 import_layers_tab.find_element(By.TAG_NAME, 'a').click()
345 check_tab_link(
346 2,
347 'Import layer',
348 f"/toastergui/project/{TestProjectPageBase.project_id}/importlayer"
349 )
350
351 # check "New custom image" tab
352 new_custom_image_tab = get_tabs()[3]
353 new_custom_image_tab.find_element(By.TAG_NAME, 'a').click()
354 check_tab_link(
355 3,
356 'New custom image',
357 f"/toastergui/project/{TestProjectPageBase.project_id}/newcustomimage"
358 )
359
360 # check search box can be use to build recipes
361 search_box = self.find('#build-input')
362 search_box.send_keys('core-image-minimal')
363 self.find('#build-button').click()
364 self.wait_until_visible('#latest-builds')
365 buildtext = "Loading"
366 while "Loading" in buildtext:
367 time.sleep(1)
368 lastest_builds = self.driver.find_elements(
369 By.XPATH,
370 '//div[@id="latest-builds"]',
371 )
372 last_build = lastest_builds[0]
373 buildtext = last_build.text
374 self.assertIn(
375 'core-image-minimal', str(last_build.text)
376 )
377
378 def test_softwareRecipe_page(self):
379 """ Test software recipe page
380 - Check title "Compatible software recipes" is displayed
381 - Check search input
382 - Check "build recipe" button works
383 - Check software recipe table feature(show/hide column, pagination)
384 """
385 self._navigate_to_config_nav('softwarerecipestable', 4)
386 # check title "Compatible software recipes" is displayed
387 self.assertIn("Compatible software recipes", self.get_page_source())
388 # Test search input
389 self._mixin_test_table_search_input(
390 input_selector='search-input-softwarerecipestable',
391 input_text='busybox',
392 searchBtn_selector='search-submit-softwarerecipestable',
393 table_selector='softwarerecipestable'
394 )
395 # check "build recipe" button works
396 finder = lambda driver: self.find_all('#softwarerecipestable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]/a')
397 build_btn = self.wait_until_element_clickable(finder)
398 build_btn.click()
399 build_state = wait_until_build(self, 'queued cloning starting parsing failed')
400 lastest_builds = self.driver.find_elements(
401 By.XPATH,
402 '//div[@id="latest-builds"]/div'
403 )
404 self.assertTrue(len(lastest_builds) > 0)
405 # Find the latest builds, the last build and then the cancel button
406
407 finder = lambda driver: driver.find_elements(By.XPATH, '//div[@id="latest-builds"]/div')[0].find_element(By.XPATH, '//span[@class="cancel-build-btn pull-right alert-link"]')
408 cancel_button = self.wait_until_element_clickable(finder)
409 cancel_button.click()
410 if 'starting' not in build_state: # change build state when cancelled in starting state
411 wait_until_build_cancelled(self)
412
413 # check software recipe table feature(show/hide column, pagination)
414 self._navigate_to_config_nav('softwarerecipestable', 4)
415 column_list = [
416 'get_description_or_summary',
417 'layer_version__get_vcs_reference',
418 'layer_version__layer__name',
419 'license',
420 'recipe-file',
421 'section',
422 'version',
423 ]
424 self._mixin_test_table_edit_column(
425 'softwarerecipestable',
426 'edit-columns-button',
427 [f'checkbox-{column}' for column in column_list]
428 )
429 self._navigate_to_config_nav('softwarerecipestable', 4)
430 # check show rows(pagination)
431 self._mixin_test_table_show_rows(
432 table_selector='softwarerecipestable',
433 to_skip=[150],
434 )
435
436 def test_machines_page(self):
437 """ Test Machine page
438 - Check if title "Compatible machines" is displayed
439 - Check search input
440 - Check "Select machine" button works
441 - Check "Add layer" button works
442 - Check Machine table feature(show/hide column, pagination)
443 """
444 self._navigate_to_config_nav('machinestable', 5)
445 # check title "Compatible software recipes" is displayed
446 self.assertIn("Compatible machines", self.get_page_source())
447 # Test search input
448 self._mixin_test_table_search_input(
449 input_selector='search-input-machinestable',
450 input_text='qemux86-64',
451 searchBtn_selector='search-submit-machinestable',
452 table_selector='machinestable'
453 )
454 # check "Select machine" button works
455 finder = lambda driver: self.find_all('#machinestable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]')
456 select_btn = self.wait_until_element_clickable(finder)
457 select_btn.click()
458 self.wait_until_visible('#project-machine-name')
459 project_machine_name = self.find('#project-machine-name')
460 self.assertIn(
461 'qemux86-64', project_machine_name.text
462 )
463 # check "Add layer" button works
464 self._navigate_to_config_nav('machinestable', 5)
465 # Search for a machine whit layer not in project
466 self._mixin_test_table_search_input(
467 input_selector='search-input-machinestable',
468 input_text='qemux86-64-tpm2',
469 searchBtn_selector='search-submit-machinestable',
470 table_selector='machinestable'
471 )
472
473 self.wait_until_visible('#machinestable tbody tr')
474 # Locate a machine to add button
475 finder = lambda driver: self.find_all('#machinestable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]')
476 add_btn = self.wait_until_element_clickable(finder)
477 add_btn.click()
478 self.wait_until_visible('#change-notification')
479 change_notification = self.find('#change-notification')
480 self.assertIn(
481 f'You have added 1 layer to your project', str(change_notification.text)
482 )
483
484 finder = lambda driver: self.find('#hide-alert')
485 hide_button = self.wait_until_element_clickable(finder)
486 hide_button.click()
487 self.wait_until_not_visible('#change-notification')
488
489 # check Machine table feature(show/hide column, pagination)
490 self._navigate_to_config_nav('machinestable', 5)
491 column_list = [
492 'description',
493 'layer_version__get_vcs_reference',
494 'layer_version__layer__name',
495 'machinefile',
496 ]
497 self._mixin_test_table_edit_column(
498 'machinestable',
499 'edit-columns-button',
500 [f'checkbox-{column}' for column in column_list]
501 )
502 self._navigate_to_config_nav('machinestable', 5)
503 # check show rows(pagination)
504 self._mixin_test_table_show_rows(
505 table_selector='machinestable',
506 to_skip=[150],
507 )
508
509 def test_layers_page(self):
510 """ Test layers page
511 - Check if title "Compatible layerss" is displayed
512 - Check search input
513 - Check "Add layer" button works
514 - Check "Remove layer" button works
515 - Check layers table feature(show/hide column, pagination)
516 """
517 self._navigate_to_config_nav('layerstable', 6)
518 # check title "Compatible layers" is displayed
519 self.assertIn("Compatible layers", self.get_page_source())
520 # Test search input
521 input_text='meta-tanowrt'
522 self._mixin_test_table_search_input(
523 input_selector='search-input-layerstable',
524 input_text=input_text,
525 searchBtn_selector='search-submit-layerstable',
526 table_selector='layerstable'
527 )
528 # check "Add layer" button works
529 self.wait_until_visible('#layerstable tbody tr')
530 finder = lambda driver: self.find_all('#layerstable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]/a[@data-directive="add"]')
531 add_btn = self.wait_until_element_clickable(finder)
532 add_btn.click()
533 # check modal is displayed
534 self.wait_until_visible('#dependencies-modal')
535 list_dependencies = self.find_all('#dependencies-list li')
536 # click on add-layers button
537 finder = lambda driver: self.driver.find_element(By.XPATH, '//form[@id="dependencies-modal-form"]//button[@class="btn btn-primary"]')
538 add_layers_btn = self.wait_until_element_clickable(finder)
539 add_layers_btn.click()
540 self.wait_until_visible('#change-notification')
541 change_notification = self.find('#change-notification')
542 self.assertIn(
543 f'You have added {len(list_dependencies)+1} layers to your project: {input_text} and its dependencies', str(change_notification.text)
544 )
545
546 finder = lambda driver: self.find('#hide-alert')
547 hide_button = self.wait_until_element_clickable(finder)
548 hide_button.click()
549 self.wait_until_not_visible('#change-notification')
550
551 # check "Remove layer" button works
552 self.wait_until_visible('#layerstable tbody tr')
553 finder = lambda driver: self.find_all('#layerstable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]/a[@data-directive="remove"]')
554 remove_btn = self.wait_until_element_clickable(finder)
555 remove_btn.click()
556 self.wait_until_visible('#change-notification')
557 change_notification = self.find('#change-notification')
558 self.assertIn(
559 f'You have removed 1 layer from your project: {input_text}', str(change_notification.text)
560 )
561
562 finder = lambda driver: self.find('#hide-alert')
563 hide_button = self.wait_until_element_clickable(finder)
564 hide_button.click()
565 self.wait_until_not_visible('#change-notification')
566
567 # check layers table feature(show/hide column, pagination)
568 self._navigate_to_config_nav('layerstable', 6)
569 column_list = [
570 'dependencies',
571 'revision',
572 'layer__vcs_url',
573 'git_subdir',
574 'layer__summary',
575 ]
576 self._mixin_test_table_edit_column(
577 'layerstable',
578 'edit-columns-button',
579 [f'checkbox-{column}' for column in column_list]
580 )
581 self._navigate_to_config_nav('layerstable', 6)
582 # check show rows(pagination)
583 self._mixin_test_table_show_rows(
584 table_selector='layerstable',
585 to_skip=[150],
586 )
587
588 def test_distro_page(self):
589 """ Test distros page
590 - Check if title "Compatible distros" is displayed
591 - Check search input
592 - Check "Add layer" button works
593 - Check distro table feature(show/hide column, pagination)
594 """
595 self._navigate_to_config_nav('distrostable', 7)
596 # check title "Compatible distros" is displayed
597 self.assertIn("Compatible Distros", self.get_page_source())
598 # Test search input
599 input_text='poky-altcfg'
600 self._mixin_test_table_search_input(
601 input_selector='search-input-distrostable',
602 input_text=input_text,
603 searchBtn_selector='search-submit-distrostable',
604 table_selector='distrostable'
605 )
606 # check "Add distro" button works
607 self.wait_until_visible(".add-del-layers")
608 finder = lambda driver: self.find_all('#distrostable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]')
609 add_btn = self.wait_until_element_clickable(finder)
610 add_btn.click()
611 self.wait_until_visible('#change-notification')
612 change_notification = self.find('#change-notification')
613 self.assertIn(
614 f'You have changed the distro to: {input_text}', str(change_notification.text)
615 )
616 # check distro table feature(show/hide column, pagination)
617 self._navigate_to_config_nav('distrostable', 7)
618 column_list = [
619 'description',
620 'templatefile',
621 'layer_version__get_vcs_reference',
622 'layer_version__layer__name',
623 ]
624 self._mixin_test_table_edit_column(
625 'distrostable',
626 'edit-columns-button',
627 [f'checkbox-{column}' for column in column_list]
628 )
629 self._navigate_to_config_nav('distrostable', 7)
630 # check show rows(pagination)
631 self._mixin_test_table_show_rows(
632 table_selector='distrostable',
633 to_skip=[150],
634 )
635
636 def test_single_layer_page(self):
637 """ Test layer details page using meta-poky as an example (assumes is added to start with)
638 - Check if title is displayed
639 - Check add/remove layer button works
640 - Check tabs(layers, recipes, machines) are displayed
641 - Check left section is displayed
642 - Check layer name
643 - Check layer summary
644 - Check layer description
645 """
646 self._navigate_to_config_nav('layerstable', 6)
647 layer_link = self.driver.find_element(By.XPATH, '//tr/td[@class="layer__name"]/a[contains(text(),"meta-poky")]')
648 layer_link.click()
649 self.wait_until_visible('.page-header')
650 # check title is displayed
651 self.assertTrue(self.find('.page-header h1').is_displayed())
652
653 # check remove layer button works
654 finder = lambda driver: self.find('#add-remove-layer-btn')
655 remove_layer_btn = self.wait_until_element_clickable(finder)
656 remove_layer_btn.click()
657 self.wait_until_visible('#change-notification')
658 change_notification = self.find('#change-notification')
659 self.assertIn(
660 f'You have removed 1 layer from your project', str(change_notification.text)
661 )
662 finder = lambda driver: self.find('#hide-alert')
663 hide_button = self.wait_until_element_clickable(finder)
664 hide_button.click()
665 # check add layer button works
666 self.wait_until_not_visible('#change-notification')
667 finder = lambda driver: self.find('#add-remove-layer-btn')
668 add_layer_btn = self.wait_until_element_clickable(finder)
669 add_layer_btn.click()
670 self.wait_until_visible('#change-notification')
671 change_notification = self.find('#change-notification')
672 self.assertIn(
673 f'You have added 1 layer to your project', str(change_notification.text)
674 )
675 finder = lambda driver: self.find('#hide-alert')
676 hide_button = self.wait_until_element_clickable(finder)
677 hide_button.click()
678 self.wait_until_not_visible('#change-notification')
679 # check tabs(layers, recipes, machines) are displayed
680 tabs = self.find_all('.nav-tabs li')
681 self.assertEqual(len(tabs), 3)
682 # Check first tab
683 tabs[0].click()
684 self.assertIn(
685 'active', str(self.find('#information').get_attribute('class'))
686 )
687 # Check second tab (recipes)
688 self.wait_until_visible('.nav-tabs')
689 # Ensure page is scrolled to the top
690 self.driver.execute_script('window.scrollTo({behavior: "instant", top: 0, left: 0})')
691 tabs[1].click()
692 self.assertIn(
693 'active', str(self.find('#recipes').get_attribute('class'))
694 )
695 # Check third tab (machines)
696 self.wait_until_visible('.nav-tabs')
697 # Ensure page is scrolled to the top
698 self.driver.execute_script('window.scrollTo({behavior: "instant", top: 0, left: 0})')
699 tabs[2].click()
700 self.assertIn(
701 'active', str(self.find('#machines').get_attribute('class'))
702 )
703 # Check left section is displayed
704 section = self.find('.well')
705 # Check layer name
706 self.assertTrue(
707 section.find_element(By.XPATH, '//h2[1]').is_displayed()
708 )
709 # Check layer summary
710 self.assertIn("Summary", section.text)
711 # Check layer description
712 self.assertIn("Description", section.text)
713
714@pytest.mark.django_db
715@pytest.mark.order("last")
716class TestProjectPageRecipes(TestProjectPageBase):
717
718 def test_single_recipe_page(self):
719 """ Test recipe page
720 - Check if title is displayed
721 - Check add recipe layer displayed
722 - Check left section is displayed
723 - Check recipe: name, summary, description, Version, Section,
724 License, Approx. packages included, Approx. size, Recipe file
725 """
726 # Use a recipe which is likely to exist in the layer index but not enabled
727 # in poky out the box - xen-image-minimal from meta-virtualization
728 self._navigate_to_project_page()
729 prj = Project.objects.get(pk=TestProjectPageBase.project_id)
730 recipe_id = prj.get_all_compatible_recipes().get(name="xen-image-minimal").pk
731 url = reverse("recipedetails", args=(TestProjectPageBase.project_id, recipe_id))
732 self.get(url)
733 self.wait_until_visible('.page-header')
734 # check title is displayed
735 self.assertTrue(self.find('.page-header h1').is_displayed())
736 # check add recipe layer displayed
737 add_recipe_layer_btn = self.find('#add-layer-btn')
738 self.assertTrue(add_recipe_layer_btn.is_displayed())
739 # check left section is displayed
740 section = self.find('.well')
741 # Check recipe name
742 self.assertTrue(
743 section.find_element(By.XPATH, '//h2[1]').is_displayed()
744 )
745 # Check recipe sections details info are displayed
746 self.assertIn("Summary", section.text)
747 self.assertIn("Description", section.text)
748 self.assertIn("Version", section.text)
749 self.assertIn("Section", section.text)
750 self.assertIn("License", section.text)
751 self.assertIn("Approx. packages included", section.text)
752 self.assertIn("Approx. package size", section.text)
753 self.assertIn("Recipe file", section.text)
754
755 def test_image_recipe_editColumn(self):
756 """ Test the edit column feature in image recipe table on project page """
757 self._get_create_builds(success=10, failure=10)
758
759 url = reverse('projectimagerecipes', args=(TestProjectPageBase.project_id,))
760 self.get(url)
761 self.wait_until_present('#imagerecipestable tbody tr')
762
763 column_list = [
764 'get_description_or_summary', 'layer_version__get_vcs_reference',
765 'layer_version__layer__name', 'license', 'recipe-file', 'section',
766 'version'
767 ]
768
769 # Check that we can hide the edit column
770 self._mixin_test_table_edit_column(
771 'imagerecipestable',
772 'edit-columns-button',
773 [f'checkbox-{column}' for column in column_list]
774 )
775
diff --git a/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py b/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py
new file mode 100644
index 0000000000..80c53e1544
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py
@@ -0,0 +1,507 @@
1#! /usr/bin/env python3 #
2# BitBake Toaster UI tests implementation
3#
4# Copyright (C) 2023 Savoir-faire Linux
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import string
10import time
11import pytest
12from django.urls import reverse
13from selenium.webdriver import Keys
14from selenium.webdriver.support.select import Select
15from selenium.common.exceptions import ElementClickInterceptedException, NoSuchElementException, TimeoutException
16from tests.functional.functional_helpers import SeleniumFunctionalTestCase
17from selenium.webdriver.common.by import By
18
19from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled
20
21class TestProjectConfigTabBase(SeleniumFunctionalTestCase):
22 PROJECT_NAME = 'TestProjectConfigTab'
23 project_id = None
24
25 def _navigate_to_project_page(self):
26 # Navigate to project page
27 if TestProjectConfigTabBase.project_id is None:
28 TestProjectConfigTabBase.project_id = self.create_new_project(self.PROJECT_NAME, '3', None, True)
29 url = reverse('project', args=(TestProjectConfigTabBase.project_id,))
30 self.get(url)
31 self.wait_until_visible('#config-nav')
32
33 def _create_builds(self):
34 # check search box can be use to build recipes
35 search_box = self.find('#build-input')
36 search_box.send_keys('foo')
37 self.find('#build-button').click()
38 self.wait_until_present('#latest-builds')
39 # loop until reach the parsing state
40 wait_until_build(self, 'queued cloning starting parsing failed')
41 lastest_builds = self.driver.find_elements(
42 By.XPATH,
43 '//div[@id="latest-builds"]/div',
44 )
45 last_build = lastest_builds[0]
46 self.assertIn(
47 'foo', str(last_build.text)
48 )
49 last_build = lastest_builds[0]
50 try:
51 cancel_button = last_build.find_element(
52 By.XPATH,
53 '//span[@class="cancel-build-btn pull-right alert-link"]',
54 )
55 cancel_button.click()
56 except NoSuchElementException:
57 # Skip if the build is already cancelled
58 pass
59 wait_until_build_cancelled(self)
60
61 def _get_tabs(self):
62 # tabs links list
63 return self.driver.find_elements(
64 By.XPATH,
65 '//div[@id="project-topbar"]//li'
66 )
67
68 def _get_config_nav_item(self, index):
69 config_nav = self.find('#config-nav')
70 return config_nav.find_elements(By.TAG_NAME, 'li')[index]
71
72class TestProjectConfigTab(TestProjectConfigTabBase):
73
74 def test_project_config_nav(self):
75 """ Test project config tab navigation:
76 - Check if the menu is displayed and contains the right elements:
77 - Configuration
78 - COMPATIBLE METADATA
79 - Custom images
80 - Image recipes
81 - Software recipes
82 - Machines
83 - Layers
84 - Distro
85 - EXTRA CONFIGURATION
86 - Bitbake variables
87 - Actions
88 - Delete project
89 """
90 self._navigate_to_project_page()
91
92 def _get_config_nav_item(index):
93 config_nav = self.find('#config-nav')
94 return config_nav.find_elements(By.TAG_NAME, 'li')[index]
95
96 def check_config_nav_item(index, item_name, url):
97 item = _get_config_nav_item(index)
98 self.assertIn(item_name, item.text)
99 self.assertEqual(item.get_attribute('class'), 'active')
100 self.assertIn(url, self.driver.current_url)
101
102 # check if the menu contains the right elements
103 # COMPATIBLE METADATA
104 compatible_metadata = _get_config_nav_item(1)
105 self.assertIn(
106 "compatible metadata", compatible_metadata.text.lower()
107 )
108 # EXTRA CONFIGURATION
109 extra_configuration = _get_config_nav_item(8)
110 self.assertIn(
111 "extra configuration", extra_configuration.text.lower()
112 )
113 # Actions
114 actions = _get_config_nav_item(10)
115 self.assertIn("actions", str(actions.text).lower())
116
117 conf_nav_list = [
118 # config
119 [0, 'Configuration',
120 f"/toastergui/project/{TestProjectConfigTabBase.project_id}"],
121 # custom images
122 [2, 'Custom images',
123 f"/toastergui/project/{TestProjectConfigTabBase.project_id}/customimages"],
124 # image recipes
125 [3, 'Image recipes',
126 f"/toastergui/project/{TestProjectConfigTabBase.project_id}/images"],
127 # software recipes
128 [4, 'Software recipes',
129 f"/toastergui/project/{TestProjectConfigTabBase.project_id}/softwarerecipes"],
130 # machines
131 [5, 'Machines',
132 f"/toastergui/project/{TestProjectConfigTabBase.project_id}/machines"],
133 # layers
134 [6, 'Layers',
135 f"/toastergui/project/{TestProjectConfigTabBase.project_id}/layers"],
136 # distro
137 [7, 'Distros',
138 f"/toastergui/project/{TestProjectConfigTabBase.project_id}/distros"],
139 # [9, 'BitBake variables', f"/toastergui/project/{TestProjectConfigTabBase.project_id}/configuration"], # bitbake variables
140 ]
141 for index, item_name, url in conf_nav_list:
142 item = _get_config_nav_item(index)
143 if item.get_attribute('class') != 'active':
144 item.click()
145 check_config_nav_item(index, item_name, url)
146
147 def test_image_recipe_editColumn(self):
148 """ Test the edit column feature in image recipe table on project page """
149 def test_edit_column(check_box_id):
150 # Check that we can hide/show table column
151 check_box = self.find(f'#{check_box_id}')
152 th_class = str(check_box_id).replace('checkbox-', '')
153 if check_box.is_selected():
154 # check if column is visible in table
155 self.assertTrue(
156 self.find(
157 f'#imagerecipestable thead th.{th_class}'
158 ).is_displayed(),
159 f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
160 )
161 check_box.click()
162 # check if column is hidden in table
163 self.assertFalse(
164 self.find(
165 f'#imagerecipestable thead th.{th_class}'
166 ).is_displayed(),
167 f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
168 )
169 else:
170 # check if column is hidden in table
171 self.assertFalse(
172 self.find(
173 f'#imagerecipestable thead th.{th_class}'
174 ).is_displayed(),
175 f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
176 )
177 check_box.click()
178 # check if column is visible in table
179 self.assertTrue(
180 self.find(
181 f'#imagerecipestable thead th.{th_class}'
182 ).is_displayed(),
183 f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
184 )
185
186 self._navigate_to_project_page()
187 # navigate to project image recipe page
188 recipe_image_page_link = self._get_config_nav_item(3)
189 recipe_image_page_link.click()
190 self.wait_until_present('#imagerecipestable tbody tr')
191
192 # Check edit column
193 edit_column = self.find('#edit-columns-button')
194 self.assertTrue(edit_column.is_displayed())
195 edit_column.click()
196 # Check dropdown is visible
197 self.wait_until_visible('ul.dropdown-menu.editcol')
198
199 # Check that we can hide the edit column
200 test_edit_column('checkbox-get_description_or_summary')
201 test_edit_column('checkbox-layer_version__get_vcs_reference')
202 test_edit_column('checkbox-layer_version__layer__name')
203 test_edit_column('checkbox-license')
204 test_edit_column('checkbox-recipe-file')
205 test_edit_column('checkbox-section')
206 test_edit_column('checkbox-version')
207
208 def test_image_recipe_show_rows(self):
209 """ Test the show rows feature in image recipe table on project page """
210 def test_show_rows(row_to_show, show_row_link):
211 # Check that we can show rows == row_to_show
212 show_row_link.select_by_value(str(row_to_show))
213 self.wait_until_visible('#imagerecipestable tbody tr')
214 # check at least some rows are visible
215 self.assertTrue(
216 len(self.find_all('#imagerecipestable tbody tr')) > 0
217 )
218
219 self._navigate_to_project_page()
220 # navigate to project image recipe page
221 recipe_image_page_link = self._get_config_nav_item(3)
222 recipe_image_page_link.click()
223 self.wait_until_present('#imagerecipestable tbody tr')
224
225 show_rows = self.driver.find_elements(
226 By.XPATH,
227 '//select[@class="form-control pagesize-imagerecipestable"]'
228 )
229 # Check show rows
230 for show_row_link in show_rows:
231 show_row_link = Select(show_row_link)
232 test_show_rows(10, show_row_link)
233 test_show_rows(25, show_row_link)
234 test_show_rows(50, show_row_link)
235 test_show_rows(100, show_row_link)
236 test_show_rows(150, show_row_link)
237
238 def test_project_config_tab_right_section(self):
239 """ Test project config tab right section contains five blocks:
240 - Machine:
241 - check 'Machine' is displayed
242 - check can change Machine
243 - Distro:
244 - check 'Distro' is displayed
245 - check can change Distro
246 - Most built recipes:
247 - check 'Most built recipes' is displayed
248 - check can select a recipe and build it
249 - Project release:
250 - check 'Project release' is displayed
251 - check project has right release displayed
252 - Layers:
253 - check can add a layer if exists
254 - check at least three layers are displayed
255 - openembedded-core
256 - meta-poky
257 - meta-yocto-bsp
258 """
259 project_id = self.create_new_project(self.PROJECT_NAME + "-ST", '3', None, True)
260 url = reverse('project', args=(project_id,))
261 self.get(url)
262 self.wait_until_visible('#config-nav')
263
264 # check if the menu is displayed
265 self.wait_until_visible('#project-page')
266 block_l = self.driver.find_element(
267 By.XPATH, '//*[@id="project-page"]/div[2]')
268 project_release = self.driver.find_element(
269 By.XPATH, '//*[@id="project-page"]/div[1]/div[4]')
270 layers = block_l.find_element(By.ID, 'layer-container')
271
272 def check_machine_distro(self, item_name, new_item_name, block_id):
273 block = self.find(f'#{block_id}')
274 title = block.find_element(By.TAG_NAME, 'h3')
275 self.assertIn(item_name.capitalize(), title.text)
276 edit_btn = self.find(f'#change-{item_name}-toggle')
277 edit_btn.click()
278 self.wait_until_visible(f'#{item_name}-change-input')
279 name_input = self.find(f'#{item_name}-change-input')
280 name_input.clear()
281 name_input.send_keys(new_item_name)
282 change_btn = self.find(f'#{item_name}-change-btn')
283 change_btn.click()
284 self.wait_until_visible(f'#project-{item_name}-name')
285 project_name = self.find(f'#project-{item_name}-name')
286 self.assertIn(new_item_name, project_name.text)
287 # check change notificaiton is displayed
288 change_notification = self.find('#change-notification')
289 self.assertIn(
290 f'You have changed the {item_name} to: {new_item_name}', change_notification.text
291 )
292 hide_button = self.find('#hide-alert')
293 hide_button.click()
294 self.wait_until_not_visible('#change-notification')
295
296 # Machine
297 check_machine_distro(self, 'machine', 'qemux86-64', 'machine-section')
298 # Distro
299 check_machine_distro(self, 'distro', 'poky-altcfg', 'distro-section')
300
301 # Project release
302 title = project_release.find_element(By.TAG_NAME, 'h3')
303 self.assertIn("Project release", title.text)
304 self.assertIn(
305 "Yocto Project master", self.find('#project-release-title').text
306 )
307 # Layers
308 title = layers.find_element(By.TAG_NAME, 'h3')
309 self.assertIn("Layers", title.text)
310 self.wait_until_clickable('#layer-add-input')
311 # check at least three layers are displayed
312 # openembedded-core
313 # meta-poky
314 # meta-yocto-bsp
315 layer_list_items = []
316 starttime = time.time()
317 while len(layer_list_items) < 3:
318 layers_list = self.driver.find_element(By.ID, 'layers-in-project-list')
319 layer_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
320 if time.time() > (starttime + 30):
321 self.fail("Layer list didn't contain at least 3 items within 30s (contained %d)" % len(layer_list_items))
322
323 # remove all layers except the first three layers
324 for i in range(3, len(layer_list_items)):
325 layer_list_items[i].find_element(By.TAG_NAME, 'span').click()
326
327 # check can add a layer if exists
328 add_layer_input = layers.find_element(By.ID, 'layer-add-input')
329 add_layer_input.send_keys('meta-oe')
330 self.wait_until_visible('#layer-container > form > div > span > div')
331 self.wait_until_visible('.dropdown-menu')
332 finder = lambda driver: driver.find_element(By.XPATH, '//*[@id="layer-container"]/form/div/span/div/div/div')
333 dropdown_item = self.wait_until_element_clickable(finder)
334 dropdown_item.click()
335 self.wait_until_clickable('#add-layer-btn')
336 add_layer_btn = layers.find_element(By.ID, 'add-layer-btn')
337 add_layer_btn.click()
338 self.wait_until_visible('#layers-in-project-list')
339
340 # check layer is added
341 layer_list_items = []
342 starttime = time.time()
343 while len(layer_list_items) < 4:
344 layers_list = self.driver.find_element(By.ID, 'layers-in-project-list')
345 layer_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
346 if time.time() > (starttime + 30):
347 self.fail("Layer list didn't contain at least 4 items within 30s (contained %d)" % len(layer_list_items))
348
349 def test_project_page_tab_importlayer(self):
350 """ Test project page tab import layer """
351 self._navigate_to_project_page()
352 # navigate to "Import layers" tab
353 import_layers_tab = self._get_tabs()[2]
354 import_layers_tab.find_element(By.TAG_NAME, 'a').click()
355 self.wait_until_visible('#layer-git-repo-url')
356
357 # Check git repo radio button
358 git_repo_radio = self.find('#git-repo-radio')
359 git_repo_radio.click()
360
361 # Set git repo url
362 input_repo_url = self.find('#layer-git-repo-url')
363 input_repo_url.send_keys('git://git.yoctoproject.org/meta-fake')
364 # Blur the input to trigger the validation
365 input_repo_url.send_keys(Keys.TAB)
366
367 # Check name is set
368 input_layer_name = self.find('#import-layer-name')
369 self.assertTrue(input_layer_name.get_attribute('value') == 'meta-fake')
370
371 # Set branch
372 input_branch = self.find('#layer-git-ref')
373 input_branch.send_keys('master')
374
375 # Import layer
376 self.find('#import-and-add-btn').click()
377
378 # Check layer is added
379 self.wait_until_visible('#layer-container')
380 block_l = self.driver.find_element(
381 By.XPATH, '//*[@id="project-page"]/div[2]')
382 layers = block_l.find_element(By.ID, 'layer-container')
383 layers_list = layers.find_element(By.ID, 'layers-in-project-list')
384 layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
385 self.assertIn(
386 'meta-fake', str(layers_list_items[-1].text)
387 )
388
389 def test_project_page_custom_image_no_image(self):
390 """ Test project page tab "New custom image" when no custom image """
391 project_id = self.create_new_project(self.PROJECT_NAME + "-CustomImage", '3', None, True)
392 url = reverse('project', args=(project_id,))
393 self.get(url)
394 self.wait_until_visible('#config-nav')
395
396 # navigate to "Custom image" tab
397 custom_image_section = self._get_config_nav_item(2)
398 custom_image_section.click()
399 self.wait_until_visible('#empty-state-customimagestable')
400
401 # Check message when no custom image
402 self.assertIn(
403 "You have not created any custom images yet.", str(
404 self.find('#empty-state-customimagestable').text
405 )
406 )
407 div_empty_msg = self.find('#empty-state-customimagestable')
408 link_create_custom_image = div_empty_msg.find_element(
409 By.TAG_NAME, 'a')
410 self.assertTrue(TestProjectConfigTabBase.project_id is not None)
411 self.assertIn(
412 f"/toastergui/project/{project_id}/newcustomimage", str(
413 link_create_custom_image.get_attribute('href')
414 )
415 )
416 self.assertIn(
417 "Create your first custom image", str(
418 link_create_custom_image.text
419 )
420 )
421
422 def test_project_page_image_recipe(self):
423 """ Test project page section images
424 - Check image recipes are displayed
425 - Check search input
426 - Check image recipe build button works
427 - Check image recipe table features(show/hide column, pagination)
428 """
429 self._navigate_to_project_page()
430 # navigate to "Images section"
431 images_section = self._get_config_nav_item(3)
432 images_section.click()
433 self.wait_until_visible('#imagerecipestable')
434 rows = self.find_all('#imagerecipestable tbody tr')
435 self.assertTrue(len(rows) > 0)
436
437 # Test search input
438 self.wait_until_visible('#search-input-imagerecipestable')
439 recipe_input = self.find('#search-input-imagerecipestable')
440 recipe_input.send_keys('core-image-minimal')
441 self.find('#search-submit-imagerecipestable').click()
442 self.wait_until_visible('#imagerecipestable tbody tr')
443 rows = self.find_all('#imagerecipestable tbody tr')
444 self.assertTrue(len(rows) > 0)
445
446@pytest.mark.django_db
447@pytest.mark.order("last")
448class TestProjectConfigTabDB(TestProjectConfigTabBase):
449
450 def test_most_build_recipes(self):
451 """ Test most build recipes block contains"""
452 def rebuild_from_most_build_recipes(recipe_list_items):
453 checkbox = recipe_list_items[0].find_element(By.TAG_NAME, 'input')
454 checkbox.click()
455 build_btn = self.find('#freq-build-btn')
456 build_btn.click()
457 self.wait_until_visible('#latest-builds')
458 wait_until_build(self, 'queued cloning starting parsing failed')
459 lastest_builds = self.driver.find_elements(
460 By.XPATH,
461 '//div[@id="latest-builds"]/div'
462 )
463 self.assertTrue(len(lastest_builds) >= 2)
464 last_build = lastest_builds[0]
465 try:
466 cancel_button = last_build.find_element(
467 By.XPATH,
468 '//span[@class="cancel-build-btn pull-right alert-link"]',
469 )
470 cancel_button.click()
471 except NoSuchElementException:
472 # Skip if the build is already cancelled
473 pass
474 wait_until_build_cancelled(self)
475
476 # Create a new project for remaining asserts
477 project_id = self.create_new_project(self.PROJECT_NAME + "-MostBuilt", '2', None, True)
478 url = reverse('project', args=(project_id,))
479 self.get(url)
480 self.wait_until_visible('#config-nav')
481
482 current_url = self.driver.current_url
483 url = current_url.split('?')[0]
484
485 # Create a new builds
486 self._create_builds()
487
488 # back to project page
489 self.driver.get(url)
490
491 self.wait_until_visible('#project-page')
492
493 # Most built recipes
494 most_built_recipes = self.driver.find_element(
495 By.XPATH, '//*[@id="project-page"]/div[1]/div[3]')
496 title = most_built_recipes.find_element(By.TAG_NAME, 'h3')
497 self.assertIn("Most built recipes", title.text)
498 # check can select a recipe and build it
499 self.wait_until_visible('#freq-build-list')
500 recipe_list = self.find('#freq-build-list')
501 recipe_list_items = recipe_list.find_elements(By.TAG_NAME, 'li')
502 self.assertTrue(
503 len(recipe_list_items) > 0,
504 msg="No recipes found in the most built recipes list",
505 )
506 rebuild_from_most_build_recipes(recipe_list_items)
507
diff --git a/bitbake/lib/toaster/tests/functional/utils.py b/bitbake/lib/toaster/tests/functional/utils.py
new file mode 100644
index 0000000000..72345aef9f
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/utils.py
@@ -0,0 +1,86 @@
1#!/usr/bin/env python3
2# -*- coding: utf-8 -*-
3# BitBake Toaster UI tests implementation
4#
5# Copyright (C) 2023 Savoir-faire Linux
6#
7# SPDX-License-Identifier: GPL-2.0-only
8
9
10from time import sleep
11from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException, TimeoutException, WebDriverException
12from selenium.webdriver.common.by import By
13
14from orm.models import Build
15
16
17def wait_until_build(test_instance, state):
18 timeout = 60
19 start_time = 0
20 build_state = ''
21 while True:
22 try:
23 if start_time > timeout:
24 raise TimeoutException(
25 f'Build did not reach {state} state within {timeout} seconds'
26 )
27 last_build_state = test_instance.driver.find_element(
28 By.XPATH,
29 '//*[@id="latest-builds"]/div[1]//div[@class="build-state"]',
30 )
31 build_state = last_build_state.get_attribute(
32 'data-build-state')
33 state_text = state.lower().split()
34 if any(x in str(build_state).lower() for x in state_text):
35 return str(build_state).lower()
36 if 'failed' in str(build_state).lower():
37 break
38 except NoSuchElementException:
39 pass
40 except TimeoutException:
41 break
42 start_time += 1
43 sleep(1) # take a breath and try again
44
45def wait_until_build_cancelled(test_instance):
46 """ Cancel build take a while sometime, the method is to wait driver action
47 until build being cancelled
48 """
49 timeout = 30
50 start_time = 0
51 while True:
52 try:
53 if start_time > timeout:
54 raise TimeoutException(
55 f'Build did not reach cancelled state within {timeout} seconds'
56 )
57 last_build_state = test_instance.driver.find_element(
58 By.XPATH,
59 '//*[@id="latest-builds"]/div[1]//div[@class="build-state"]',
60 )
61 build_state = last_build_state.get_attribute(
62 'data-build-state')
63 if 'failed' in str(build_state).lower():
64 break
65 if 'cancelling' in str(build_state).lower():
66 pass
67 if 'cancelled' in str(build_state).lower():
68 break
69 except TimeoutException:
70 break
71 except NoSuchElementException:
72 pass
73 except StaleElementReferenceException:
74 pass
75 except WebDriverException:
76 pass
77 start_time += 1
78 sleep(1) # take a breath and try again
79
80def get_projectId_from_url(url):
81 # url = 'http://domainename.com/toastergui/project/1656/whatever
82 # or url = 'http://domainename.com/toastergui/project/1/
83 # or url = 'http://domainename.com/toastergui/project/186
84 assert '/toastergui/project/' in url, "URL is not valid"
85 url_to_list = url.split('/toastergui/project/')
86 return int(url_to_list[1].split('/')[0]) # project_id
diff --git a/bitbake/lib/toaster/tests/toaster-tests-requirements.txt b/bitbake/lib/toaster/tests/toaster-tests-requirements.txt
index 4f9fcc46d2..6243c00a36 100644
--- a/bitbake/lib/toaster/tests/toaster-tests-requirements.txt
+++ b/bitbake/lib/toaster/tests/toaster-tests-requirements.txt
@@ -1 +1,9 @@
1selenium==2.49.2 1selenium>=4.13.0
2pytest==7.4.2
3pytest-django==4.5.2
4pytest-env==1.1.0
5pytest-html==4.0.2
6pytest-metadata==3.0.0
7pytest-order==1.1.0
8requests
9
diff --git a/bitbake/lib/toaster/tests/views/test_views.py b/bitbake/lib/toaster/tests/views/test_views.py
index 735d596bcc..e1adfcf86a 100644
--- a/bitbake/lib/toaster/tests/views/test_views.py
+++ b/bitbake/lib/toaster/tests/views/test_views.py
@@ -9,6 +9,8 @@
9 9
10"""Test cases for Toaster GUI and ReST.""" 10"""Test cases for Toaster GUI and ReST."""
11 11
12import os
13import pytest
12from django.test import TestCase 14from django.test import TestCase
13from django.test.client import RequestFactory 15from django.test.client import RequestFactory
14from django.urls import reverse 16from django.urls import reverse
@@ -19,6 +21,7 @@ from orm.models import Layer_Version, Recipe
19from orm.models import CustomImageRecipe 21from orm.models import CustomImageRecipe
20from orm.models import CustomImagePackage 22from orm.models import CustomImagePackage
21 23
24from bldcontrol.models import BuildEnvironment
22import inspect 25import inspect
23import toastergui 26import toastergui
24 27
@@ -32,19 +35,32 @@ PROJECT_NAME2 = "test project 2"
32CLI_BUILDS_PROJECT_NAME = 'Command line builds' 35CLI_BUILDS_PROJECT_NAME = 'Command line builds'
33 36
34 37
38
35class ViewTests(TestCase): 39class ViewTests(TestCase):
36 """Tests to verify view APIs.""" 40 """Tests to verify view APIs."""
37 41
38 fixtures = ['toastergui-unittest-data'] 42 fixtures = ['toastergui-unittest-data']
43 builldir = os.environ.get('BUILDDIR')
39 44
40 def setUp(self): 45 def setUp(self):
41 46
42 self.project = Project.objects.first() 47 self.project = Project.objects.first()
48
43 self.recipe1 = Recipe.objects.get(pk=2) 49 self.recipe1 = Recipe.objects.get(pk=2)
50 # create a file and to recipe1 file_path
51 file_path = f"{self.builldir}/{self.recipe1.name.strip().replace(' ', '-')}.bb"
52 with open(file_path, 'w') as f:
53 f.write('foo')
54 self.recipe1.file_path = file_path
55 self.recipe1.save()
56
44 self.customr = CustomImageRecipe.objects.first() 57 self.customr = CustomImageRecipe.objects.first()
45 self.cust_package = CustomImagePackage.objects.first() 58 self.cust_package = CustomImagePackage.objects.first()
46 self.package = Package.objects.first() 59 self.package = Package.objects.first()
47 self.lver = Layer_Version.objects.first() 60 self.lver = Layer_Version.objects.first()
61 if BuildEnvironment.objects.count() == 0:
62 BuildEnvironment.objects.create(betype=BuildEnvironment.TYPE_LOCAL)
63
48 64
49 def test_get_base_call_returns_html(self): 65 def test_get_base_call_returns_html(self):
50 """Basic test for all-projects view""" 66 """Basic test for all-projects view"""
@@ -226,7 +242,7 @@ class ViewTests(TestCase):
226 recipe = CustomImageRecipe.objects.create( 242 recipe = CustomImageRecipe.objects.create(
227 name=name, project=self.project, 243 name=name, project=self.project,
228 base_recipe=self.recipe1, 244 base_recipe=self.recipe1,
229 file_path="/tmp/testing", 245 file_path=f"{self.builldir}/testing",
230 layer_version=self.customr.layer_version) 246 layer_version=self.customr.layer_version)
231 url = reverse('xhr_customrecipe_id', args=(recipe.id,)) 247 url = reverse('xhr_customrecipe_id', args=(recipe.id,))
232 response = self.client.delete(url) 248 response = self.client.delete(url)
@@ -297,7 +313,7 @@ class ViewTests(TestCase):
297 """Download the recipe file generated for the custom image""" 313 """Download the recipe file generated for the custom image"""
298 314
299 # Create a dummy recipe file for the custom image generation to read 315 # Create a dummy recipe file for the custom image generation to read
300 open("/tmp/a_recipe.bb", 'a').close() 316 open(f"{self.builldir}/a_recipe.bb", 'a').close()
301 response = self.client.get(reverse('customrecipedownload', 317 response = self.client.get(reverse('customrecipedownload',
302 args=(self.project.id, 318 args=(self.project.id,
303 self.customr.id))) 319 self.customr.id)))
diff --git a/bitbake/lib/toaster/toastergui/api.py b/bitbake/lib/toaster/toastergui/api.py
index b4cdc335ef..e367bd910e 100644
--- a/bitbake/lib/toaster/toastergui/api.py
+++ b/bitbake/lib/toaster/toastergui/api.py
@@ -11,7 +11,7 @@ import os
11import re 11import re
12import logging 12import logging
13import json 13import json
14import subprocess 14import glob
15from collections import Counter 15from collections import Counter
16 16
17from orm.models import Project, ProjectTarget, Build, Layer_Version 17from orm.models import Project, ProjectTarget, Build, Layer_Version
@@ -227,20 +227,18 @@ class XhrSetDefaultImageUrl(View):
227# same logical name 227# same logical name
228# * Each project that uses a layer will have its own 228# * Each project that uses a layer will have its own
229# LayerVersion and Project Layer for it 229# LayerVersion and Project Layer for it
230# * During the Paroject delete process, when the last 230# * During the Project delete process, when the last
231# LayerVersion for a 'local_source_dir' layer is deleted 231# LayerVersion for a 'local_source_dir' layer is deleted
232# then the Layer record is deleted to remove orphans 232# then the Layer record is deleted to remove orphans
233# 233#
234 234
235def scan_layer_content(layer,layer_version): 235def scan_layer_content(layer,layer_version):
236 # if this is a local layer directory, we can immediately scan its content 236 # if this is a local layer directory, we can immediately scan its content
237 if layer.local_source_dir: 237 if os.path.isdir(layer.local_source_dir):
238 try: 238 try:
239 # recipes-*/*/*.bb 239 # recipes-*/*/*.bb
240 cmd = '%s %s' % ('ls', os.path.join(layer.local_source_dir,'recipes-*/*/*.bb')) 240 recipes_list = glob.glob(os.path.join(layer.local_source_dir, 'recipes-*/*/*.bb'))
241 recipes_list = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT).stdout.read() 241 for recipe in recipes_list:
242 recipes_list = recipes_list.decode("utf-8").strip()
243 if recipes_list and 'No such' not in recipes_list:
244 for recipe in recipes_list.split('\n'): 242 for recipe in recipes_list.split('\n'):
245 recipe_path = recipe[recipe.rfind('recipes-'):] 243 recipe_path = recipe[recipe.rfind('recipes-'):]
246 recipe_name = recipe[recipe.rfind('/')+1:].replace('.bb','') 244 recipe_name = recipe[recipe.rfind('/')+1:].replace('.bb','')
@@ -260,6 +258,9 @@ def scan_layer_content(layer,layer_version):
260 258
261 except Exception as e: 259 except Exception as e:
262 logger.warning("ERROR:scan_layer_content: %s" % e) 260 logger.warning("ERROR:scan_layer_content: %s" % e)
261 else:
262 logger.warning("ERROR: wrong path given")
263 raise KeyError("local_source_dir")
263 264
264class XhrLayer(View): 265class XhrLayer(View):
265 """ Delete, Get, Add and Update Layer information 266 """ Delete, Get, Add and Update Layer information
@@ -456,15 +457,18 @@ class XhrLayer(View):
456 'layerdetailurl': 457 'layerdetailurl':
457 layer_dep.get_detailspage_url(project.pk)}) 458 layer_dep.get_detailspage_url(project.pk)})
458 459
459 # Scan the layer's content and update components 460 # Only scan_layer_content if layer is local
460 scan_layer_content(layer,layer_version) 461 if layer_data.get('local_source_dir', None):
462 # Scan the layer's content and update components
463 scan_layer_content(layer,layer_version)
461 464
462 except Layer_Version.DoesNotExist: 465 except Layer_Version.DoesNotExist:
463 return error_response("layer-dep-not-found") 466 return error_response("layer-dep-not-found")
464 except Project.DoesNotExist: 467 except Project.DoesNotExist:
465 return error_response("project-not-found") 468 return error_response("project-not-found")
466 except KeyError: 469 except KeyError as e:
467 return error_response("incorrect-parameters") 470 _log("KeyError: %s" % e)
471 return error_response(f"incorrect-parameters")
468 472
469 return JsonResponse({'error': "ok", 473 return JsonResponse({'error': "ok",
470 'imported_layer': { 474 'imported_layer': {
diff --git a/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml b/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
index 4517ed1765..f626572fd1 100644
--- a/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
+++ b/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
@@ -6,10 +6,22 @@
6 <field type="CharField" name="dirpath">b</field> 6 <field type="CharField" name="dirpath">b</field>
7 <field type="CharField" name="branch">a</field> 7 <field type="CharField" name="branch">a</field>
8 </object> 8 </object>
9 <object pk="1" model="orm.distro">
10 <field type="DateTimeField" name="up_date"><None></None></field>
11 <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">1</field>
12 <field type="CharField" name="name">poky_distro1</field>
13 <field type="CharField" name="description">poky_distro1 description</field>
14 </object>
15 <object pk="2" model="orm.distro">
16 <field type="DateTimeField" name="up_date"><None></None></field>
17 <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">2</field>
18 <field type="CharField" name="name">poky_distro2</field>
19 <field type="CharField" name="description">poky_distro2 description</field>
20 </object>
9 <object pk="1" model="orm.release"> 21 <object pk="1" model="orm.release">
10 <field type="CharField" name="name">master</field> 22 <field type="CharField" name="name">foo_master</field>
11 <field type="CharField" name="description">master project</field> 23 <field type="CharField" name="description">master project</field>
12 <field to="orm.bitbake_version" name="bitbake_version">1</field> 24 <field to="orm.bitbakeversion" name="bitbake_version">1</field>
13 </object> 25 </object>
14 <object pk="1" model="orm.project"> 26 <object pk="1" model="orm.project">
15 <field type="CharField" name="name">a test project</field> 27 <field type="CharField" name="name">a test project</field>
@@ -34,12 +46,12 @@
34 <object pk="1" model="orm.ProjectVariable"> 46 <object pk="1" model="orm.ProjectVariable">
35 <field to="orm.project" name="project" rel="ManyToOneRel">1</field> 47 <field to="orm.project" name="project" rel="ManyToOneRel">1</field>
36 <field type="CharField" name="name">MACHINE</field> 48 <field type="CharField" name="name">MACHINE</field>
37 <field type="TextField" name="value">qemux86</field> 49 <field type="TextField" name="value">qemux86-64</field>
38 </object> 50 </object>
39 <object pk="2" model="orm.ProjectVariable"> 51 <object pk="2" model="orm.ProjectVariable">
40 <field to="orm.project" name="project" rel="ManyToOneRel">2</field> 52 <field to="orm.project" name="project" rel="ManyToOneRel">2</field>
41 <field type="CharField" name="name">MACHINE</field> 53 <field type="CharField" name="name">MACHINE</field>
42 <field type="TextField" name="value">qemux86</field> 54 <field type="TextField" name="value">qemux86-64</field>
43 </object> 55 </object>
44 <object pk="1" model="orm.build"> 56 <object pk="1" model="orm.build">
45 <field to="orm.project" name="project" rel="ManyToOneRel">1</field> 57 <field to="orm.project" name="project" rel="ManyToOneRel">1</field>
@@ -67,7 +79,7 @@
67 </object> 79 </object>
68 <object pk="3" model="orm.build"> 80 <object pk="3" model="orm.build">
69 <field to="orm.project" name="project" rel="ManyToOneRel">1</field> 81 <field to="orm.project" name="project" rel="ManyToOneRel">1</field>
70 <field type="CharField" name="machine">qemux86</field> 82 <field type="CharField" name="machine">qemux86-64</field>
71 <field type="CharField" name="distro"></field> 83 <field type="CharField" name="distro"></field>
72 <field type="CharField" name="distro_version"></field> 84 <field type="CharField" name="distro_version"></field>
73 <field type="DateTimeField" name="started_on">2016-02-12T18:46:20.114530+00:00</field> 85 <field type="DateTimeField" name="started_on">2016-02-12T18:46:20.114530+00:00</field>
@@ -79,7 +91,7 @@
79 </object> 91 </object>
80 <object pk="4" model="orm.build"> 92 <object pk="4" model="orm.build">
81 <field to="orm.project" name="project" rel="ManyToOneRel">2</field> 93 <field to="orm.project" name="project" rel="ManyToOneRel">2</field>
82 <field type="CharField" name="machine">qemux86</field> 94 <field type="CharField" name="machine">qemux86-64</field>
83 <field type="CharField" name="distro"></field> 95 <field type="CharField" name="distro"></field>
84 <field type="CharField" name="distro_version"></field> 96 <field type="CharField" name="distro_version"></field>
85 <field type="DateTimeField" name="started_on">2016-02-11T18:46:20.114530+00:00</field> 97 <field type="DateTimeField" name="started_on">2016-02-11T18:46:20.114530+00:00</field>
diff --git a/bitbake/lib/toaster/toastergui/forms.py b/bitbake/lib/toaster/toastergui/forms.py
new file mode 100644
index 0000000000..0f279e06c5
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/forms.py
@@ -0,0 +1,14 @@
1#!/usr/bin/env python3
2# -*- coding: utf-8 -*-
3# BitBake Toaster UI tests implementation
4#
5# Copyright (C) 2023 Savoir-faire Linux
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10from django import forms
11from django.core.validators import FileExtensionValidator
12
13class LoadFileForm(forms.Form):
14 eventlog_file = forms.FileField(widget=forms.FileInput(attrs={'accept': '.json'}))
diff --git a/bitbake/lib/toaster/toastergui/static/css/default.css b/bitbake/lib/toaster/toastergui/static/css/default.css
index 5cd7e211a0..284355e70b 100644
--- a/bitbake/lib/toaster/toastergui/static/css/default.css
+++ b/bitbake/lib/toaster/toastergui/static/css/default.css
@@ -367,3 +367,31 @@ h2.panel-title { font-size: 30px; }
367 } 367 }
368} 368}
369/* End copied in from newer version of Font-Awesome 4.3.0 */ 369/* End copied in from newer version of Font-Awesome 4.3.0 */
370
371
372#overlay {
373 display: flex;
374 position: fixed;
375 top: 0;
376 left: 0;
377 width: 100%;
378 height: 100%;
379 background-color: rgba(0, 0, 0, 0.7);
380 align-items: center;
381 justify-content: center;
382 z-index: 999;
383}
384
385.spinner {
386 border: 6px solid rgba(255, 255, 255, 0.3);
387 border-radius: 50%;
388 border-top: 6px solid #3498db;
389 width: 50px;
390 height: 50px;
391 animation: spin 1s linear infinite;
392}
393
394@keyframes spin {
395 0% { transform: rotate(0deg); }
396 100% { transform: rotate(360deg); }
397}
diff --git a/bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css b/bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css
new file mode 100644
index 0000000000..c0a442ce07
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css
@@ -0,0 +1 @@
:root{--dt-row-selected: 13, 110, 253;--dt-row-selected-text: 255, 255, 255;--dt-row-selected-link: 9, 10, 11;--dt-row-stripe: 0, 0, 0;--dt-row-hover: 0, 0, 0;--dt-column-ordering: 0, 0, 0;--dt-html-background: white}:root.dark{--dt-html-background: rgb(33, 37, 41)}table.dataTable td.dt-control{text-align:center;cursor:pointer}table.dataTable td.dt-control:before{display:inline-block;color:rgba(0, 0, 0, 0.5);content:"â–¶"}table.dataTable tr.dt-hasChild td.dt-control:before{content:"â–¼"}html.dark table.dataTable td.dt-control:before{color:rgba(255, 255, 255, 0.5)}html.dark table.dataTable tr.dt-hasChild td.dt-control:before{color:rgba(255, 255, 255, 0.5)}table.dataTable thead>tr>th.sorting,table.dataTable thead>tr>th.sorting_asc,table.dataTable thead>tr>th.sorting_desc,table.dataTable thead>tr>th.sorting_asc_disabled,table.dataTable thead>tr>th.sorting_desc_disabled,table.dataTable thead>tr>td.sorting,table.dataTable thead>tr>td.sorting_asc,table.dataTable thead>tr>td.sorting_desc,table.dataTable thead>tr>td.sorting_asc_disabled,table.dataTable thead>tr>td.sorting_desc_disabled{cursor:pointer;position:relative;padding-right:26px}table.dataTable thead>tr>th.sorting:before,table.dataTable thead>tr>th.sorting:after,table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_asc:after,table.dataTable thead>tr>th.sorting_desc:before,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>th.sorting_asc_disabled:after,table.dataTable thead>tr>th.sorting_desc_disabled:before,table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting:before,table.dataTable thead>tr>td.sorting:after,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_asc:after,table.dataTable thead>tr>td.sorting_desc:before,table.dataTable thead>tr>td.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_asc_disabled:after,table.dataTable thead>tr>td.sorting_desc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:after{position:absolute;display:block;opacity:.125;right:10px;line-height:9px;font-size:.8em}table.dataTable thead>tr>th.sorting:before,table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_desc:before,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>th.sorting_desc_disabled:before,table.dataTable thead>tr>td.sorting:before,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_desc:before,table.dataTable thead>tr>td.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:before{bottom:50%;content:"â–²";content:"â–²"/""}table.dataTable thead>tr>th.sorting:after,table.dataTable thead>tr>th.sorting_asc:after,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>th.sorting_asc_disabled:after,table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting:after,table.dataTable thead>tr>td.sorting_asc:after,table.dataTable thead>tr>td.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc_disabled:after,table.dataTable thead>tr>td.sorting_desc_disabled:after{top:50%;content:"â–¼";content:"â–¼"/""}table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_desc:after{opacity:.6}table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting_asc_disabled:before{display:none}table.dataTable thead>tr>th:active,table.dataTable thead>tr>td:active{outline:none}div.dataTables_scrollBody>table.dataTable>thead>tr>th:before,div.dataTables_scrollBody>table.dataTable>thead>tr>th:after,div.dataTables_scrollBody>table.dataTable>thead>tr>td:before,div.dataTables_scrollBody>table.dataTable>thead>tr>td:after{display:none}div.dataTables_processing{position:absolute;top:50%;left:50%;width:200px;margin-left:-100px;margin-top:-26px;text-align:center;padding:2px;z-index:10}div.dataTables_processing>div:last-child{position:relative;width:80px;height:15px;margin:1em auto}div.dataTables_processing>div:last-child>div{position:absolute;top:0;width:13px;height:13px;border-radius:50%;background:rgb(13, 110, 253);background:rgb(var(--dt-row-selected));animation-timing-function:cubic-bezier(0, 1, 1, 0)}div.dataTables_processing>div:last-child>div:nth-child(1){left:8px;animation:datatables-loader-1 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(2){left:8px;animation:datatables-loader-2 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(3){left:32px;animation:datatables-loader-2 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(4){left:56px;animation:datatables-loader-3 .6s infinite}@keyframes datatables-loader-1{0%{transform:scale(0)}100%{transform:scale(1)}}@keyframes datatables-loader-3{0%{transform:scale(1)}100%{transform:scale(0)}}@keyframes datatables-loader-2{0%{transform:translate(0, 0)}100%{transform:translate(24px, 0)}}table.dataTable.nowrap th,table.dataTable.nowrap td{white-space:nowrap}table.dataTable th.dt-left,table.dataTable td.dt-left{text-align:left}table.dataTable th.dt-center,table.dataTable td.dt-center,table.dataTable td.dataTables_empty{text-align:center}table.dataTable th.dt-right,table.dataTable td.dt-right{text-align:right}table.dataTable th.dt-justify,table.dataTable td.dt-justify{text-align:justify}table.dataTable th.dt-nowrap,table.dataTable td.dt-nowrap{white-space:nowrap}table.dataTable thead th,table.dataTable thead td,table.dataTable tfoot th,table.dataTable tfoot td{text-align:left}table.dataTable thead th.dt-head-left,table.dataTable thead td.dt-head-left,table.dataTable tfoot th.dt-head-left,table.dataTable tfoot td.dt-head-left{text-align:left}table.dataTable thead th.dt-head-center,table.dataTable thead td.dt-head-center,table.dataTable tfoot th.dt-head-center,table.dataTable tfoot td.dt-head-center{text-align:center}table.dataTable thead th.dt-head-right,table.dataTable thead td.dt-head-right,table.dataTable tfoot th.dt-head-right,table.dataTable tfoot td.dt-head-right{text-align:right}table.dataTable thead th.dt-head-justify,table.dataTable thead td.dt-head-justify,table.dataTable tfoot th.dt-head-justify,table.dataTable tfoot td.dt-head-justify{text-align:justify}table.dataTable thead th.dt-head-nowrap,table.dataTable thead td.dt-head-nowrap,table.dataTable tfoot th.dt-head-nowrap,table.dataTable tfoot td.dt-head-nowrap{white-space:nowrap}table.dataTable tbody th.dt-body-left,table.dataTable tbody td.dt-body-left{text-align:left}table.dataTable tbody th.dt-body-center,table.dataTable tbody td.dt-body-center{text-align:center}table.dataTable tbody th.dt-body-right,table.dataTable tbody td.dt-body-right{text-align:right}table.dataTable tbody th.dt-body-justify,table.dataTable tbody td.dt-body-justify{text-align:justify}table.dataTable tbody th.dt-body-nowrap,table.dataTable tbody td.dt-body-nowrap{white-space:nowrap}table.dataTable{width:100%;margin:0 auto;clear:both;border-collapse:separate;border-spacing:0}table.dataTable thead th,table.dataTable tfoot th{font-weight:bold}table.dataTable>thead>tr>th,table.dataTable>thead>tr>td{padding:10px;border-bottom:1px solid rgba(0, 0, 0, 0.3)}table.dataTable>thead>tr>th:active,table.dataTable>thead>tr>td:active{outline:none}table.dataTable>tfoot>tr>th,table.dataTable>tfoot>tr>td{padding:10px 10px 6px 10px;border-top:1px solid rgba(0, 0, 0, 0.3)}table.dataTable tbody tr{background-color:transparent}table.dataTable tbody tr.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.9);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.9);color:rgb(255, 255, 255);color:rgb(var(--dt-row-selected-text))}table.dataTable tbody tr.selected a{color:rgb(9, 10, 11);color:rgb(var(--dt-row-selected-link))}table.dataTable tbody th,table.dataTable tbody td{padding:8px 10px}table.dataTable.row-border>tbody>tr>th,table.dataTable.row-border>tbody>tr>td,table.dataTable.display>tbody>tr>th,table.dataTable.display>tbody>tr>td{border-top:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.row-border>tbody>tr:first-child>th,table.dataTable.row-border>tbody>tr:first-child>td,table.dataTable.display>tbody>tr:first-child>th,table.dataTable.display>tbody>tr:first-child>td{border-top:none}table.dataTable.row-border>tbody>tr.selected+tr.selected>td,table.dataTable.display>tbody>tr.selected+tr.selected>td{border-top-color:#0262ef}table.dataTable.cell-border>tbody>tr>th,table.dataTable.cell-border>tbody>tr>td{border-top:1px solid rgba(0, 0, 0, 0.15);border-right:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.cell-border>tbody>tr>th:first-child,table.dataTable.cell-border>tbody>tr>td:first-child{border-left:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.cell-border>tbody>tr:first-child>th,table.dataTable.cell-border>tbody>tr:first-child>td{border-top:none}table.dataTable.stripe>tbody>tr.odd>*,table.dataTable.display>tbody>tr.odd>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.023);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-stripe), 0.023)}table.dataTable.stripe>tbody>tr.odd.selected>*,table.dataTable.display>tbody>tr.odd.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.923);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.923)}table.dataTable.hover>tbody>tr:hover>*,table.dataTable.display>tbody>tr:hover>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.035);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.035)}table.dataTable.hover>tbody>tr.selected:hover>*,table.dataTable.display>tbody>tr.selected:hover>*{box-shadow:inset 0 0 0 9999px #0d6efd !important;box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 1) !important}table.dataTable.order-column>tbody tr>.sorting_1,table.dataTable.order-column>tbody tr>.sorting_2,table.dataTable.order-column>tbody tr>.sorting_3,table.dataTable.display>tbody tr>.sorting_1,table.dataTable.display>tbody tr>.sorting_2,table.dataTable.display>tbody tr>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.019);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.019)}table.dataTable.order-column>tbody tr.selected>.sorting_1,table.dataTable.order-column>tbody tr.selected>.sorting_2,table.dataTable.order-column>tbody tr.selected>.sorting_3,table.dataTable.display>tbody tr.selected>.sorting_1,table.dataTable.display>tbody tr.selected>.sorting_2,table.dataTable.display>tbody tr.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.919);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.919)}table.dataTable.display>tbody>tr.odd>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.054);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.054)}table.dataTable.display>tbody>tr.odd>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.047);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.047)}table.dataTable.display>tbody>tr.odd>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.039);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.039)}table.dataTable.display>tbody>tr.odd.selected>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.954);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.954)}table.dataTable.display>tbody>tr.odd.selected>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.947);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.947)}table.dataTable.display>tbody>tr.odd.selected>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.939);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.939)}table.dataTable.display>tbody>tr.even>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.019);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.019)}table.dataTable.display>tbody>tr.even>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.011);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.011)}table.dataTable.display>tbody>tr.even>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.003);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.003)}table.dataTable.display>tbody>tr.even.selected>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.919);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.919)}table.dataTable.display>tbody>tr.even.selected>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.911);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.911)}table.dataTable.display>tbody>tr.even.selected>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.903);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.903)}table.dataTable.display tbody tr:hover>.sorting_1,table.dataTable.order-column.hover tbody tr:hover>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.082);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.082)}table.dataTable.display tbody tr:hover>.sorting_2,table.dataTable.order-column.hover tbody tr:hover>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.074);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.074)}table.dataTable.display tbody tr:hover>.sorting_3,table.dataTable.order-column.hover tbody tr:hover>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.062);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.062)}table.dataTable.display tbody tr:hover.selected>.sorting_1,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.982);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.982)}table.dataTable.display tbody tr:hover.selected>.sorting_2,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.974);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.974)}table.dataTable.display tbody tr:hover.selected>.sorting_3,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.962);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.962)}table.dataTable.no-footer{border-bottom:1px solid rgba(0, 0, 0, 0.3)}table.dataTable.compact thead th,table.dataTable.compact thead td,table.dataTable.compact tfoot th,table.dataTable.compact tfoot td,table.dataTable.compact tbody th,table.dataTable.compact tbody td{padding:4px}table.dataTable th,table.dataTable td{box-sizing:content-box}.dataTables_wrapper{position:relative;clear:both}.dataTables_wrapper .dataTables_length{float:left}.dataTables_wrapper .dataTables_length select{border:1px solid #aaa;border-radius:3px;padding:5px;background-color:transparent;color:inherit;padding:4px}.dataTables_wrapper .dataTables_filter{float:right;text-align:right}.dataTables_wrapper .dataTables_filter input{border:1px solid #aaa;border-radius:3px;padding:5px;background-color:transparent;color:inherit;margin-left:3px}.dataTables_wrapper .dataTables_info{clear:both;float:left;padding-top:.755em}.dataTables_wrapper .dataTables_paginate{float:right;text-align:right;padding-top:.25em}.dataTables_wrapper .dataTables_paginate .paginate_button{box-sizing:border-box;display:inline-block;min-width:1.5em;padding:.5em 1em;margin-left:2px;text-align:center;text-decoration:none !important;cursor:pointer;color:inherit !important;border:1px solid transparent;border-radius:2px;background:transparent}.dataTables_wrapper .dataTables_paginate .paginate_button.current,.dataTables_wrapper .dataTables_paginate .paginate_button.current:hover{color:inherit !important;border:1px solid rgba(0, 0, 0, 0.3);background-color:rgba(0, 0, 0, 0.05);background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, rgba(230, 230, 230, 0.05)), color-stop(100%, rgba(0, 0, 0, 0.05)));background:-webkit-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-moz-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-ms-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-o-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:linear-gradient(to bottom, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%)}.dataTables_wrapper .dataTables_paginate .paginate_button.disabled,.dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover,.dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active{cursor:default;color:#666 !important;border:1px solid transparent;background:transparent;box-shadow:none}.dataTables_wrapper .dataTables_paginate .paginate_button:hover{color:white !important;border:1px solid #111;background-color:#111;background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #585858), color-stop(100%, #111));background:-webkit-linear-gradient(top, #585858 0%, #111 100%);background:-moz-linear-gradient(top, #585858 0%, #111 100%);background:-ms-linear-gradient(top, #585858 0%, #111 100%);background:-o-linear-gradient(top, #585858 0%, #111 100%);background:linear-gradient(to bottom, #585858 0%, #111 100%)}.dataTables_wrapper .dataTables_paginate .paginate_button:active{outline:none;background-color:#0c0c0c;background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #2b2b2b), color-stop(100%, #0c0c0c));background:-webkit-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-moz-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-ms-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-o-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:linear-gradient(to bottom, #2b2b2b 0%, #0c0c0c 100%);box-shadow:inset 0 0 3px #111}.dataTables_wrapper .dataTables_paginate .ellipsis{padding:0 1em}.dataTables_wrapper .dataTables_length,.dataTables_wrapper .dataTables_filter,.dataTables_wrapper .dataTables_info,.dataTables_wrapper .dataTables_processing,.dataTables_wrapper .dataTables_paginate{color:inherit}.dataTables_wrapper .dataTables_scroll{clear:both}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody{-webkit-overflow-scrolling:touch}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>th,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>td,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>th,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>td{vertical-align:middle}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>th>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>td>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>th>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>td>div.dataTables_sizing{height:0;overflow:hidden;margin:0 !important;padding:0 !important}.dataTables_wrapper.no-footer .dataTables_scrollBody{border-bottom:1px solid rgba(0, 0, 0, 0.3)}.dataTables_wrapper.no-footer div.dataTables_scrollHead table.dataTable,.dataTables_wrapper.no-footer div.dataTables_scrollBody>table{border-bottom:none}.dataTables_wrapper:after{visibility:hidden;display:block;content:"";clear:both;height:0}@media screen and (max-width: 767px){.dataTables_wrapper .dataTables_info,.dataTables_wrapper .dataTables_paginate{float:none;text-align:center}.dataTables_wrapper .dataTables_paginate{margin-top:.5em}}@media screen and (max-width: 640px){.dataTables_wrapper .dataTables_length,.dataTables_wrapper .dataTables_filter{float:none;text-align:center}.dataTables_wrapper .dataTables_filter{margin-top:.5em}}html.dark{--dt-row-hover: 255, 255, 255;--dt-row-stripe: 255, 255, 255;--dt-column-ordering: 255, 255, 255}html.dark table.dataTable>thead>tr>th,html.dark table.dataTable>thead>tr>td{border-bottom:1px solid rgb(89, 91, 94)}html.dark table.dataTable>thead>tr>th:active,html.dark table.dataTable>thead>tr>td:active{outline:none}html.dark table.dataTable>tfoot>tr>th,html.dark table.dataTable>tfoot>tr>td{border-top:1px solid rgb(89, 91, 94)}html.dark table.dataTable.row-border>tbody>tr>th,html.dark table.dataTable.row-border>tbody>tr>td,html.dark table.dataTable.display>tbody>tr>th,html.dark table.dataTable.display>tbody>tr>td{border-top:1px solid rgb(64, 67, 70)}html.dark table.dataTable.row-border>tbody>tr.selected+tr.selected>td,html.dark table.dataTable.display>tbody>tr.selected+tr.selected>td{border-top-color:#0257d5}html.dark table.dataTable.cell-border>tbody>tr>th,html.dark table.dataTable.cell-border>tbody>tr>td{border-top:1px solid rgb(64, 67, 70);border-right:1px solid rgb(64, 67, 70)}html.dark table.dataTable.cell-border>tbody>tr>th:first-child,html.dark table.dataTable.cell-border>tbody>tr>td:first-child{border-left:1px solid rgb(64, 67, 70)}html.dark .dataTables_wrapper .dataTables_filter input,html.dark .dataTables_wrapper .dataTables_length select{border:1px solid rgba(255, 255, 255, 0.2);background-color:var(--dt-html-background)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.current,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.current:hover{border:1px solid rgb(89, 91, 94);background:rgba(255, 255, 255, 0.15)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active{color:#666 !important}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button:hover{border:1px solid rgb(53, 53, 53);background:rgb(53, 53, 53)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button:active{background:#3a3a3a}
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js
index d47d640feb..170bd608f7 100644
--- a/bitbake/lib/toaster/toastergui/static/js/bootstrap.js
+++ b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js
@@ -1,6 +1,6 @@
1/*! 1/*!
2 * Bootstrap v3.3.6 (http://getbootstrap.com) 2 * Bootstrap v3.4.1 (https://getbootstrap.com/)
3 * Copyright 2011-2016 Twitter, Inc. 3 * Copyright 2011-2019 Twitter, Inc.
4 * Licensed under the MIT license 4 * Licensed under the MIT license
5 */ 5 */
6 6
@@ -11,16 +11,16 @@ if (typeof jQuery === 'undefined') {
11+function ($) { 11+function ($) {
12 'use strict'; 12 'use strict';
13 var version = $.fn.jquery.split(' ')[0].split('.') 13 var version = $.fn.jquery.split(' ')[0].split('.')
14 if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 2)) { 14 if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) {
15 throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3') 15 throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4')
16 } 16 }
17}(jQuery); 17}(jQuery);
18 18
19/* ======================================================================== 19/* ========================================================================
20 * Bootstrap: transition.js v3.3.6 20 * Bootstrap: transition.js v3.4.1
21 * http://getbootstrap.com/javascript/#transitions 21 * https://getbootstrap.com/docs/3.4/javascript/#transitions
22 * ======================================================================== 22 * ========================================================================
23 * Copyright 2011-2015 Twitter, Inc. 23 * Copyright 2011-2019 Twitter, Inc.
24 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 24 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
25 * ======================================================================== */ 25 * ======================================================================== */
26 26
@@ -28,7 +28,7 @@ if (typeof jQuery === 'undefined') {
28+function ($) { 28+function ($) {
29 'use strict'; 29 'use strict';
30 30
31 // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/) 31 // CSS TRANSITION SUPPORT (Shoutout: https://modernizr.com/)
32 // ============================================================ 32 // ============================================================
33 33
34 function transitionEnd() { 34 function transitionEnd() {
@@ -50,7 +50,7 @@ if (typeof jQuery === 'undefined') {
50 return false // explicit for ie8 ( ._.) 50 return false // explicit for ie8 ( ._.)
51 } 51 }
52 52
53 // http://blog.alexmaccaw.com/css-transitions 53 // https://blog.alexmaccaw.com/css-transitions
54 $.fn.emulateTransitionEnd = function (duration) { 54 $.fn.emulateTransitionEnd = function (duration) {
55 var called = false 55 var called = false
56 var $el = this 56 var $el = this
@@ -77,10 +77,10 @@ if (typeof jQuery === 'undefined') {
77}(jQuery); 77}(jQuery);
78 78
79/* ======================================================================== 79/* ========================================================================
80 * Bootstrap: alert.js v3.3.6 80 * Bootstrap: alert.js v3.4.1
81 * http://getbootstrap.com/javascript/#alerts 81 * https://getbootstrap.com/docs/3.4/javascript/#alerts
82 * ======================================================================== 82 * ========================================================================
83 * Copyright 2011-2015 Twitter, Inc. 83 * Copyright 2011-2019 Twitter, Inc.
84 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 84 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
85 * ======================================================================== */ 85 * ======================================================================== */
86 86
@@ -96,7 +96,7 @@ if (typeof jQuery === 'undefined') {
96 $(el).on('click', dismiss, this.close) 96 $(el).on('click', dismiss, this.close)
97 } 97 }
98 98
99 Alert.VERSION = '3.3.6' 99 Alert.VERSION = '3.4.1'
100 100
101 Alert.TRANSITION_DURATION = 150 101 Alert.TRANSITION_DURATION = 150
102 102
@@ -109,7 +109,8 @@ if (typeof jQuery === 'undefined') {
109 selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 109 selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
110 } 110 }
111 111
112 var $parent = $(selector) 112 selector = selector === '#' ? [] : selector
113 var $parent = $(document).find(selector)
113 114
114 if (e) e.preventDefault() 115 if (e) e.preventDefault()
115 116
@@ -172,10 +173,10 @@ if (typeof jQuery === 'undefined') {
172}(jQuery); 173}(jQuery);
173 174
174/* ======================================================================== 175/* ========================================================================
175 * Bootstrap: button.js v3.3.6 176 * Bootstrap: button.js v3.4.1
176 * http://getbootstrap.com/javascript/#buttons 177 * https://getbootstrap.com/docs/3.4/javascript/#buttons
177 * ======================================================================== 178 * ========================================================================
178 * Copyright 2011-2015 Twitter, Inc. 179 * Copyright 2011-2019 Twitter, Inc.
179 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 180 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
180 * ======================================================================== */ 181 * ======================================================================== */
181 182
@@ -192,7 +193,7 @@ if (typeof jQuery === 'undefined') {
192 this.isLoading = false 193 this.isLoading = false
193 } 194 }
194 195
195 Button.VERSION = '3.3.6' 196 Button.VERSION = '3.4.1'
196 197
197 Button.DEFAULTS = { 198 Button.DEFAULTS = {
198 loadingText: 'loading...' 199 loadingText: 'loading...'
@@ -214,10 +215,10 @@ if (typeof jQuery === 'undefined') {
214 215
215 if (state == 'loadingText') { 216 if (state == 'loadingText') {
216 this.isLoading = true 217 this.isLoading = true
217 $el.addClass(d).attr(d, d) 218 $el.addClass(d).attr(d, d).prop(d, true)
218 } else if (this.isLoading) { 219 } else if (this.isLoading) {
219 this.isLoading = false 220 this.isLoading = false
220 $el.removeClass(d).removeAttr(d) 221 $el.removeClass(d).removeAttr(d).prop(d, false)
221 } 222 }
222 }, this), 0) 223 }, this), 0)
223 } 224 }
@@ -281,10 +282,15 @@ if (typeof jQuery === 'undefined') {
281 282
282 $(document) 283 $(document)
283 .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) { 284 .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) {
284 var $btn = $(e.target) 285 var $btn = $(e.target).closest('.btn')
285 if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn')
286 Plugin.call($btn, 'toggle') 286 Plugin.call($btn, 'toggle')
287 if (!($(e.target).is('input[type="radio"]') || $(e.target).is('input[type="checkbox"]'))) e.preventDefault() 287 if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) {
288 // Prevent double click on radios, and the double selections (so cancellation) on checkboxes
289 e.preventDefault()
290 // The target component still receive the focus
291 if ($btn.is('input,button')) $btn.trigger('focus')
292 else $btn.find('input:visible,button:visible').first().trigger('focus')
293 }
288 }) 294 })
289 .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) { 295 .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) {
290 $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type)) 296 $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type))
@@ -293,10 +299,10 @@ if (typeof jQuery === 'undefined') {
293}(jQuery); 299}(jQuery);
294 300
295/* ======================================================================== 301/* ========================================================================
296 * Bootstrap: carousel.js v3.3.6 302 * Bootstrap: carousel.js v3.4.1
297 * http://getbootstrap.com/javascript/#carousel 303 * https://getbootstrap.com/docs/3.4/javascript/#carousel
298 * ======================================================================== 304 * ========================================================================
299 * Copyright 2011-2015 Twitter, Inc. 305 * Copyright 2011-2019 Twitter, Inc.
300 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 306 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
301 * ======================================================================== */ 307 * ======================================================================== */
302 308
@@ -324,7 +330,7 @@ if (typeof jQuery === 'undefined') {
324 .on('mouseleave.bs.carousel', $.proxy(this.cycle, this)) 330 .on('mouseleave.bs.carousel', $.proxy(this.cycle, this))
325 } 331 }
326 332
327 Carousel.VERSION = '3.3.6' 333 Carousel.VERSION = '3.4.1'
328 334
329 Carousel.TRANSITION_DURATION = 600 335 Carousel.TRANSITION_DURATION = 600
330 336
@@ -438,7 +444,9 @@ if (typeof jQuery === 'undefined') {
438 var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid" 444 var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid"
439 if ($.support.transition && this.$element.hasClass('slide')) { 445 if ($.support.transition && this.$element.hasClass('slide')) {
440 $next.addClass(type) 446 $next.addClass(type)
441 $next[0].offsetWidth // force reflow 447 if (typeof $next === 'object' && $next.length) {
448 $next[0].offsetWidth // force reflow
449 }
442 $active.addClass(direction) 450 $active.addClass(direction)
443 $next.addClass(direction) 451 $next.addClass(direction)
444 $active 452 $active
@@ -500,10 +508,17 @@ if (typeof jQuery === 'undefined') {
500 // ================= 508 // =================
501 509
502 var clickHandler = function (e) { 510 var clickHandler = function (e) {
503 var href
504 var $this = $(this) 511 var $this = $(this)
505 var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7 512 var href = $this.attr('href')
513 if (href) {
514 href = href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
515 }
516
517 var target = $this.attr('data-target') || href
518 var $target = $(document).find(target)
519
506 if (!$target.hasClass('carousel')) return 520 if (!$target.hasClass('carousel')) return
521
507 var options = $.extend({}, $target.data(), $this.data()) 522 var options = $.extend({}, $target.data(), $this.data())
508 var slideIndex = $this.attr('data-slide-to') 523 var slideIndex = $this.attr('data-slide-to')
509 if (slideIndex) options.interval = false 524 if (slideIndex) options.interval = false
@@ -531,13 +546,14 @@ if (typeof jQuery === 'undefined') {
531}(jQuery); 546}(jQuery);
532 547
533/* ======================================================================== 548/* ========================================================================
534 * Bootstrap: collapse.js v3.3.6 549 * Bootstrap: collapse.js v3.4.1
535 * http://getbootstrap.com/javascript/#collapse 550 * https://getbootstrap.com/docs/3.4/javascript/#collapse
536 * ======================================================================== 551 * ========================================================================
537 * Copyright 2011-2015 Twitter, Inc. 552 * Copyright 2011-2019 Twitter, Inc.
538 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 553 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
539 * ======================================================================== */ 554 * ======================================================================== */
540 555
556/* jshint latedef: false */
541 557
542+function ($) { 558+function ($) {
543 'use strict'; 559 'use strict';
@@ -561,7 +577,7 @@ if (typeof jQuery === 'undefined') {
561 if (this.options.toggle) this.toggle() 577 if (this.options.toggle) this.toggle()
562 } 578 }
563 579
564 Collapse.VERSION = '3.3.6' 580 Collapse.VERSION = '3.4.1'
565 581
566 Collapse.TRANSITION_DURATION = 350 582 Collapse.TRANSITION_DURATION = 350
567 583
@@ -668,7 +684,7 @@ if (typeof jQuery === 'undefined') {
668 } 684 }
669 685
670 Collapse.prototype.getParent = function () { 686 Collapse.prototype.getParent = function () {
671 return $(this.options.parent) 687 return $(document).find(this.options.parent)
672 .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]') 688 .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
673 .each($.proxy(function (i, element) { 689 .each($.proxy(function (i, element) {
674 var $element = $(element) 690 var $element = $(element)
@@ -691,7 +707,7 @@ if (typeof jQuery === 'undefined') {
691 var target = $trigger.attr('data-target') 707 var target = $trigger.attr('data-target')
692 || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7 708 || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
693 709
694 return $(target) 710 return $(document).find(target)
695 } 711 }
696 712
697 713
@@ -743,10 +759,10 @@ if (typeof jQuery === 'undefined') {
743}(jQuery); 759}(jQuery);
744 760
745/* ======================================================================== 761/* ========================================================================
746 * Bootstrap: dropdown.js v3.3.6 762 * Bootstrap: dropdown.js v3.4.1
747 * http://getbootstrap.com/javascript/#dropdowns 763 * https://getbootstrap.com/docs/3.4/javascript/#dropdowns
748 * ======================================================================== 764 * ========================================================================
749 * Copyright 2011-2015 Twitter, Inc. 765 * Copyright 2011-2019 Twitter, Inc.
750 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 766 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
751 * ======================================================================== */ 767 * ======================================================================== */
752 768
@@ -763,7 +779,7 @@ if (typeof jQuery === 'undefined') {
763 $(element).on('click.bs.dropdown', this.toggle) 779 $(element).on('click.bs.dropdown', this.toggle)
764 } 780 }
765 781
766 Dropdown.VERSION = '3.3.6' 782 Dropdown.VERSION = '3.4.1'
767 783
768 function getParent($this) { 784 function getParent($this) {
769 var selector = $this.attr('data-target') 785 var selector = $this.attr('data-target')
@@ -773,7 +789,7 @@ if (typeof jQuery === 'undefined') {
773 selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 789 selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
774 } 790 }
775 791
776 var $parent = selector && $(selector) 792 var $parent = selector !== '#' ? $(document).find(selector) : null
777 793
778 return $parent && $parent.length ? $parent : $this.parent() 794 return $parent && $parent.length ? $parent : $this.parent()
779 } 795 }
@@ -909,10 +925,10 @@ if (typeof jQuery === 'undefined') {
909}(jQuery); 925}(jQuery);
910 926
911/* ======================================================================== 927/* ========================================================================
912 * Bootstrap: modal.js v3.3.6 928 * Bootstrap: modal.js v3.4.1
913 * http://getbootstrap.com/javascript/#modals 929 * https://getbootstrap.com/docs/3.4/javascript/#modals
914 * ======================================================================== 930 * ========================================================================
915 * Copyright 2011-2015 Twitter, Inc. 931 * Copyright 2011-2019 Twitter, Inc.
916 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 932 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
917 * ======================================================================== */ 933 * ======================================================================== */
918 934
@@ -924,15 +940,16 @@ if (typeof jQuery === 'undefined') {
924 // ====================== 940 // ======================
925 941
926 var Modal = function (element, options) { 942 var Modal = function (element, options) {
927 this.options = options 943 this.options = options
928 this.$body = $(document.body) 944 this.$body = $(document.body)
929 this.$element = $(element) 945 this.$element = $(element)
930 this.$dialog = this.$element.find('.modal-dialog') 946 this.$dialog = this.$element.find('.modal-dialog')
931 this.$backdrop = null 947 this.$backdrop = null
932 this.isShown = null 948 this.isShown = null
933 this.originalBodyPad = null 949 this.originalBodyPad = null
934 this.scrollbarWidth = 0 950 this.scrollbarWidth = 0
935 this.ignoreBackdropClick = false 951 this.ignoreBackdropClick = false
952 this.fixedContent = '.navbar-fixed-top, .navbar-fixed-bottom'
936 953
937 if (this.options.remote) { 954 if (this.options.remote) {
938 this.$element 955 this.$element
@@ -943,7 +960,7 @@ if (typeof jQuery === 'undefined') {
943 } 960 }
944 } 961 }
945 962
946 Modal.VERSION = '3.3.6' 963 Modal.VERSION = '3.4.1'
947 964
948 Modal.TRANSITION_DURATION = 300 965 Modal.TRANSITION_DURATION = 300
949 Modal.BACKDROP_TRANSITION_DURATION = 150 966 Modal.BACKDROP_TRANSITION_DURATION = 150
@@ -960,7 +977,7 @@ if (typeof jQuery === 'undefined') {
960 977
961 Modal.prototype.show = function (_relatedTarget) { 978 Modal.prototype.show = function (_relatedTarget) {
962 var that = this 979 var that = this
963 var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget }) 980 var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
964 981
965 this.$element.trigger(e) 982 this.$element.trigger(e)
966 983
@@ -1050,7 +1067,9 @@ if (typeof jQuery === 'undefined') {
1050 $(document) 1067 $(document)
1051 .off('focusin.bs.modal') // guard against infinite focus loop 1068 .off('focusin.bs.modal') // guard against infinite focus loop
1052 .on('focusin.bs.modal', $.proxy(function (e) { 1069 .on('focusin.bs.modal', $.proxy(function (e) {
1053 if (this.$element[0] !== e.target && !this.$element.has(e.target).length) { 1070 if (document !== e.target &&
1071 this.$element[0] !== e.target &&
1072 !this.$element.has(e.target).length) {
1054 this.$element.trigger('focus') 1073 this.$element.trigger('focus')
1055 } 1074 }
1056 }, this)) 1075 }, this))
@@ -1152,7 +1171,7 @@ if (typeof jQuery === 'undefined') {
1152 var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight 1171 var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight
1153 1172
1154 this.$element.css({ 1173 this.$element.css({
1155 paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '', 1174 paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
1156 paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : '' 1175 paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : ''
1157 }) 1176 })
1158 } 1177 }
@@ -1177,11 +1196,26 @@ if (typeof jQuery === 'undefined') {
1177 Modal.prototype.setScrollbar = function () { 1196 Modal.prototype.setScrollbar = function () {
1178 var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10) 1197 var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10)
1179 this.originalBodyPad = document.body.style.paddingRight || '' 1198 this.originalBodyPad = document.body.style.paddingRight || ''
1180 if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth) 1199 var scrollbarWidth = this.scrollbarWidth
1200 if (this.bodyIsOverflowing) {
1201 this.$body.css('padding-right', bodyPad + scrollbarWidth)
1202 $(this.fixedContent).each(function (index, element) {
1203 var actualPadding = element.style.paddingRight
1204 var calculatedPadding = $(element).css('padding-right')
1205 $(element)
1206 .data('padding-right', actualPadding)
1207 .css('padding-right', parseFloat(calculatedPadding) + scrollbarWidth + 'px')
1208 })
1209 }
1181 } 1210 }
1182 1211
1183 Modal.prototype.resetScrollbar = function () { 1212 Modal.prototype.resetScrollbar = function () {
1184 this.$body.css('padding-right', this.originalBodyPad) 1213 this.$body.css('padding-right', this.originalBodyPad)
1214 $(this.fixedContent).each(function (index, element) {
1215 var padding = $(element).data('padding-right')
1216 $(element).removeData('padding-right')
1217 element.style.paddingRight = padding ? padding : ''
1218 })
1185 } 1219 }
1186 1220
1187 Modal.prototype.measureScrollbar = function () { // thx walsh 1221 Modal.prototype.measureScrollbar = function () { // thx walsh
@@ -1199,8 +1233,8 @@ if (typeof jQuery === 'undefined') {
1199 1233
1200 function Plugin(option, _relatedTarget) { 1234 function Plugin(option, _relatedTarget) {
1201 return this.each(function () { 1235 return this.each(function () {
1202 var $this = $(this) 1236 var $this = $(this)
1203 var data = $this.data('bs.modal') 1237 var data = $this.data('bs.modal')
1204 var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option) 1238 var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option)
1205 1239
1206 if (!data) $this.data('bs.modal', (data = new Modal(this, options))) 1240 if (!data) $this.data('bs.modal', (data = new Modal(this, options)))
@@ -1211,7 +1245,7 @@ if (typeof jQuery === 'undefined') {
1211 1245
1212 var old = $.fn.modal 1246 var old = $.fn.modal
1213 1247
1214 $.fn.modal = Plugin 1248 $.fn.modal = Plugin
1215 $.fn.modal.Constructor = Modal 1249 $.fn.modal.Constructor = Modal
1216 1250
1217 1251
@@ -1228,10 +1262,13 @@ if (typeof jQuery === 'undefined') {
1228 // ============== 1262 // ==============
1229 1263
1230 $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) { 1264 $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) {
1231 var $this = $(this) 1265 var $this = $(this)
1232 var href = $this.attr('href') 1266 var href = $this.attr('href')
1233 var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7 1267 var target = $this.attr('data-target') ||
1234 var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data()) 1268 (href && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7
1269
1270 var $target = $(document).find(target)
1271 var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
1235 1272
1236 if ($this.is('a')) e.preventDefault() 1273 if ($this.is('a')) e.preventDefault()
1237 1274
@@ -1247,18 +1284,148 @@ if (typeof jQuery === 'undefined') {
1247}(jQuery); 1284}(jQuery);
1248 1285
1249/* ======================================================================== 1286/* ========================================================================
1250 * Bootstrap: tooltip.js v3.3.6 1287 * Bootstrap: tooltip.js v3.4.1
1251 * http://getbootstrap.com/javascript/#tooltip 1288 * https://getbootstrap.com/docs/3.4/javascript/#tooltip
1252 * Inspired by the original jQuery.tipsy by Jason Frame 1289 * Inspired by the original jQuery.tipsy by Jason Frame
1253 * ======================================================================== 1290 * ========================================================================
1254 * Copyright 2011-2015 Twitter, Inc. 1291 * Copyright 2011-2019 Twitter, Inc.
1255 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 1292 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
1256 * ======================================================================== */ 1293 * ======================================================================== */
1257 1294
1258
1259+function ($) { 1295+function ($) {
1260 'use strict'; 1296 'use strict';
1261 1297
1298 var DISALLOWED_ATTRIBUTES = ['sanitize', 'whiteList', 'sanitizeFn']
1299
1300 var uriAttrs = [
1301 'background',
1302 'cite',
1303 'href',
1304 'itemtype',
1305 'longdesc',
1306 'poster',
1307 'src',
1308 'xlink:href'
1309 ]
1310
1311 var ARIA_ATTRIBUTE_PATTERN = /^aria-[\w-]*$/i
1312
1313 var DefaultWhitelist = {
1314 // Global attributes allowed on any supplied element below.
1315 '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],
1316 a: ['target', 'href', 'title', 'rel'],
1317 area: [],
1318 b: [],
1319 br: [],
1320 col: [],
1321 code: [],
1322 div: [],
1323 em: [],
1324 hr: [],
1325 h1: [],
1326 h2: [],
1327 h3: [],
1328 h4: [],
1329 h5: [],
1330 h6: [],
1331 i: [],
1332 img: ['src', 'alt', 'title', 'width', 'height'],
1333 li: [],
1334 ol: [],
1335 p: [],
1336 pre: [],
1337 s: [],
1338 small: [],
1339 span: [],
1340 sub: [],
1341 sup: [],
1342 strong: [],
1343 u: [],
1344 ul: []
1345 }
1346
1347 /**
1348 * A pattern that recognizes a commonly useful subset of URLs that are safe.
1349 *
1350 * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
1351 */
1352 var SAFE_URL_PATTERN = /^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi
1353
1354 /**
1355 * A pattern that matches safe data URLs. Only matches image, video and audio types.
1356 *
1357 * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
1358 */
1359 var DATA_URL_PATTERN = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i
1360
1361 function allowedAttribute(attr, allowedAttributeList) {
1362 var attrName = attr.nodeName.toLowerCase()
1363
1364 if ($.inArray(attrName, allowedAttributeList) !== -1) {
1365 if ($.inArray(attrName, uriAttrs) !== -1) {
1366 return Boolean(attr.nodeValue.match(SAFE_URL_PATTERN) || attr.nodeValue.match(DATA_URL_PATTERN))
1367 }
1368
1369 return true
1370 }
1371
1372 var regExp = $(allowedAttributeList).filter(function (index, value) {
1373 return value instanceof RegExp
1374 })
1375
1376 // Check if a regular expression validates the attribute.
1377 for (var i = 0, l = regExp.length; i < l; i++) {
1378 if (attrName.match(regExp[i])) {
1379 return true
1380 }
1381 }
1382
1383 return false
1384 }
1385
1386 function sanitizeHtml(unsafeHtml, whiteList, sanitizeFn) {
1387 if (unsafeHtml.length === 0) {
1388 return unsafeHtml
1389 }
1390
1391 if (sanitizeFn && typeof sanitizeFn === 'function') {
1392 return sanitizeFn(unsafeHtml)
1393 }
1394
1395 // IE 8 and below don't support createHTMLDocument
1396 if (!document.implementation || !document.implementation.createHTMLDocument) {
1397 return unsafeHtml
1398 }
1399
1400 var createdDocument = document.implementation.createHTMLDocument('sanitization')
1401 createdDocument.body.innerHTML = unsafeHtml
1402
1403 var whitelistKeys = $.map(whiteList, function (el, i) { return i })
1404 var elements = $(createdDocument.body).find('*')
1405
1406 for (var i = 0, len = elements.length; i < len; i++) {
1407 var el = elements[i]
1408 var elName = el.nodeName.toLowerCase()
1409
1410 if ($.inArray(elName, whitelistKeys) === -1) {
1411 el.parentNode.removeChild(el)
1412
1413 continue
1414 }
1415
1416 var attributeList = $.map(el.attributes, function (el) { return el })
1417 var whitelistedAttributes = [].concat(whiteList['*'] || [], whiteList[elName] || [])
1418
1419 for (var j = 0, len2 = attributeList.length; j < len2; j++) {
1420 if (!allowedAttribute(attributeList[j], whitelistedAttributes)) {
1421 el.removeAttribute(attributeList[j].nodeName)
1422 }
1423 }
1424 }
1425
1426 return createdDocument.body.innerHTML
1427 }
1428
1262 // TOOLTIP PUBLIC CLASS DEFINITION 1429 // TOOLTIP PUBLIC CLASS DEFINITION
1263 // =============================== 1430 // ===============================
1264 1431
@@ -1274,7 +1441,7 @@ if (typeof jQuery === 'undefined') {
1274 this.init('tooltip', element, options) 1441 this.init('tooltip', element, options)
1275 } 1442 }
1276 1443
1277 Tooltip.VERSION = '3.3.6' 1444 Tooltip.VERSION = '3.4.1'
1278 1445
1279 Tooltip.TRANSITION_DURATION = 150 1446 Tooltip.TRANSITION_DURATION = 150
1280 1447
@@ -1291,7 +1458,10 @@ if (typeof jQuery === 'undefined') {
1291 viewport: { 1458 viewport: {
1292 selector: 'body', 1459 selector: 'body',
1293 padding: 0 1460 padding: 0
1294 } 1461 },
1462 sanitize : true,
1463 sanitizeFn : null,
1464 whiteList : DefaultWhitelist
1295 } 1465 }
1296 1466
1297 Tooltip.prototype.init = function (type, element, options) { 1467 Tooltip.prototype.init = function (type, element, options) {
@@ -1299,7 +1469,7 @@ if (typeof jQuery === 'undefined') {
1299 this.type = type 1469 this.type = type
1300 this.$element = $(element) 1470 this.$element = $(element)
1301 this.options = this.getOptions(options) 1471 this.options = this.getOptions(options)
1302 this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport)) 1472 this.$viewport = this.options.viewport && $(document).find($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport))
1303 this.inState = { click: false, hover: false, focus: false } 1473 this.inState = { click: false, hover: false, focus: false }
1304 1474
1305 if (this.$element[0] instanceof document.constructor && !this.options.selector) { 1475 if (this.$element[0] instanceof document.constructor && !this.options.selector) {
@@ -1332,7 +1502,15 @@ if (typeof jQuery === 'undefined') {
1332 } 1502 }
1333 1503
1334 Tooltip.prototype.getOptions = function (options) { 1504 Tooltip.prototype.getOptions = function (options) {
1335 options = $.extend({}, this.getDefaults(), this.$element.data(), options) 1505 var dataAttributes = this.$element.data()
1506
1507 for (var dataAttr in dataAttributes) {
1508 if (dataAttributes.hasOwnProperty(dataAttr) && $.inArray(dataAttr, DISALLOWED_ATTRIBUTES) !== -1) {
1509 delete dataAttributes[dataAttr]
1510 }
1511 }
1512
1513 options = $.extend({}, this.getDefaults(), dataAttributes, options)
1336 1514
1337 if (options.delay && typeof options.delay == 'number') { 1515 if (options.delay && typeof options.delay == 'number') {
1338 options.delay = { 1516 options.delay = {
@@ -1341,6 +1519,10 @@ if (typeof jQuery === 'undefined') {
1341 } 1519 }
1342 } 1520 }
1343 1521
1522 if (options.sanitize) {
1523 options.template = sanitizeHtml(options.template, options.whiteList, options.sanitizeFn)
1524 }
1525
1344 return options 1526 return options
1345 } 1527 }
1346 1528
@@ -1452,7 +1634,7 @@ if (typeof jQuery === 'undefined') {
1452 .addClass(placement) 1634 .addClass(placement)
1453 .data('bs.' + this.type, this) 1635 .data('bs.' + this.type, this)
1454 1636
1455 this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element) 1637 this.options.container ? $tip.appendTo($(document).find(this.options.container)) : $tip.insertAfter(this.$element)
1456 this.$element.trigger('inserted.bs.' + this.type) 1638 this.$element.trigger('inserted.bs.' + this.type)
1457 1639
1458 var pos = this.getPosition() 1640 var pos = this.getPosition()
@@ -1554,7 +1736,16 @@ if (typeof jQuery === 'undefined') {
1554 var $tip = this.tip() 1736 var $tip = this.tip()
1555 var title = this.getTitle() 1737 var title = this.getTitle()
1556 1738
1557 $tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title) 1739 if (this.options.html) {
1740 if (this.options.sanitize) {
1741 title = sanitizeHtml(title, this.options.whiteList, this.options.sanitizeFn)
1742 }
1743
1744 $tip.find('.tooltip-inner').html(title)
1745 } else {
1746 $tip.find('.tooltip-inner').text(title)
1747 }
1748
1558 $tip.removeClass('fade in top bottom left right') 1749 $tip.removeClass('fade in top bottom left right')
1559 } 1750 }
1560 1751
@@ -1565,9 +1756,11 @@ if (typeof jQuery === 'undefined') {
1565 1756
1566 function complete() { 1757 function complete() {
1567 if (that.hoverState != 'in') $tip.detach() 1758 if (that.hoverState != 'in') $tip.detach()
1568 that.$element 1759 if (that.$element) { // TODO: Check whether guarding this code with this `if` is really necessary.
1569 .removeAttr('aria-describedby') 1760 that.$element
1570 .trigger('hidden.bs.' + that.type) 1761 .removeAttr('aria-describedby')
1762 .trigger('hidden.bs.' + that.type)
1763 }
1571 callback && callback() 1764 callback && callback()
1572 } 1765 }
1573 1766
@@ -1610,7 +1803,10 @@ if (typeof jQuery === 'undefined') {
1610 // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093 1803 // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093
1611 elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top }) 1804 elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top })
1612 } 1805 }
1613 var elOffset = isBody ? { top: 0, left: 0 } : $element.offset() 1806 var isSvg = window.SVGElement && el instanceof window.SVGElement
1807 // Avoid using $.offset() on SVGs since it gives incorrect results in jQuery 3.
1808 // See https://github.com/twbs/bootstrap/issues/20280
1809 var elOffset = isBody ? { top: 0, left: 0 } : (isSvg ? null : $element.offset())
1614 var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() } 1810 var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() }
1615 var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null 1811 var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null
1616 1812
@@ -1726,9 +1922,13 @@ if (typeof jQuery === 'undefined') {
1726 that.$tip = null 1922 that.$tip = null
1727 that.$arrow = null 1923 that.$arrow = null
1728 that.$viewport = null 1924 that.$viewport = null
1925 that.$element = null
1729 }) 1926 })
1730 } 1927 }
1731 1928
1929 Tooltip.prototype.sanitizeHtml = function (unsafeHtml) {
1930 return sanitizeHtml(unsafeHtml, this.options.whiteList, this.options.sanitizeFn)
1931 }
1732 1932
1733 // TOOLTIP PLUGIN DEFINITION 1933 // TOOLTIP PLUGIN DEFINITION
1734 // ========================= 1934 // =========================
@@ -1762,10 +1962,10 @@ if (typeof jQuery === 'undefined') {
1762}(jQuery); 1962}(jQuery);
1763 1963
1764/* ======================================================================== 1964/* ========================================================================
1765 * Bootstrap: popover.js v3.3.6 1965 * Bootstrap: popover.js v3.4.1
1766 * http://getbootstrap.com/javascript/#popovers 1966 * https://getbootstrap.com/docs/3.4/javascript/#popovers
1767 * ======================================================================== 1967 * ========================================================================
1768 * Copyright 2011-2015 Twitter, Inc. 1968 * Copyright 2011-2019 Twitter, Inc.
1769 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 1969 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
1770 * ======================================================================== */ 1970 * ======================================================================== */
1771 1971
@@ -1782,7 +1982,7 @@ if (typeof jQuery === 'undefined') {
1782 1982
1783 if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js') 1983 if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js')
1784 1984
1785 Popover.VERSION = '3.3.6' 1985 Popover.VERSION = '3.4.1'
1786 1986
1787 Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, { 1987 Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, {
1788 placement: 'right', 1988 placement: 'right',
@@ -1808,10 +2008,25 @@ if (typeof jQuery === 'undefined') {
1808 var title = this.getTitle() 2008 var title = this.getTitle()
1809 var content = this.getContent() 2009 var content = this.getContent()
1810 2010
1811 $tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title) 2011 if (this.options.html) {
1812 $tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events 2012 var typeContent = typeof content
1813 this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text' 2013
1814 ](content) 2014 if (this.options.sanitize) {
2015 title = this.sanitizeHtml(title)
2016
2017 if (typeContent === 'string') {
2018 content = this.sanitizeHtml(content)
2019 }
2020 }
2021
2022 $tip.find('.popover-title').html(title)
2023 $tip.find('.popover-content').children().detach().end()[
2024 typeContent === 'string' ? 'html' : 'append'
2025 ](content)
2026 } else {
2027 $tip.find('.popover-title').text(title)
2028 $tip.find('.popover-content').children().detach().end().text(content)
2029 }
1815 2030
1816 $tip.removeClass('fade top bottom left right in') 2031 $tip.removeClass('fade top bottom left right in')
1817 2032
@@ -1830,8 +2045,8 @@ if (typeof jQuery === 'undefined') {
1830 2045
1831 return $e.attr('data-content') 2046 return $e.attr('data-content')
1832 || (typeof o.content == 'function' ? 2047 || (typeof o.content == 'function' ?
1833 o.content.call($e[0]) : 2048 o.content.call($e[0]) :
1834 o.content) 2049 o.content)
1835 } 2050 }
1836 2051
1837 Popover.prototype.arrow = function () { 2052 Popover.prototype.arrow = function () {
@@ -1871,10 +2086,10 @@ if (typeof jQuery === 'undefined') {
1871}(jQuery); 2086}(jQuery);
1872 2087
1873/* ======================================================================== 2088/* ========================================================================
1874 * Bootstrap: scrollspy.js v3.3.6 2089 * Bootstrap: scrollspy.js v3.4.1
1875 * http://getbootstrap.com/javascript/#scrollspy 2090 * https://getbootstrap.com/docs/3.4/javascript/#scrollspy
1876 * ======================================================================== 2091 * ========================================================================
1877 * Copyright 2011-2015 Twitter, Inc. 2092 * Copyright 2011-2019 Twitter, Inc.
1878 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 2093 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
1879 * ======================================================================== */ 2094 * ======================================================================== */
1880 2095
@@ -1900,7 +2115,7 @@ if (typeof jQuery === 'undefined') {
1900 this.process() 2115 this.process()
1901 } 2116 }
1902 2117
1903 ScrollSpy.VERSION = '3.3.6' 2118 ScrollSpy.VERSION = '3.4.1'
1904 2119
1905 ScrollSpy.DEFAULTS = { 2120 ScrollSpy.DEFAULTS = {
1906 offset: 10 2121 offset: 10
@@ -2044,10 +2259,10 @@ if (typeof jQuery === 'undefined') {
2044}(jQuery); 2259}(jQuery);
2045 2260
2046/* ======================================================================== 2261/* ========================================================================
2047 * Bootstrap: tab.js v3.3.6 2262 * Bootstrap: tab.js v3.4.1
2048 * http://getbootstrap.com/javascript/#tabs 2263 * https://getbootstrap.com/docs/3.4/javascript/#tabs
2049 * ======================================================================== 2264 * ========================================================================
2050 * Copyright 2011-2015 Twitter, Inc. 2265 * Copyright 2011-2019 Twitter, Inc.
2051 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 2266 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
2052 * ======================================================================== */ 2267 * ======================================================================== */
2053 2268
@@ -2064,7 +2279,7 @@ if (typeof jQuery === 'undefined') {
2064 // jscs:enable requireDollarBeforejQueryAssignment 2279 // jscs:enable requireDollarBeforejQueryAssignment
2065 } 2280 }
2066 2281
2067 Tab.VERSION = '3.3.6' 2282 Tab.VERSION = '3.4.1'
2068 2283
2069 Tab.TRANSITION_DURATION = 150 2284 Tab.TRANSITION_DURATION = 150
2070 2285
@@ -2093,7 +2308,7 @@ if (typeof jQuery === 'undefined') {
2093 2308
2094 if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return 2309 if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return
2095 2310
2096 var $target = $(selector) 2311 var $target = $(document).find(selector)
2097 2312
2098 this.activate($this.closest('li'), $ul) 2313 this.activate($this.closest('li'), $ul)
2099 this.activate($target, $target.parent(), function () { 2314 this.activate($target, $target.parent(), function () {
@@ -2118,15 +2333,15 @@ if (typeof jQuery === 'undefined') {
2118 $active 2333 $active
2119 .removeClass('active') 2334 .removeClass('active')
2120 .find('> .dropdown-menu > .active') 2335 .find('> .dropdown-menu > .active')
2121 .removeClass('active') 2336 .removeClass('active')
2122 .end() 2337 .end()
2123 .find('[data-toggle="tab"]') 2338 .find('[data-toggle="tab"]')
2124 .attr('aria-expanded', false) 2339 .attr('aria-expanded', false)
2125 2340
2126 element 2341 element
2127 .addClass('active') 2342 .addClass('active')
2128 .find('[data-toggle="tab"]') 2343 .find('[data-toggle="tab"]')
2129 .attr('aria-expanded', true) 2344 .attr('aria-expanded', true)
2130 2345
2131 if (transition) { 2346 if (transition) {
2132 element[0].offsetWidth // reflow for transition 2347 element[0].offsetWidth // reflow for transition
@@ -2138,10 +2353,10 @@ if (typeof jQuery === 'undefined') {
2138 if (element.parent('.dropdown-menu').length) { 2353 if (element.parent('.dropdown-menu').length) {
2139 element 2354 element
2140 .closest('li.dropdown') 2355 .closest('li.dropdown')
2141 .addClass('active') 2356 .addClass('active')
2142 .end() 2357 .end()
2143 .find('[data-toggle="tab"]') 2358 .find('[data-toggle="tab"]')
2144 .attr('aria-expanded', true) 2359 .attr('aria-expanded', true)
2145 } 2360 }
2146 2361
2147 callback && callback() 2362 callback && callback()
@@ -2200,10 +2415,10 @@ if (typeof jQuery === 'undefined') {
2200}(jQuery); 2415}(jQuery);
2201 2416
2202/* ======================================================================== 2417/* ========================================================================
2203 * Bootstrap: affix.js v3.3.6 2418 * Bootstrap: affix.js v3.4.1
2204 * http://getbootstrap.com/javascript/#affix 2419 * https://getbootstrap.com/docs/3.4/javascript/#affix
2205 * ======================================================================== 2420 * ========================================================================
2206 * Copyright 2011-2015 Twitter, Inc. 2421 * Copyright 2011-2019 Twitter, Inc.
2207 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 2422 * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
2208 * ======================================================================== */ 2423 * ======================================================================== */
2209 2424
@@ -2217,7 +2432,9 @@ if (typeof jQuery === 'undefined') {
2217 var Affix = function (element, options) { 2432 var Affix = function (element, options) {
2218 this.options = $.extend({}, Affix.DEFAULTS, options) 2433 this.options = $.extend({}, Affix.DEFAULTS, options)
2219 2434
2220 this.$target = $(this.options.target) 2435 var target = this.options.target === Affix.DEFAULTS.target ? $(this.options.target) : $(document).find(this.options.target)
2436
2437 this.$target = target
2221 .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this)) 2438 .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this))
2222 .on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this)) 2439 .on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this))
2223 2440
@@ -2229,7 +2446,7 @@ if (typeof jQuery === 'undefined') {
2229 this.checkPosition() 2446 this.checkPosition()
2230 } 2447 }
2231 2448
2232 Affix.VERSION = '3.3.6' 2449 Affix.VERSION = '3.4.1'
2233 2450
2234 Affix.RESET = 'affix affix-top affix-bottom' 2451 Affix.RESET = 'affix affix-top affix-bottom'
2235 2452
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js
new file mode 100644
index 0000000000..eb0a8b410f
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js
@@ -0,0 +1,6 @@
1/*!
2 * Bootstrap v3.4.1 (https://getbootstrap.com/)
3 * Copyright 2011-2019 Twitter, Inc.
4 * Licensed under the MIT license
5 */
6if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");!function(t){"use strict";var e=jQuery.fn.jquery.split(" ")[0].split(".");if(e[0]<2&&e[1]<9||1==e[0]&&9==e[1]&&e[2]<1||3<e[0])throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(),function(n){"use strict";n.fn.emulateTransitionEnd=function(t){var e=!1,i=this;n(this).one("bsTransitionEnd",function(){e=!0});return setTimeout(function(){e||n(i).trigger(n.support.transition.end)},t),this},n(function(){n.support.transition=function o(){var t=document.createElement("bootstrap"),e={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var i in e)if(t.style[i]!==undefined)return{end:e[i]};return!1}(),n.support.transition&&(n.event.special.bsTransitionEnd={bindType:n.support.transition.end,delegateType:n.support.transition.end,handle:function(t){if(n(t.target).is(this))return t.handleObj.handler.apply(this,arguments)}})})}(jQuery),function(s){"use strict";var e='[data-dismiss="alert"]',a=function(t){s(t).on("click",e,this.close)};a.VERSION="3.4.1",a.TRANSITION_DURATION=150,a.prototype.close=function(t){var e=s(this),i=e.attr("data-target");i||(i=(i=e.attr("href"))&&i.replace(/.*(?=#[^\s]*$)/,"")),i="#"===i?[]:i;var o=s(document).find(i);function n(){o.detach().trigger("closed.bs.alert").remove()}t&&t.preventDefault(),o.length||(o=e.closest(".alert")),o.trigger(t=s.Event("close.bs.alert")),t.isDefaultPrevented()||(o.removeClass("in"),s.support.transition&&o.hasClass("fade")?o.one("bsTransitionEnd",n).emulateTransitionEnd(a.TRANSITION_DURATION):n())};var t=s.fn.alert;s.fn.alert=function o(i){return this.each(function(){var t=s(this),e=t.data("bs.alert");e||t.data("bs.alert",e=new a(this)),"string"==typeof i&&e[i].call(t)})},s.fn.alert.Constructor=a,s.fn.alert.noConflict=function(){return s.fn.alert=t,this},s(document).on("click.bs.alert.data-api",e,a.prototype.close)}(jQuery),function(s){"use strict";var n=function(t,e){this.$element=s(t),this.options=s.extend({},n.DEFAULTS,e),this.isLoading=!1};function i(o){return this.each(function(){var t=s(this),e=t.data("bs.button"),i="object"==typeof o&&o;e||t.data("bs.button",e=new n(this,i)),"toggle"==o?e.toggle():o&&e.setState(o)})}n.VERSION="3.4.1",n.DEFAULTS={loadingText:"loading..."},n.prototype.setState=function(t){var e="disabled",i=this.$element,o=i.is("input")?"val":"html",n=i.data();t+="Text",null==n.resetText&&i.data("resetText",i[o]()),setTimeout(s.proxy(function(){i[o](null==n[t]?this.options[t]:n[t]),"loadingText"==t?(this.isLoading=!0,i.addClass(e).attr(e,e).prop(e,!0)):this.isLoading&&(this.isLoading=!1,i.removeClass(e).removeAttr(e).prop(e,!1))},this),0)},n.prototype.toggle=function(){var t=!0,e=this.$element.closest('[data-toggle="buttons"]');if(e.length){var i=this.$element.find("input");"radio"==i.prop("type")?(i.prop("checked")&&(t=!1),e.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==i.prop("type")&&(i.prop("checked")!==this.$element.hasClass("active")&&(t=!1),this.$element.toggleClass("active")),i.prop("checked",this.$element.hasClass("active")),t&&i.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var t=s.fn.button;s.fn.button=i,s.fn.button.Constructor=n,s.fn.button.noConflict=function(){return s.fn.button=t,this},s(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(t){var e=s(t.target).closest(".btn");i.call(e,"toggle"),s(t.target).is('input[type="radio"], input[type="checkbox"]')||(t.preventDefault(),e.is("input,button")?e.trigger("focus"):e.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(t){s(t.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(t.type))})}(jQuery),function(p){"use strict";var c=function(t,e){this.$element=p(t),this.$indicators=this.$element.find(".carousel-indicators"),this.options=e,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",p.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",p.proxy(this.pause,this)).on("mouseleave.bs.carousel",p.proxy(this.cycle,this))};function r(n){return this.each(function(){var t=p(this),e=t.data("bs.carousel"),i=p.extend({},c.DEFAULTS,t.data(),"object"==typeof n&&n),o="string"==typeof n?n:i.slide;e||t.data("bs.carousel",e=new c(this,i)),"number"==typeof n?e.to(n):o?e[o]():i.interval&&e.pause().cycle()})}c.VERSION="3.4.1",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(t){if(!/input|textarea/i.test(t.target.tagName)){switch(t.which){case 37:this.prev();break;case 39:this.next();break;default:return}t.preventDefault()}},c.prototype.cycle=function(t){return t||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(p.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(t){return this.$items=t.parent().children(".item"),this.$items.index(t||this.$active)},c.prototype.getItemForDirection=function(t,e){var i=this.getItemIndex(e);if(("prev"==t&&0===i||"next"==t&&i==this.$items.length-1)&&!this.options.wrap)return e;var o=(i+("prev"==t?-1:1))%this.$items.length;return this.$items.eq(o)},c.prototype.to=function(t){var e=this,i=this.getItemIndex(this.$active=this.$element.find(".item.active"));if(!(t>this.$items.length-1||t<0))return this.sliding?this.$element.one("slid.bs.carousel",function(){e.to(t)}):i==t?this.pause().cycle():this.slide(i<t?"next":"prev",this.$items.eq(t))},c.prototype.pause=function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&p.support.transition&&(this.$element.trigger(p.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){if(!this.sliding)return this.slide("next")},c.prototype.prev=function(){if(!this.sliding)return this.slide("prev")},c.prototype.slide=function(t,e){var i=this.$element.find(".item.active"),o=e||this.getItemForDirection(t,i),n=this.interval,s="next"==t?"left":"right",a=this;if(o.hasClass("active"))return this.sliding=!1;var r=o[0],l=p.Event("slide.bs.carousel",{relatedTarget:r,direction:s});if(this.$element.trigger(l),!l.isDefaultPrevented()){if(this.sliding=!0,n&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var h=p(this.$indicators.children()[this.getItemIndex(o)]);h&&h.addClass("active")}var d=p.Event("slid.bs.carousel",{relatedTarget:r,direction:s});return p.support.transition&&this.$element.hasClass("slide")?(o.addClass(t),"object"==typeof o&&o.length&&o[0].offsetWidth,i.addClass(s),o.addClass(s),i.one("bsTransitionEnd",function(){o.removeClass([t,s].join(" ")).addClass("active"),i.removeClass(["active",s].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger(d)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(i.removeClass("active"),o.addClass("active"),this.sliding=!1,this.$element.trigger(d)),n&&this.cycle(),this}};var t=p.fn.carousel;p.fn.carousel=r,p.fn.carousel.Constructor=c,p.fn.carousel.noConflict=function(){return p.fn.carousel=t,this};var e=function(t){var e=p(this),i=e.attr("href");i&&(i=i.replace(/.*(?=#[^\s]+$)/,""));var o=e.attr("data-target")||i,n=p(document).find(o);if(n.hasClass("carousel")){var s=p.extend({},n.data(),e.data()),a=e.attr("data-slide-to");a&&(s.interval=!1),r.call(n,s),a&&n.data("bs.carousel").to(a),t.preventDefault()}};p(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),p(window).on("load",function(){p('[data-ride="carousel"]').each(function(){var t=p(this);r.call(t,t.data())})})}(jQuery),function(a){"use strict";var r=function(t,e){this.$element=a(t),this.options=a.extend({},r.DEFAULTS,e),this.$trigger=a('[data-toggle="collapse"][href="#'+t.id+'"],[data-toggle="collapse"][data-target="#'+t.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};function n(t){var e,i=t.attr("data-target")||(e=t.attr("href"))&&e.replace(/.*(?=#[^\s]+$)/,"");return a(document).find(i)}function l(o){return this.each(function(){var t=a(this),e=t.data("bs.collapse"),i=a.extend({},r.DEFAULTS,t.data(),"object"==typeof o&&o);!e&&i.toggle&&/show|hide/.test(o)&&(i.toggle=!1),e||t.data("bs.collapse",e=new r(this,i)),"string"==typeof o&&e[o]()})}r.VERSION="3.4.1",r.TRANSITION_DURATION=350,r.DEFAULTS={toggle:!0},r.prototype.dimension=function(){return this.$element.hasClass("width")?"width":"height"},r.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var t,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(t=e.data("bs.collapse"))&&t.transitioning)){var i=a.Event("show.bs.collapse");if(this.$element.trigger(i),!i.isDefaultPrevented()){e&&e.length&&(l.call(e,"hide"),t||e.data("bs.collapse",null));var o=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[o](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var n=function(){this.$element.removeClass("collapsing").addClass("collapse in")[o](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return n.call(this);var s=a.camelCase(["scroll",o].join("-"));this.$element.one("bsTransitionEnd",a.proxy(n,this)).emulateTransitionEnd(r.TRANSITION_DURATION)[o](this.$element[0][s])}}}},r.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var t=a.Event("hide.bs.collapse");if(this.$element.trigger(t),!t.isDefaultPrevented()){var e=this.dimension();this.$element[e](this.$element[e]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var i=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};if(!a.support.transition)return i.call(this);this.$element[e](0).one("bsTransitionEnd",a.proxy(i,this)).emulateTransitionEnd(r.TRANSITION_DURATION)}}},r.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},r.prototype.getParent=function(){return a(document).find(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(t,e){var i=a(e);this.addAriaAndCollapsedClass(n(i),i)},this)).end()},r.prototype.addAriaAndCollapsedClass=function(t,e){var i=t.hasClass("in");t.attr("aria-expanded",i),e.toggleClass("collapsed",!i).attr("aria-expanded",i)};var t=a.fn.collapse;a.fn.collapse=l,a.fn.collapse.Constructor=r,a.fn.collapse.noConflict=function(){return a.fn.collapse=t,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(t){var e=a(this);e.attr("data-target")||t.preventDefault();var i=n(e),o=i.data("bs.collapse")?"toggle":e.data();l.call(i,o)})}(jQuery),function(a){"use strict";var r='[data-toggle="dropdown"]',o=function(t){a(t).on("click.bs.dropdown",this.toggle)};function l(t){var e=t.attr("data-target");e||(e=(e=t.attr("href"))&&/#[A-Za-z]/.test(e)&&e.replace(/.*(?=#[^\s]*$)/,""));var i="#"!==e?a(document).find(e):null;return i&&i.length?i:t.parent()}function s(o){o&&3===o.which||(a(".dropdown-backdrop").remove(),a(r).each(function(){var t=a(this),e=l(t),i={relatedTarget:this};e.hasClass("open")&&(o&&"click"==o.type&&/input|textarea/i.test(o.target.tagName)&&a.contains(e[0],o.target)||(e.trigger(o=a.Event("hide.bs.dropdown",i)),o.isDefaultPrevented()||(t.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",i)))))}))}o.VERSION="3.4.1",o.prototype.toggle=function(t){var e=a(this);if(!e.is(".disabled, :disabled")){var i=l(e),o=i.hasClass("open");if(s(),!o){"ontouchstart"in document.documentElement&&!i.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",s);var n={relatedTarget:this};if(i.trigger(t=a.Event("show.bs.dropdown",n)),t.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),i.toggleClass("open").trigger(a.Event("shown.bs.dropdown",n))}return!1}},o.prototype.keydown=function(t){if(/(38|40|27|32)/.test(t.which)&&!/input|textarea/i.test(t.target.tagName)){var e=a(this);if(t.preventDefault(),t.stopPropagation(),!e.is(".disabled, :disabled")){var i=l(e),o=i.hasClass("open");if(!o&&27!=t.which||o&&27==t.which)return 27==t.which&&i.find(r).trigger("focus"),e.trigger("click");var n=i.find(".dropdown-menu li:not(.disabled):visible a");if(n.length){var s=n.index(t.target);38==t.which&&0<s&&s--,40==t.which&&s<n.length-1&&s++,~s||(s=0),n.eq(s).trigger("focus")}}}};var t=a.fn.dropdown;a.fn.dropdown=function e(i){return this.each(function(){var t=a(this),e=t.data("bs.dropdown");e||t.data("bs.dropdown",e=new o(this)),"string"==typeof i&&e[i].call(t)})},a.fn.dropdown.Constructor=o,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=t,this},a(document).on("click.bs.dropdown.data-api",s).on("click.bs.dropdown.data-api",".dropdown form",function(t){t.stopPropagation()}).on("click.bs.dropdown.data-api",r,o.prototype.toggle).on("keydown.bs.dropdown.data-api",r,o.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",o.prototype.keydown)}(jQuery),function(a){"use strict";var s=function(t,e){this.options=e,this.$body=a(document.body),this.$element=a(t),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.fixedContent=".navbar-fixed-top, .navbar-fixed-bottom",this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};function r(o,n){return this.each(function(){var t=a(this),e=t.data("bs.modal"),i=a.extend({},s.DEFAULTS,t.data(),"object"==typeof o&&o);e||t.data("bs.modal",e=new s(this,i)),"string"==typeof o?e[o](n):i.show&&e.show(n)})}s.VERSION="3.4.1",s.TRANSITION_DURATION=300,s.BACKDROP_TRANSITION_DURATION=150,s.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},s.prototype.toggle=function(t){return this.isShown?this.hide():this.show(t)},s.prototype.show=function(i){var o=this,t=a.Event("show.bs.modal",{relatedTarget:i});this.$element.trigger(t),this.isShown||t.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){o.$element.one("mouseup.dismiss.bs.modal",function(t){a(t.target).is(o.$element)&&(o.ignoreBackdropClick=!0)})}),this.backdrop(function(){var t=a.support.transition&&o.$element.hasClass("fade");o.$element.parent().length||o.$element.appendTo(o.$body),o.$element.show().scrollTop(0),o.adjustDialog(),t&&o.$element[0].offsetWidth,o.$element.addClass("in"),o.enforceFocus();var e=a.Event("shown.bs.modal",{relatedTarget:i});t?o.$dialog.one("bsTransitionEnd",function(){o.$element.trigger("focus").trigger(e)}).emulateTransitionEnd(s.TRANSITION_DURATION):o.$element.trigger("focus").trigger(e)}))},s.prototype.hide=function(t){t&&t.preventDefault(),t=a.Event("hide.bs.modal"),this.$element.trigger(t),this.isShown&&!t.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(s.TRANSITION_DURATION):this.hideModal())},s.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(t){document===t.target||this.$element[0]===t.target||this.$element.has(t.target).length||this.$element.trigger("focus")},this))},s.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(t){27==t.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},s.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},s.prototype.hideModal=function(){var t=this;this.$element.hide(),this.backdrop(function(){t.$body.removeClass("modal-open"),t.resetAdjustments(),t.resetScrollbar(),t.$element.trigger("hidden.bs.modal")})},s.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},s.prototype.backdrop=function(t){var e=this,i=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var o=a.support.transition&&i;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+i).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(t){this.ignoreBackdropClick?this.ignoreBackdropClick=!1:t.target===t.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide())},this)),o&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!t)return;o?this.$backdrop.one("bsTransitionEnd",t).emulateTransitionEnd(s.BACKDROP_TRANSITION_DURATION):t()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var n=function(){e.removeBackdrop(),t&&t()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",n).emulateTransitionEnd(s.BACKDROP_TRANSITION_DURATION):n()}else t&&t()},s.prototype.handleUpdate=function(){this.adjustDialog()},s.prototype.adjustDialog=function(){var t=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&t?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!t?this.scrollbarWidth:""})},s.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},s.prototype.checkScrollbar=function(){var t=window.innerWidth;if(!t){var e=document.documentElement.getBoundingClientRect();t=e.right-Math.abs(e.left)}this.bodyIsOverflowing=document.body.clientWidth<t,this.scrollbarWidth=this.measureScrollbar()},s.prototype.setScrollbar=function(){var t=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"";var n=this.scrollbarWidth;this.bodyIsOverflowing&&(this.$body.css("padding-right",t+n),a(this.fixedContent).each(function(t,e){var i=e.style.paddingRight,o=a(e).css("padding-right");a(e).data("padding-right",i).css("padding-right",parseFloat(o)+n+"px")}))},s.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad),a(this.fixedContent).each(function(t,e){var i=a(e).data("padding-right");a(e).removeData("padding-right"),e.style.paddingRight=i||""})},s.prototype.measureScrollbar=function(){var t=document.createElement("div");t.className="modal-scrollbar-measure",this.$body.append(t);var e=t.offsetWidth-t.clientWidth;return this.$body[0].removeChild(t),e};var t=a.fn.modal;a.fn.modal=r,a.fn.modal.Constructor=s,a.fn.modal.noConflict=function(){return a.fn.modal=t,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(t){var e=a(this),i=e.attr("href"),o=e.attr("data-target")||i&&i.replace(/.*(?=#[^\s]+$)/,""),n=a(document).find(o),s=n.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(i)&&i},n.data(),e.data());e.is("a")&&t.preventDefault(),n.one("show.bs.modal",function(t){t.isDefaultPrevented()||n.one("hidden.bs.modal",function(){e.is(":visible")&&e.trigger("focus")})}),r.call(n,s,this)})}(jQuery),function(g){"use strict";var o=["sanitize","whiteList","sanitizeFn"],a=["background","cite","href","itemtype","longdesc","poster","src","xlink:href"],t={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},r=/^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi,l=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i;function u(t,e){var i=t.nodeName.toLowerCase();if(-1!==g.inArray(i,e))return-1===g.inArray(i,a)||Boolean(t.nodeValue.match(r)||t.nodeValue.match(l));for(var o=g(e).filter(function(t,e){return e instanceof RegExp}),n=0,s=o.length;n<s;n++)if(i.match(o[n]))return!0;return!1}function n(t,e,i){if(0===t.length)return t;if(i&&"function"==typeof i)return i(t);if(!document.implementation||!document.implementation.createHTMLDocument)return t;var o=document.implementation.createHTMLDocument("sanitization");o.body.innerHTML=t;for(var n=g.map(e,function(t,e){return e}),s=g(o.body).find("*"),a=0,r=s.length;a<r;a++){var l=s[a],h=l.nodeName.toLowerCase();if(-1!==g.inArray(h,n))for(var d=g.map(l.attributes,function(t){return t}),p=[].concat(e["*"]||[],e[h]||[]),c=0,f=d.length;c<f;c++)u(d[c],p)||l.removeAttribute(d[c].nodeName);else l.parentNode.removeChild(l)}return o.body.innerHTML}var m=function(t,e){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",t,e)};m.VERSION="3.4.1",m.TRANSITION_DURATION=150,m.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0},sanitize:!0,sanitizeFn:null,whiteList:t},m.prototype.init=function(t,e,i){if(this.enabled=!0,this.type=t,this.$element=g(e),this.options=this.getOptions(i),this.$viewport=this.options.viewport&&g(document).find(g.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var o=this.options.trigger.split(" "),n=o.length;n--;){var s=o[n];if("click"==s)this.$element.on("click."+this.type,this.options.selector,g.proxy(this.toggle,this));else if("manual"!=s){var a="hover"==s?"mouseenter":"focusin",r="hover"==s?"mouseleave":"focusout";this.$element.on(a+"."+this.type,this.options.selector,g.proxy(this.enter,this)),this.$element.on(r+"."+this.type,this.options.selector,g.proxy(this.leave,this))}}this.options.selector?this._options=g.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},m.prototype.getDefaults=function(){return m.DEFAULTS},m.prototype.getOptions=function(t){var e=this.$element.data();for(var i in e)e.hasOwnProperty(i)&&-1!==g.inArray(i,o)&&delete e[i];return(t=g.extend({},this.getDefaults(),e,t)).delay&&"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),t.sanitize&&(t.template=n(t.template,t.whiteList,t.sanitizeFn)),t},m.prototype.getDelegateOptions=function(){var i={},o=this.getDefaults();return this._options&&g.each(this._options,function(t,e){o[t]!=e&&(i[t]=e)}),i},m.prototype.enter=function(t){var e=t instanceof this.constructor?t:g(t.currentTarget).data("bs."+this.type);if(e||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e)),t instanceof g.Event&&(e.inState["focusin"==t.type?"focus":"hover"]=!0),e.tip().hasClass("in")||"in"==e.hoverState)e.hoverState="in";else{if(clearTimeout(e.timeout),e.hoverState="in",!e.options.delay||!e.options.delay.show)return e.show();e.timeout=setTimeout(function(){"in"==e.hoverState&&e.show()},e.options.delay.show)}},m.prototype.isInStateTrue=function(){for(var t in this.inState)if(this.inState[t])return!0;return!1},m.prototype.leave=function(t){var e=t instanceof this.constructor?t:g(t.currentTarget).data("bs."+this.type);if(e||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e)),t instanceof g.Event&&(e.inState["focusout"==t.type?"focus":"hover"]=!1),!e.isInStateTrue()){if(clearTimeout(e.timeout),e.hoverState="out",!e.options.delay||!e.options.delay.hide)return e.hide();e.timeout=setTimeout(function(){"out"==e.hoverState&&e.hide()},e.options.delay.hide)}},m.prototype.show=function(){var t=g.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(t);var e=g.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(t.isDefaultPrevented()||!e)return;var i=this,o=this.tip(),n=this.getUID(this.type);this.setContent(),o.attr("id",n),this.$element.attr("aria-describedby",n),this.options.animation&&o.addClass("fade");var s="function"==typeof this.options.placement?this.options.placement.call(this,o[0],this.$element[0]):this.options.placement,a=/\s?auto?\s?/i,r=a.test(s);r&&(s=s.replace(a,"")||"top"),o.detach().css({top:0,left:0,display:"block"}).addClass(s).data("bs."+this.type,this),this.options.container?o.appendTo(g(document).find(this.options.container)):o.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var l=this.getPosition(),h=o[0].offsetWidth,d=o[0].offsetHeight;if(r){var p=s,c=this.getPosition(this.$viewport);s="bottom"==s&&l.bottom+d>c.bottom?"top":"top"==s&&l.top-d<c.top?"bottom":"right"==s&&l.right+h>c.width?"left":"left"==s&&l.left-h<c.left?"right":s,o.removeClass(p).addClass(s)}var f=this.getCalculatedOffset(s,l,h,d);this.applyPlacement(f,s);var u=function(){var t=i.hoverState;i.$element.trigger("shown.bs."+i.type),i.hoverState=null,"out"==t&&i.leave(i)};g.support.transition&&this.$tip.hasClass("fade")?o.one("bsTransitionEnd",u).emulateTransitionEnd(m.TRANSITION_DURATION):u()}},m.prototype.applyPlacement=function(t,e){var i=this.tip(),o=i[0].offsetWidth,n=i[0].offsetHeight,s=parseInt(i.css("margin-top"),10),a=parseInt(i.css("margin-left"),10);isNaN(s)&&(s=0),isNaN(a)&&(a=0),t.top+=s,t.left+=a,g.offset.setOffset(i[0],g.extend({using:function(t){i.css({top:Math.round(t.top),left:Math.round(t.left)})}},t),0),i.addClass("in");var r=i[0].offsetWidth,l=i[0].offsetHeight;"top"==e&&l!=n&&(t.top=t.top+n-l);var h=this.getViewportAdjustedDelta(e,t,r,l);h.left?t.left+=h.left:t.top+=h.top;var d=/top|bottom/.test(e),p=d?2*h.left-o+r:2*h.top-n+l,c=d?"offsetWidth":"offsetHeight";i.offset(t),this.replaceArrow(p,i[0][c],d)},m.prototype.replaceArrow=function(t,e,i){this.arrow().css(i?"left":"top",50*(1-t/e)+"%").css(i?"top":"left","")},m.prototype.setContent=function(){var t=this.tip(),e=this.getTitle();this.options.html?(this.options.sanitize&&(e=n(e,this.options.whiteList,this.options.sanitizeFn)),t.find(".tooltip-inner").html(e)):t.find(".tooltip-inner").text(e),t.removeClass("fade in top bottom left right")},m.prototype.hide=function(t){var e=this,i=g(this.$tip),o=g.Event("hide.bs."+this.type);function n(){"in"!=e.hoverState&&i.detach(),e.$element&&e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),t&&t()}if(this.$element.trigger(o),!o.isDefaultPrevented())return i.removeClass("in"),g.support.transition&&i.hasClass("fade")?i.one("bsTransitionEnd",n).emulateTransitionEnd(m.TRANSITION_DURATION):n(),this.hoverState=null,this},m.prototype.fixTitle=function(){var t=this.$element;(t.attr("title")||"string"!=typeof t.attr("data-original-title"))&&t.attr("data-original-title",t.attr("title")||"").attr("title","")},m.prototype.hasContent=function(){return this.getTitle()},m.prototype.getPosition=function(t){var e=(t=t||this.$element)[0],i="BODY"==e.tagName,o=e.getBoundingClientRect();null==o.width&&(o=g.extend({},o,{width:o.right-o.left,height:o.bottom-o.top}));var n=window.SVGElement&&e instanceof window.SVGElement,s=i?{top:0,left:0}:n?null:t.offset(),a={scroll:i?document.documentElement.scrollTop||document.body.scrollTop:t.scrollTop()},r=i?{width:g(window).width(),height:g(window).height()}:null;return g.extend({},o,a,r,s)},m.prototype.getCalculatedOffset=function(t,e,i,o){return"bottom"==t?{top:e.top+e.height,left:e.left+e.width/2-i/2}:"top"==t?{top:e.top-o,left:e.left+e.width/2-i/2}:"left"==t?{top:e.top+e.height/2-o/2,left:e.left-i}:{top:e.top+e.height/2-o/2,left:e.left+e.width}},m.prototype.getViewportAdjustedDelta=function(t,e,i,o){var n={top:0,left:0};if(!this.$viewport)return n;var s=this.options.viewport&&this.options.viewport.padding||0,a=this.getPosition(this.$viewport);if(/right|left/.test(t)){var r=e.top-s-a.scroll,l=e.top+s-a.scroll+o;r<a.top?n.top=a.top-r:l>a.top+a.height&&(n.top=a.top+a.height-l)}else{var h=e.left-s,d=e.left+s+i;h<a.left?n.left=a.left-h:d>a.right&&(n.left=a.left+a.width-d)}return n},m.prototype.getTitle=function(){var t=this.$element,e=this.options;return t.attr("data-original-title")||("function"==typeof e.title?e.title.call(t[0]):e.title)},m.prototype.getUID=function(t){for(;t+=~~(1e6*Math.random()),document.getElementById(t););return t},m.prototype.tip=function(){if(!this.$tip&&(this.$tip=g(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},m.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},m.prototype.enable=function(){this.enabled=!0},m.prototype.disable=function(){this.enabled=!1},m.prototype.toggleEnabled=function(){this.enabled=!this.enabled},m.prototype.toggle=function(t){var e=this;t&&((e=g(t.currentTarget).data("bs."+this.type))||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e))),t?(e.inState.click=!e.inState.click,e.isInStateTrue()?e.enter(e):e.leave(e)):e.tip().hasClass("in")?e.leave(e):e.enter(e)},m.prototype.destroy=function(){var t=this;clearTimeout(this.timeout),this.hide(function(){t.$element.off("."+t.type).removeData("bs."+t.type),t.$tip&&t.$tip.detach(),t.$tip=null,t.$arrow=null,t.$viewport=null,t.$element=null})},m.prototype.sanitizeHtml=function(t){return n(t,this.options.whiteList,this.options.sanitizeFn)};var e=g.fn.tooltip;g.fn.tooltip=function i(o){return this.each(function(){var t=g(this),e=t.data("bs.tooltip"),i="object"==typeof o&&o;!e&&/destroy|hide/.test(o)||(e||t.data("bs.tooltip",e=new m(this,i)),"string"==typeof o&&e[o]())})},g.fn.tooltip.Constructor=m,g.fn.tooltip.noConflict=function(){return g.fn.tooltip=e,this}}(jQuery),function(n){"use strict";var s=function(t,e){this.init("popover",t,e)};if(!n.fn.tooltip)throw new Error("Popover requires tooltip.js");s.VERSION="3.4.1",s.DEFAULTS=n.extend({},n.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),((s.prototype=n.extend({},n.fn.tooltip.Constructor.prototype)).constructor=s).prototype.getDefaults=function(){return s.DEFAULTS},s.prototype.setContent=function(){var t=this.tip(),e=this.getTitle(),i=this.getContent();if(this.options.html){var o=typeof i;this.options.sanitize&&(e=this.sanitizeHtml(e),"string"===o&&(i=this.sanitizeHtml(i))),t.find(".popover-title").html(e),t.find(".popover-content").children().detach().end()["string"===o?"html":"append"](i)}else t.find(".popover-title").text(e),t.find(".popover-content").children().detach().end().text(i);t.removeClass("fade top bottom left right in"),t.find(".popover-title").html()||t.find(".popover-title").hide()},s.prototype.hasContent=function(){return this.getTitle()||this.getContent()},s.prototype.getContent=function(){var t=this.$element,e=this.options;return t.attr("data-content")||("function"==typeof e.content?e.content.call(t[0]):e.content)},s.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var t=n.fn.popover;n.fn.popover=function e(o){return this.each(function(){var t=n(this),e=t.data("bs.popover"),i="object"==typeof o&&o;!e&&/destroy|hide/.test(o)||(e||t.data("bs.popover",e=new s(this,i)),"string"==typeof o&&e[o]())})},n.fn.popover.Constructor=s,n.fn.popover.noConflict=function(){return n.fn.popover=t,this}}(jQuery),function(s){"use strict";function n(t,e){this.$body=s(document.body),this.$scrollElement=s(t).is(document.body)?s(window):s(t),this.options=s.extend({},n.DEFAULTS,e),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",s.proxy(this.process,this)),this.refresh(),this.process()}function e(o){return this.each(function(){var t=s(this),e=t.data("bs.scrollspy"),i="object"==typeof o&&o;e||t.data("bs.scrollspy",e=new n(this,i)),"string"==typeof o&&e[o]()})}n.VERSION="3.4.1",n.DEFAULTS={offset:10},n.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},n.prototype.refresh=function(){var t=this,o="offset",n=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),s.isWindow(this.$scrollElement[0])||(o="position",n=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var t=s(this),e=t.data("target")||t.attr("href"),i=/^#./.test(e)&&s(e);return i&&i.length&&i.is(":visible")&&[[i[o]().top+n,e]]||null}).sort(function(t,e){return t[0]-e[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},n.prototype.process=function(){var t,e=this.$scrollElement.scrollTop()+this.options.offset,i=this.getScrollHeight(),o=this.options.offset+i-this.$scrollElement.height(),n=this.offsets,s=this.targets,a=this.activeTarget;if(this.scrollHeight!=i&&this.refresh(),o<=e)return a!=(t=s[s.length-1])&&this.activate(t);if(a&&e<n[0])return this.activeTarget=null,this.clear();for(t=n.length;t--;)a!=s[t]&&e>=n[t]&&(n[t+1]===undefined||e<n[t+1])&&this.activate(s[t])},n.prototype.activate=function(t){this.activeTarget=t,this.clear();var e=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',i=s(e).parents("li").addClass("active");i.parent(".dropdown-menu").length&&(i=i.closest("li.dropdown").addClass("active")),i.trigger("activate.bs.scrollspy")},n.prototype.clear=function(){s(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var t=s.fn.scrollspy;s.fn.scrollspy=e,s.fn.scrollspy.Constructor=n,s.fn.scrollspy.noConflict=function(){return s.fn.scrollspy=t,this},s(window).on("load.bs.scrollspy.data-api",function(){s('[data-spy="scroll"]').each(function(){var t=s(this);e.call(t,t.data())})})}(jQuery),function(r){"use strict";var a=function(t){this.element=r(t)};function e(i){return this.each(function(){var t=r(this),e=t.data("bs.tab");e||t.data("bs.tab",e=new a(this)),"string"==typeof i&&e[i]()})}a.VERSION="3.4.1",a.TRANSITION_DURATION=150,a.prototype.show=function(){var t=this.element,e=t.closest("ul:not(.dropdown-menu)"),i=t.data("target");if(i||(i=(i=t.attr("href"))&&i.replace(/.*(?=#[^\s]*$)/,"")),!t.parent("li").hasClass("active")){var o=e.find(".active:last a"),n=r.Event("hide.bs.tab",{relatedTarget:t[0]}),s=r.Event("show.bs.tab",{relatedTarget:o[0]});if(o.trigger(n),t.trigger(s),!s.isDefaultPrevented()&&!n.isDefaultPrevented()){var a=r(document).find(i);this.activate(t.closest("li"),e),this.activate(a,a.parent(),function(){o.trigger({type:"hidden.bs.tab",relatedTarget:t[0]}),t.trigger({type:"shown.bs.tab",relatedTarget:o[0]})})}}},a.prototype.activate=function(t,e,i){var o=e.find("> .active"),n=i&&r.support.transition&&(o.length&&o.hasClass("fade")||!!e.find("> .fade").length);function s(){o.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),t.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),n?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu").length&&t.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),i&&i()}o.length&&n?o.one("bsTransitionEnd",s).emulateTransitionEnd(a.TRANSITION_DURATION):s(),o.removeClass("in")};var t=r.fn.tab;r.fn.tab=e,r.fn.tab.Constructor=a,r.fn.tab.noConflict=function(){return r.fn.tab=t,this};var i=function(t){t.preventDefault(),e.call(r(this),"show")};r(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',i).on("click.bs.tab.data-api",'[data-toggle="pill"]',i)}(jQuery),function(l){"use strict";var h=function(t,e){this.options=l.extend({},h.DEFAULTS,e);var i=this.options.target===h.DEFAULTS.target?l(this.options.target):l(document).find(this.options.target);this.$target=i.on("scroll.bs.affix.data-api",l.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",l.proxy(this.checkPositionWithEventLoop,this)),this.$element=l(t),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};function i(o){return this.each(function(){var t=l(this),e=t.data("bs.affix"),i="object"==typeof o&&o;e||t.data("bs.affix",e=new h(this,i)),"string"==typeof o&&e[o]()})}h.VERSION="3.4.1",h.RESET="affix affix-top affix-bottom",h.DEFAULTS={offset:0,target:window},h.prototype.getState=function(t,e,i,o){var n=this.$target.scrollTop(),s=this.$element.offset(),a=this.$target.height();if(null!=i&&"top"==this.affixed)return n<i&&"top";if("bottom"==this.affixed)return null!=i?!(n+this.unpin<=s.top)&&"bottom":!(n+a<=t-o)&&"bottom";var r=null==this.affixed,l=r?n:s.top;return null!=i&&n<=i?"top":null!=o&&t-o<=l+(r?a:e)&&"bottom"},h.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(h.RESET).addClass("affix");var t=this.$target.scrollTop(),e=this.$element.offset();return this.pinnedOffset=e.top-t},h.prototype.checkPositionWithEventLoop=function(){setTimeout(l.proxy(this.checkPosition,this),1)},h.prototype.checkPosition=function(){if(this.$element.is(":visible")){var t=this.$element.height(),e=this.options.offset,i=e.top,o=e.bottom,n=Math.max(l(document).height(),l(document.body).height());"object"!=typeof e&&(o=i=e),"function"==typeof i&&(i=e.top(this.$element)),"function"==typeof o&&(o=e.bottom(this.$element));var s=this.getState(n,t,i,o);if(this.affixed!=s){null!=this.unpin&&this.$element.css("top","");var a="affix"+(s?"-"+s:""),r=l.Event(a+".bs.affix");if(this.$element.trigger(r),r.isDefaultPrevented())return;this.affixed=s,this.unpin="bottom"==s?this.getPinnedOffset():null,this.$element.removeClass(h.RESET).addClass(a).trigger(a.replace("affix","affixed")+".bs.affix")}"bottom"==s&&this.$element.offset({top:n-t-o})}};var t=l.fn.affix;l.fn.affix=i,l.fn.affix.Constructor=h,l.fn.affix.noConflict=function(){return l.fn.affix=t,this},l(window).on("load",function(){l('[data-spy="affix"]').each(function(){var t=l(this),e=t.data();e.offset=e.offset||{},null!=e.offsetBottom&&(e.offset.bottom=e.offsetBottom),null!=e.offsetTop&&(e.offset.top=e.offsetTop),i.call(t,e)})})}(jQuery); \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js
deleted file mode 100644
index c4a924160d..0000000000
--- a/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js
+++ /dev/null
@@ -1,7 +0,0 @@
1/*!
2 * Bootstrap v3.3.6 (http://getbootstrap.com)
3 * Copyright 2011-2016 Twitter, Inc.
4 * Licensed under the MIT license
5 */
6if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>2)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){return a(b.target).is(this)?b.handleObj.handler.apply(this,arguments):void 0}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.6",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a(f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.6",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target);d.hasClass("btn")||(d=d.closest(".btn")),b.call(d,"toggle"),a(c.target).is('input[type="radio"]')||a(c.target).is('input[type="checkbox"]')||c.preventDefault()}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));return a>this.$items.length-1||0>a?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){return this.sliding?void 0:this.slide("next")},c.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.6",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.6",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&j<i.length-1&&j++,~j||(j=0),i.eq(j).trigger("focus")}}}};var h=a.fn.dropdown;a.fn.dropdown=d,a.fn.dropdown.Constructor=g,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=h,this},a(document).on("click.bs.dropdown.data-api",c).on("click.bs.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.bs.dropdown.data-api",f,g.prototype.toggle).on("keydown.bs.dropdown.data-api",f,g.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",g.prototype.keydown)}(jQuery),+function(a){"use strict";function b(b,d){return this.each(function(){var e=a(this),f=e.data("bs.modal"),g=a.extend({},c.DEFAULTS,e.data(),"object"==typeof b&&b);f||e.data("bs.modal",f=new c(this,g)),"string"==typeof b?f[b](d):g.show&&f.show(d)})}var c=function(b,c){this.options=c,this.$body=a(document.body),this.$element=a(b),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=300,c.BACKDROP_TRANSITION_DURATION=150,c.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},c.prototype.toggle=function(a){return this.isShown?this.hide():this.show(a)},c.prototype.show=function(b){var d=this,e=a.Event("show.bs.modal",{relatedTarget:b});this.$element.trigger(e),this.isShown||e.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){d.$element.one("mouseup.dismiss.bs.modal",function(b){a(b.target).is(d.$element)&&(d.ignoreBackdropClick=!0)})}),this.backdrop(function(){var e=a.support.transition&&d.$element.hasClass("fade");d.$element.parent().length||d.$element.appendTo(d.$body),d.$element.show().scrollTop(0),d.adjustDialog(),e&&d.$element[0].offsetWidth,d.$element.addClass("in"),d.enforceFocus();var f=a.Event("shown.bs.modal",{relatedTarget:b});e?d.$dialog.one("bsTransitionEnd",function(){d.$element.trigger("focus").trigger(f)}).emulateTransitionEnd(c.TRANSITION_DURATION):d.$element.trigger("focus").trigger(f)}))},c.prototype.hide=function(b){b&&b.preventDefault(),b=a.Event("hide.bs.modal"),this.$element.trigger(b),this.isShown&&!b.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(c.TRANSITION_DURATION):this.hideModal())},c.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(a){this.$element[0]===a.target||this.$element.has(a.target).length||this.$element.trigger("focus")},this))},c.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(a){27==a.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},c.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},c.prototype.hideModal=function(){var a=this;this.$element.hide(),this.backdrop(function(){a.$body.removeClass("modal-open"),a.resetAdjustments(),a.resetScrollbar(),a.$element.trigger("hidden.bs.modal")})},c.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},c.prototype.backdrop=function(b){var d=this,e=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var f=a.support.transition&&e;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+e).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(a){return this.ignoreBackdropClick?void(this.ignoreBackdropClick=!1):void(a.target===a.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide()))},this)),f&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!b)return;f?this.$backdrop.one("bsTransitionEnd",b).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):b()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var g=function(){d.removeBackdrop(),b&&b()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",g).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):g()}else b&&b()},c.prototype.handleUpdate=function(){this.adjustDialog()},c.prototype.adjustDialog=function(){var a=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth<a,this.scrollbarWidth=this.measureScrollbar()},c.prototype.setScrollbar=function(){var a=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"",this.bodyIsOverflowing&&this.$body.css("padding-right",a+this.scrollbarWidth)},c.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad)},c.prototype.measureScrollbar=function(){var a=document.createElement("div");a.className="modal-scrollbar-measure",this.$body.append(a);var b=a.offsetWidth-a.clientWidth;return this.$body[0].removeChild(a),b};var d=a.fn.modal;a.fn.modal=b,a.fn.modal.Constructor=c,a.fn.modal.noConflict=function(){return a.fn.modal=d,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(c){var d=a(this),e=d.attr("href"),f=a(d.attr("data-target")||e&&e.replace(/.*(?=#[^\s]+$)/,"")),g=f.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(e)&&e},f.data(),d.data());d.is("a")&&c.preventDefault(),f.one("show.bs.modal",function(a){a.isDefaultPrevented()||f.one("hidden.bs.modal",function(){d.is(":visible")&&d.trigger("focus")})}),b.call(f,g,this)})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tooltip"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.tooltip",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",a,b)};c.VERSION="3.3.6",c.TRANSITION_DURATION=150,c.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),c.isInStateTrue()?void 0:(clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide())},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-m<o.top?"bottom":"right"==h&&k.right+l>o.width?"left":"left"==h&&k.left-l<o.left?"right":h,f.removeClass(n).addClass(h)}var p=this.getCalculatedOffset(h,k,l,m);this.applyPlacement(p,h);var q=function(){var a=e.hoverState;e.$element.trigger("shown.bs."+e.type),e.hoverState=null,"out"==a&&e.leave(e)};a.support.transition&&this.$tip.hasClass("fade")?f.one("bsTransitionEnd",q).emulateTransitionEnd(c.TRANSITION_DURATION):q()}},c.prototype.applyPlacement=function(b,c){var d=this.tip(),e=d[0].offsetWidth,f=d[0].offsetHeight,g=parseInt(d.css("margin-top"),10),h=parseInt(d.css("margin-left"),10);isNaN(g)&&(g=0),isNaN(h)&&(h=0),b.top+=g,b.left+=h,a.offset.setOffset(d[0],a.extend({using:function(a){d.css({top:Math.round(a.top),left:Math.round(a.left)})}},b),0),d.addClass("in");var i=d[0].offsetWidth,j=d[0].offsetHeight;"top"==c&&j!=f&&(b.top=b.top+f-j);var k=this.getViewportAdjustedDelta(c,b,i,j);k.left?b.left+=k.left:b.top+=k.top;var l=/top|bottom/.test(c),m=l?2*k.left-e+i:2*k.top-f+j,n=l?"offsetWidth":"offsetHeight";d.offset(b),this.replaceArrow(m,d[0][n],l)},c.prototype.replaceArrow=function(a,b,c){this.arrow().css(c?"left":"top",50*(1-a/b)+"%").css(c?"top":"left","")},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},c.prototype.hide=function(b){function d(){"in"!=e.hoverState&&f.detach(),e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),b&&b()}var e=this,f=a(this.$tip),g=a.Event("hide.bs."+this.type);return this.$element.trigger(g),g.isDefaultPrevented()?void 0:(f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one("bsTransitionEnd",d).emulateTransitionEnd(c.TRANSITION_DURATION):d(),this.hoverState=null,this)},c.prototype.fixTitle=function(){var a=this.$element;(a.attr("title")||"string"!=typeof a.attr("data-original-title"))&&a.attr("data-original-title",a.attr("title")||"").attr("title","")},c.prototype.hasContent=function(){return this.getTitle()},c.prototype.getPosition=function(b){b=b||this.$element;var c=b[0],d="BODY"==c.tagName,e=c.getBoundingClientRect();null==e.width&&(e=a.extend({},e,{width:e.right-e.left,height:e.bottom-e.top}));var f=d?{top:0,left:0}:b.offset(),g={scroll:d?document.documentElement.scrollTop||document.body.scrollTop:b.scrollTop()},h=d?{width:a(window).width(),height:a(window).height()}:null;return a.extend({},e,g,h,f)},c.prototype.getCalculatedOffset=function(a,b,c,d){return"bottom"==a?{top:b.top+b.height,left:b.left+b.width/2-c/2}:"top"==a?{top:b.top-d,left:b.left+b.width/2-c/2}:"left"==a?{top:b.top+b.height/2-d/2,left:b.left-c}:{top:b.top+b.height/2-d/2,left:b.left+b.width}},c.prototype.getViewportAdjustedDelta=function(a,b,c,d){var e={top:0,left:0};if(!this.$viewport)return e;var f=this.options.viewport&&this.options.viewport.padding||0,g=this.getPosition(this.$viewport);if(/right|left/.test(a)){var h=b.top-f-g.scroll,i=b.top+f-g.scroll+d;h<g.top?e.top=g.top-h:i>g.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;j<g.left?e.left=g.left-j:k>g.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.6",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.6",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b<e[0])return this.activeTarget=null,this.clear();for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(void 0===e[a+1]||b<e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){this.activeTarget=b,this.clear();var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),
7d.trigger("activate.bs.scrollspy")},b.prototype.clear=function(){a(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var d=a.fn.scrollspy;a.fn.scrollspy=c,a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=d,this},a(window).on("load.bs.scrollspy.data-api",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);c.call(b,b.data())})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new c(this)),"string"==typeof b&&e[b]()})}var c=function(b){this.element=a(b)};c.VERSION="3.3.6",c.TRANSITION_DURATION=150,c.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a"),f=a.Event("hide.bs.tab",{relatedTarget:b[0]}),g=a.Event("show.bs.tab",{relatedTarget:e[0]});if(e.trigger(f),b.trigger(g),!g.isDefaultPrevented()&&!f.isDefaultPrevented()){var h=a(d);this.activate(b.closest("li"),c),this.activate(h,h.parent(),function(){e.trigger({type:"hidden.bs.tab",relatedTarget:b[0]}),b.trigger({type:"shown.bs.tab",relatedTarget:e[0]})})}}},c.prototype.activate=function(b,d,e){function f(){g.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.6",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return c>e?"top":!1;if("bottom"==this.affixed)return null!=c?e+this.unpin<=f.top?!1:"bottom":a-d>=e+g?!1:"bottom";var h=null==this.affixed,i=h?e:f.top,j=h?g:b;return null!=c&&c>=e?"top":null!=d&&i+j>=a-d?"bottom":!1},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js
new file mode 100644
index 0000000000..7f37b5d991
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js
@@ -0,0 +1,2 @@
1/*! jQuery v3.7.1 | (c) OpenJS Foundation and other contributors | jquery.org/license */
2!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(ie,e){"use strict";var oe=[],r=Object.getPrototypeOf,ae=oe.slice,g=oe.flat?function(e){return oe.flat.call(e)}:function(e){return oe.concat.apply([],e)},s=oe.push,se=oe.indexOf,n={},i=n.toString,ue=n.hasOwnProperty,o=ue.toString,a=o.call(Object),le={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},C=ie.document,u={type:!0,src:!0,nonce:!0,noModule:!0};function m(e,t,n){var r,i,o=(n=n||C).createElement("script");if(o.text=e,t)for(r in u)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[i.call(e)]||"object":typeof e}var t="3.7.1",l=/HTML$/i,ce=function(e,t){return new ce.fn.init(e,t)};function c(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0<t&&t-1 in e)}function fe(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}ce.fn=ce.prototype={jquery:t,constructor:ce,length:0,toArray:function(){return ae.call(this)},get:function(e){return null==e?ae.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=ce.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return ce.each(this,e)},map:function(n){return this.pushStack(ce.map(this,function(e,t){return n.call(e,t,e)}))},slice:function(){return this.pushStack(ae.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},even:function(){return this.pushStack(ce.grep(this,function(e,t){return(t+1)%2}))},odd:function(){return this.pushStack(ce.grep(this,function(e,t){return t%2}))},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(0<=n&&n<t?[this[n]]:[])},end:function(){return this.prevObject||this.constructor()},push:s,sort:oe.sort,splice:oe.splice},ce.extend=ce.fn.extend=function(){var e,t,n,r,i,o,a=arguments[0]||{},s=1,u=arguments.length,l=!1;for("boolean"==typeof a&&(l=a,a=arguments[s]||{},s++),"object"==typeof a||v(a)||(a={}),s===u&&(a=this,s--);s<u;s++)if(null!=(e=arguments[s]))for(t in e)r=e[t],"__proto__"!==t&&a!==r&&(l&&r&&(ce.isPlainObject(r)||(i=Array.isArray(r)))?(n=a[t],o=i&&!Array.isArray(n)?[]:i||ce.isPlainObject(n)?n:{},i=!1,a[t]=ce.extend(l,o,r)):void 0!==r&&(a[t]=r));return a},ce.extend({expando:"jQuery"+(t+Math.random()).replace(/\D/g,""),isReady:!0,error:function(e){throw new Error(e)},noop:function(){},isPlainObject:function(e){var t,n;return!(!e||"[object Object]"!==i.call(e))&&(!(t=r(e))||"function"==typeof(n=ue.call(t,"constructor")&&t.constructor)&&o.call(n)===a)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},globalEval:function(e,t,n){m(e,{nonce:t&&t.nonce},n)},each:function(e,t){var n,r=0;if(c(e)){for(n=e.length;r<n;r++)if(!1===t.call(e[r],r,e[r]))break}else for(r in e)if(!1===t.call(e[r],r,e[r]))break;return e},text:function(e){var t,n="",r=0,i=e.nodeType;if(!i)while(t=e[r++])n+=ce.text(t);return 1===i||11===i?e.textContent:9===i?e.documentElement.textContent:3===i||4===i?e.nodeValue:n},makeArray:function(e,t){var n=t||[];return null!=e&&(c(Object(e))?ce.merge(n,"string"==typeof e?[e]:e):s.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:se.call(t,e,n)},isXMLDoc:function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!l.test(t||n&&n.nodeName||"HTML")},merge:function(e,t){for(var n=+t.length,r=0,i=e.length;r<n;r++)e[i++]=t[r];return e.length=i,e},grep:function(e,t,n){for(var r=[],i=0,o=e.length,a=!n;i<o;i++)!t(e[i],i)!==a&&r.push(e[i]);return r},map:function(e,t,n){var r,i,o=0,a=[];if(c(e))for(r=e.length;o<r;o++)null!=(i=t(e[o],o,n))&&a.push(i);else for(o in e)null!=(i=t(e[o],o,n))&&a.push(i);return g(a)},guid:1,support:le}),"function"==typeof Symbol&&(ce.fn[Symbol.iterator]=oe[Symbol.iterator]),ce.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(e,t){n["[object "+t+"]"]=t.toLowerCase()});var pe=oe.pop,de=oe.sort,he=oe.splice,ge="[\\x20\\t\\r\\n\\f]",ve=new RegExp("^"+ge+"+|((?:^|[^\\\\])(?:\\\\.)*)"+ge+"+$","g");ce.contains=function(e,t){var n=t&&t.parentNode;return e===n||!(!n||1!==n.nodeType||!(e.contains?e.contains(n):e.compareDocumentPosition&&16&e.compareDocumentPosition(n)))};var f=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\x80-\uFFFF\w-]/g;function p(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e}ce.escapeSelector=function(e){return(e+"").replace(f,p)};var ye=C,me=s;!function(){var e,b,w,o,a,T,r,C,d,i,k=me,S=ce.expando,E=0,n=0,s=W(),c=W(),u=W(),h=W(),l=function(e,t){return e===t&&(a=!0),0},f="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",t="(?:\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+",p="\\["+ge+"*("+t+")(?:"+ge+"*([*^$|!~]?=)"+ge+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+t+"))|)"+ge+"*\\]",g=":("+t+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+p+")*)|.*)\\)|)",v=new RegExp(ge+"+","g"),y=new RegExp("^"+ge+"*,"+ge+"*"),m=new RegExp("^"+ge+"*([>+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="<a id='"+S+"' href='' disabled='disabled'></a><select id='"+S+"-\r\\' disabled='disabled'><option selected=''></option></select>",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0<I(t,T,null,[e]).length},I.contains=function(e,t){return(e.ownerDocument||e)!=T&&V(e),ce.contains(e,t)},I.attr=function(e,t){(e.ownerDocument||e)!=T&&V(e);var n=b.attrHandle[t.toLowerCase()],r=n&&ue.call(b.attrHandle,t.toLowerCase())?n(e,t,!C):void 0;return void 0!==r?r:e.getAttribute(t)},I.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},ce.uniqueSort=function(e){var t,n=[],r=0,i=0;if(a=!le.sortStable,o=!le.sortStable&&ae.call(e,0),de.call(e,l),a){while(t=e[i++])t===e[i]&&(r=n.push(i));while(r--)he.call(e,n[r],1)}return o=null,e},ce.fn.uniqueSort=function(){return this.pushStack(ce.uniqueSort(ae.apply(this)))},(b=ce.expr={cacheLength:50,createPseudo:F,match:D,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1<t.indexOf(i):"$="===r?i&&t.slice(-i.length)===i:"~="===r?-1<(" "+t.replace(v," ")+" ").indexOf(i):"|="===r&&(t===i||t.slice(0,i.length+1)===i+"-"))}},CHILD:function(d,e,t,h,g){var v="nth"!==d.slice(0,3),y="last"!==d.slice(-4),m="of-type"===e;return 1===h&&0===g?function(e){return!!e.parentNode}:function(e,t,n){var r,i,o,a,s,u=v!==y?"nextSibling":"previousSibling",l=e.parentNode,c=m&&e.nodeName.toLowerCase(),f=!n&&!m,p=!1;if(l){if(v){while(u){o=e;while(o=o[u])if(m?fe(o,c):1===o.nodeType)return!1;s=u="only"===d&&!s&&"nextSibling"}return!0}if(s=[y?l.firstChild:l.lastChild],y&&f){p=(a=(r=(i=l[S]||(l[S]={}))[d]||[])[0]===E&&r[1])&&r[2],o=a&&l.childNodes[a];while(o=++a&&o&&o[u]||(p=a=0)||s.pop())if(1===o.nodeType&&++p&&o===e){i[d]=[E,a,p];break}}else if(f&&(p=a=(r=(i=e[S]||(e[S]={}))[d]||[])[0]===E&&r[1]),!1===p)while(o=++a&&o&&o[u]||(p=a=0)||s.pop())if((m?fe(o,c):1===o.nodeType)&&++p&&(f&&((i=o[S]||(o[S]={}))[d]=[E,p]),o===e))break;return(p-=g)===h||p%h==0&&0<=p/h}}},PSEUDO:function(e,o){var t,a=b.pseudos[e]||b.setFilters[e.toLowerCase()]||I.error("unsupported pseudo: "+e);return a[S]?a(o):1<a.length?(t=[e,e,"",o],b.setFilters.hasOwnProperty(e.toLowerCase())?F(function(e,t){var n,r=a(e,o),i=r.length;while(i--)e[n=se.call(e,r[i])]=!(t[n]=r[i])}):function(e){return a(e,0,t)}):a}},pseudos:{not:F(function(e){var r=[],i=[],s=ne(e.replace(ve,"$1"));return s[S]?F(function(e,t,n,r){var i,o=s(e,null,r,[]),a=e.length;while(a--)(i=o[a])&&(e[a]=!(t[a]=i))}):function(e,t,n){return r[0]=e,s(r,null,n,i),r[0]=null,!i.pop()}}),has:F(function(t){return function(e){return 0<I(t,e).length}}),contains:F(function(t){return t=t.replace(O,P),function(e){return-1<(e.textContent||ce.text(e)).indexOf(t)}}),lang:F(function(n){return A.test(n||"")||I.error("unsupported lang: "+n),n=n.replace(O,P).toLowerCase(),function(e){var t;do{if(t=C?e.lang:e.getAttribute("xml:lang")||e.getAttribute("lang"))return(t=t.toLowerCase())===n||0===t.indexOf(n+"-")}while((e=e.parentNode)&&1===e.nodeType);return!1}}),target:function(e){var t=ie.location&&ie.location.hash;return t&&t.slice(1)===e.id},root:function(e){return e===r},focus:function(e){return e===function(){try{return T.activeElement}catch(e){}}()&&T.hasFocus()&&!!(e.type||e.href||~e.tabIndex)},enabled:z(!1),disabled:z(!0),checked:function(e){return fe(e,"input")&&!!e.checked||fe(e,"option")&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!b.pseudos.empty(e)},header:function(e){return q.test(e.nodeName)},input:function(e){return N.test(e.nodeName)},button:function(e){return fe(e,"input")&&"button"===e.type||fe(e,"button")},text:function(e){var t;return fe(e,"input")&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:X(function(){return[0]}),last:X(function(e,t){return[t-1]}),eq:X(function(e,t,n){return[n<0?n+t:n]}),even:X(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:X(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:X(function(e,t,n){var r;for(r=n<0?n+t:t<n?t:n;0<=--r;)e.push(r);return e}),gt:X(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}}).pseudos.nth=b.pseudos.eq,{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})b.pseudos[e]=B(e);for(e in{submit:!0,reset:!0})b.pseudos[e]=_(e);function G(){}function Y(e,t){var n,r,i,o,a,s,u,l=c[e+" "];if(l)return t?0:l.slice(0);a=e,s=[],u=b.preFilter;while(a){for(o in n&&!(r=y.exec(a))||(r&&(a=a.slice(r[0].length)||a),s.push(i=[])),n=!1,(r=m.exec(a))&&(n=r.shift(),i.push({value:n,type:r[0].replace(ve," ")}),a=a.slice(n.length)),b.filter)!(r=D[o].exec(a))||u[o]&&!(r=u[o](r))||(n=r.shift(),i.push({value:n,type:o,matches:r}),a=a.slice(n.length));if(!n)break}return t?a.length:a?I.error(e):c(e,s).slice(0)}function Q(e){for(var t=0,n=e.length,r="";t<n;t++)r+=e[t].value;return r}function J(a,e,t){var s=e.dir,u=e.next,l=u||s,c=t&&"parentNode"===l,f=n++;return e.first?function(e,t,n){while(e=e[s])if(1===e.nodeType||c)return a(e,t,n);return!1}:function(e,t,n){var r,i,o=[E,f];if(n){while(e=e[s])if((1===e.nodeType||c)&&a(e,t,n))return!0}else while(e=e[s])if(1===e.nodeType||c)if(i=e[S]||(e[S]={}),u&&fe(e,u))e=e[s]||e;else{if((r=i[l])&&r[0]===E&&r[1]===f)return o[2]=r[2];if((i[l]=o)[2]=a(e,t,n))return!0}return!1}}function K(i){return 1<i.length?function(e,t,n){var r=i.length;while(r--)if(!i[r](e,t,n))return!1;return!0}:i[0]}function Z(e,t,n,r,i){for(var o,a=[],s=0,u=e.length,l=null!=t;s<u;s++)(o=e[s])&&(n&&!n(o,r,i)||(a.push(o),l&&t.push(s)));return a}function ee(d,h,g,v,y,e){return v&&!v[S]&&(v=ee(v)),y&&!y[S]&&(y=ee(y,e)),F(function(e,t,n,r){var i,o,a,s,u=[],l=[],c=t.length,f=e||function(e,t,n){for(var r=0,i=t.length;r<i;r++)I(e,t[r],n);return n}(h||"*",n.nodeType?[n]:n,[]),p=!d||!e&&h?f:Z(f,u,d,n,r);if(g?g(p,s=y||(e?d:c||v)?[]:t,n,r):s=p,v){i=Z(s,l),v(i,[],n,r),o=i.length;while(o--)(a=i[o])&&(s[l[o]]=!(p[l[o]]=a))}if(e){if(y||d){if(y){i=[],o=s.length;while(o--)(a=s[o])&&i.push(p[o]=a);y(null,s=[],i,r)}o=s.length;while(o--)(a=s[o])&&-1<(i=y?se.call(e,a):u[o])&&(e[i]=!(t[i]=a))}}else s=Z(s===t?s.splice(c,s.length):s),y?y(null,t,s,r):k.apply(t,s)})}function te(e){for(var i,t,n,r=e.length,o=b.relative[e[0].type],a=o||b.relative[" "],s=o?1:0,u=J(function(e){return e===i},a,!0),l=J(function(e){return-1<se.call(i,e)},a,!0),c=[function(e,t,n){var r=!o&&(n||t!=w)||((i=t).nodeType?u(e,t,n):l(e,t,n));return i=null,r}];s<r;s++)if(t=b.relative[e[s].type])c=[J(K(c),t)];else{if((t=b.filter[e[s].type].apply(null,e[s].matches))[S]){for(n=++s;n<r;n++)if(b.relative[e[n].type])break;return ee(1<s&&K(c),1<s&&Q(e.slice(0,s-1).concat({value:" "===e[s-2].type?"*":""})).replace(ve,"$1"),t,s<n&&te(e.slice(s,n)),n<r&&te(e=e.slice(n)),n<r&&Q(e))}c.push(t)}return K(c)}function ne(e,t){var n,v,y,m,x,r,i=[],o=[],a=u[e+" "];if(!a){t||(t=Y(e)),n=t.length;while(n--)(a=te(t[n]))[S]?i.push(a):o.push(a);(a=u(e,(v=o,m=0<(y=i).length,x=0<v.length,r=function(e,t,n,r,i){var o,a,s,u=0,l="0",c=e&&[],f=[],p=w,d=e||x&&b.find.TAG("*",i),h=E+=null==p?1:Math.random()||.1,g=d.length;for(i&&(w=t==T||t||i);l!==g&&null!=(o=d[l]);l++){if(x&&o){a=0,t||o.ownerDocument==T||(V(o),n=!C);while(s=v[a++])if(s(o,t||T,n)){k.call(r,o);break}i&&(E=h)}m&&((o=!s&&o)&&u--,e&&c.push(o))}if(u+=l,m&&l!==u){a=0;while(s=y[a++])s(c,f,t,n);if(e){if(0<u)while(l--)c[l]||f[l]||(f[l]=pe.call(r));f=Z(f)}k.apply(r,f),i&&!e&&0<f.length&&1<u+y.length&&ce.uniqueSort(r)}return i&&(E=h,w=p),c},m?F(r):r))).selector=e}return a}function re(e,t,n,r){var i,o,a,s,u,l="function"==typeof e&&e,c=!r&&Y(e=l.selector||e);if(n=n||[],1===c.length){if(2<(o=c[0]=c[0].slice(0)).length&&"ID"===(a=o[0]).type&&9===t.nodeType&&C&&b.relative[o[1].type]){if(!(t=(b.find.ID(a.matches[0].replace(O,P),t)||[])[0]))return n;l&&(t=t.parentNode),e=e.slice(o.shift().value.length)}i=D.needsContext.test(e)?0:o.length;while(i--){if(a=o[i],b.relative[s=a.type])break;if((u=b.find[s])&&(r=u(a.matches[0].replace(O,P),H.test(o[0].type)&&U(t.parentNode)||t))){if(o.splice(i,1),!(e=r.length&&Q(o)))return k.apply(n,r),n;break}}}return(l||ne(e,c))(r,t,!C,n,!t||H.test(e)&&U(t.parentNode)||t),n}G.prototype=b.filters=b.pseudos,b.setFilters=new G,le.sortStable=S.split("").sort(l).join("")===S,V(),le.sortDetached=$(function(e){return 1&e.compareDocumentPosition(T.createElement("fieldset"))}),ce.find=I,ce.expr[":"]=ce.expr.pseudos,ce.unique=ce.uniqueSort,I.compile=ne,I.select=re,I.setDocument=V,I.tokenize=Y,I.escape=ce.escapeSelector,I.getText=ce.text,I.isXML=ce.isXMLDoc,I.selectors=ce.expr,I.support=ce.support,I.uniqueSort=ce.uniqueSort}();var d=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&ce(e).is(n))break;r.push(e)}return r},h=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},b=ce.expr.match.needsContext,w=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1<se.call(n,e)!==r}):ce.filter(n,e,r)}ce.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?ce.find.matchesSelector(r,e)?[r]:[]:ce.find.matches(e,ce.grep(t,function(e){return 1===e.nodeType}))},ce.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(ce(e).filter(function(){for(t=0;t<r;t++)if(ce.contains(i[t],this))return!0}));for(n=this.pushStack([]),t=0;t<r;t++)ce.find(e,i[t],n);return 1<r?ce.uniqueSort(n):n},filter:function(e){return this.pushStack(T(this,e||[],!1))},not:function(e){return this.pushStack(T(this,e||[],!0))},is:function(e){return!!T(this,"string"==typeof e&&b.test(e)?ce(e):e||[],!1).length}});var k,S=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e<n;e++)if(ce.contains(this,t[e]))return!0})},closest:function(e,t){var n,r=0,i=this.length,o=[],a="string"!=typeof e&&ce(e);if(!b.test(e))for(;r<i;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(n.nodeType<11&&(a?-1<a.index(n):1===n.nodeType&&ce.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(1<o.length?ce.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?se.call(ce(e),this[0]):se.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(ce.uniqueSort(ce.merge(this.get(),ce(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),ce.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return d(e,"parentNode")},parentsUntil:function(e,t,n){return d(e,"parentNode",n)},next:function(e){return A(e,"nextSibling")},prev:function(e){return A(e,"previousSibling")},nextAll:function(e){return d(e,"nextSibling")},prevAll:function(e){return d(e,"previousSibling")},nextUntil:function(e,t,n){return d(e,"nextSibling",n)},prevUntil:function(e,t,n){return d(e,"previousSibling",n)},siblings:function(e){return h((e.parentNode||{}).firstChild,e)},children:function(e){return h(e.firstChild)},contents:function(e){return null!=e.contentDocument&&r(e.contentDocument)?e.contentDocument:(fe(e,"template")&&(e=e.content||e),ce.merge([],e.childNodes))}},function(r,i){ce.fn[r]=function(e,t){var n=ce.map(this,i,e);return"Until"!==r.slice(-5)&&(t=e),t&&"string"==typeof t&&(n=ce.filter(t,n)),1<this.length&&(j[r]||ce.uniqueSort(n),E.test(r)&&n.reverse()),this.pushStack(n)}});var D=/[^\x20\t\r\n\f]+/g;function N(e){return e}function q(e){throw e}function L(e,t,n,r){var i;try{e&&v(i=e.promise)?i.call(e).done(t).fail(n):e&&v(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}ce.Callbacks=function(r){var e,n;r="string"==typeof r?(e=r,n={},ce.each(e.match(D)||[],function(e,t){n[t]=!0}),n):ce.extend({},r);var i,t,o,a,s=[],u=[],l=-1,c=function(){for(a=a||r.once,o=i=!0;u.length;l=-1){t=u.shift();while(++l<s.length)!1===s[l].apply(t[0],t[1])&&r.stopOnFalse&&(l=s.length,t=!1)}r.memory||(t=!1),i=!1,a&&(s=t?[]:"")},f={add:function(){return s&&(t&&!i&&(l=s.length-1,u.push(t)),function n(e){ce.each(e,function(e,t){v(t)?r.unique&&f.has(t)||s.push(t):t&&t.length&&"string"!==x(t)&&n(t)})}(arguments),t&&!i&&c()),this},remove:function(){return ce.each(arguments,function(e,t){var n;while(-1<(n=ce.inArray(t,s,n)))s.splice(n,1),n<=l&&l--}),this},has:function(e){return e?-1<ce.inArray(e,s):0<s.length},empty:function(){return s&&(s=[]),this},disable:function(){return a=u=[],s=t="",this},disabled:function(){return!s},lock:function(){return a=u=[],t||i||(s=t=""),this},locked:function(){return!!a},fireWith:function(e,t){return a||(t=[e,(t=t||[]).slice?t.slice():t],u.push(t),i||c()),this},fire:function(){return f.fireWith(this,arguments),this},fired:function(){return!!o}};return f},ce.extend({Deferred:function(e){var o=[["notify","progress",ce.Callbacks("memory"),ce.Callbacks("memory"),2],["resolve","done",ce.Callbacks("once memory"),ce.Callbacks("once memory"),0,"resolved"],["reject","fail",ce.Callbacks("once memory"),ce.Callbacks("once memory"),1,"rejected"]],i="pending",a={state:function(){return i},always:function(){return s.done(arguments).fail(arguments),this},"catch":function(e){return a.then(null,e)},pipe:function(){var i=arguments;return ce.Deferred(function(r){ce.each(o,function(e,t){var n=v(i[t[4]])&&i[t[4]];s[t[1]](function(){var e=n&&n.apply(this,arguments);e&&v(e.promise)?e.promise().progress(r.notify).done(r.resolve).fail(r.reject):r[t[0]+"With"](this,n?[e]:arguments)})}),i=null}).promise()},then:function(t,n,r){var u=0;function l(i,o,a,s){return function(){var n=this,r=arguments,e=function(){var e,t;if(!(i<u)){if((e=a.apply(n,r))===o.promise())throw new TypeError("Thenable self-resolution");t=e&&("object"==typeof e||"function"==typeof e)&&e.then,v(t)?s?t.call(e,l(u,o,N,s),l(u,o,q,s)):(u++,t.call(e,l(u,o,N,s),l(u,o,q,s),l(u,o,N,o.notifyWith))):(a!==N&&(n=void 0,r=[e]),(s||o.resolveWith)(n,r))}},t=s?e:function(){try{e()}catch(e){ce.Deferred.exceptionHook&&ce.Deferred.exceptionHook(e,t.error),u<=i+1&&(a!==q&&(n=void 0,r=[e]),o.rejectWith(n,r))}};i?t():(ce.Deferred.getErrorHook?t.error=ce.Deferred.getErrorHook():ce.Deferred.getStackHook&&(t.error=ce.Deferred.getStackHook()),ie.setTimeout(t))}}return ce.Deferred(function(e){o[0][3].add(l(0,e,v(r)?r:N,e.notifyWith)),o[1][3].add(l(0,e,v(t)?t:N)),o[2][3].add(l(0,e,v(n)?n:q))}).promise()},promise:function(e){return null!=e?ce.extend(e,a):a}},s={};return ce.each(o,function(e,t){var n=t[2],r=t[5];a[t[1]]=n.add,r&&n.add(function(){i=r},o[3-e][2].disable,o[3-e][3].disable,o[0][2].lock,o[0][3].lock),n.add(t[3].fire),s[t[0]]=function(){return s[t[0]+"With"](this===s?void 0:this,arguments),this},s[t[0]+"With"]=n.fireWith}),a.promise(s),e&&e.call(s,s),s},when:function(e){var n=arguments.length,t=n,r=Array(t),i=ae.call(arguments),o=ce.Deferred(),a=function(t){return function(e){r[t]=this,i[t]=1<arguments.length?ae.call(arguments):e,--n||o.resolveWith(r,i)}};if(n<=1&&(L(e,o.done(a(t)).resolve,o.reject,!n),"pending"===o.state()||v(i[t]&&i[t].then)))return o.then();while(t--)L(i[t],a(t),o.reject);return o.promise()}});var H=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;ce.Deferred.exceptionHook=function(e,t){ie.console&&ie.console.warn&&e&&H.test(e.name)&&ie.console.warn("jQuery.Deferred exception: "+e.message,e.stack,t)},ce.readyException=function(e){ie.setTimeout(function(){throw e})};var O=ce.Deferred();function P(){C.removeEventListener("DOMContentLoaded",P),ie.removeEventListener("load",P),ce.ready()}ce.fn.ready=function(e){return O.then(e)["catch"](function(e){ce.readyException(e)}),this},ce.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--ce.readyWait:ce.isReady)||(ce.isReady=!0)!==e&&0<--ce.readyWait||O.resolveWith(C,[ce])}}),ce.ready.then=O.then,"complete"===C.readyState||"loading"!==C.readyState&&!C.documentElement.doScroll?ie.setTimeout(ce.ready):(C.addEventListener("DOMContentLoaded",P),ie.addEventListener("load",P));var M=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===x(n))for(s in i=!0,n)M(e,t,s,n[s],!0,o,a);else if(void 0!==r&&(i=!0,v(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(ce(e),n)})),t))for(;s<u;s++)t(e[s],n,a?r:r.call(e[s],s,t(e[s],n)));return i?e:l?t.call(e):u?t(e[0],n):o},R=/^-ms-/,I=/-([a-z])/g;function W(e,t){return t.toUpperCase()}function F(e){return e.replace(R,"ms-").replace(I,W)}var $=function(e){return 1===e.nodeType||9===e.nodeType||!+e.nodeType};function B(){this.expando=ce.expando+B.uid++}B.uid=1,B.prototype={cache:function(e){var t=e[this.expando];return t||(t={},$(e)&&(e.nodeType?e[this.expando]=t:Object.defineProperty(e,this.expando,{value:t,configurable:!0}))),t},set:function(e,t,n){var r,i=this.cache(e);if("string"==typeof t)i[F(t)]=n;else for(r in t)i[F(r)]=t[r];return i},get:function(e,t){return void 0===t?this.cache(e):e[this.expando]&&e[this.expando][F(t)]},access:function(e,t,n){return void 0===t||t&&"string"==typeof t&&void 0===n?this.get(e,t):(this.set(e,t,n),void 0!==n?n:t)},remove:function(e,t){var n,r=e[this.expando];if(void 0!==r){if(void 0!==t){n=(t=Array.isArray(t)?t.map(F):(t=F(t))in r?[t]:t.match(D)||[]).length;while(n--)delete r[t[n]]}(void 0===t||ce.isEmptyObject(r))&&(e.nodeType?e[this.expando]=void 0:delete e[this.expando])}},hasData:function(e){var t=e[this.expando];return void 0!==t&&!ce.isEmptyObject(t)}};var _=new B,z=new B,X=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,U=/[A-Z]/g;function V(e,t,n){var r,i;if(void 0===n&&1===e.nodeType)if(r="data-"+t.replace(U,"-$&").toLowerCase(),"string"==typeof(n=e.getAttribute(r))){try{n="true"===(i=n)||"false"!==i&&("null"===i?null:i===+i+""?+i:X.test(i)?JSON.parse(i):i)}catch(e){}z.set(e,t,n)}else n=void 0;return n}ce.extend({hasData:function(e){return z.hasData(e)||_.hasData(e)},data:function(e,t,n){return z.access(e,t,n)},removeData:function(e,t){z.remove(e,t)},_data:function(e,t,n){return _.access(e,t,n)},_removeData:function(e,t){_.remove(e,t)}}),ce.fn.extend({data:function(n,e){var t,r,i,o=this[0],a=o&&o.attributes;if(void 0===n){if(this.length&&(i=z.get(o),1===o.nodeType&&!_.get(o,"hasDataAttrs"))){t=a.length;while(t--)a[t]&&0===(r=a[t].name).indexOf("data-")&&(r=F(r.slice(5)),V(o,r,i[r]));_.set(o,"hasDataAttrs",!0)}return i}return"object"==typeof n?this.each(function(){z.set(this,n)}):M(this,function(e){var t;if(o&&void 0===e)return void 0!==(t=z.get(o,n))?t:void 0!==(t=V(o,n))?t:void 0;this.each(function(){z.set(this,n,e)})},null,e,1<arguments.length,null,!0)},removeData:function(e){return this.each(function(){z.remove(this,e)})}}),ce.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=_.get(e,t),n&&(!r||Array.isArray(n)?r=_.access(e,t,ce.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=ce.queue(e,t),r=n.length,i=n.shift(),o=ce._queueHooks(e,t);"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,function(){ce.dequeue(e,t)},o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return _.get(e,n)||_.access(e,n,{empty:ce.Callbacks("once memory").add(function(){_.remove(e,[t+"queue",n])})})}}),ce.fn.extend({queue:function(t,n){var e=2;return"string"!=typeof t&&(n=t,t="fx",e--),arguments.length<e?ce.queue(this[0],t):void 0===n?this:this.each(function(){var e=ce.queue(this,t,n);ce._queueHooks(this,t),"fx"===t&&"inprogress"!==e[0]&&ce.dequeue(this,t)})},dequeue:function(e){return this.each(function(){ce.dequeue(this,e)})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,r=1,i=ce.Deferred(),o=this,a=this.length,s=function(){--r||i.resolveWith(o,[o])};"string"!=typeof e&&(t=e,e=void 0),e=e||"fx";while(a--)(n=_.get(o[a],e+"queueHooks"))&&n.empty&&(r++,n.empty.add(s));return s(),i.promise(t)}});var G=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,Y=new RegExp("^(?:([+-])=|)("+G+")([a-z%]*)$","i"),Q=["Top","Right","Bottom","Left"],J=C.documentElement,K=function(e){return ce.contains(e.ownerDocument,e)},Z={composed:!0};J.getRootNode&&(K=function(e){return ce.contains(e.ownerDocument,e)||e.getRootNode(Z)===e.ownerDocument});var ee=function(e,t){return"none"===(e=t||e).style.display||""===e.style.display&&K(e)&&"none"===ce.css(e,"display")};function te(e,t,n,r){var i,o,a=20,s=r?function(){return r.cur()}:function(){return ce.css(e,t,"")},u=s(),l=n&&n[3]||(ce.cssNumber[t]?"":"px"),c=e.nodeType&&(ce.cssNumber[t]||"px"!==l&&+u)&&Y.exec(ce.css(e,t));if(c&&c[3]!==l){u/=2,l=l||c[3],c=+u||1;while(a--)ce.style(e,t,c+l),(1-o)*(1-(o=s()/u||.5))<=0&&(a=0),c/=o;c*=2,ce.style(e,t,c+l),n=n||[]}return n&&(c=+c||+u||0,i=n[1]?c+(n[1]+1)*n[2]:+n[2],r&&(r.unit=l,r.start=c,r.end=i)),i}var ne={};function re(e,t){for(var n,r,i,o,a,s,u,l=[],c=0,f=e.length;c<f;c++)(r=e[c]).style&&(n=r.style.display,t?("none"===n&&(l[c]=_.get(r,"display")||null,l[c]||(r.style.display="")),""===r.style.display&&ee(r)&&(l[c]=(u=a=o=void 0,a=(i=r).ownerDocument,s=i.nodeName,(u=ne[s])||(o=a.body.appendChild(a.createElement(s)),u=ce.css(o,"display"),o.parentNode.removeChild(o),"none"===u&&(u="block"),ne[s]=u)))):"none"!==n&&(l[c]="none",_.set(r,"display",n)));for(c=0;c<f;c++)null!=l[c]&&(e[c].style.display=l[c]);return e}ce.fn.extend({show:function(){return re(this,!0)},hide:function(){return re(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){ee(this)?ce(this).show():ce(this).hide()})}});var xe,be,we=/^(?:checkbox|radio)$/i,Te=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="<textarea>x</textarea>",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="<option></option>",le.option=!!xe.lastChild;var ke={thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n<r;n++)_.set(e[n],"globalEval",!t||_.get(t[n],"globalEval"))}ke.tbody=ke.tfoot=ke.colgroup=ke.caption=ke.thead,ke.th=ke.td,le.option||(ke.optgroup=ke.option=[1,"<select multiple='multiple'>","</select>"]);var je=/<|&#?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d<h;d++)if((o=e[d])||0===o)if("object"===x(o))ce.merge(p,o.nodeType?[o]:o);else if(je.test(o)){a=a||f.appendChild(t.createElement("div")),s=(Te.exec(o)||["",""])[1].toLowerCase(),u=ke[s]||ke._default,a.innerHTML=u[1]+ce.htmlPrefilter(o)+u[2],c=u[0];while(c--)a=a.lastChild;ce.merge(p,a.childNodes),(a=f.firstChild).textContent=""}else p.push(t.createTextNode(o));f.textContent="",d=0;while(o=p[d++])if(r&&-1<ce.inArray(o,r))i&&i.push(o);else if(l=K(o),a=Se(f.appendChild(o),"script"),l&&Ee(a),n){c=0;while(o=a[c++])Ce.test(o.type||"")&&n.push(o)}return f}var De=/^([^.]*)(?:\.(.+)|)/;function Ne(){return!0}function qe(){return!1}function Le(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Le(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=qe;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return ce().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=ce.guid++)),e.each(function(){ce.event.add(this,t,i,r,n)})}function He(e,r,t){t?(_.set(e,r,!1),ce.event.add(e,r,{namespace:!1,handler:function(e){var t,n=_.get(this,r);if(1&e.isTrigger&&this[r]){if(n)(ce.event.special[r]||{}).delegateType&&e.stopPropagation();else if(n=ae.call(arguments),_.set(this,r,n),this[r](),t=_.get(this,r),_.set(this,r,!1),n!==t)return e.stopImmediatePropagation(),e.preventDefault(),t}else n&&(_.set(this,r,ce.event.trigger(n[0],n.slice(1),this)),e.stopPropagation(),e.isImmediatePropagationStopped=Ne)}})):void 0===_.get(e,r)&&ce.event.add(e,r,Ne)}ce.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=_.get(t);if($(t)){n.handler&&(n=(o=n).handler,i=o.selector),i&&ce.find.matchesSelector(J,i),n.guid||(n.guid=ce.guid++),(u=v.events)||(u=v.events=Object.create(null)),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof ce&&ce.event.triggered!==e.type?ce.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(D)||[""]).length;while(l--)d=g=(s=De.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=ce.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=ce.event.special[d]||{},c=ce.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&ce.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),ce.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=_.hasData(e)&&_.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(D)||[""]).length;while(l--)if(d=g=(s=De.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=ce.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||ce.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)ce.event.remove(e,d+t[l],n,r,!0);ce.isEmptyObject(u)&&_.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=new Array(arguments.length),u=ce.event.fix(e),l=(_.get(this,"events")||Object.create(null))[u.type]||[],c=ce.event.special[u.type]||{};for(s[0]=u,t=1;t<arguments.length;t++)s[t]=arguments[t];if(u.delegateTarget=this,!c.preDispatch||!1!==c.preDispatch.call(this,u)){a=ce.event.handlers.call(this,u,l),t=0;while((i=a[t++])&&!u.isPropagationStopped()){u.currentTarget=i.elem,n=0;while((o=i.handlers[n++])&&!u.isImmediatePropagationStopped())u.rnamespace&&!1!==o.namespace&&!u.rnamespace.test(o.namespace)||(u.handleObj=o,u.data=o.data,void 0!==(r=((ce.event.special[o.origType]||{}).handle||o.handler).apply(i.elem,s))&&!1===(u.result=r)&&(u.preventDefault(),u.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,u),u.result}},handlers:function(e,t){var n,r,i,o,a,s=[],u=t.delegateCount,l=e.target;if(u&&l.nodeType&&!("click"===e.type&&1<=e.button))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n<u;n++)void 0===a[i=(r=t[n]).selector+" "]&&(a[i]=r.needsContext?-1<ce(i,this).index(l):ce.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u<t.length&&s.push({elem:l,handlers:t.slice(u)}),s},addProp:function(t,e){Object.defineProperty(ce.Event.prototype,t,{enumerable:!0,configurable:!0,get:v(e)?function(){if(this.originalEvent)return e(this.originalEvent)}:function(){if(this.originalEvent)return this.originalEvent[t]},set:function(e){Object.defineProperty(this,t,{enumerable:!0,configurable:!0,writable:!0,value:e})}})},fix:function(e){return e[ce.expando]?e:new ce.Event(e)},special:{load:{noBubble:!0},click:{setup:function(e){var t=this||e;return we.test(t.type)&&t.click&&fe(t,"input")&&He(t,"click",!0),!1},trigger:function(e){var t=this||e;return we.test(t.type)&&t.click&&fe(t,"input")&&He(t,"click"),!0},_default:function(e){var t=e.target;return we.test(t.type)&&t.click&&fe(t,"input")&&_.get(t,"click")||fe(t,"a")}},beforeunload:{postDispatch:function(e){void 0!==e.result&&e.originalEvent&&(e.originalEvent.returnValue=e.result)}}}},ce.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n)},ce.Event=function(e,t){if(!(this instanceof ce.Event))return new ce.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||void 0===e.defaultPrevented&&!1===e.returnValue?Ne:qe,this.target=e.target&&3===e.target.nodeType?e.target.parentNode:e.target,this.currentTarget=e.currentTarget,this.relatedTarget=e.relatedTarget):this.type=e,t&&ce.extend(this,t),this.timeStamp=e&&e.timeStamp||Date.now(),this[ce.expando]=!0},ce.Event.prototype={constructor:ce.Event,isDefaultPrevented:qe,isPropagationStopped:qe,isImmediatePropagationStopped:qe,isSimulated:!1,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=Ne,e&&!this.isSimulated&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=Ne,e&&!this.isSimulated&&e.stopPropagation()},stopImmediatePropagation:function(){var e=this.originalEvent;this.isImmediatePropagationStopped=Ne,e&&!this.isSimulated&&e.stopImmediatePropagation(),this.stopPropagation()}},ce.each({altKey:!0,bubbles:!0,cancelable:!0,changedTouches:!0,ctrlKey:!0,detail:!0,eventPhase:!0,metaKey:!0,pageX:!0,pageY:!0,shiftKey:!0,view:!0,"char":!0,code:!0,charCode:!0,key:!0,keyCode:!0,button:!0,buttons:!0,clientX:!0,clientY:!0,offsetX:!0,offsetY:!0,pointerId:!0,pointerType:!0,screenX:!0,screenY:!0,targetTouches:!0,toElement:!0,touches:!0,which:!0},ce.event.addProp),ce.each({focus:"focusin",blur:"focusout"},function(r,i){function o(e){if(C.documentMode){var t=_.get(this,"handle"),n=ce.event.fix(e);n.type="focusin"===e.type?"focus":"blur",n.isSimulated=!0,t(e),n.target===n.currentTarget&&t(n)}else ce.event.simulate(i,e.target,ce.event.fix(e))}ce.event.special[r]={setup:function(){var e;if(He(this,r,!0),!C.documentMode)return!1;(e=_.get(this,i))||this.addEventListener(i,o),_.set(this,i,(e||0)+1)},trigger:function(){return He(this,r),!0},teardown:function(){var e;if(!C.documentMode)return!1;(e=_.get(this,i)-1)?_.set(this,i,e):(this.removeEventListener(i,o),_.remove(this,i))},_default:function(e){return _.get(e.target,r)},delegateType:i},ce.event.special[i]={setup:function(){var e=this.ownerDocument||this.document||this,t=C.documentMode?this:e,n=_.get(t,i);n||(C.documentMode?this.addEventListener(i,o):e.addEventListener(r,o,!0)),_.set(t,i,(n||0)+1)},teardown:function(){var e=this.ownerDocument||this.document||this,t=C.documentMode?this:e,n=_.get(t,i)-1;n?_.set(t,i,n):(C.documentMode?this.removeEventListener(i,o):e.removeEventListener(r,o,!0),_.remove(t,i))}}}),ce.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(e,i){ce.event.special[e]={delegateType:i,bindType:i,handle:function(e){var t,n=e.relatedTarget,r=e.handleObj;return n&&(n===this||ce.contains(this,n))||(e.type=r.origType,t=r.handler.apply(this,arguments),e.type=i),t}}}),ce.fn.extend({on:function(e,t,n,r){return Le(this,e,t,n,r)},one:function(e,t,n,r){return Le(this,e,t,n,r,1)},off:function(e,t,n){var r,i;if(e&&e.preventDefault&&e.handleObj)return r=e.handleObj,ce(e.delegateTarget).off(r.namespace?r.origType+"."+r.namespace:r.origType,r.selector,r.handler),this;if("object"==typeof e){for(i in e)this.off(i,t,e[i]);return this}return!1!==t&&"function"!=typeof t||(n=t,t=void 0),!1===n&&(n=qe),this.each(function(){ce.event.remove(this,e,n,t)})}});var Oe=/<script|<style|<link/i,Pe=/checked\s*(?:[^=]|=\s*.checked.)/i,Me=/^\s*<!\[CDATA\[|\]\]>\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n<r;n++)ce.event.add(t,i,s[i][n]);z.hasData(e)&&(o=z.access(e),a=ce.extend({},o),z.set(t,a))}}function $e(n,r,i,o){r=g(r);var e,t,a,s,u,l,c=0,f=n.length,p=f-1,d=r[0],h=v(d);if(h||1<f&&"string"==typeof d&&!le.checkClone&&Pe.test(d))return n.each(function(e){var t=n.eq(e);h&&(r[0]=d.call(this,e,t.html())),$e(t,r,i,o)});if(f&&(t=(e=Ae(r,n[0].ownerDocument,!1,n,o)).firstChild,1===e.childNodes.length&&(e=t),t||o)){for(s=(a=ce.map(Se(e,"script"),Ie)).length;c<f;c++)u=e,c!==p&&(u=ce.clone(u,!0,!0),s&&ce.merge(a,Se(u,"script"))),i.call(n[c],u,c);if(s)for(l=a[a.length-1].ownerDocument,ce.map(a,We),c=0;c<s;c++)u=a[c],Ce.test(u.type||"")&&!_.access(u,"globalEval")&&ce.contains(l,u)&&(u.src&&"module"!==(u.type||"").toLowerCase()?ce._evalUrl&&!u.noModule&&ce._evalUrl(u.src,{nonce:u.nonce||u.getAttribute("nonce")},l):m(u.textContent.replace(Me,""),u,l))}return n}function Be(e,t,n){for(var r,i=t?ce.filter(t,e):e,o=0;null!=(r=i[o]);o++)n||1!==r.nodeType||ce.cleanData(Se(r)),r.parentNode&&(n&&K(r)&&Ee(Se(r,"script")),r.parentNode.removeChild(r));return e}ce.extend({htmlPrefilter:function(e){return e},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=K(e);if(!(le.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||ce.isXMLDoc(e)))for(a=Se(c),r=0,i=(o=Se(e)).length;r<i;r++)s=o[r],u=a[r],void 0,"input"===(l=u.nodeName.toLowerCase())&&we.test(s.type)?u.checked=s.checked:"input"!==l&&"textarea"!==l||(u.defaultValue=s.defaultValue);if(t)if(n)for(o=o||Se(e),a=a||Se(c),r=0,i=o.length;r<i;r++)Fe(o[r],a[r]);else Fe(e,c);return 0<(a=Se(c,"script")).length&&Ee(a,!f&&Se(e,"script")),c},cleanData:function(e){for(var t,n,r,i=ce.event.special,o=0;void 0!==(n=e[o]);o++)if($(n)){if(t=n[_.expando]){if(t.events)for(r in t.events)i[r]?ce.event.remove(n,r):ce.removeEvent(n,r,t.handle);n[_.expando]=void 0}n[z.expando]&&(n[z.expando]=void 0)}}}),ce.fn.extend({detach:function(e){return Be(this,e,!0)},remove:function(e){return Be(this,e)},text:function(e){return M(this,function(e){return void 0===e?ce.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return $e(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Re(this,e).appendChild(e)})},prepend:function(){return $e(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Re(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return $e(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return $e(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(ce.cleanData(Se(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return ce.clone(this,e,t)})},html:function(e){return M(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!Oe.test(e)&&!ke[(Te.exec(e)||["",""])[1].toLowerCase()]){e=ce.htmlPrefilter(e);try{for(;n<r;n++)1===(t=this[n]||{}).nodeType&&(ce.cleanData(Se(t,!1)),t.innerHTML=e);t=0}catch(e){}}t&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var n=[];return $e(this,arguments,function(e){var t=this.parentNode;ce.inArray(this,n)<0&&(ce.cleanData(Se(this)),t&&t.replaceChild(e,this))},n)}}),ce.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,a){ce.fn[e]=function(e){for(var t,n=[],r=ce(e),i=r.length-1,o=0;o<=i;o++)t=o===i?this:this.clone(!0),ce(r[o])[a](t),s.apply(n,t.get());return this.pushStack(n)}});var _e=new RegExp("^("+G+")(?!px)[a-z%]+$","i"),ze=/^--/,Xe=function(e){var t=e.ownerDocument.defaultView;return t&&t.opener||(t=ie),t.getComputedStyle(e)},Ue=function(e,t,n){var r,i,o={};for(i in t)o[i]=e.style[i],e.style[i]=t[i];for(i in r=n.call(e),t)e.style[i]=o[i];return r},Ve=new RegExp(Q.join("|"),"i");function Ge(e,t,n){var r,i,o,a,s=ze.test(t),u=e.style;return(n=n||Xe(e))&&(a=n.getPropertyValue(t)||n[t],s&&a&&(a=a.replace(ve,"$1")||void 0),""!==a||K(e)||(a=ce.style(e,t)),!le.pixelBoxStyles()&&_e.test(a)&&Ve.test(t)&&(r=u.width,i=u.minWidth,o=u.maxWidth,u.minWidth=u.maxWidth=u.width=a,a=n.width,u.width=r,u.minWidth=i,u.maxWidth=o)),void 0!==a?a+"":a}function Ye(e,t){return{get:function(){if(!e())return(this.get=t).apply(this,arguments);delete this.get}}}!function(){function e(){if(l){u.style.cssText="position:absolute;left:-11111px;width:60px;margin-top:1px;padding:0;border:0",l.style.cssText="position:relative;display:block;box-sizing:border-box;overflow:scroll;margin:auto;border:1px;padding:1px;width:60%;top:1%",J.appendChild(u).appendChild(l);var e=ie.getComputedStyle(l);n="1%"!==e.top,s=12===t(e.marginLeft),l.style.right="60%",o=36===t(e.right),r=36===t(e.width),l.style.position="absolute",i=12===t(l.offsetWidth/3),J.removeChild(u),l=null}}function t(e){return Math.round(parseFloat(e))}var n,r,i,o,a,s,u=C.createElement("div"),l=C.createElement("div");l.style&&(l.style.backgroundClip="content-box",l.cloneNode(!0).style.backgroundClip="",le.clearCloneStyle="content-box"===l.style.backgroundClip,ce.extend(le,{boxSizingReliable:function(){return e(),r},pixelBoxStyles:function(){return e(),o},pixelPosition:function(){return e(),n},reliableMarginLeft:function(){return e(),s},scrollboxSize:function(){return e(),i},reliableTrDimensions:function(){var e,t,n,r;return null==a&&(e=C.createElement("table"),t=C.createElement("tr"),n=C.createElement("div"),e.style.cssText="position:absolute;left:-11111px;border-collapse:separate",t.style.cssText="box-sizing:content-box;border:1px solid",t.style.height="1px",n.style.height="9px",n.style.display="block",J.appendChild(e).appendChild(t).appendChild(n),r=ie.getComputedStyle(t),a=parseInt(r.height,10)+parseInt(r.borderTopWidth,10)+parseInt(r.borderBottomWidth,10)===t.offsetHeight,J.removeChild(e)),a}}))}();var Qe=["Webkit","Moz","ms"],Je=C.createElement("div").style,Ke={};function Ze(e){var t=ce.cssProps[e]||Ke[e];return t||(e in Je?e:Ke[e]=function(e){var t=e[0].toUpperCase()+e.slice(1),n=Qe.length;while(n--)if((e=Qe[n]+t)in Je)return e}(e)||e)}var et=/^(none|table(?!-c[ea]).+)/,tt={position:"absolute",visibility:"hidden",display:"block"},nt={letterSpacing:"0",fontWeight:"400"};function rt(e,t,n){var r=Y.exec(t);return r?Math.max(0,r[2]-(n||0))+(r[3]||"px"):t}function it(e,t,n,r,i,o){var a="width"===t?1:0,s=0,u=0,l=0;if(n===(r?"border":"content"))return 0;for(;a<4;a+=2)"margin"===n&&(l+=ce.css(e,n+Q[a],!0,i)),r?("content"===n&&(u-=ce.css(e,"padding"+Q[a],!0,i)),"margin"!==n&&(u-=ce.css(e,"border"+Q[a]+"Width",!0,i))):(u+=ce.css(e,"padding"+Q[a],!0,i),"padding"!==n?u+=ce.css(e,"border"+Q[a]+"Width",!0,i):s+=ce.css(e,"border"+Q[a]+"Width",!0,i));return!r&&0<=o&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))||0),u+l}function ot(e,t,n){var r=Xe(e),i=(!le.boxSizingReliable()||n)&&"border-box"===ce.css(e,"boxSizing",!1,r),o=i,a=Ge(e,t,r),s="offset"+t[0].toUpperCase()+t.slice(1);if(_e.test(a)){if(!n)return a;a="auto"}return(!le.boxSizingReliable()&&i||!le.reliableTrDimensions()&&fe(e,"tr")||"auto"===a||!parseFloat(a)&&"inline"===ce.css(e,"display",!1,r))&&e.getClientRects().length&&(i="border-box"===ce.css(e,"boxSizing",!1,r),(o=s in e)&&(a=e[s])),(a=parseFloat(a)||0)+it(e,t,n||(i?"border":"content"),o,r,a)+"px"}function at(e,t,n,r,i){return new at.prototype.init(e,t,n,r,i)}ce.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Ge(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,aspectRatio:!0,borderImageSlice:!0,columnCount:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,gridArea:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnStart:!0,gridRow:!0,gridRowEnd:!0,gridRowStart:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,scale:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeMiterlimit:!0,strokeOpacity:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=F(t),u=ze.test(t),l=e.style;if(u||(t=Ze(s)),a=ce.cssHooks[t]||ce.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"===(o=typeof n)&&(i=Y.exec(n))&&i[1]&&(n=te(e,t,i),o="number"),null!=n&&n==n&&("number"!==o||u||(n+=i&&i[3]||(ce.cssNumber[s]?"":"px")),le.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=F(t);return ze.test(t)||(t=Ze(s)),(a=ce.cssHooks[t]||ce.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=Ge(e,t,r)),"normal"===i&&t in nt&&(i=nt[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),ce.each(["height","width"],function(e,u){ce.cssHooks[u]={get:function(e,t,n){if(t)return!et.test(ce.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?ot(e,u,n):Ue(e,tt,function(){return ot(e,u,n)})},set:function(e,t,n){var r,i=Xe(e),o=!le.scrollboxSize()&&"absolute"===i.position,a=(o||n)&&"border-box"===ce.css(e,"boxSizing",!1,i),s=n?it(e,u,n,a,i):0;return a&&o&&(s-=Math.ceil(e["offset"+u[0].toUpperCase()+u.slice(1)]-parseFloat(i[u])-it(e,u,"border",!1,i)-.5)),s&&(r=Y.exec(t))&&"px"!==(r[3]||"px")&&(e.style[u]=t,t=ce.css(e,u)),rt(0,t,s)}}}),ce.cssHooks.marginLeft=Ye(le.reliableMarginLeft,function(e,t){if(t)return(parseFloat(Ge(e,"marginLeft"))||e.getBoundingClientRect().left-Ue(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),ce.each({margin:"",padding:"",border:"Width"},function(i,o){ce.cssHooks[i+o]={expand:function(e){for(var t=0,n={},r="string"==typeof e?e.split(" "):[e];t<4;t++)n[i+Q[t]+o]=r[t]||r[t-2]||r[0];return n}},"margin"!==i&&(ce.cssHooks[i+o].set=rt)}),ce.fn.extend({css:function(e,t){return M(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=Xe(e),i=t.length;a<i;a++)o[t[a]]=ce.css(e,t[a],!1,r);return o}return void 0!==n?ce.style(e,t,n):ce.css(e,t)},e,t,1<arguments.length)}}),((ce.Tween=at).prototype={constructor:at,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||ce.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(ce.cssNumber[n]?"":"px")},cur:function(){var e=at.propHooks[this.prop];return e&&e.get?e.get(this):at.propHooks._default.get(this)},run:function(e){var t,n=at.propHooks[this.prop];return this.options.duration?this.pos=t=ce.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):at.propHooks._default.set(this),this}}).init.prototype=at.prototype,(at.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=ce.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){ce.fx.step[e.prop]?ce.fx.step[e.prop](e):1!==e.elem.nodeType||!ce.cssHooks[e.prop]&&null==e.elem.style[Ze(e.prop)]?e.elem[e.prop]=e.now:ce.style(e.elem,e.prop,e.now+e.unit)}}}).scrollTop=at.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},ce.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},ce.fx=at.prototype.init,ce.fx.step={};var st,ut,lt,ct,ft=/^(?:toggle|show|hide)$/,pt=/queueHooks$/;function dt(){ut&&(!1===C.hidden&&ie.requestAnimationFrame?ie.requestAnimationFrame(dt):ie.setTimeout(dt,ce.fx.interval),ce.fx.tick())}function ht(){return ie.setTimeout(function(){st=void 0}),st=Date.now()}function gt(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=Q[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function vt(e,t,n){for(var r,i=(yt.tweeners[t]||[]).concat(yt.tweeners["*"]),o=0,a=i.length;o<a;o++)if(r=i[o].call(n,t,e))return r}function yt(o,e,t){var n,a,r=0,i=yt.prefilters.length,s=ce.Deferred().always(function(){delete u.elem}),u=function(){if(a)return!1;for(var e=st||ht(),t=Math.max(0,l.startTime+l.duration-e),n=1-(t/l.duration||0),r=0,i=l.tweens.length;r<i;r++)l.tweens[r].run(n);return s.notifyWith(o,[l,n,t]),n<1&&i?t:(i||s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l]),!1)},l=s.promise({elem:o,props:ce.extend({},e),opts:ce.extend(!0,{specialEasing:{},easing:ce.easing._default},t),originalProperties:e,originalOptions:t,startTime:st||ht(),duration:t.duration,tweens:[],createTween:function(e,t){var n=ce.Tween(o,l.opts,e,t,l.opts.specialEasing[e]||l.opts.easing);return l.tweens.push(n),n},stop:function(e){var t=0,n=e?l.tweens.length:0;if(a)return this;for(a=!0;t<n;t++)l.tweens[t].run(1);return e?(s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l,e])):s.rejectWith(o,[l,e]),this}}),c=l.props;for(!function(e,t){var n,r,i,o,a;for(n in e)if(i=t[r=F(n)],o=e[n],Array.isArray(o)&&(i=o[1],o=e[n]=o[0]),n!==r&&(e[r]=o,delete e[n]),(a=ce.cssHooks[r])&&"expand"in a)for(n in o=a.expand(o),delete e[r],o)n in e||(e[n]=o[n],t[n]=i);else t[r]=i}(c,l.opts.specialEasing);r<i;r++)if(n=yt.prefilters[r].call(l,o,c,l.opts))return v(n.stop)&&(ce._queueHooks(l.elem,l.opts.queue).stop=n.stop.bind(n)),n;return ce.map(c,vt,l),v(l.opts.start)&&l.opts.start.call(o,l),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always),ce.fx.timer(ce.extend(u,{elem:o,anim:l,queue:l.opts.queue})),l}ce.Animation=ce.extend(yt,{tweeners:{"*":[function(e,t){var n=this.createTween(e,t);return te(n.elem,e,Y.exec(t),n),n}]},tweener:function(e,t){v(e)?(t=e,e=["*"]):e=e.match(D);for(var n,r=0,i=e.length;r<i;r++)n=e[r],yt.tweeners[n]=yt.tweeners[n]||[],yt.tweeners[n].unshift(t)},prefilters:[function(e,t,n){var r,i,o,a,s,u,l,c,f="width"in t||"height"in t,p=this,d={},h=e.style,g=e.nodeType&&ee(e),v=_.get(e,"fxshow");for(r in n.queue||(null==(a=ce._queueHooks(e,"fx")).unqueued&&(a.unqueued=0,s=a.empty.fire,a.empty.fire=function(){a.unqueued||s()}),a.unqueued++,p.always(function(){p.always(function(){a.unqueued--,ce.queue(e,"fx").length||a.empty.fire()})})),t)if(i=t[r],ft.test(i)){if(delete t[r],o=o||"toggle"===i,i===(g?"hide":"show")){if("show"!==i||!v||void 0===v[r])continue;g=!0}d[r]=v&&v[r]||ce.style(e,r)}if((u=!ce.isEmptyObject(t))||!ce.isEmptyObject(d))for(r in f&&1===e.nodeType&&(n.overflow=[h.overflow,h.overflowX,h.overflowY],null==(l=v&&v.display)&&(l=_.get(e,"display")),"none"===(c=ce.css(e,"display"))&&(l?c=l:(re([e],!0),l=e.style.display||l,c=ce.css(e,"display"),re([e]))),("inline"===c||"inline-block"===c&&null!=l)&&"none"===ce.css(e,"float")&&(u||(p.done(function(){h.display=l}),null==l&&(c=h.display,l="none"===c?"":c)),h.display="inline-block")),n.overflow&&(h.overflow="hidden",p.always(function(){h.overflow=n.overflow[0],h.overflowX=n.overflow[1],h.overflowY=n.overflow[2]})),u=!1,d)u||(v?"hidden"in v&&(g=v.hidden):v=_.access(e,"fxshow",{display:l}),o&&(v.hidden=!g),g&&re([e],!0),p.done(function(){for(r in g||re([e]),_.remove(e,"fxshow"),d)ce.style(e,r,d[r])})),u=vt(g?v[r]:0,r,p),r in v||(v[r]=u.start,g&&(u.end=u.start,u.start=0))}],prefilter:function(e,t){t?yt.prefilters.unshift(e):yt.prefilters.push(e)}}),ce.speed=function(e,t,n){var r=e&&"object"==typeof e?ce.extend({},e):{complete:n||!n&&t||v(e)&&e,duration:e,easing:n&&t||t&&!v(t)&&t};return ce.fx.off?r.duration=0:"number"!=typeof r.duration&&(r.duration in ce.fx.speeds?r.duration=ce.fx.speeds[r.duration]:r.duration=ce.fx.speeds._default),null!=r.queue&&!0!==r.queue||(r.queue="fx"),r.old=r.complete,r.complete=function(){v(r.old)&&r.old.call(this),r.queue&&ce.dequeue(this,r.queue)},r},ce.fn.extend({fadeTo:function(e,t,n,r){return this.filter(ee).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(t,e,n,r){var i=ce.isEmptyObject(t),o=ce.speed(e,n,r),a=function(){var e=yt(this,ce.extend({},t),o);(i||_.get(this,"finish"))&&e.stop(!0)};return a.finish=a,i||!1===o.queue?this.each(a):this.queue(o.queue,a)},stop:function(i,e,o){var a=function(e){var t=e.stop;delete e.stop,t(o)};return"string"!=typeof i&&(o=e,e=i,i=void 0),e&&this.queue(i||"fx",[]),this.each(function(){var e=!0,t=null!=i&&i+"queueHooks",n=ce.timers,r=_.get(this);if(t)r[t]&&r[t].stop&&a(r[t]);else for(t in r)r[t]&&r[t].stop&&pt.test(t)&&a(r[t]);for(t=n.length;t--;)n[t].elem!==this||null!=i&&n[t].queue!==i||(n[t].anim.stop(o),e=!1,n.splice(t,1));!e&&o||ce.dequeue(this,i)})},finish:function(a){return!1!==a&&(a=a||"fx"),this.each(function(){var e,t=_.get(this),n=t[a+"queue"],r=t[a+"queueHooks"],i=ce.timers,o=n?n.length:0;for(t.finish=!0,ce.queue(this,a,[]),r&&r.stop&&r.stop.call(this,!0),e=i.length;e--;)i[e].elem===this&&i[e].queue===a&&(i[e].anim.stop(!0),i.splice(e,1));for(e=0;e<o;e++)n[e]&&n[e].finish&&n[e].finish.call(this);delete t.finish})}}),ce.each(["toggle","show","hide"],function(e,r){var i=ce.fn[r];ce.fn[r]=function(e,t,n){return null==e||"boolean"==typeof e?i.apply(this,arguments):this.animate(gt(r,!0),e,t,n)}}),ce.each({slideDown:gt("show"),slideUp:gt("hide"),slideToggle:gt("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,r){ce.fn[e]=function(e,t,n){return this.animate(r,e,t,n)}}),ce.timers=[],ce.fx.tick=function(){var e,t=0,n=ce.timers;for(st=Date.now();t<n.length;t++)(e=n[t])()||n[t]!==e||n.splice(t--,1);n.length||ce.fx.stop(),st=void 0},ce.fx.timer=function(e){ce.timers.push(e),ce.fx.start()},ce.fx.interval=13,ce.fx.start=function(){ut||(ut=!0,dt())},ce.fx.stop=function(){ut=null},ce.fx.speeds={slow:600,fast:200,_default:400},ce.fn.delay=function(r,e){return r=ce.fx&&ce.fx.speeds[r]||r,e=e||"fx",this.queue(e,function(e,t){var n=ie.setTimeout(e,r);t.stop=function(){ie.clearTimeout(n)}})},lt=C.createElement("input"),ct=C.createElement("select").appendChild(C.createElement("option")),lt.type="checkbox",le.checkOn=""!==lt.value,le.optSelected=ct.selected,(lt=C.createElement("input")).value="t",lt.type="radio",le.radioValue="t"===lt.value;var mt,xt=ce.expr.attrHandle;ce.fn.extend({attr:function(e,t){return M(this,ce.attr,e,t,1<arguments.length)},removeAttr:function(e){return this.each(function(){ce.removeAttr(this,e)})}}),ce.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?ce.prop(e,t,n):(1===o&&ce.isXMLDoc(e)||(i=ce.attrHooks[t.toLowerCase()]||(ce.expr.match.bool.test(t)?mt:void 0)),void 0!==n?null===n?void ce.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=ce.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!le.radioValue&&"radio"===t&&fe(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(D);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),mt={set:function(e,t,n){return!1===t?ce.removeAttr(e,n):e.setAttribute(n,n),n}},ce.each(ce.expr.match.bool.source.match(/\w+/g),function(e,t){var a=xt[t]||ce.find.attr;xt[t]=function(e,t,n){var r,i,o=t.toLowerCase();return n||(i=xt[o],xt[o]=r,r=null!=a(e,t,n)?o:null,xt[o]=i),r}});var bt=/^(?:input|select|textarea|button)$/i,wt=/^(?:a|area)$/i;function Tt(e){return(e.match(D)||[]).join(" ")}function Ct(e){return e.getAttribute&&e.getAttribute("class")||""}function kt(e){return Array.isArray(e)?e:"string"==typeof e&&e.match(D)||[]}ce.fn.extend({prop:function(e,t){return M(this,ce.prop,e,t,1<arguments.length)},removeProp:function(e){return this.each(function(){delete this[ce.propFix[e]||e]})}}),ce.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&ce.isXMLDoc(e)||(t=ce.propFix[t]||t,i=ce.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=ce.find.attr(e,"tabindex");return t?parseInt(t,10):bt.test(e.nodeName)||wt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),le.optSelected||(ce.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),ce.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){ce.propFix[this.toLowerCase()]=this}),ce.fn.extend({addClass:function(t){var e,n,r,i,o,a;return v(t)?this.each(function(e){ce(this).addClass(t.call(this,e,Ct(this)))}):(e=kt(t)).length?this.each(function(){if(r=Ct(this),n=1===this.nodeType&&" "+Tt(r)+" "){for(o=0;o<e.length;o++)i=e[o],n.indexOf(" "+i+" ")<0&&(n+=i+" ");a=Tt(n),r!==a&&this.setAttribute("class",a)}}):this},removeClass:function(t){var e,n,r,i,o,a;return v(t)?this.each(function(e){ce(this).removeClass(t.call(this,e,Ct(this)))}):arguments.length?(e=kt(t)).length?this.each(function(){if(r=Ct(this),n=1===this.nodeType&&" "+Tt(r)+" "){for(o=0;o<e.length;o++){i=e[o];while(-1<n.indexOf(" "+i+" "))n=n.replace(" "+i+" "," ")}a=Tt(n),r!==a&&this.setAttribute("class",a)}}):this:this.attr("class","")},toggleClass:function(t,n){var e,r,i,o,a=typeof t,s="string"===a||Array.isArray(t);return v(t)?this.each(function(e){ce(this).toggleClass(t.call(this,e,Ct(this),n),n)}):"boolean"==typeof n&&s?n?this.addClass(t):this.removeClass(t):(e=kt(t),this.each(function(){if(s)for(o=ce(this),i=0;i<e.length;i++)r=e[i],o.hasClass(r)?o.removeClass(r):o.addClass(r);else void 0!==t&&"boolean"!==a||((r=Ct(this))&&_.set(this,"__className__",r),this.setAttribute&&this.setAttribute("class",r||!1===t?"":_.get(this,"__className__")||""))}))},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&-1<(" "+Tt(Ct(n))+" ").indexOf(t))return!0;return!1}});var St=/\r/g;ce.fn.extend({val:function(n){var r,e,i,t=this[0];return arguments.length?(i=v(n),this.each(function(e){var t;1===this.nodeType&&(null==(t=i?n.call(this,e,ce(this).val()):n)?t="":"number"==typeof t?t+="":Array.isArray(t)&&(t=ce.map(t,function(e){return null==e?"":e+""})),(r=ce.valHooks[this.type]||ce.valHooks[this.nodeName.toLowerCase()])&&"set"in r&&void 0!==r.set(this,t,"value")||(this.value=t))})):t?(r=ce.valHooks[t.type]||ce.valHooks[t.nodeName.toLowerCase()])&&"get"in r&&void 0!==(e=r.get(t,"value"))?e:"string"==typeof(e=t.value)?e.replace(St,""):null==e?"":e:void 0}}),ce.extend({valHooks:{option:{get:function(e){var t=ce.find.attr(e,"value");return null!=t?t:Tt(ce.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r<u;r++)if(((n=i[r]).selected||r===o)&&!n.disabled&&(!n.parentNode.disabled||!fe(n.parentNode,"optgroup"))){if(t=ce(n).val(),a)return t;s.push(t)}return s},set:function(e,t){var n,r,i=e.options,o=ce.makeArray(t),a=i.length;while(a--)((r=i[a]).selected=-1<ce.inArray(ce.valHooks.option.get(r),o))&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),ce.each(["radio","checkbox"],function(){ce.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=-1<ce.inArray(ce(e).val(),t)}},le.checkOn||(ce.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})});var Et=ie.location,jt={guid:Date.now()},At=/\?/;ce.parseXML=function(e){var t,n;if(!e||"string"!=typeof e)return null;try{t=(new ie.DOMParser).parseFromString(e,"text/xml")}catch(e){}return n=t&&t.getElementsByTagName("parsererror")[0],t&&!n||ce.error("Invalid XML: "+(n?ce.map(n.childNodes,function(e){return e.textContent}).join("\n"):e)),t};var Dt=/^(?:focusinfocus|focusoutblur)$/,Nt=function(e){e.stopPropagation()};ce.extend(ce.event,{trigger:function(e,t,n,r){var i,o,a,s,u,l,c,f,p=[n||C],d=ue.call(e,"type")?e.type:e,h=ue.call(e,"namespace")?e.namespace.split("."):[];if(o=f=a=n=n||C,3!==n.nodeType&&8!==n.nodeType&&!Dt.test(d+ce.event.triggered)&&(-1<d.indexOf(".")&&(d=(h=d.split(".")).shift(),h.sort()),u=d.indexOf(":")<0&&"on"+d,(e=e[ce.expando]?e:new ce.Event(d,"object"==typeof e&&e)).isTrigger=r?2:3,e.namespace=h.join("."),e.rnamespace=e.namespace?new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,e.result=void 0,e.target||(e.target=n),t=null==t?[e]:ce.makeArray(t,[e]),c=ce.event.special[d]||{},r||!c.trigger||!1!==c.trigger.apply(n,t))){if(!r&&!c.noBubble&&!y(n)){for(s=c.delegateType||d,Dt.test(s+d)||(o=o.parentNode);o;o=o.parentNode)p.push(o),a=o;a===(n.ownerDocument||C)&&p.push(a.defaultView||a.parentWindow||ie)}i=0;while((o=p[i++])&&!e.isPropagationStopped())f=o,e.type=1<i?s:c.bindType||d,(l=(_.get(o,"events")||Object.create(null))[e.type]&&_.get(o,"handle"))&&l.apply(o,t),(l=u&&o[u])&&l.apply&&$(o)&&(e.result=l.apply(o,t),!1===e.result&&e.preventDefault());return e.type=d,r||e.isDefaultPrevented()||c._default&&!1!==c._default.apply(p.pop(),t)||!$(n)||u&&v(n[d])&&!y(n)&&((a=n[u])&&(n[u]=null),ce.event.triggered=d,e.isPropagationStopped()&&f.addEventListener(d,Nt),n[d](),e.isPropagationStopped()&&f.removeEventListener(d,Nt),ce.event.triggered=void 0,a&&(n[u]=a)),e.result}},simulate:function(e,t,n){var r=ce.extend(new ce.Event,n,{type:e,isSimulated:!0});ce.event.trigger(r,null,t)}}),ce.fn.extend({trigger:function(e,t){return this.each(function(){ce.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return ce.event.trigger(e,t,n,!0)}});var qt=/\[\]$/,Lt=/\r?\n/g,Ht=/^(?:submit|button|image|reset|file)$/i,Ot=/^(?:input|select|textarea|keygen)/i;function Pt(n,e,r,i){var t;if(Array.isArray(e))ce.each(e,function(e,t){r||qt.test(n)?i(n,t):Pt(n+"["+("object"==typeof t&&null!=t?e:"")+"]",t,r,i)});else if(r||"object"!==x(e))i(n,e);else for(t in e)Pt(n+"["+t+"]",e[t],r,i)}ce.param=function(e,t){var n,r=[],i=function(e,t){var n=v(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(null==e)return"";if(Array.isArray(e)||e.jquery&&!ce.isPlainObject(e))ce.each(e,function(){i(this.name,this.value)});else for(n in e)Pt(n,e[n],t,i);return r.join("&")},ce.fn.extend({serialize:function(){return ce.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=ce.prop(this,"elements");return e?ce.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!ce(this).is(":disabled")&&Ot.test(this.nodeName)&&!Ht.test(e)&&(this.checked||!we.test(e))}).map(function(e,t){var n=ce(this).val();return null==n?null:Array.isArray(n)?ce.map(n,function(e){return{name:t.name,value:e.replace(Lt,"\r\n")}}):{name:t.name,value:n.replace(Lt,"\r\n")}}).get()}});var Mt=/%20/g,Rt=/#.*$/,It=/([?&])_=[^&]*/,Wt=/^(.*?):[ \t]*([^\r\n]*)$/gm,Ft=/^(?:GET|HEAD)$/,$t=/^\/\//,Bt={},_t={},zt="*/".concat("*"),Xt=C.createElement("a");function Ut(o){return function(e,t){"string"!=typeof e&&(t=e,e="*");var n,r=0,i=e.toLowerCase().match(D)||[];if(v(t))while(n=i[r++])"+"===n[0]?(n=n.slice(1)||"*",(o[n]=o[n]||[]).unshift(t)):(o[n]=o[n]||[]).push(t)}}function Vt(t,i,o,a){var s={},u=t===_t;function l(e){var r;return s[e]=!0,ce.each(t[e]||[],function(e,t){var n=t(i,o,a);return"string"!=typeof n||u||s[n]?u?!(r=n):void 0:(i.dataTypes.unshift(n),l(n),!1)}),r}return l(i.dataTypes[0])||!s["*"]&&l("*")}function Gt(e,t){var n,r,i=ce.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&ce.extend(!0,e,r),e}Xt.href=Et.href,ce.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Et.href,type:"GET",isLocal:/^(?:about|app|app-storage|.+-extension|file|res|widget):$/.test(Et.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":zt,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":ce.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?Gt(Gt(e,ce.ajaxSettings),t):Gt(ce.ajaxSettings,e)},ajaxPrefilter:Ut(Bt),ajaxTransport:Ut(_t),ajax:function(e,t){"object"==typeof e&&(t=e,e=void 0),t=t||{};var c,f,p,n,d,r,h,g,i,o,v=ce.ajaxSetup({},t),y=v.context||v,m=v.context&&(y.nodeType||y.jquery)?ce(y):ce.event,x=ce.Deferred(),b=ce.Callbacks("once memory"),w=v.statusCode||{},a={},s={},u="canceled",T={readyState:0,getResponseHeader:function(e){var t;if(h){if(!n){n={};while(t=Wt.exec(p))n[t[1].toLowerCase()+" "]=(n[t[1].toLowerCase()+" "]||[]).concat(t[2])}t=n[e.toLowerCase()+" "]}return null==t?null:t.join(", ")},getAllResponseHeaders:function(){return h?p:null},setRequestHeader:function(e,t){return null==h&&(e=s[e.toLowerCase()]=s[e.toLowerCase()]||e,a[e]=t),this},overrideMimeType:function(e){return null==h&&(v.mimeType=e),this},statusCode:function(e){var t;if(e)if(h)T.always(e[T.status]);else for(t in e)w[t]=[w[t],e[t]];return this},abort:function(e){var t=e||u;return c&&c.abort(t),l(0,t),this}};if(x.promise(T),v.url=((e||v.url||Et.href)+"").replace($t,Et.protocol+"//"),v.type=t.method||t.type||v.method||v.type,v.dataTypes=(v.dataType||"*").toLowerCase().match(D)||[""],null==v.crossDomain){r=C.createElement("a");try{r.href=v.url,r.href=r.href,v.crossDomain=Xt.protocol+"//"+Xt.host!=r.protocol+"//"+r.host}catch(e){v.crossDomain=!0}}if(v.data&&v.processData&&"string"!=typeof v.data&&(v.data=ce.param(v.data,v.traditional)),Vt(Bt,v,t,T),h)return T;for(i in(g=ce.event&&v.global)&&0==ce.active++&&ce.event.trigger("ajaxStart"),v.type=v.type.toUpperCase(),v.hasContent=!Ft.test(v.type),f=v.url.replace(Rt,""),v.hasContent?v.data&&v.processData&&0===(v.contentType||"").indexOf("application/x-www-form-urlencoded")&&(v.data=v.data.replace(Mt,"+")):(o=v.url.slice(f.length),v.data&&(v.processData||"string"==typeof v.data)&&(f+=(At.test(f)?"&":"?")+v.data,delete v.data),!1===v.cache&&(f=f.replace(It,"$1"),o=(At.test(f)?"&":"?")+"_="+jt.guid+++o),v.url=f+o),v.ifModified&&(ce.lastModified[f]&&T.setRequestHeader("If-Modified-Since",ce.lastModified[f]),ce.etag[f]&&T.setRequestHeader("If-None-Match",ce.etag[f])),(v.data&&v.hasContent&&!1!==v.contentType||t.contentType)&&T.setRequestHeader("Content-Type",v.contentType),T.setRequestHeader("Accept",v.dataTypes[0]&&v.accepts[v.dataTypes[0]]?v.accepts[v.dataTypes[0]]+("*"!==v.dataTypes[0]?", "+zt+"; q=0.01":""):v.accepts["*"]),v.headers)T.setRequestHeader(i,v.headers[i]);if(v.beforeSend&&(!1===v.beforeSend.call(y,T,v)||h))return T.abort();if(u="abort",b.add(v.complete),T.done(v.success),T.fail(v.error),c=Vt(_t,v,t,T)){if(T.readyState=1,g&&m.trigger("ajaxSend",[T,v]),h)return T;v.async&&0<v.timeout&&(d=ie.setTimeout(function(){T.abort("timeout")},v.timeout));try{h=!1,c.send(a,l)}catch(e){if(h)throw e;l(-1,e)}}else l(-1,"No Transport");function l(e,t,n,r){var i,o,a,s,u,l=t;h||(h=!0,d&&ie.clearTimeout(d),c=void 0,p=r||"",T.readyState=0<e?4:0,i=200<=e&&e<300||304===e,n&&(s=function(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}(v,T,n)),!i&&-1<ce.inArray("script",v.dataTypes)&&ce.inArray("json",v.dataTypes)<0&&(v.converters["text script"]=function(){}),s=function(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}(v,s,T,i),i?(v.ifModified&&((u=T.getResponseHeader("Last-Modified"))&&(ce.lastModified[f]=u),(u=T.getResponseHeader("etag"))&&(ce.etag[f]=u)),204===e||"HEAD"===v.type?l="nocontent":304===e?l="notmodified":(l=s.state,o=s.data,i=!(a=s.error))):(a=l,!e&&l||(l="error",e<0&&(e=0))),T.status=e,T.statusText=(t||l)+"",i?x.resolveWith(y,[o,l,T]):x.rejectWith(y,[T,l,a]),T.statusCode(w),w=void 0,g&&m.trigger(i?"ajaxSuccess":"ajaxError",[T,v,i?o:a]),b.fireWith(y,[T,l]),g&&(m.trigger("ajaxComplete",[T,v]),--ce.active||ce.event.trigger("ajaxStop")))}return T},getJSON:function(e,t,n){return ce.get(e,t,n,"json")},getScript:function(e,t){return ce.get(e,void 0,t,"script")}}),ce.each(["get","post"],function(e,i){ce[i]=function(e,t,n,r){return v(t)&&(r=r||n,n=t,t=void 0),ce.ajax(ce.extend({url:e,type:i,dataType:r,data:t,success:n},ce.isPlainObject(e)&&e))}}),ce.ajaxPrefilter(function(e){var t;for(t in e.headers)"content-type"===t.toLowerCase()&&(e.contentType=e.headers[t]||"")}),ce._evalUrl=function(e,t,n){return ce.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,converters:{"text script":function(){}},dataFilter:function(e){ce.globalEval(e,t,n)}})},ce.fn.extend({wrapAll:function(e){var t;return this[0]&&(v(e)&&(e=e.call(this[0])),t=ce(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(n){return v(n)?this.each(function(e){ce(this).wrapInner(n.call(this,e))}):this.each(function(){var e=ce(this),t=e.contents();t.length?t.wrapAll(n):e.append(n)})},wrap:function(t){var n=v(t);return this.each(function(e){ce(this).wrapAll(n?t.call(this,e):t)})},unwrap:function(e){return this.parent(e).not("body").each(function(){ce(this).replaceWith(this.childNodes)}),this}}),ce.expr.pseudos.hidden=function(e){return!ce.expr.pseudos.visible(e)},ce.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},ce.ajaxSettings.xhr=function(){try{return new ie.XMLHttpRequest}catch(e){}};var Yt={0:200,1223:204},Qt=ce.ajaxSettings.xhr();le.cors=!!Qt&&"withCredentials"in Qt,le.ajax=Qt=!!Qt,ce.ajaxTransport(function(i){var o,a;if(le.cors||Qt&&!i.crossDomain)return{send:function(e,t){var n,r=i.xhr();if(r.open(i.type,i.url,i.async,i.username,i.password),i.xhrFields)for(n in i.xhrFields)r[n]=i.xhrFields[n];for(n in i.mimeType&&r.overrideMimeType&&r.overrideMimeType(i.mimeType),i.crossDomain||e["X-Requested-With"]||(e["X-Requested-With"]="XMLHttpRequest"),e)r.setRequestHeader(n,e[n]);o=function(e){return function(){o&&(o=a=r.onload=r.onerror=r.onabort=r.ontimeout=r.onreadystatechange=null,"abort"===e?r.abort():"error"===e?"number"!=typeof r.status?t(0,"error"):t(r.status,r.statusText):t(Yt[r.status]||r.status,r.statusText,"text"!==(r.responseType||"text")||"string"!=typeof r.responseText?{binary:r.response}:{text:r.responseText},r.getAllResponseHeaders()))}},r.onload=o(),a=r.onerror=r.ontimeout=o("error"),void 0!==r.onabort?r.onabort=a:r.onreadystatechange=function(){4===r.readyState&&ie.setTimeout(function(){o&&a()})},o=o("abort");try{r.send(i.hasContent&&i.data||null)}catch(e){if(o)throw e}},abort:function(){o&&o()}}}),ce.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),ce.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return ce.globalEval(e),e}}}),ce.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),ce.ajaxTransport("script",function(n){var r,i;if(n.crossDomain||n.scriptAttrs)return{send:function(e,t){r=ce("<script>").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="<form></form><form></form>",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1<s&&(r=Tt(e.slice(s)),e=e.slice(0,s)),v(t)?(n=t,t=void 0):t&&"object"==typeof t&&(i="POST"),0<a.length&&ce.ajax({url:e,type:i||"GET",dataType:"html",data:t}).done(function(e){o=arguments,a.html(r?ce("<div>").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 0<arguments.length?this.on(n,null,e,t):this.trigger(n)}});var en=/^[\s\uFEFF\xA0]+|([^\s\uFEFF\xA0])[\s\uFEFF\xA0]+$/g;ce.proxy=function(e,t){var n,r,i;if("string"==typeof t&&(n=e[t],t=e,e=n),v(e))return r=ae.call(arguments,2),(i=function(){return e.apply(t||this,r.concat(ae.call(arguments)))}).guid=e.guid=e.guid||ce.guid++,i},ce.holdReady=function(e){e?ce.readyWait++:ce.ready(!0)},ce.isArray=Array.isArray,ce.parseJSON=JSON.parse,ce.nodeName=fe,ce.isFunction=v,ce.isWindow=y,ce.camelCase=F,ce.type=x,ce.now=Date.now,ce.isNumeric=function(e){var t=ce.type(e);return("number"===t||"string"===t)&&!isNaN(e-parseFloat(e))},ce.trim=function(e){return null==e?"":(e+"").replace(en,"$1")},"function"==typeof define&&define.amd&&define("jquery",[],function(){return ce});var tn=ie.jQuery,nn=ie.$;return ce.noConflict=function(e){return ie.$===ce&&(ie.$=nn),e&&ie.jQuery===ce&&(ie.jQuery=tn),ce},"undefined"==typeof e&&(ie.jQuery=ie.$=ce),ce});
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map
new file mode 100644
index 0000000000..db38af5893
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map
@@ -0,0 +1 @@
{"version":3,"sources":["jquery-3.7.1.js"],"names":["global","factory","module","exports","document","w","Error","window","this","noGlobal","arr","getProto","Object","getPrototypeOf","slice","flat","array","call","concat","apply","push","indexOf","class2type","toString","hasOwn","hasOwnProperty","fnToString","ObjectFunctionString","support","isFunction","obj","nodeType","item","isWindow","preservedScriptAttributes","type","src","nonce","noModule","DOMEval","code","node","doc","i","val","script","createElement","text","getAttribute","setAttribute","head","appendChild","parentNode","removeChild","toType","version","rhtmlSuffix","jQuery","selector","context","fn","init","isArrayLike","length","nodeName","elem","name","toLowerCase","prototype","jquery","constructor","toArray","get","num","pushStack","elems","ret","merge","prevObject","each","callback","map","arguments","first","eq","last","even","grep","_elem","odd","len","j","end","sort","splice","extend","options","copy","copyIsArray","clone","target","deep","isPlainObject","Array","isArray","undefined","expando","Math","random","replace","isReady","error","msg","noop","proto","Ctor","isEmptyObject","globalEval","textContent","documentElement","nodeValue","makeArray","results","inArray","isXMLDoc","namespace","namespaceURI","docElem","ownerDocument","test","second","invert","matches","callbackExpect","arg","value","guid","Symbol","iterator","split","_i","pop","whitespace","rtrimCSS","RegExp","contains","a","b","bup","compareDocumentPosition","rcssescape","fcssescape","ch","asCodePoint","charCodeAt","escapeSelector","sel","preferredDoc","pushNative","Expr","outermostContext","sortInput","hasDuplicate","documentIsHTML","rbuggyQSA","dirruns","done","classCache","createCache","tokenCache","compilerCache","nonnativeSelectorCache","sortOrder","booleans","identifier","attributes","pseudos","rwhitespace","rcomma","rleadingCombinator","rdescend","rpseudo","ridentifier","matchExpr","ID","CLASS","TAG","ATTR","PSEUDO","CHILD","bool","needsContext","rinputs","rheader","rquickExpr","rsibling","runescape","funescape","escape","nonHex","high","String","fromCharCode","unloadHandler","setDocument","inDisabledFieldset","addCombinator","disabled","dir","next","childNodes","e","els","find","seed","m","nid","match","groups","newSelector","newContext","exec","getElementById","id","getElementsByTagName","getElementsByClassName","testContext","scope","tokenize","toSelector","join","querySelectorAll","qsaError","removeAttribute","select","keys","cache","key","cacheLength","shift","markFunction","assert","el","createInputPseudo","createButtonPseudo","createDisabledPseudo","isDisabled","createPositionalPseudo","argument","matchIndexes","subWindow","webkitMatchesSelector","msMatchesSelector","defaultView","top","addEventListener","getById","getElementsByName","disconnectedMatch","cssHas","querySelector","filter","attrId","getAttributeNode","tag","className","input","innerHTML","compare","sortDetached","expr","elements","matchesSelector","attr","attrHandle","uniqueSort","duplicates","sortStable","createPseudo","relative",">"," ","+","~","preFilter","excess","unquoted","nodeNameSelector","expectedNodeName","pattern","operator","check","result","what","_argument","simple","forward","ofType","_context","xml","outerCache","nodeIndex","start","parent","useCache","diff","firstChild","lastChild","pseudo","args","setFilters","idx","matched","not","matcher","compile","unmatched","has","lang","elemLang","hash","location","root","focus","activeElement","err","safeActiveElement","hasFocus","href","tabIndex","enabled","checked","selected","selectedIndex","empty","nextSibling","header","button","_matchIndexes","lt","gt","nth","radio","checkbox","file","password","image","submit","reset","parseOnly","tokens","soFar","preFilters","cached","combinator","base","skip","checkNonElements","doneName","oldCache","newCache","elementMatcher","matchers","condense","newUnmatched","mapped","setMatcher","postFilter","postFinder","postSelector","temp","matcherOut","preMap","postMap","preexisting","contexts","multipleContexts","matcherIn","matcherFromTokens","checkContext","leadingRelative","implicitRelative","matchContext","matchAnyContext","elementMatchers","setMatchers","bySet","byElement","superMatcher","outermost","matchedCount","setMatched","contextBackup","dirrunsUnique","token","compiled","filters","unique","getText","isXML","selectors","until","truncate","is","siblings","n","rneedsContext","rsingleTag","winnow","qualifier","self","rootjQuery","parseHTML","ready","rparentsprev","guaranteedUnique","children","contents","prev","sibling","cur","targets","l","closest","index","prevAll","add","addBack","parents","parentsUntil","nextAll","nextUntil","prevUntil","contentDocument","content","reverse","rnothtmlwhite","Identity","v","Thrower","ex","adoptValue","resolve","reject","noValue","method","promise","fail","then","Callbacks","object","_","flag","firing","memory","fired","locked","list","queue","firingIndex","fire","once","stopOnFalse","remove","disable","lock","fireWith","Deferred","func","tuples","state","always","deferred","catch","pipe","fns","newDefer","tuple","returned","progress","notify","onFulfilled","onRejected","onProgress","maxDepth","depth","handler","special","that","mightThrow","TypeError","notifyWith","resolveWith","process","exceptionHook","rejectWith","getErrorHook","getStackHook","setTimeout","stateString","when","singleValue","remaining","resolveContexts","resolveValues","primary","updateFunc","rerrorNames","asyncError","console","warn","message","stack","readyException","readyList","completed","removeEventListener","readyWait","wait","readyState","doScroll","access","chainable","emptyGet","raw","bulk","_key","rmsPrefix","rdashAlpha","fcamelCase","_all","letter","toUpperCase","camelCase","string","acceptData","owner","Data","uid","defineProperty","configurable","set","data","prop","hasData","dataPriv","dataUser","rbrace","rmultiDash","dataAttr","JSON","parse","removeData","_data","_removeData","attrs","dequeue","startLength","hooks","_queueHooks","unshift","stop","setter","clearQueue","tmp","count","defer","pnum","source","rcssNum","cssExpand","isAttached","composed","getRootNode","isHiddenWithinTree","style","display","css","adjustCSS","valueParts","tween","adjusted","scale","maxIterations","currentValue","initial","unit","cssNumber","initialInUnit","defaultDisplayMap","showHide","show","values","body","hide","toggle","div","rcheckableType","rtagName","rscriptType","createDocumentFragment","checkClone","cloneNode","noCloneChecked","defaultValue","option","wrapMap","thead","col","tr","td","_default","getAll","setGlobalEval","refElements","tbody","tfoot","colgroup","caption","th","optgroup","rhtml","buildFragment","scripts","selection","ignored","wrap","attached","fragment","nodes","htmlPrefilter","createTextNode","rtypenamespace","returnTrue","returnFalse","on","types","one","origFn","event","off","leverageNative","isSetup","saved","isTrigger","delegateType","stopPropagation","stopImmediatePropagation","preventDefault","trigger","isImmediatePropagationStopped","handleObjIn","eventHandle","events","t","handleObj","handlers","namespaces","origType","elemData","create","handle","triggered","dispatch","bindType","delegateCount","setup","mappedTypes","origCount","teardown","removeEvent","nativeEvent","handlerQueue","fix","delegateTarget","preDispatch","isPropagationStopped","currentTarget","rnamespace","postDispatch","matchedHandlers","matchedSelectors","addProp","hook","Event","enumerable","originalEvent","writable","load","noBubble","click","beforeunload","returnValue","props","isDefaultPrevented","defaultPrevented","relatedTarget","timeStamp","Date","now","isSimulated","altKey","bubbles","cancelable","changedTouches","ctrlKey","detail","eventPhase","metaKey","pageX","pageY","shiftKey","view","char","charCode","keyCode","buttons","clientX","clientY","offsetX","offsetY","pointerId","pointerType","screenX","screenY","targetTouches","toElement","touches","which","blur","focusMappedHandler","documentMode","simulate","attaches","dataHolder","mouseenter","mouseleave","pointerenter","pointerleave","orig","related","rnoInnerhtml","rchecked","rcleanScript","manipulationTarget","disableScript","restoreScript","cloneCopyEvent","dest","udataOld","udataCur","domManip","collection","hasScripts","iNoClone","valueIsFunction","html","_evalUrl","keepData","cleanData","dataAndEvents","deepDataAndEvents","srcElements","destElements","inPage","detach","append","prepend","insertBefore","before","after","replaceWith","replaceChild","appendTo","prependTo","insertAfter","replaceAll","original","insert","rnumnonpx","rcustomProp","getStyles","opener","getComputedStyle","swap","old","rboxStyle","curCSS","computed","width","minWidth","maxWidth","isCustomProp","getPropertyValue","pixelBoxStyles","addGetHookIf","conditionFn","hookFn","computeStyleTests","container","cssText","divStyle","pixelPositionVal","reliableMarginLeftVal","roundPixelMeasures","marginLeft","right","pixelBoxStylesVal","boxSizingReliableVal","position","scrollboxSizeVal","offsetWidth","measure","round","parseFloat","reliableTrDimensionsVal","backgroundClip","clearCloneStyle","boxSizingReliable","pixelPosition","reliableMarginLeft","scrollboxSize","reliableTrDimensions","table","trChild","trStyle","height","parseInt","borderTopWidth","borderBottomWidth","offsetHeight","cssPrefixes","emptyStyle","vendorProps","finalPropName","final","cssProps","capName","vendorPropName","rdisplayswap","cssShow","visibility","cssNormalTransform","letterSpacing","fontWeight","setPositiveNumber","subtract","max","boxModelAdjustment","dimension","box","isBorderBox","styles","computedVal","extra","delta","marginDelta","ceil","getWidthOrHeight","valueIsBorderBox","offsetProp","getClientRects","Tween","easing","cssHooks","opacity","animationIterationCount","aspectRatio","borderImageSlice","columnCount","flexGrow","flexShrink","gridArea","gridColumn","gridColumnEnd","gridColumnStart","gridRow","gridRowEnd","gridRowStart","lineHeight","order","orphans","widows","zIndex","zoom","fillOpacity","floodOpacity","stopOpacity","strokeMiterlimit","strokeOpacity","origName","setProperty","isFinite","getBoundingClientRect","scrollboxSizeBuggy","left","margin","padding","border","prefix","suffix","expand","expanded","parts","propHooks","run","percent","eased","duration","pos","step","fx","scrollTop","scrollLeft","linear","p","swing","cos","PI","fxNow","inProgress","opt","rfxtypes","rrun","schedule","hidden","requestAnimationFrame","interval","tick","createFxNow","genFx","includeWidth","createTween","animation","Animation","tweeners","properties","stopped","prefilters","currentTime","startTime","tweens","opts","specialEasing","originalProperties","originalOptions","gotoEnd","propFilter","bind","complete","timer","anim","*","tweener","oldfire","propTween","restoreDisplay","isBox","dataShow","unqueued","overflow","overflowX","overflowY","prefilter","speed","speeds","fadeTo","to","animate","optall","doAnimation","finish","stopQueue","timers","cssFn","slideDown","slideUp","slideToggle","fadeIn","fadeOut","fadeToggle","slow","fast","delay","time","timeout","clearTimeout","checkOn","optSelected","radioValue","boolHook","removeAttr","nType","attrHooks","attrNames","getter","lowercaseName","rfocusable","rclickable","stripAndCollapse","getClass","classesToArray","removeProp","propFix","tabindex","for","class","addClass","classNames","curValue","finalValue","removeClass","toggleClass","stateVal","isValidValue","hasClass","rreturn","valHooks","optionSet","rquery","parseXML","parserErrorElem","DOMParser","parseFromString","rfocusMorph","stopPropagationCallback","onlyHandlers","bubbleType","ontype","lastElement","eventPath","parentWindow","triggerHandler","rbracket","rCRLF","rsubmitterTypes","rsubmittable","buildParams","traditional","param","s","valueOrFunction","encodeURIComponent","serialize","serializeArray","r20","rhash","rantiCache","rheaders","rnoContent","rprotocol","transports","allTypes","originAnchor","addToPrefiltersOrTransports","structure","dataTypeExpression","dataType","dataTypes","inspectPrefiltersOrTransports","jqXHR","inspected","seekingTransport","inspect","prefilterOrFactory","dataTypeOrTransport","ajaxExtend","flatOptions","ajaxSettings","active","lastModified","etag","url","isLocal","protocol","processData","async","contentType","accepts","json","responseFields","converters","* text","text html","text json","text xml","ajaxSetup","settings","ajaxPrefilter","ajaxTransport","ajax","transport","cacheURL","responseHeadersString","responseHeaders","timeoutTimer","urlAnchor","fireGlobals","uncached","callbackContext","globalEventContext","completeDeferred","statusCode","requestHeaders","requestHeadersNames","strAbort","getResponseHeader","getAllResponseHeaders","setRequestHeader","overrideMimeType","mimeType","status","abort","statusText","finalText","crossDomain","host","hasContent","ifModified","headers","beforeSend","success","send","nativeStatusText","responses","isSuccess","response","modified","ct","finalDataType","firstDataType","ajaxHandleResponses","conv2","current","conv","dataFilter","throws","ajaxConvert","getJSON","getScript","text script","wrapAll","firstElementChild","wrapInner","htmlIsFunction","unwrap","visible","xhr","XMLHttpRequest","xhrSuccessStatus","0","1223","xhrSupported","cors","errorCallback","open","username","xhrFields","onload","onerror","onabort","ontimeout","onreadystatechange","responseType","responseText","binary","scriptAttrs","charset","scriptCharset","evt","oldCallbacks","rjsonp","jsonp","jsonpCallback","originalSettings","callbackName","overwritten","responseContainer","jsonProp","createHTMLDocument","implementation","keepScripts","parsed","params","animated","offset","setOffset","curPosition","curLeft","curCSSTop","curTop","curOffset","curCSSLeft","curElem","using","rect","win","pageYOffset","pageXOffset","offsetParent","parentOffset","scrollTo","Height","Width","","defaultExtra","funcName","unbind","delegate","undelegate","hover","fnOver","fnOut","rtrim","proxy","holdReady","hold","parseJSON","isNumeric","isNaN","trim","define","amd","_jQuery","_$","$","noConflict"],"mappings":";CAUA,SAAYA,EAAQC,GAEnB,aAEuB,iBAAXC,QAAiD,iBAAnBA,OAAOC,QAShDD,OAAOC,QAAUH,EAAOI,SACvBH,EAASD,GAAQ,GACjB,SAAUK,GACT,IAAMA,EAAED,SACP,MAAM,IAAIE,MAAO,4CAElB,OAAOL,EAASI,IAGlBJ,EAASD,GAtBX,CA0BuB,oBAAXO,OAAyBA,OAASC,KAAM,SAAUD,GAAQE,GAMtE,aAEA,IAAIC,GAAM,GAENC,EAAWC,OAAOC,eAElBC,GAAQJ,GAAII,MAEZC,EAAOL,GAAIK,KAAO,SAAUC,GAC/B,OAAON,GAAIK,KAAKE,KAAMD,IACnB,SAAUA,GACb,OAAON,GAAIQ,OAAOC,MAAO,GAAIH,IAI1BI,EAAOV,GAAIU,KAEXC,GAAUX,GAAIW,QAEdC,EAAa,GAEbC,EAAWD,EAAWC,SAEtBC,GAASF,EAAWG,eAEpBC,EAAaF,GAAOD,SAEpBI,EAAuBD,EAAWT,KAAML,QAExCgB,GAAU,GAEVC,EAAa,SAAqBC,GASpC,MAAsB,mBAARA,GAA8C,iBAAjBA,EAAIC,UAC1B,mBAAbD,EAAIE,MAIVC,EAAW,SAAmBH,GAChC,OAAc,MAAPA,GAAeA,IAAQA,EAAIvB,QAIhCH,EAAWG,GAAOH,SAIjB8B,EAA4B,CAC/BC,MAAM,EACNC,KAAK,EACLC,OAAO,EACPC,UAAU,GAGX,SAASC,EAASC,EAAMC,EAAMC,GAG7B,IAAIC,EAAGC,EACNC,GAHDH,EAAMA,GAAOtC,GAGC0C,cAAe,UAG7B,GADAD,EAAOE,KAAOP,EACTC,EACJ,IAAME,KAAKT,GAYVU,EAAMH,EAAME,IAAOF,EAAKO,cAAgBP,EAAKO,aAAcL,KAE1DE,EAAOI,aAAcN,EAAGC,GAI3BF,EAAIQ,KAAKC,YAAaN,GAASO,WAAWC,YAAaR,GAIzD,SAASS,EAAQxB,GAChB,OAAY,MAAPA,EACGA,EAAM,GAIQ,iBAARA,GAAmC,mBAARA,EACxCR,EAAYC,EAASN,KAAMa,KAAW,gBAC/BA,EAQT,IAAIyB,EAAU,QAEbC,EAAc,SAGdC,GAAS,SAAUC,EAAUC,GAI5B,OAAO,IAAIF,GAAOG,GAAGC,KAAMH,EAAUC,IAmYvC,SAASG,EAAahC,GAMrB,IAAIiC,IAAWjC,GAAO,WAAYA,GAAOA,EAAIiC,OAC5C5B,EAAOmB,EAAQxB,GAEhB,OAAKD,EAAYC,KAASG,EAAUH,KAIpB,UAATK,GAA+B,IAAX4B,GACR,iBAAXA,GAAgC,EAATA,GAAgBA,EAAS,KAAOjC,GAIhE,SAASkC,GAAUC,EAAMC,GAExB,OAAOD,EAAKD,UAAYC,EAAKD,SAASG,gBAAkBD,EAAKC,cApZ9DV,GAAOG,GAAKH,GAAOW,UAAY,CAG9BC,OAAQd,EAERe,YAAab,GAGbM,OAAQ,EAERQ,QAAS,WACR,OAAOzD,GAAMG,KAAMT,OAKpBgE,IAAK,SAAUC,GAGd,OAAY,MAAPA,EACG3D,GAAMG,KAAMT,MAIbiE,EAAM,EAAIjE,KAAMiE,EAAMjE,KAAKuD,QAAWvD,KAAMiE,IAKpDC,UAAW,SAAUC,GAGpB,IAAIC,EAAMnB,GAAOoB,MAAOrE,KAAK8D,cAAeK,GAM5C,OAHAC,EAAIE,WAAatE,KAGVoE,GAIRG,KAAM,SAAUC,GACf,OAAOvB,GAAOsB,KAAMvE,KAAMwE,IAG3BC,IAAK,SAAUD,GACd,OAAOxE,KAAKkE,UAAWjB,GAAOwB,IAAKzE,KAAM,SAAUyD,EAAMtB,GACxD,OAAOqC,EAAS/D,KAAMgD,EAAMtB,EAAGsB,OAIjCnD,MAAO,WACN,OAAON,KAAKkE,UAAW5D,GAAMK,MAAOX,KAAM0E,aAG3CC,MAAO,WACN,OAAO3E,KAAK4E,GAAI,IAGjBC,KAAM,WACL,OAAO7E,KAAK4E,IAAK,IAGlBE,KAAM,WACL,OAAO9E,KAAKkE,UAAWjB,GAAO8B,KAAM/E,KAAM,SAAUgF,EAAO7C,GAC1D,OAASA,EAAI,GAAM,MAIrB8C,IAAK,WACJ,OAAOjF,KAAKkE,UAAWjB,GAAO8B,KAAM/E,KAAM,SAAUgF,EAAO7C,GAC1D,OAAOA,EAAI,MAIbyC,GAAI,SAAUzC,GACb,IAAI+C,EAAMlF,KAAKuD,OACd4B,GAAKhD,GAAMA,EAAI,EAAI+C,EAAM,GAC1B,OAAOlF,KAAKkE,UAAgB,GAALiB,GAAUA,EAAID,EAAM,CAAElF,KAAMmF,IAAQ,KAG5DC,IAAK,WACJ,OAAOpF,KAAKsE,YAActE,KAAK8D,eAKhClD,KAAMA,EACNyE,KAAMnF,GAAImF,KACVC,OAAQpF,GAAIoF,QAGbrC,GAAOsC,OAAStC,GAAOG,GAAGmC,OAAS,WAClC,IAAIC,EAAS9B,EAAM9B,EAAK6D,EAAMC,EAAaC,EAC1CC,EAASlB,UAAW,IAAO,GAC3BvC,EAAI,EACJoB,EAASmB,UAAUnB,OACnBsC,GAAO,EAsBR,IAnBuB,kBAAXD,IACXC,EAAOD,EAGPA,EAASlB,UAAWvC,IAAO,GAC3BA,KAIsB,iBAAXyD,GAAwBvE,EAAYuE,KAC/CA,EAAS,IAILzD,IAAMoB,IACVqC,EAAS5F,KACTmC,KAGOA,EAAIoB,EAAQpB,IAGnB,GAAqC,OAA9BqD,EAAUd,UAAWvC,IAG3B,IAAMuB,KAAQ8B,EACbC,EAAOD,EAAS9B,GAIF,cAATA,GAAwBkC,IAAWH,IAKnCI,GAAQJ,IAAUxC,GAAO6C,cAAeL,KAC1CC,EAAcK,MAAMC,QAASP,MAC/B7D,EAAMgE,EAAQlC,GAIbiC,EADID,IAAgBK,MAAMC,QAASpE,GAC3B,GACI8D,GAAgBzC,GAAO6C,cAAelE,GAG1CA,EAFA,GAIT8D,GAAc,EAGdE,EAAQlC,GAAST,GAAOsC,OAAQM,EAAMF,EAAOF,SAGzBQ,IAATR,IACXG,EAAQlC,GAAS+B,IAOrB,OAAOG,GAGR3C,GAAOsC,OAAQ,CAGdW,QAAS,UAAanD,EAAUoD,KAAKC,UAAWC,QAAS,MAAO,IAGhEC,SAAS,EAETC,MAAO,SAAUC,GAChB,MAAM,IAAI1G,MAAO0G,IAGlBC,KAAM,aAENX,cAAe,SAAUxE,GACxB,IAAIoF,EAAOC,EAIX,SAAMrF,GAAgC,oBAAzBP,EAASN,KAAMa,QAI5BoF,EAAQvG,EAAUmB,KASK,mBADvBqF,EAAO3F,GAAOP,KAAMiG,EAAO,gBAAmBA,EAAM5C,cACf5C,EAAWT,KAAMkG,KAAWxF,IAGlEyF,cAAe,SAAUtF,GACxB,IAAIoC,EAEJ,IAAMA,KAAQpC,EACb,OAAO,EAER,OAAO,GAKRuF,WAAY,SAAU7E,EAAMwD,EAAStD,GACpCH,EAASC,EAAM,CAAEH,MAAO2D,GAAWA,EAAQ3D,OAASK,IAGrDqC,KAAM,SAAUjD,EAAKkD,GACpB,IAAIjB,EAAQpB,EAAI,EAEhB,GAAKmB,EAAahC,IAEjB,IADAiC,EAASjC,EAAIiC,OACLpB,EAAIoB,EAAQpB,IACnB,IAAgD,IAA3CqC,EAAS/D,KAAMa,EAAKa,GAAKA,EAAGb,EAAKa,IACrC,WAIF,IAAMA,KAAKb,EACV,IAAgD,IAA3CkD,EAAS/D,KAAMa,EAAKa,GAAKA,EAAGb,EAAKa,IACrC,MAKH,OAAOb,GAKRiB,KAAM,SAAUkB,GACf,IAAIxB,EACHmC,EAAM,GACNjC,EAAI,EACJZ,EAAWkC,EAAKlC,SAEjB,IAAMA,EAGL,MAAUU,EAAOwB,EAAMtB,KAGtBiC,GAAOnB,GAAOV,KAAMN,GAGtB,OAAkB,IAAbV,GAA+B,KAAbA,EACfkC,EAAKqD,YAEK,IAAbvF,EACGkC,EAAKsD,gBAAgBD,YAEX,IAAbvF,GAA+B,IAAbA,EACfkC,EAAKuD,UAKN5C,GAIR6C,UAAW,SAAU/G,EAAKgH,GACzB,IAAI9C,EAAM8C,GAAW,GAarB,OAXY,MAAPhH,IACCoD,EAAalD,OAAQF,IACzB+C,GAAOoB,MAAOD,EACE,iBAARlE,EACN,CAAEA,GAAQA,GAGZU,EAAKH,KAAM2D,EAAKlE,IAIXkE,GAGR+C,QAAS,SAAU1D,EAAMvD,EAAKiC,GAC7B,OAAc,MAAPjC,GAAe,EAAIW,GAAQJ,KAAMP,EAAKuD,EAAMtB,IAGpDiF,SAAU,SAAU3D,GACnB,IAAI4D,EAAY5D,GAAQA,EAAK6D,aAC5BC,EAAU9D,IAAUA,EAAK+D,eAAiB/D,GAAOsD,gBAIlD,OAAQ/D,EAAYyE,KAAMJ,GAAaE,GAAWA,EAAQ/D,UAAY,SAKvEa,MAAO,SAAUM,EAAO+C,GAKvB,IAJA,IAAIxC,GAAOwC,EAAOnE,OACjB4B,EAAI,EACJhD,EAAIwC,EAAMpB,OAEH4B,EAAID,EAAKC,IAChBR,EAAOxC,KAAQuF,EAAQvC,GAKxB,OAFAR,EAAMpB,OAASpB,EAERwC,GAGRI,KAAM,SAAUZ,EAAOK,EAAUmD,GAShC,IARA,IACCC,EAAU,GACVzF,EAAI,EACJoB,EAASY,EAAMZ,OACfsE,GAAkBF,EAIXxF,EAAIoB,EAAQpB,KACAqC,EAAUL,EAAOhC,GAAKA,KAChB0F,GACxBD,EAAQhH,KAAMuD,EAAOhC,IAIvB,OAAOyF,GAIRnD,IAAK,SAAUN,EAAOK,EAAUsD,GAC/B,IAAIvE,EAAQwE,EACX5F,EAAI,EACJiC,EAAM,GAGP,GAAKd,EAAaa,GAEjB,IADAZ,EAASY,EAAMZ,OACPpB,EAAIoB,EAAQpB,IAGL,OAFd4F,EAAQvD,EAAUL,EAAOhC,GAAKA,EAAG2F,KAGhC1D,EAAIxD,KAAMmH,QAMZ,IAAM5F,KAAKgC,EAGI,OAFd4D,EAAQvD,EAAUL,EAAOhC,GAAKA,EAAG2F,KAGhC1D,EAAIxD,KAAMmH,GAMb,OAAOxH,EAAM6D,IAId4D,KAAM,EAIN5G,QAASA,KAGa,mBAAX6G,SACXhF,GAAOG,GAAI6E,OAAOC,UAAahI,GAAK+H,OAAOC,WAI5CjF,GAAOsB,KAAM,uEAAuE4D,MAAO,KAC1F,SAAUC,EAAI1E,GACb5C,EAAY,WAAa4C,EAAO,KAAQA,EAAKC,gBA0B/C,IAAI0E,GAAMnI,GAAImI,IAGVhD,GAAOnF,GAAImF,KAGXC,GAASpF,GAAIoF,OAGbgD,GAAa,sBAGbC,GAAW,IAAIC,OAClB,IAAMF,GAAa,8BAAgCA,GAAa,KAChE,KAODrF,GAAOwF,SAAW,SAAUC,EAAGC,GAC9B,IAAIC,EAAMD,GAAKA,EAAE/F,WAEjB,OAAO8F,IAAME,MAAWA,GAAwB,IAAjBA,EAAIrH,YAIlCmH,EAAED,SACDC,EAAED,SAAUG,GACZF,EAAEG,yBAA8D,GAAnCH,EAAEG,wBAAyBD,MAS3D,IAAIE,EAAa,+CAEjB,SAASC,EAAYC,EAAIC,GACxB,OAAKA,EAGQ,OAAPD,EACG,SAIDA,EAAG1I,MAAO,GAAI,GAAM,KAAO0I,EAAGE,WAAYF,EAAGzF,OAAS,GAAIxC,SAAU,IAAO,IAI5E,KAAOiI,EAGf/F,GAAOkG,eAAiB,SAAUC,GACjC,OAASA,EAAM,IAAK/C,QAASyC,EAAYC,IAM1C,IAAIM,GAAezJ,EAClB0J,GAAa1I,GAEd,WAEA,IAAIuB,EACHoH,EACAC,EACAC,EACAC,EAIA9J,EACAmH,EACA4C,EACAC,EACAhC,EAPAhH,EAAO0I,GAUPpD,EAAUjD,GAAOiD,QACjB2D,EAAU,EACVC,EAAO,EACPC,EAAaC,IACbC,EAAaD,IACbE,EAAgBF,IAChBG,EAAyBH,IACzBI,EAAY,SAAU1B,EAAGC,GAIxB,OAHKD,IAAMC,IACVe,GAAe,GAET,GAGRW,EAAW,6HAMXC,EAAa,0BAA4BhC,GACxC,0CAGDiC,EAAa,MAAQjC,GAAa,KAAOgC,EAAa,OAAShC,GAG9D,gBAAkBA,GAGlB,2DAA6DgC,EAAa,OAC1EhC,GAAa,OAEdkC,EAAU,KAAOF,EAAa,wFAOAC,EAAa,eAO3CE,EAAc,IAAIjC,OAAQF,GAAa,IAAK,KAE5CoC,EAAS,IAAIlC,OAAQ,IAAMF,GAAa,KAAOA,GAAa,KAC5DqC,EAAqB,IAAInC,OAAQ,IAAMF,GAAa,WAAaA,GAAa,IAC7EA,GAAa,KACdsC,EAAW,IAAIpC,OAAQF,GAAa,MAEpCuC,EAAU,IAAIrC,OAAQgC,GACtBM,EAAc,IAAItC,OAAQ,IAAM8B,EAAa,KAE7CS,EAAY,CACXC,GAAI,IAAIxC,OAAQ,MAAQ8B,EAAa,KACrCW,MAAO,IAAIzC,OAAQ,QAAU8B,EAAa,KAC1CY,IAAK,IAAI1C,OAAQ,KAAO8B,EAAa,SACrCa,KAAM,IAAI3C,OAAQ,IAAM+B,GACxBa,OAAQ,IAAI5C,OAAQ,IAAMgC,GAC1Ba,MAAO,IAAI7C,OACV,yDACCF,GAAa,+BAAiCA,GAAa,cAC3DA,GAAa,aAAeA,GAAa,SAAU,KACrDgD,KAAM,IAAI9C,OAAQ,OAAS6B,EAAW,KAAM,KAI5CkB,aAAc,IAAI/C,OAAQ,IAAMF,GAC/B,mDAAqDA,GACrD,mBAAqBA,GAAa,mBAAoB,MAGxDkD,EAAU,sCACVC,EAAU,SAGVC,EAAa,mCAEbC,EAAW,OAIXC,EAAY,IAAIpD,OAAQ,uBAAyBF,GAChD,uBAAwB,KACzBuD,EAAY,SAAUC,EAAQC,GAC7B,IAAIC,EAAO,KAAOF,EAAOxL,MAAO,GAAM,MAEtC,OAAKyL,IAUEC,EAAO,EACbC,OAAOC,aAAcF,EAAO,OAC5BC,OAAOC,aAAcF,GAAQ,GAAK,MAAe,KAAPA,EAAe,SAO3DG,EAAgB,WACfC,KAGDC,EAAqBC,EACpB,SAAU7I,GACT,OAAyB,IAAlBA,EAAK8I,UAAqB/I,GAAUC,EAAM,aAElD,CAAE+I,IAAK,aAAcC,KAAM,WAa7B,IACC7L,EAAKD,MACFT,GAAMI,GAAMG,KAAM4I,GAAaqD,YACjCrD,GAAaqD,YAMdxM,GAAKmJ,GAAaqD,WAAWnJ,QAAShC,SACrC,MAAQoL,GACT/L,EAAO,CACND,MAAO,SAAUiF,EAAQgH,GACxBtD,GAAW3I,MAAOiF,EAAQtF,GAAMG,KAAMmM,KAEvCnM,KAAM,SAAUmF,GACf0D,GAAW3I,MAAOiF,EAAQtF,GAAMG,KAAMiE,UAAW,MAKpD,SAASmI,EAAM3J,EAAUC,EAAS+D,EAAS4F,GAC1C,IAAIC,EAAG5K,EAAGsB,EAAMuJ,EAAKC,EAAOC,EAAQC,EACnCC,EAAajK,GAAWA,EAAQqE,cAGhCjG,EAAW4B,EAAUA,EAAQ5B,SAAW,EAKzC,GAHA2F,EAAUA,GAAW,GAGI,iBAAbhE,IAA0BA,GACxB,IAAb3B,GAA+B,IAAbA,GAA+B,KAAbA,EAEpC,OAAO2F,EAIR,IAAM4F,IACLV,EAAajJ,GACbA,EAAUA,GAAWvD,EAEhB+J,GAAiB,CAIrB,GAAkB,KAAbpI,IAAqB0L,EAAQvB,EAAW2B,KAAMnK,IAGlD,GAAO6J,EAAIE,EAAO,IAGjB,GAAkB,IAAb1L,EAAiB,CACrB,KAAOkC,EAAON,EAAQmK,eAAgBP,IASrC,OAAO7F,EALP,GAAKzD,EAAK8J,KAAOR,EAEhB,OADAnM,EAAKH,KAAMyG,EAASzD,GACbyD,OAWT,GAAKkG,IAAgB3J,EAAO2J,EAAWE,eAAgBP,KACtDF,EAAKpE,SAAUtF,EAASM,IACxBA,EAAK8J,KAAOR,EAGZ,OADAnM,EAAKH,KAAMyG,EAASzD,GACbyD,MAKH,CAAA,GAAK+F,EAAO,GAElB,OADArM,EAAKD,MAAOuG,EAAS/D,EAAQqK,qBAAsBtK,IAC5CgE,EAGD,IAAO6F,EAAIE,EAAO,KAAS9J,EAAQsK,uBAEzC,OADA7M,EAAKD,MAAOuG,EAAS/D,EAAQsK,uBAAwBV,IAC9C7F,EAKT,KAAMiD,EAAwBjH,EAAW,MACrC0G,GAAcA,EAAUnC,KAAMvE,IAAe,CAYhD,GAVAiK,EAAcjK,EACdkK,EAAajK,EASK,IAAb5B,IACFqJ,EAASnD,KAAMvE,IAAcyH,EAAmBlD,KAAMvE,IAAe,EAGvEkK,EAAazB,EAASlE,KAAMvE,IAAcwK,EAAavK,EAAQP,aAC9DO,IAQkBA,GAAY/B,GAAQuM,SAG/BX,EAAM7J,EAAQX,aAAc,OAClCwK,EAAM/J,GAAOkG,eAAgB6D,GAE7B7J,EAAQV,aAAc,KAAQuK,EAAM9G,IAMtC/D,GADA+K,EAASU,EAAU1K,IACRK,OACX,MAAQpB,IACP+K,EAAQ/K,IAAQ6K,EAAM,IAAMA,EAAM,UAAa,IAC9Ca,EAAYX,EAAQ/K,IAEtBgL,EAAcD,EAAOY,KAAM,KAG5B,IAIC,OAHAlN,EAAKD,MAAOuG,EACXkG,EAAWW,iBAAkBZ,IAEvBjG,EACN,MAAQ8G,GACT7D,EAAwBjH,GAAU,GACjC,QACI8J,IAAQ9G,GACZ/C,EAAQ8K,gBAAiB,QAQ9B,OAAOC,GAAQhL,EAASmD,QAASkC,GAAU,MAAQpF,EAAS+D,EAAS4F,GAStE,SAAS9C,IACR,IAAImE,EAAO,GAaX,OAXA,SAASC,EAAOC,EAAKtG,GASpB,OALKoG,EAAKvN,KAAMyN,EAAM,KAAQ9E,EAAK+E,oBAG3BF,EAAOD,EAAKI,SAEXH,EAAOC,EAAM,KAAQtG,GAShC,SAASyG,EAAcpL,GAEtB,OADAA,EAAI8C,IAAY,EACT9C,EAOR,SAASqL,EAAQrL,GAChB,IAAIsL,EAAK9O,EAAS0C,cAAe,YAEjC,IACC,QAASc,EAAIsL,GACZ,MAAQ/B,GACT,OAAO,EACN,QAGI+B,EAAG9L,YACP8L,EAAG9L,WAAWC,YAAa6L,GAI5BA,EAAK,MAQP,SAASC,EAAmBhN,GAC3B,OAAO,SAAU8B,GAChB,OAAOD,GAAUC,EAAM,UAAaA,EAAK9B,OAASA,GAQpD,SAASiN,EAAoBjN,GAC5B,OAAO,SAAU8B,GAChB,OAASD,GAAUC,EAAM,UAAaD,GAAUC,EAAM,YACrDA,EAAK9B,OAASA,GAQjB,SAASkN,EAAsBtC,GAG9B,OAAO,SAAU9I,GAKhB,MAAK,SAAUA,EASTA,EAAKb,aAAgC,IAAlBa,EAAK8I,SAGvB,UAAW9I,EACV,UAAWA,EAAKb,WACba,EAAKb,WAAW2J,WAAaA,EAE7B9I,EAAK8I,WAAaA,EAMpB9I,EAAKqL,aAAevC,GAG1B9I,EAAKqL,cAAgBvC,GACpBF,EAAoB5I,KAAW8I,EAG3B9I,EAAK8I,WAAaA,EAKd,UAAW9I,GACfA,EAAK8I,WAAaA,GAY5B,SAASwC,EAAwB3L,GAChC,OAAOoL,EAAc,SAAUQ,GAE9B,OADAA,GAAYA,EACLR,EAAc,SAAU1B,EAAMlF,GACpC,IAAIzC,EACH8J,EAAe7L,EAAI,GAAI0J,EAAKvJ,OAAQyL,GACpC7M,EAAI8M,EAAa1L,OAGlB,MAAQpB,IACF2K,EAAQ3H,EAAI8J,EAAc9M,MAC9B2K,EAAM3H,KAASyC,EAASzC,GAAM2H,EAAM3H,SAYzC,SAASuI,EAAavK,GACrB,OAAOA,GAAmD,oBAAjCA,EAAQqK,sBAAwCrK,EAQ1E,SAASiJ,EAAanK,GACrB,IAAIiN,EACHhN,EAAMD,EAAOA,EAAKuF,eAAiBvF,EAAOoH,GAO3C,OAAKnH,GAAOtC,GAA6B,IAAjBsC,EAAIX,UAAmBW,EAAI6E,kBAMnDA,GADAnH,EAAWsC,GACgB6E,gBAC3B4C,GAAkB1G,GAAOmE,SAAUxH,GAInCgI,EAAUb,EAAgBa,SACzBb,EAAgBoI,uBAChBpI,EAAgBqI,kBAOZrI,EAAgBqI,mBAMpB/F,IAAgBzJ,IACdsP,EAAYtP,EAASyP,cAAiBH,EAAUI,MAAQJ,GAG1DA,EAAUK,iBAAkB,SAAUpD,GAOvC/K,GAAQoO,QAAUf,EAAQ,SAAUC,GAEnC,OADA3H,EAAgBpE,YAAa+L,GAAKnB,GAAKtK,GAAOiD,SACtCtG,EAAS6P,oBACf7P,EAAS6P,kBAAmBxM,GAAOiD,SAAU3C,SAMhDnC,GAAQsO,kBAAoBjB,EAAQ,SAAUC,GAC7C,OAAO9G,EAAQnH,KAAMiO,EAAI,OAK1BtN,GAAQuM,MAAQc,EAAQ,WACvB,OAAO7O,EAASmO,iBAAkB,YAYnC3M,GAAQuO,OAASlB,EAAQ,WACxB,IAEC,OADA7O,EAASgQ,cAAe,oBACjB,EACN,MAAQjD,GACT,OAAO,KAKJvL,GAAQoO,SACZjG,EAAKsG,OAAO7E,GAAK,SAAUuC,GAC1B,IAAIuC,EAASvC,EAAGlH,QAASuF,EAAWC,GACpC,OAAO,SAAUpI,GAChB,OAAOA,EAAKjB,aAAc,QAAWsN,IAGvCvG,EAAKsD,KAAK7B,GAAK,SAAUuC,EAAIpK,GAC5B,GAAuC,oBAA3BA,EAAQmK,gBAAkC3D,EAAiB,CACtE,IAAIlG,EAAON,EAAQmK,eAAgBC,GACnC,OAAO9J,EAAO,CAAEA,GAAS,OAI3B8F,EAAKsG,OAAO7E,GAAM,SAAUuC,GAC3B,IAAIuC,EAASvC,EAAGlH,QAASuF,EAAWC,GACpC,OAAO,SAAUpI,GAChB,IAAIxB,EAAwC,oBAA1BwB,EAAKsM,kBACtBtM,EAAKsM,iBAAkB,MACxB,OAAO9N,GAAQA,EAAK8F,QAAU+H,IAMhCvG,EAAKsD,KAAK7B,GAAK,SAAUuC,EAAIpK,GAC5B,GAAuC,oBAA3BA,EAAQmK,gBAAkC3D,EAAiB,CACtE,IAAI1H,EAAME,EAAGgC,EACZV,EAAON,EAAQmK,eAAgBC,GAEhC,GAAK9J,EAAO,CAIX,IADAxB,EAAOwB,EAAKsM,iBAAkB,QACjB9N,EAAK8F,QAAUwF,EAC3B,MAAO,CAAE9J,GAIVU,EAAQhB,EAAQsM,kBAAmBlC,GACnCpL,EAAI,EACJ,MAAUsB,EAAOU,EAAOhC,KAEvB,IADAF,EAAOwB,EAAKsM,iBAAkB,QACjB9N,EAAK8F,QAAUwF,EAC3B,MAAO,CAAE9J,GAKZ,MAAO,MAMV8F,EAAKsD,KAAK3B,IAAM,SAAU8E,EAAK7M,GAC9B,MAA6C,oBAAjCA,EAAQqK,qBACZrK,EAAQqK,qBAAsBwC,GAI9B7M,EAAQ4K,iBAAkBiC,IAKnCzG,EAAKsD,KAAK5B,MAAQ,SAAUgF,EAAW9M,GACtC,GAA+C,oBAAnCA,EAAQsK,wBAA0C9D,EAC7D,OAAOxG,EAAQsK,uBAAwBwC,IASzCrG,EAAY,GAIZ6E,EAAQ,SAAUC,GAEjB,IAAIwB,EAEJnJ,EAAgBpE,YAAa+L,GAAKyB,UACjC,UAAYjK,EAAU,iDACLA,EAAU,oEAKtBwI,EAAGX,iBAAkB,cAAexK,QACzCqG,EAAUhJ,KAAM,MAAQ0H,GAAa,aAAe+B,EAAW,KAI1DqE,EAAGX,iBAAkB,QAAU7H,EAAU,MAAO3C,QACrDqG,EAAUhJ,KAAM,MAMX8N,EAAGX,iBAAkB,KAAO7H,EAAU,MAAO3C,QAClDqG,EAAUhJ,KAAM,YAOX8N,EAAGX,iBAAkB,YAAaxK,QACvCqG,EAAUhJ,KAAM,aAKjBsP,EAAQtQ,EAAS0C,cAAe,UAC1BG,aAAc,OAAQ,UAC5BiM,EAAG/L,YAAauN,GAAQzN,aAAc,OAAQ,KAQ9CsE,EAAgBpE,YAAa+L,GAAKnC,UAAW,EACM,IAA9CmC,EAAGX,iBAAkB,aAAcxK,QACvCqG,EAAUhJ,KAAM,WAAY,cAQ7BsP,EAAQtQ,EAAS0C,cAAe,UAC1BG,aAAc,OAAQ,IAC5BiM,EAAG/L,YAAauN,GACVxB,EAAGX,iBAAkB,aAAcxK,QACxCqG,EAAUhJ,KAAM,MAAQ0H,GAAa,QAAUA,GAAa,KAC3DA,GAAa,kBAIVlH,GAAQuO,QAQb/F,EAAUhJ,KAAM,QAGjBgJ,EAAYA,EAAUrG,QAAU,IAAIiF,OAAQoB,EAAUkE,KAAM,MAM5D1D,EAAY,SAAU1B,EAAGC,GAGxB,GAAKD,IAAMC,EAEV,OADAe,GAAe,EACR,EAIR,IAAI0G,GAAW1H,EAAEG,yBAA2BF,EAAEE,wBAC9C,OAAKuH,IAgBU,GAPfA,GAAY1H,EAAElB,eAAiBkB,KAASC,EAAEnB,eAAiBmB,GAC1DD,EAAEG,wBAAyBF,GAG3B,KAIGvH,GAAQiP,cAAgB1H,EAAEE,wBAAyBH,KAAQ0H,EAOzD1H,IAAM9I,GAAY8I,EAAElB,eAAiB6B,IACzCwD,EAAKpE,SAAUY,GAAcX,IACrB,EAOJC,IAAM/I,GAAY+I,EAAEnB,eAAiB6B,IACzCwD,EAAKpE,SAAUY,GAAcV,GACtB,EAIDc,EACJ5I,GAAQJ,KAAMgJ,EAAWf,GAAM7H,GAAQJ,KAAMgJ,EAAWd,GAC1D,EAGe,EAAVyH,GAAe,EAAI,KAGpBxQ,EAqpBR,IAAMuC,KAlpBN0K,EAAKjF,QAAU,SAAU0I,EAAMC,GAC9B,OAAO1D,EAAMyD,EAAM,KAAM,KAAMC,IAGhC1D,EAAK2D,gBAAkB,SAAU/M,EAAM6M,GAGtC,GAFAlE,EAAa3I,GAERkG,IACHQ,EAAwBmG,EAAO,QAC7B1G,IAAcA,EAAUnC,KAAM6I,IAEjC,IACC,IAAIlM,EAAMwD,EAAQnH,KAAMgD,EAAM6M,GAG9B,GAAKlM,GAAOhD,GAAQsO,mBAIlBjM,EAAK7D,UAAuC,KAA3B6D,EAAK7D,SAAS2B,SAChC,OAAO6C,EAEP,MAAQuI,GACTxC,EAAwBmG,GAAM,GAIhC,OAAuD,EAAhDzD,EAAMyD,EAAM1Q,EAAU,KAAM,CAAE6D,IAASF,QAG/CsJ,EAAKpE,SAAW,SAAUtF,EAASM,GAUlC,OAHON,EAAQqE,eAAiBrE,IAAavD,GAC5CwM,EAAajJ,GAEPF,GAAOwF,SAAUtF,EAASM,IAIlCoJ,EAAK4D,KAAO,SAAUhN,EAAMC,IAOpBD,EAAK+D,eAAiB/D,IAAU7D,GACtCwM,EAAa3I,GAGd,IAAIL,EAAKmG,EAAKmH,WAAYhN,EAAKC,eAG9BvB,EAAMgB,GAAMpC,GAAOP,KAAM8I,EAAKmH,WAAYhN,EAAKC,eAC9CP,EAAIK,EAAMC,GAAOiG,QACjB1D,EAEF,YAAaA,IAAR7D,EACGA,EAGDqB,EAAKjB,aAAckB,IAG3BmJ,EAAKtG,MAAQ,SAAUC,GACtB,MAAM,IAAI1G,MAAO,0CAA4C0G,IAO9DvD,GAAO0N,WAAa,SAAUzJ,GAC7B,IAAIzD,EACHmN,EAAa,GACbzL,EAAI,EACJhD,EAAI,EAWL,GAJAuH,GAAgBtI,GAAQyP,WACxBpH,GAAarI,GAAQyP,YAAcvQ,GAAMG,KAAMyG,EAAS,GACxD7B,GAAK5E,KAAMyG,EAASkD,GAEfV,EAAe,CACnB,MAAUjG,EAAOyD,EAAS/E,KACpBsB,IAASyD,EAAS/E,KACtBgD,EAAIyL,EAAWhQ,KAAMuB,IAGvB,MAAQgD,IACPG,GAAO7E,KAAMyG,EAAS0J,EAAYzL,GAAK,GAQzC,OAFAsE,EAAY,KAELvC,GAGRjE,GAAOG,GAAGuN,WAAa,WACtB,OAAO3Q,KAAKkE,UAAWjB,GAAO0N,WAAYrQ,GAAMK,MAAOX,UAGxDuJ,EAAOtG,GAAOqN,KAAO,CAGpBhC,YAAa,GAEbwC,aAActC,EAEdvB,MAAOlC,EAEP2F,WAAY,GAEZ7D,KAAM,GAENkE,SAAU,CACTC,IAAK,CAAExE,IAAK,aAAc7H,OAAO,GACjCsM,IAAK,CAAEzE,IAAK,cACZ0E,IAAK,CAAE1E,IAAK,kBAAmB7H,OAAO,GACtCwM,IAAK,CAAE3E,IAAK,oBAGb4E,UAAW,CACVjG,KAAM,SAAU8B,GAWf,OAVAA,EAAO,GAAMA,EAAO,GAAI5G,QAASuF,EAAWC,GAG5CoB,EAAO,IAAQA,EAAO,IAAOA,EAAO,IAAOA,EAAO,IAAO,IACvD5G,QAASuF,EAAWC,GAEF,OAAfoB,EAAO,KACXA,EAAO,GAAM,IAAMA,EAAO,GAAM,KAG1BA,EAAM3M,MAAO,EAAG,IAGxB+K,MAAO,SAAU4B,GAkChB,OAtBAA,EAAO,GAAMA,EAAO,GAAItJ,cAEU,QAA7BsJ,EAAO,GAAI3M,MAAO,EAAG,IAGnB2M,EAAO,IACZJ,EAAKtG,MAAO0G,EAAO,IAKpBA,EAAO,KAASA,EAAO,GACtBA,EAAO,IAAQA,EAAO,IAAO,GAC7B,GAAqB,SAAfA,EAAO,IAAiC,QAAfA,EAAO,KAEvCA,EAAO,KAAWA,EAAO,GAAMA,EAAO,IAAwB,QAAfA,EAAO,KAG3CA,EAAO,IAClBJ,EAAKtG,MAAO0G,EAAO,IAGbA,GAGR7B,OAAQ,SAAU6B,GACjB,IAAIoE,EACHC,GAAYrE,EAAO,IAAOA,EAAO,GAElC,OAAKlC,EAAUM,MAAM5D,KAAMwF,EAAO,IAC1B,MAIHA,EAAO,GACXA,EAAO,GAAMA,EAAO,IAAOA,EAAO,IAAO,GAG9BqE,GAAYzG,EAAQpD,KAAM6J,KAGnCD,EAASzD,EAAU0D,GAAU,MAG7BD,EAASC,EAASzQ,QAAS,IAAKyQ,EAAS/N,OAAS8N,GAAWC,EAAS/N,UAGxE0J,EAAO,GAAMA,EAAO,GAAI3M,MAAO,EAAG+Q,GAClCpE,EAAO,GAAMqE,EAAShR,MAAO,EAAG+Q,IAI1BpE,EAAM3M,MAAO,EAAG,MAIzBuP,OAAQ,CAEP3E,IAAK,SAAUqG,GACd,IAAIC,EAAmBD,EAAiBlL,QAASuF,EAAWC,GAAYlI,cACxE,MAA4B,MAArB4N,EACN,WACC,OAAO,GAER,SAAU9N,GACT,OAAOD,GAAUC,EAAM+N,KAI1BvG,MAAO,SAAUgF,GAChB,IAAIwB,EAAU1H,EAAYkG,EAAY,KAEtC,OAAOwB,IACJA,EAAU,IAAIjJ,OAAQ,MAAQF,GAAa,IAAM2H,EAClD,IAAM3H,GAAa,SACpByB,EAAYkG,EAAW,SAAUxM,GAChC,OAAOgO,EAAQhK,KACY,iBAAnBhE,EAAKwM,WAA0BxM,EAAKwM,WACb,oBAAtBxM,EAAKjB,cACXiB,EAAKjB,aAAc,UACpB,OAKL2I,KAAM,SAAUzH,EAAMgO,EAAUC,GAC/B,OAAO,SAAUlO,GAChB,IAAImO,EAAS/E,EAAK4D,KAAMhN,EAAMC,GAE9B,OAAe,MAAVkO,EACgB,OAAbF,GAEFA,IAINE,GAAU,GAEQ,MAAbF,EACGE,IAAWD,EAED,OAAbD,EACGE,IAAWD,EAED,OAAbD,EACGC,GAAqC,IAA5BC,EAAO/Q,QAAS8Q,GAEf,OAAbD,EACGC,IAAoC,EAA3BC,EAAO/Q,QAAS8Q,GAEf,OAAbD,EACGC,GAASC,EAAOtR,OAAQqR,EAAMpO,UAAaoO,EAEjC,OAAbD,GAEkB,GADb,IAAME,EAAOvL,QAASoE,EAAa,KAAQ,KAClD5J,QAAS8Q,GAEM,OAAbD,IACGE,IAAWD,GAASC,EAAOtR,MAAO,EAAGqR,EAAMpO,OAAS,KAAQoO,EAAQ,QAO9EtG,MAAO,SAAU1J,EAAMkQ,EAAMC,EAAWnN,EAAOE,GAC9C,IAAIkN,EAAgC,QAAvBpQ,EAAKrB,MAAO,EAAG,GAC3B0R,EAA+B,SAArBrQ,EAAKrB,OAAQ,GACvB2R,EAAkB,YAATJ,EAEV,OAAiB,IAAVlN,GAAwB,IAATE,EAGrB,SAAUpB,GACT,QAASA,EAAKb,YAGf,SAAUa,EAAMyO,EAAUC,GACzB,IAAI/D,EAAOgE,EAAYnQ,EAAMoQ,EAAWC,EACvC9F,EAAMuF,IAAWC,EAAU,cAAgB,kBAC3CO,EAAS9O,EAAKb,WACdc,EAAOuO,GAAUxO,EAAKD,SAASG,cAC/B6O,GAAYL,IAAQF,EACpBQ,GAAO,EAER,GAAKF,EAAS,CAGb,GAAKR,EAAS,CACb,MAAQvF,EAAM,CACbvK,EAAOwB,EACP,MAAUxB,EAAOA,EAAMuK,GACtB,GAAKyF,EACJzO,GAAUvB,EAAMyB,GACE,IAAlBzB,EAAKV,SAEL,OAAO,EAKT+Q,EAAQ9F,EAAe,SAAT7K,IAAoB2Q,GAAS,cAE5C,OAAO,EAMR,GAHAA,EAAQ,CAAEN,EAAUO,EAAOG,WAAaH,EAAOI,WAG1CX,GAAWQ,EAAW,CAM1BC,GADAJ,GADAjE,GADAgE,EAAaG,EAAQrM,KAAeqM,EAAQrM,GAAY,KACpCvE,IAAU,IACX,KAAQkI,GAAWuE,EAAO,KACzBA,EAAO,GAC3BnM,EAAOoQ,GAAaE,EAAO7F,WAAY2F,GAEvC,MAAUpQ,IAASoQ,GAAapQ,GAAQA,EAAMuK,KAG3CiG,EAAOJ,EAAY,IAAOC,EAAMjK,MAGlC,GAAuB,IAAlBpG,EAAKV,YAAoBkR,GAAQxQ,IAASwB,EAAO,CACrD2O,EAAYzQ,GAAS,CAAEkI,EAASwI,EAAWI,GAC3C,YAgBF,GATKD,IAIJC,EADAJ,GADAjE,GADAgE,EAAa3O,EAAMyC,KAAezC,EAAMyC,GAAY,KAChCvE,IAAU,IACX,KAAQkI,GAAWuE,EAAO,KAMhC,IAATqE,EAGJ,MAAUxQ,IAASoQ,GAAapQ,GAAQA,EAAMuK,KAC3CiG,EAAOJ,EAAY,IAAOC,EAAMjK,MAElC,IAAO4J,EACNzO,GAAUvB,EAAMyB,GACE,IAAlBzB,EAAKV,aACHkR,IAGGD,KACJJ,EAAanQ,EAAMiE,KAChBjE,EAAMiE,GAAY,KACTvE,GAAS,CAAEkI,EAAS4I,IAG5BxQ,IAASwB,GACb,MASL,OADAgP,GAAQ5N,KACQF,GAAW8N,EAAO9N,GAAU,GAAqB,GAAhB8N,EAAO9N,KAK5DyG,OAAQ,SAAUwH,EAAQ5D,GAMzB,IAAI6D,EACHzP,EAAKmG,EAAKiB,QAASoI,IAAYrJ,EAAKuJ,WAAYF,EAAOjP,gBACtDkJ,EAAKtG,MAAO,uBAAyBqM,GAKvC,OAAKxP,EAAI8C,GACD9C,EAAI4L,GAIK,EAAZ5L,EAAGG,QACPsP,EAAO,CAAED,EAAQA,EAAQ,GAAI5D,GACtBzF,EAAKuJ,WAAW7R,eAAgB2R,EAAOjP,eAC7C6K,EAAc,SAAU1B,EAAMlF,GAC7B,IAAImL,EACHC,EAAU5P,EAAI0J,EAAMkC,GACpB7M,EAAI6Q,EAAQzP,OACb,MAAQpB,IAEP2K,EADAiG,EAAMlS,GAAQJ,KAAMqM,EAAMkG,EAAS7Q,OAClByF,EAASmL,GAAQC,EAAS7Q,MAG7C,SAAUsB,GACT,OAAOL,EAAIK,EAAM,EAAGoP,KAIhBzP,IAIToH,QAAS,CAGRyI,IAAKzE,EAAc,SAAUtL,GAK5B,IAAIgN,EAAQ,GACXhJ,EAAU,GACVgM,EAAUC,GAASjQ,EAASmD,QAASkC,GAAU,OAEhD,OAAO2K,EAAShN,GACfsI,EAAc,SAAU1B,EAAMlF,EAASsK,EAAUC,GAChD,IAAI1O,EACH2P,EAAYF,EAASpG,EAAM,KAAMqF,EAAK,IACtChQ,EAAI2K,EAAKvJ,OAGV,MAAQpB,KACAsB,EAAO2P,EAAWjR,MACxB2K,EAAM3K,KAASyF,EAASzF,GAAMsB,MAIjC,SAAUA,EAAMyO,EAAUC,GAOzB,OANAjC,EAAO,GAAMzM,EACbyP,EAAShD,EAAO,KAAMiC,EAAKjL,GAI3BgJ,EAAO,GAAM,MACLhJ,EAAQmB,SAInBgL,IAAK7E,EAAc,SAAUtL,GAC5B,OAAO,SAAUO,GAChB,OAAuC,EAAhCoJ,EAAM3J,EAAUO,GAAOF,UAIhCkF,SAAU+F,EAAc,SAAUjM,GAEjC,OADAA,EAAOA,EAAK8D,QAASuF,EAAWC,GACzB,SAAUpI,GAChB,OAAsE,GAA7DA,EAAKqD,aAAe7D,GAAOV,KAAMkB,IAAS5C,QAAS0B,MAW9D+Q,KAAM9E,EAAc,SAAU8E,GAO7B,OAJMxI,EAAYrD,KAAM6L,GAAQ,KAC/BzG,EAAKtG,MAAO,qBAAuB+M,GAEpCA,EAAOA,EAAKjN,QAASuF,EAAWC,GAAYlI,cACrC,SAAUF,GAChB,IAAI8P,EACJ,GACC,GAAOA,EAAW5J,EACjBlG,EAAK6P,KACL7P,EAAKjB,aAAc,aAAgBiB,EAAKjB,aAAc,QAGtD,OADA+Q,EAAWA,EAAS5P,iBACA2P,GAA2C,IAAnCC,EAAS1S,QAASyS,EAAO,YAE3C7P,EAAOA,EAAKb,aAAkC,IAAlBa,EAAKlC,UAC7C,OAAO,KAKTqE,OAAQ,SAAUnC,GACjB,IAAI+P,EAAOzT,GAAO0T,UAAY1T,GAAO0T,SAASD,KAC9C,OAAOA,GAAQA,EAAKlT,MAAO,KAAQmD,EAAK8J,IAGzCmG,KAAM,SAAUjQ,GACf,OAAOA,IAASsD,GAGjB4M,MAAO,SAAUlQ,GAChB,OAAOA,IA5oCV,WACC,IACC,OAAO7D,EAASgU,cACf,MAAQC,KAyoCQC,IACflU,EAASmU,eACLtQ,EAAK9B,MAAQ8B,EAAKuQ,OAASvQ,EAAKwQ,WAItCC,QAASrF,GAAsB,GAC/BtC,SAAUsC,GAAsB,GAEhCsF,QAAS,SAAU1Q,GAIlB,OAASD,GAAUC,EAAM,YAAeA,EAAK0Q,SAC1C3Q,GAAUC,EAAM,aAAgBA,EAAK2Q,UAGzCA,SAAU,SAAU3Q,GAWnB,OALKA,EAAKb,YAETa,EAAKb,WAAWyR,eAGQ,IAAlB5Q,EAAK2Q,UAIbE,MAAO,SAAU7Q,GAMhB,IAAMA,EAAOA,EAAKiP,WAAYjP,EAAMA,EAAOA,EAAK8Q,YAC/C,GAAK9Q,EAAKlC,SAAW,EACpB,OAAO,EAGT,OAAO,GAGRgR,OAAQ,SAAU9O,GACjB,OAAQ8F,EAAKiB,QAAQ8J,MAAO7Q,IAI7B+Q,OAAQ,SAAU/Q,GACjB,OAAOgI,EAAQhE,KAAMhE,EAAKD,WAG3B0M,MAAO,SAAUzM,GAChB,OAAO+H,EAAQ/D,KAAMhE,EAAKD,WAG3BiR,OAAQ,SAAUhR,GACjB,OAAOD,GAAUC,EAAM,UAA2B,WAAdA,EAAK9B,MACxC6B,GAAUC,EAAM,WAGlBlB,KAAM,SAAUkB,GACf,IAAIgN,EACJ,OAAOjN,GAAUC,EAAM,UAA2B,SAAdA,EAAK9B,OAKI,OAAxC8O,EAAOhN,EAAKjB,aAAc,UACN,SAAvBiO,EAAK9M,gBAIRgB,MAAOoK,EAAwB,WAC9B,MAAO,CAAE,KAGVlK,KAAMkK,EAAwB,SAAU2F,EAAenR,GACtD,MAAO,CAAEA,EAAS,KAGnBqB,GAAImK,EAAwB,SAAU2F,EAAenR,EAAQyL,GAC5D,MAAO,CAAEA,EAAW,EAAIA,EAAWzL,EAASyL,KAG7ClK,KAAMiK,EAAwB,SAAUE,EAAc1L,GAErD,IADA,IAAIpB,EAAI,EACAA,EAAIoB,EAAQpB,GAAK,EACxB8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGRhK,IAAK8J,EAAwB,SAAUE,EAAc1L,GAEpD,IADA,IAAIpB,EAAI,EACAA,EAAIoB,EAAQpB,GAAK,EACxB8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGR0F,GAAI5F,EAAwB,SAAUE,EAAc1L,EAAQyL,GAC3D,IAAI7M,EAUJ,IAPCA,EADI6M,EAAW,EACXA,EAAWzL,EACOA,EAAXyL,EACPzL,EAEAyL,EAGU,KAAL7M,GACT8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGR2F,GAAI7F,EAAwB,SAAUE,EAAc1L,EAAQyL,GAE3D,IADA,IAAI7M,EAAI6M,EAAW,EAAIA,EAAWzL,EAASyL,IACjC7M,EAAIoB,GACb0L,EAAarO,KAAMuB,GAEpB,OAAO8M,OAKLzE,QAAQqK,IAAMtL,EAAKiB,QAAQ5F,GAGrB,CAAEkQ,OAAO,EAAMC,UAAU,EAAMC,MAAM,EAAMC,UAAU,EAAMC,OAAO,GAC5E3L,EAAKiB,QAASrI,GAAMwM,EAAmBxM,GAExC,IAAMA,IAAK,CAAEgT,QAAQ,EAAMC,OAAO,GACjC7L,EAAKiB,QAASrI,GAAMyM,EAAoBzM,GAIzC,SAAS2Q,KAIT,SAASlF,EAAU1K,EAAUmS,GAC5B,IAAIrC,EAAS/F,EAAOqI,EAAQ3T,EAC3B4T,EAAOrI,EAAQsI,EACfC,EAASxL,EAAY/G,EAAW,KAEjC,GAAKuS,EACJ,OAAOJ,EAAY,EAAII,EAAOnV,MAAO,GAGtCiV,EAAQrS,EACRgK,EAAS,GACTsI,EAAajM,EAAK6H,UAElB,MAAQmE,EAAQ,CA2Bf,IAAM5T,KAxBAqR,KAAa/F,EAAQvC,EAAO2C,KAAMkI,MAClCtI,IAGJsI,EAAQA,EAAMjV,MAAO2M,EAAO,GAAI1J,SAAYgS,GAE7CrI,EAAOtM,KAAQ0U,EAAS,KAGzBtC,GAAU,GAGH/F,EAAQtC,EAAmB0C,KAAMkI,MACvCvC,EAAU/F,EAAMsB,QAChB+G,EAAO1U,KAAM,CACZmH,MAAOiL,EAGPrR,KAAMsL,EAAO,GAAI5G,QAASkC,GAAU,OAErCgN,EAAQA,EAAMjV,MAAO0S,EAAQzP,SAIhBgG,EAAKsG,SACX5C,EAAQlC,EAAWpJ,GAAO0L,KAAMkI,KAAgBC,EAAY7T,MAChEsL,EAAQuI,EAAY7T,GAAQsL,MAC9B+F,EAAU/F,EAAMsB,QAChB+G,EAAO1U,KAAM,CACZmH,MAAOiL,EACPrR,KAAMA,EACNiG,QAASqF,IAEVsI,EAAQA,EAAMjV,MAAO0S,EAAQzP,SAI/B,IAAMyP,EACL,MAOF,OAAKqC,EACGE,EAAMhS,OAGPgS,EACN1I,EAAKtG,MAAOrD,GAGZ+G,EAAY/G,EAAUgK,GAAS5M,MAAO,GAGxC,SAASuN,EAAYyH,GAIpB,IAHA,IAAInT,EAAI,EACP+C,EAAMoQ,EAAO/R,OACbL,EAAW,GACJf,EAAI+C,EAAK/C,IAChBe,GAAYoS,EAAQnT,GAAI4F,MAEzB,OAAO7E,EAGR,SAASoJ,EAAe4G,EAASwC,EAAYC,GAC5C,IAAInJ,EAAMkJ,EAAWlJ,IACpBoJ,EAAOF,EAAWjJ,KAClB4B,EAAMuH,GAAQpJ,EACdqJ,EAAmBF,GAAgB,eAARtH,EAC3ByH,EAAWhM,IAEZ,OAAO4L,EAAW/Q,MAGjB,SAAUlB,EAAMN,EAASgP,GACxB,MAAU1O,EAAOA,EAAM+I,GACtB,GAAuB,IAAlB/I,EAAKlC,UAAkBsU,EAC3B,OAAO3C,EAASzP,EAAMN,EAASgP,GAGjC,OAAO,GAIR,SAAU1O,EAAMN,EAASgP,GACxB,IAAI4D,EAAU3D,EACb4D,EAAW,CAAEnM,EAASiM,GAGvB,GAAK3D,GACJ,MAAU1O,EAAOA,EAAM+I,GACtB,IAAuB,IAAlB/I,EAAKlC,UAAkBsU,IACtB3C,EAASzP,EAAMN,EAASgP,GAC5B,OAAO,OAKV,MAAU1O,EAAOA,EAAM+I,GACtB,GAAuB,IAAlB/I,EAAKlC,UAAkBsU,EAG3B,GAFAzD,EAAa3O,EAAMyC,KAAezC,EAAMyC,GAAY,IAE/C0P,GAAQpS,GAAUC,EAAMmS,GAC5BnS,EAAOA,EAAM+I,IAAS/I,MAChB,CAAA,IAAOsS,EAAW3D,EAAY/D,KACpC0H,EAAU,KAAQlM,GAAWkM,EAAU,KAAQD,EAG/C,OAASE,EAAU,GAAMD,EAAU,GAOnC,IAHA3D,EAAY/D,GAAQ2H,GAGH,GAAM9C,EAASzP,EAAMN,EAASgP,GAC9C,OAAO,EAMZ,OAAO,GAIV,SAAS8D,EAAgBC,GACxB,OAAyB,EAAlBA,EAAS3S,OACf,SAAUE,EAAMN,EAASgP,GACxB,IAAIhQ,EAAI+T,EAAS3S,OACjB,MAAQpB,IACP,IAAM+T,EAAU/T,GAAKsB,EAAMN,EAASgP,GACnC,OAAO,EAGT,OAAO,GAER+D,EAAU,GAYZ,SAASC,EAAU/C,EAAW3O,EAAKoL,EAAQ1M,EAASgP,GAOnD,IANA,IAAI1O,EACH2S,EAAe,GACfjU,EAAI,EACJ+C,EAAMkO,EAAU7P,OAChB8S,EAAgB,MAAP5R,EAEFtC,EAAI+C,EAAK/C,KACTsB,EAAO2P,EAAWjR,MAClB0N,IAAUA,EAAQpM,EAAMN,EAASgP,KACtCiE,EAAaxV,KAAM6C,GACd4S,GACJ5R,EAAI7D,KAAMuB,KAMd,OAAOiU,EAGR,SAASE,GAAYlF,EAAWlO,EAAUgQ,EAASqD,EAAYC,EAAYC,GAO1E,OANKF,IAAeA,EAAYrQ,KAC/BqQ,EAAaD,GAAYC,IAErBC,IAAeA,EAAYtQ,KAC/BsQ,EAAaF,GAAYE,EAAYC,IAE/BjI,EAAc,SAAU1B,EAAM5F,EAAS/D,EAASgP,GACtD,IAAIuE,EAAMvU,EAAGsB,EAAMkT,EAClBC,EAAS,GACTC,EAAU,GACVC,EAAc5P,EAAQ3D,OAGtBY,EAAQ2I,GA5CX,SAA2B5J,EAAU6T,EAAU7P,GAG9C,IAFA,IAAI/E,EAAI,EACP+C,EAAM6R,EAASxT,OACRpB,EAAI+C,EAAK/C,IAChB0K,EAAM3J,EAAU6T,EAAU5U,GAAK+E,GAEhC,OAAOA,EAuCJ8P,CAAkB9T,GAAY,IAC7BC,EAAQ5B,SAAW,CAAE4B,GAAYA,EAAS,IAG5C8T,GAAY7F,IAAetE,GAAS5J,EAEnCiB,EADAgS,EAAUhS,EAAOyS,EAAQxF,EAAWjO,EAASgP,GAsB/C,GAnBKe,EAaJA,EAAS+D,EATTN,EAAaH,IAAgB1J,EAAOsE,EAAY0F,GAAeP,GAG9D,GAGArP,EAG+B/D,EAASgP,GAEzCwE,EAAaM,EAITV,EAAa,CACjBG,EAAOP,EAAUQ,EAAYE,GAC7BN,EAAYG,EAAM,GAAIvT,EAASgP,GAG/BhQ,EAAIuU,EAAKnT,OACT,MAAQpB,KACAsB,EAAOiT,EAAMvU,MACnBwU,EAAYE,EAAS1U,MAAW8U,EAAWJ,EAAS1U,IAAQsB,IAK/D,GAAKqJ,GACJ,GAAK0J,GAAcpF,EAAY,CAC9B,GAAKoF,EAAa,CAGjBE,EAAO,GACPvU,EAAIwU,EAAWpT,OACf,MAAQpB,KACAsB,EAAOkT,EAAYxU,KAGzBuU,EAAK9V,KAAQqW,EAAW9U,GAAMsB,GAGhC+S,EAAY,KAAQG,EAAa,GAAMD,EAAMvE,GAI9ChQ,EAAIwU,EAAWpT,OACf,MAAQpB,KACAsB,EAAOkT,EAAYxU,MAC2C,GAAlEuU,EAAOF,EAAa3V,GAAQJ,KAAMqM,EAAMrJ,GAASmT,EAAQzU,MAE3D2K,EAAM4J,KAAYxP,EAASwP,GAASjT,UAOvCkT,EAAaR,EACZQ,IAAezP,EACdyP,EAAWrR,OAAQwR,EAAaH,EAAWpT,QAC3CoT,GAEGH,EACJA,EAAY,KAAMtP,EAASyP,EAAYxE,GAEvCvR,EAAKD,MAAOuG,EAASyP,KAMzB,SAASO,GAAmB5B,GA+B3B,IA9BA,IAAI6B,EAAcjE,EAAS/N,EAC1BD,EAAMoQ,EAAO/R,OACb6T,EAAkB7N,EAAKwH,SAAUuE,EAAQ,GAAI3T,MAC7C0V,EAAmBD,GAAmB7N,EAAKwH,SAAU,KACrD5O,EAAIiV,EAAkB,EAAI,EAG1BE,EAAehL,EAAe,SAAU7I,GACvC,OAAOA,IAAS0T,GACdE,GAAkB,GACrBE,EAAkBjL,EAAe,SAAU7I,GAC1C,OAA6C,EAAtC5C,GAAQJ,KAAM0W,EAAc1T,IACjC4T,GAAkB,GACrBnB,EAAW,CAAE,SAAUzS,EAAMN,EAASgP,GAMrC,IAAI/N,GAASgT,IAAqBjF,GAAOhP,GAAWqG,MACjD2N,EAAehU,GAAU5B,SAC1B+V,EAAc7T,EAAMN,EAASgP,GAC7BoF,EAAiB9T,EAAMN,EAASgP,IAKlC,OADAgF,EAAe,KACR/S,IAGDjC,EAAI+C,EAAK/C,IAChB,GAAO+Q,EAAU3J,EAAKwH,SAAUuE,EAAQnT,GAAIR,MAC3CuU,EAAW,CAAE5J,EAAe2J,EAAgBC,GAAYhD,QAClD,CAIN,IAHAA,EAAU3J,EAAKsG,OAAQyF,EAAQnT,GAAIR,MAAOhB,MAAO,KAAM2U,EAAQnT,GAAIyF,UAGrD1B,GAAY,CAIzB,IADAf,IAAMhD,EACEgD,EAAID,EAAKC,IAChB,GAAKoE,EAAKwH,SAAUuE,EAAQnQ,GAAIxD,MAC/B,MAGF,OAAO2U,GACF,EAAJnU,GAAS8T,EAAgBC,GACrB,EAAJ/T,GAAS0L,EAGRyH,EAAOhV,MAAO,EAAG6B,EAAI,GACnBzB,OAAQ,CAAEqH,MAAgC,MAAzBuN,EAAQnT,EAAI,GAAIR,KAAe,IAAM,MACvD0E,QAASkC,GAAU,MACrB2K,EACA/Q,EAAIgD,GAAK+R,GAAmB5B,EAAOhV,MAAO6B,EAAGgD,IAC7CA,EAAID,GAAOgS,GAAqB5B,EAASA,EAAOhV,MAAO6E,IACvDA,EAAID,GAAO2I,EAAYyH,IAGzBY,EAAStV,KAAMsS,GAIjB,OAAO+C,EAAgBC,GAiIxB,SAAS/C,GAASjQ,EAAU+J,GAC3B,IAAI9K,EA/H8BqV,EAAiBC,EAC/CC,EACHC,EACAC,EA6HAH,EAAc,GACdD,EAAkB,GAClB/B,EAASvL,EAAehH,EAAW,KAEpC,IAAMuS,EAAS,CAGRxI,IACLA,EAAQW,EAAU1K,IAEnBf,EAAI8K,EAAM1J,OACV,MAAQpB,KACPsT,EAASyB,GAAmBjK,EAAO9K,KACtB+D,GACZuR,EAAY7W,KAAM6U,GAElB+B,EAAgB5W,KAAM6U,IAKxBA,EAASvL,EAAehH,GArJSsU,EAsJNA,EArJxBE,EAA6B,GADkBD,EAsJNA,GArJrBlU,OACvBoU,EAAqC,EAAzBH,EAAgBjU,OAC5BqU,EAAe,SAAU9K,EAAM3J,EAASgP,EAAKjL,EAAS2Q,GACrD,IAAIpU,EAAM0B,EAAG+N,EACZ4E,EAAe,EACf3V,EAAI,IACJiR,EAAYtG,GAAQ,GACpBiL,EAAa,GACbC,EAAgBxO,EAGhBrF,EAAQ2I,GAAQ6K,GAAapO,EAAKsD,KAAK3B,IAAK,IAAK2M,GAGjDI,EAAkBpO,GAA4B,MAAjBmO,EAAwB,EAAI7R,KAAKC,UAAY,GAC1ElB,EAAMf,EAAMZ,OAeb,IAbKsU,IAMJrO,EAAmBrG,GAAWvD,GAAYuD,GAAW0U,GAO9C1V,IAAM+C,GAAgC,OAAvBzB,EAAOU,EAAOhC,IAAeA,IAAM,CACzD,GAAKwV,GAAalU,EAAO,CACxB0B,EAAI,EAMEhC,GAAWM,EAAK+D,eAAiB5H,IACtCwM,EAAa3I,GACb0O,GAAOxI,GAER,MAAUuJ,EAAUsE,EAAiBrS,KACpC,GAAK+N,EAASzP,EAAMN,GAAWvD,EAAUuS,GAAQ,CAChDvR,EAAKH,KAAMyG,EAASzD,GACpB,MAGGoU,IACJhO,EAAUoO,GAKPP,KAGGjU,GAAQyP,GAAWzP,IACzBqU,IAIIhL,GACJsG,EAAUxS,KAAM6C,IAgBnB,GATAqU,GAAgB3V,EASXuV,GAASvV,IAAM2V,EAAe,CAClC3S,EAAI,EACJ,MAAU+N,EAAUuE,EAAatS,KAChC+N,EAASE,EAAW2E,EAAY5U,EAASgP,GAG1C,GAAKrF,EAAO,CAGX,GAAoB,EAAfgL,EACJ,MAAQ3V,IACCiR,EAAWjR,IAAO4V,EAAY5V,KACrC4V,EAAY5V,GAAMkG,GAAI5H,KAAMyG,IAM/B6Q,EAAa5B,EAAU4B,GAIxBnX,EAAKD,MAAOuG,EAAS6Q,GAGhBF,IAAc/K,GAA4B,EAApBiL,EAAWxU,QACG,EAAtCuU,EAAeL,EAAYlU,QAE7BN,GAAO0N,WAAYzJ,GAUrB,OALK2Q,IACJhO,EAAUoO,EACVzO,EAAmBwO,GAGb5E,GAGFsE,EACNlJ,EAAcoJ,GACdA,KA8BO1U,SAAWA,EAEnB,OAAOuS,EAYR,SAASvH,GAAQhL,EAAUC,EAAS+D,EAAS4F,GAC5C,IAAI3K,EAAGmT,EAAQ4C,EAAOvW,EAAMkL,EAC3BsL,EAA+B,mBAAbjV,GAA2BA,EAC7C+J,GAASH,GAAQc,EAAY1K,EAAWiV,EAASjV,UAAYA,GAM9D,GAJAgE,EAAUA,GAAW,GAIC,IAAjB+F,EAAM1J,OAAe,CAIzB,GAAqB,GADrB+R,EAASrI,EAAO,GAAMA,EAAO,GAAI3M,MAAO,IAC5BiD,QAA+C,QAA/B2U,EAAQ5C,EAAQ,IAAM3T,MAC3B,IAArBwB,EAAQ5B,UAAkBoI,GAAkBJ,EAAKwH,SAAUuE,EAAQ,GAAI3T,MAAS,CAMjF,KAJAwB,GAAYoG,EAAKsD,KAAK7B,GACrBkN,EAAMtQ,QAAS,GAAIvB,QAASuF,EAAWC,GACvC1I,IACI,IAAM,IAEV,OAAO+D,EAGIiR,IACXhV,EAAUA,EAAQP,YAGnBM,EAAWA,EAAS5C,MAAOgV,EAAO/G,QAAQxG,MAAMxE,QAIjDpB,EAAI4I,EAAUQ,aAAa9D,KAAMvE,GAAa,EAAIoS,EAAO/R,OACzD,MAAQpB,IAAM,CAIb,GAHA+V,EAAQ5C,EAAQnT,GAGXoH,EAAKwH,SAAYpP,EAAOuW,EAAMvW,MAClC,MAED,IAAOkL,EAAOtD,EAAKsD,KAAMlL,MAGjBmL,EAAOD,EACbqL,EAAMtQ,QAAS,GAAIvB,QAASuF,EAAWC,GACvCF,EAASlE,KAAM6N,EAAQ,GAAI3T,OAC1B+L,EAAavK,EAAQP,aAAgBO,IACjC,CAKL,GAFAmS,EAAOhQ,OAAQnD,EAAG,KAClBe,EAAW4J,EAAKvJ,QAAUsK,EAAYyH,IAGrC,OADA1U,EAAKD,MAAOuG,EAAS4F,GACd5F,EAGR,QAeJ,OAPEiR,GAAYhF,GAASjQ,EAAU+J,IAChCH,EACA3J,GACCwG,EACDzC,GACC/D,GAAWwI,EAASlE,KAAMvE,IAAcwK,EAAavK,EAAQP,aAAgBO,GAExE+D,EArlBR4L,EAAWlP,UAAY2F,EAAK6O,QAAU7O,EAAKiB,QAC3CjB,EAAKuJ,WAAa,IAAIA,EA2lBtB1R,GAAQyP,WAAa3K,EAAQiC,MAAO,IAAK9C,KAAM+E,GAAY0D,KAAM,MAAS5H,EAG1EkG,IAIAhL,GAAQiP,aAAe5B,EAAQ,SAAUC,GAGxC,OAA4E,EAArEA,EAAG7F,wBAAyBjJ,EAAS0C,cAAe,eAG5DW,GAAO4J,KAAOA,EAGd5J,GAAOqN,KAAM,KAAQrN,GAAOqN,KAAK9F,QACjCvH,GAAOoV,OAASpV,GAAO0N,WAIvB9D,EAAKsG,QAAUA,GACftG,EAAKqB,OAASA,GACdrB,EAAKT,YAAcA,EACnBS,EAAKe,SAAWA,EAEhBf,EAAKf,OAAS7I,GAAOkG,eACrB0D,EAAKyL,QAAUrV,GAAOV,KACtBsK,EAAK0L,MAAQtV,GAAOmE,SACpByF,EAAK2L,UAAYvV,GAAOqN,KACxBzD,EAAKzL,QAAU6B,GAAO7B,QACtByL,EAAK8D,WAAa1N,GAAO0N,WAniEzB,GA0iEA,IAAInE,EAAM,SAAU/I,EAAM+I,EAAKiM,GAC9B,IAAIzF,EAAU,GACb0F,OAAqBzS,IAAVwS,EAEZ,OAAUhV,EAAOA,EAAM+I,KAA6B,IAAlB/I,EAAKlC,SACtC,GAAuB,IAAlBkC,EAAKlC,SAAiB,CAC1B,GAAKmX,GAAYzV,GAAQQ,GAAOkV,GAAIF,GACnC,MAEDzF,EAAQpS,KAAM6C,GAGhB,OAAOuP,GAIJ4F,EAAW,SAAUC,EAAGpV,GAG3B,IAFA,IAAIuP,EAAU,GAEN6F,EAAGA,EAAIA,EAAEtE,YACI,IAAfsE,EAAEtX,UAAkBsX,IAAMpV,GAC9BuP,EAAQpS,KAAMiY,GAIhB,OAAO7F,GAIJ8F,EAAgB7V,GAAOqN,KAAKrD,MAAM1B,aAElCwN,EAAa,kEAKjB,SAASC,EAAQzI,EAAU0I,EAAWhG,GACrC,OAAK5R,EAAY4X,GACThW,GAAO8B,KAAMwL,EAAU,SAAU9M,EAAMtB,GAC7C,QAAS8W,EAAUxY,KAAMgD,EAAMtB,EAAGsB,KAAWwP,IAK1CgG,EAAU1X,SACP0B,GAAO8B,KAAMwL,EAAU,SAAU9M,GACvC,OAASA,IAASwV,IAAgBhG,IAKV,iBAAdgG,EACJhW,GAAO8B,KAAMwL,EAAU,SAAU9M,GACvC,OAA4C,EAAnC5C,GAAQJ,KAAMwY,EAAWxV,KAAkBwP,IAK/ChQ,GAAO4M,OAAQoJ,EAAW1I,EAAU0C,GAG5ChQ,GAAO4M,OAAS,SAAUS,EAAMnM,EAAO8O,GACtC,IAAIxP,EAAOU,EAAO,GAMlB,OAJK8O,IACJ3C,EAAO,QAAUA,EAAO,KAGH,IAAjBnM,EAAMZ,QAAkC,IAAlBE,EAAKlC,SACxB0B,GAAO4J,KAAK2D,gBAAiB/M,EAAM6M,GAAS,CAAE7M,GAAS,GAGxDR,GAAO4J,KAAKjF,QAAS0I,EAAMrN,GAAO8B,KAAMZ,EAAO,SAAUV,GAC/D,OAAyB,IAAlBA,EAAKlC,aAId0B,GAAOG,GAAGmC,OAAQ,CACjBsH,KAAM,SAAU3J,GACf,IAAIf,EAAGiC,EACNc,EAAMlF,KAAKuD,OACX2V,EAAOlZ,KAER,GAAyB,iBAAbkD,EACX,OAAOlD,KAAKkE,UAAWjB,GAAQC,GAAW2M,OAAQ,WACjD,IAAM1N,EAAI,EAAGA,EAAI+C,EAAK/C,IACrB,GAAKc,GAAOwF,SAAUyQ,EAAM/W,GAAKnC,MAChC,OAAO,KAQX,IAFAoE,EAAMpE,KAAKkE,UAAW,IAEhB/B,EAAI,EAAGA,EAAI+C,EAAK/C,IACrBc,GAAO4J,KAAM3J,EAAUgW,EAAM/W,GAAKiC,GAGnC,OAAa,EAANc,EAAUjC,GAAO0N,WAAYvM,GAAQA,GAE7CyL,OAAQ,SAAU3M,GACjB,OAAOlD,KAAKkE,UAAW8U,EAAQhZ,KAAMkD,GAAY,IAAI,KAEtD+P,IAAK,SAAU/P,GACd,OAAOlD,KAAKkE,UAAW8U,EAAQhZ,KAAMkD,GAAY,IAAI,KAEtDyV,GAAI,SAAUzV,GACb,QAAS8V,EACRhZ,KAIoB,iBAAbkD,GAAyB4V,EAAcrR,KAAMvE,GACnDD,GAAQC,GACRA,GAAY,IACb,GACCK,UASJ,IAAI4V,EAMHzN,EAAa,uCAENzI,GAAOG,GAAGC,KAAO,SAAUH,EAAUC,EAASuQ,GACpD,IAAIzG,EAAOxJ,EAGX,IAAMP,EACL,OAAOlD,KAQR,GAHA0T,EAAOA,GAAQyF,EAGU,iBAAbjW,EAAwB,CAanC,KAPC+J,EALsB,MAAlB/J,EAAU,IACsB,MAApCA,EAAUA,EAASK,OAAS,IACT,GAAnBL,EAASK,OAGD,CAAE,KAAML,EAAU,MAGlBwI,EAAW2B,KAAMnK,MAIV+J,EAAO,IAAQ9J,EA6CxB,OAAMA,GAAWA,EAAQU,QACtBV,GAAWuQ,GAAO7G,KAAM3J,GAK1BlD,KAAK8D,YAAaX,GAAU0J,KAAM3J,GAhDzC,GAAK+J,EAAO,GAAM,CAYjB,GAXA9J,EAAUA,aAAmBF,GAASE,EAAS,GAAMA,EAIrDF,GAAOoB,MAAOrE,KAAMiD,GAAOmW,UAC1BnM,EAAO,GACP9J,GAAWA,EAAQ5B,SAAW4B,EAAQqE,eAAiBrE,EAAUvD,GACjE,IAIImZ,EAAWtR,KAAMwF,EAAO,KAAShK,GAAO6C,cAAe3C,GAC3D,IAAM8J,KAAS9J,EAGT9B,EAAYrB,KAAMiN,IACtBjN,KAAMiN,GAAS9J,EAAS8J,IAIxBjN,KAAKyQ,KAAMxD,EAAO9J,EAAS8J,IAK9B,OAAOjN,KAYP,OARAyD,EAAO7D,EAAS0N,eAAgBL,EAAO,OAKtCjN,KAAM,GAAMyD,EACZzD,KAAKuD,OAAS,GAERvD,KAcH,OAAKkD,EAAS3B,UACpBvB,KAAM,GAAMkD,EACZlD,KAAKuD,OAAS,EACPvD,MAIIqB,EAAY6B,QACD+C,IAAfyN,EAAK2F,MACX3F,EAAK2F,MAAOnW,GAGZA,EAAUD,IAGLA,GAAOgE,UAAW/D,EAAUlD,QAIhC4D,UAAYX,GAAOG,GAGxB+V,EAAalW,GAAQrD,GAGrB,IAAI0Z,EAAe,iCAGlBC,EAAmB,CAClBC,UAAU,EACVC,UAAU,EACVhN,MAAM,EACNiN,MAAM,GAoFR,SAASC,EAASC,EAAKpN,GACtB,OAAUoN,EAAMA,EAAKpN,KAA4B,IAAjBoN,EAAIrY,UACpC,OAAOqY,EAnFR3W,GAAOG,GAAGmC,OAAQ,CACjB8N,IAAK,SAAUzN,GACd,IAAIiU,EAAU5W,GAAQ2C,EAAQ5F,MAC7B8Z,EAAID,EAAQtW,OAEb,OAAOvD,KAAK6P,OAAQ,WAEnB,IADA,IAAI1N,EAAI,EACAA,EAAI2X,EAAG3X,IACd,GAAKc,GAAOwF,SAAUzI,KAAM6Z,EAAS1X,IACpC,OAAO,KAMX4X,QAAS,SAAUvB,EAAWrV,GAC7B,IAAIyW,EACHzX,EAAI,EACJ2X,EAAI9Z,KAAKuD,OACTyP,EAAU,GACV6G,EAA+B,iBAAdrB,GAA0BvV,GAAQuV,GAGpD,IAAMM,EAAcrR,KAAM+Q,GACzB,KAAQrW,EAAI2X,EAAG3X,IACd,IAAMyX,EAAM5Z,KAAMmC,GAAKyX,GAAOA,IAAQzW,EAASyW,EAAMA,EAAIhX,WAGxD,GAAKgX,EAAIrY,SAAW,KAAQsY,GACH,EAAxBA,EAAQG,MAAOJ,GAGE,IAAjBA,EAAIrY,UACH0B,GAAO4J,KAAK2D,gBAAiBoJ,EAAKpB,IAAgB,CAEnDxF,EAAQpS,KAAMgZ,GACd,MAMJ,OAAO5Z,KAAKkE,UAA4B,EAAjB8O,EAAQzP,OAAaN,GAAO0N,WAAYqC,GAAYA,IAI5EgH,MAAO,SAAUvW,GAGhB,OAAMA,EAKe,iBAATA,EACJ5C,GAAQJ,KAAMwC,GAAQQ,GAAQzD,KAAM,IAIrCa,GAAQJ,KAAMT,KAGpByD,EAAKI,OAASJ,EAAM,GAAMA,GAZjBzD,KAAM,IAAOA,KAAM,GAAI4C,WAAe5C,KAAK2E,QAAQsV,UAAU1W,QAAU,GAgBlF2W,IAAK,SAAUhX,EAAUC,GACxB,OAAOnD,KAAKkE,UACXjB,GAAO0N,WACN1N,GAAOoB,MAAOrE,KAAKgE,MAAOf,GAAQC,EAAUC,OAK/CgX,QAAS,SAAUjX,GAClB,OAAOlD,KAAKka,IAAiB,MAAZhX,EAChBlD,KAAKsE,WAAatE,KAAKsE,WAAWuL,OAAQ3M,OAU7CD,GAAOsB,KAAM,CACZgO,OAAQ,SAAU9O,GACjB,IAAI8O,EAAS9O,EAAKb,WAClB,OAAO2P,GAA8B,KAApBA,EAAOhR,SAAkBgR,EAAS,MAEpD6H,QAAS,SAAU3W,GAClB,OAAO+I,EAAK/I,EAAM,eAEnB4W,aAAc,SAAU5W,EAAM2E,EAAIqQ,GACjC,OAAOjM,EAAK/I,EAAM,aAAcgV,IAEjChM,KAAM,SAAUhJ,GACf,OAAOkW,EAASlW,EAAM,gBAEvBiW,KAAM,SAAUjW,GACf,OAAOkW,EAASlW,EAAM,oBAEvB6W,QAAS,SAAU7W,GAClB,OAAO+I,EAAK/I,EAAM,gBAEnBwW,QAAS,SAAUxW,GAClB,OAAO+I,EAAK/I,EAAM,oBAEnB8W,UAAW,SAAU9W,EAAM2E,EAAIqQ,GAC9B,OAAOjM,EAAK/I,EAAM,cAAegV,IAElC+B,UAAW,SAAU/W,EAAM2E,EAAIqQ,GAC9B,OAAOjM,EAAK/I,EAAM,kBAAmBgV,IAEtCG,SAAU,SAAUnV,GACnB,OAAOmV,GAAYnV,EAAKb,YAAc,IAAK8P,WAAYjP,IAExD+V,SAAU,SAAU/V,GACnB,OAAOmV,EAAUnV,EAAKiP,aAEvB+G,SAAU,SAAUhW,GACnB,OAA6B,MAAxBA,EAAKgX,iBAKTta,EAAUsD,EAAKgX,iBAERhX,EAAKgX,iBAMRjX,GAAUC,EAAM,cACpBA,EAAOA,EAAKiX,SAAWjX,GAGjBR,GAAOoB,MAAO,GAAIZ,EAAKiJ,eAE7B,SAAUhJ,EAAMN,GAClBH,GAAOG,GAAIM,GAAS,SAAU+U,EAAOvV,GACpC,IAAI8P,EAAU/P,GAAOwB,IAAKzE,KAAMoD,EAAIqV,GAuBpC,MArB0B,UAArB/U,EAAKpD,OAAQ,KACjB4C,EAAWuV,GAGPvV,GAAgC,iBAAbA,IACvB8P,EAAU/P,GAAO4M,OAAQ3M,EAAU8P,IAGjB,EAAdhT,KAAKuD,SAGHgW,EAAkB7V,IACvBT,GAAO0N,WAAYqC,GAIfsG,EAAa7R,KAAM/D,IACvBsP,EAAQ2H,WAIH3a,KAAKkE,UAAW8O,MAGzB,IAAI4H,EAAgB,oBAsOpB,SAASC,EAAUC,GAClB,OAAOA,EAER,SAASC,EAASC,GACjB,MAAMA,EAGP,SAASC,EAAYlT,EAAOmT,EAASC,EAAQC,GAC5C,IAAIC,EAEJ,IAGMtT,GAAS1G,EAAcga,EAAStT,EAAMuT,SAC1CD,EAAO5a,KAAMsH,GAAQ+B,KAAMoR,GAAUK,KAAMJ,GAGhCpT,GAAS1G,EAAcga,EAAStT,EAAMyT,MACjDH,EAAO5a,KAAMsH,EAAOmT,EAASC,GAQ7BD,EAAQva,WAAOsF,EAAW,CAAE8B,GAAQzH,MAAO8a,IAM3C,MAAQrT,GAIToT,EAAOxa,WAAOsF,EAAW,CAAE8B,KAvO7B9E,GAAOwY,UAAY,SAAUjW,GA9B7B,IAAwBA,EACnBkW,EAiCJlW,EAA6B,iBAAZA,GAlCMA,EAmCPA,EAlCZkW,EAAS,GACbzY,GAAOsB,KAAMiB,EAAQyH,MAAO2N,IAAmB,GAAI,SAAUe,EAAGC,GAC/DF,EAAQE,IAAS,IAEXF,GA+BNzY,GAAOsC,OAAQ,GAAIC,GAEpB,IACCqW,EAGAC,EAGAC,EAGAC,EAGAC,EAAO,GAGPC,EAAQ,GAGRC,GAAe,EAGfC,EAAO,WAQN,IALAJ,EAASA,GAAUxW,EAAQ6W,KAI3BN,EAAQF,GAAS,EACTK,EAAM3Y,OAAQ4Y,GAAe,EAAI,CACxCL,EAASI,EAAM3N,QACf,QAAU4N,EAAcF,EAAK1Y,QAGmC,IAA1D0Y,EAAME,GAAcxb,MAAOmb,EAAQ,GAAKA,EAAQ,KACpDtW,EAAQ8W,cAGRH,EAAcF,EAAK1Y,OACnBuY,GAAS,GAMNtW,EAAQsW,SACbA,GAAS,GAGVD,GAAS,EAGJG,IAIHC,EADIH,EACG,GAIA,KAMV5C,EAAO,CAGNgB,IAAK,WA2BJ,OA1BK+B,IAGCH,IAAWD,IACfM,EAAcF,EAAK1Y,OAAS,EAC5B2Y,EAAMtb,KAAMkb,IAGb,SAAW5B,EAAKrH,GACf5P,GAAOsB,KAAMsO,EAAM,SAAU8I,EAAG7T,GAC1BzG,EAAYyG,GACVtC,EAAQ6S,QAAWa,EAAK7F,IAAKvL,IAClCmU,EAAKrb,KAAMkH,GAEDA,GAAOA,EAAIvE,QAA4B,WAAlBT,EAAQgF,IAGxCoS,EAAKpS,KATR,CAYKpD,WAEAoX,IAAWD,GACfO,KAGKpc,MAIRuc,OAAQ,WAYP,OAXAtZ,GAAOsB,KAAMG,UAAW,SAAUiX,EAAG7T,GACpC,IAAIkS,EACJ,OAA0D,GAAhDA,EAAQ/W,GAAOkE,QAASW,EAAKmU,EAAMjC,IAC5CiC,EAAK3W,OAAQ0U,EAAO,GAGfA,GAASmC,GACbA,MAIInc,MAKRqT,IAAK,SAAUjQ,GACd,OAAOA,GACwB,EAA9BH,GAAOkE,QAAS/D,EAAI6Y,GACN,EAAdA,EAAK1Y,QAIP+Q,MAAO,WAIN,OAHK2H,IACJA,EAAO,IAEDjc,MAMRwc,QAAS,WAGR,OAFAR,EAASE,EAAQ,GACjBD,EAAOH,EAAS,GACT9b,MAERuM,SAAU,WACT,OAAQ0P,GAMTQ,KAAM,WAKL,OAJAT,EAASE,EAAQ,GACXJ,GAAWD,IAChBI,EAAOH,EAAS,IAEV9b,MAERgc,OAAQ,WACP,QAASA,GAIVU,SAAU,SAAUvZ,EAAS0P,GAS5B,OARMmJ,IAELnJ,EAAO,CAAE1P,GADT0P,EAAOA,GAAQ,IACQvS,MAAQuS,EAAKvS,QAAUuS,GAC9CqJ,EAAMtb,KAAMiS,GACNgJ,GACLO,KAGKpc,MAIRoc,KAAM,WAEL,OADAlD,EAAKwD,SAAU1c,KAAM0E,WACd1E,MAIR+b,MAAO,WACN,QAASA,IAIZ,OAAO7C,GA4CRjW,GAAOsC,OAAQ,CAEdoX,SAAU,SAAUC,GACnB,IAAIC,EAAS,CAIX,CAAE,SAAU,WAAY5Z,GAAOwY,UAAW,UACzCxY,GAAOwY,UAAW,UAAY,GAC/B,CAAE,UAAW,OAAQxY,GAAOwY,UAAW,eACtCxY,GAAOwY,UAAW,eAAiB,EAAG,YACvC,CAAE,SAAU,OAAQxY,GAAOwY,UAAW,eACrCxY,GAAOwY,UAAW,eAAiB,EAAG,aAExCqB,EAAQ,UACRxB,EAAU,CACTwB,MAAO,WACN,OAAOA,GAERC,OAAQ,WAEP,OADAC,EAASlT,KAAMpF,WAAY6W,KAAM7W,WAC1B1E,MAERid,QAAS,SAAU7Z,GAClB,OAAOkY,EAAQE,KAAM,KAAMpY,IAI5B8Z,KAAM,WACL,IAAIC,EAAMzY,UAEV,OAAOzB,GAAO0Z,SAAU,SAAUS,GACjCna,GAAOsB,KAAMsY,EAAQ,SAAUzU,EAAIiV,GAGlC,IAAIja,EAAK/B,EAAY8b,EAAKE,EAAO,MAAWF,EAAKE,EAAO,IAKxDL,EAAUK,EAAO,IAAO,WACvB,IAAIC,EAAWla,GAAMA,EAAGzC,MAAOX,KAAM0E,WAChC4Y,GAAYjc,EAAYic,EAAShC,SACrCgC,EAAShC,UACPiC,SAAUH,EAASI,QACnB1T,KAAMsT,EAASlC,SACfK,KAAM6B,EAASjC,QAEjBiC,EAAUC,EAAO,GAAM,QACtBrd,KACAoD,EAAK,CAAEka,GAAa5Y,eAKxByY,EAAM,OACH7B,WAELE,KAAM,SAAUiC,EAAaC,EAAYC,GACxC,IAAIC,EAAW,EACf,SAAS1C,EAAS2C,EAAOb,EAAUc,EAASC,GAC3C,OAAO,WACN,IAAIC,EAAOhe,KACV6S,EAAOnO,UACPuZ,EAAa,WACZ,IAAIX,EAAU9B,EAKd,KAAKqC,EAAQD,GAAb,CAQA,IAJAN,EAAWQ,EAAQnd,MAAOqd,EAAMnL,MAIdmK,EAAS1B,UAC1B,MAAM,IAAI4C,UAAW,4BAOtB1C,EAAO8B,IAKgB,iBAAbA,GACY,mBAAbA,IACRA,EAAS9B,KAGLna,EAAYma,GAGXuC,EACJvC,EAAK/a,KACJ6c,EACApC,EAAS0C,EAAUZ,EAAUnC,EAAUkD,GACvC7C,EAAS0C,EAAUZ,EAAUjC,EAASgD,KAOvCH,IAEApC,EAAK/a,KACJ6c,EACApC,EAAS0C,EAAUZ,EAAUnC,EAAUkD,GACvC7C,EAAS0C,EAAUZ,EAAUjC,EAASgD,GACtC7C,EAAS0C,EAAUZ,EAAUnC,EAC5BmC,EAASmB,eASPL,IAAYjD,IAChBmD,OAAO/X,EACP4M,EAAO,CAAEyK,KAKRS,GAAWf,EAASoB,aAAeJ,EAAMnL,MAK7CwL,EAAUN,EACTE,EACA,WACC,IACCA,IACC,MAAQtR,GAEJ1J,GAAO0Z,SAAS2B,eACpBrb,GAAO0Z,SAAS2B,cAAe3R,EAC9B0R,EAAQ9X,OAMQqX,GAAbC,EAAQ,IAIPC,IAAY/C,IAChBiD,OAAO/X,EACP4M,EAAO,CAAElG,IAGVqQ,EAASuB,WAAYP,EAAMnL,MAS3BgL,EACJQ,KAKKpb,GAAO0Z,SAAS6B,aACpBH,EAAQ9X,MAAQtD,GAAO0Z,SAAS6B,eAMrBvb,GAAO0Z,SAAS8B,eAC3BJ,EAAQ9X,MAAQtD,GAAO0Z,SAAS8B,gBAEjC1e,GAAO2e,WAAYL,KAKtB,OAAOpb,GAAO0Z,SAAU,SAAUS,GAGjCP,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYsc,GACXA,EACA9C,EACDuC,EAASe,aAKXtB,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYoc,GACXA,EACA5C,IAKHgC,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYqc,GACXA,EACA3C,MAGAO,WAKLA,QAAS,SAAUha,GAClB,OAAc,MAAPA,EAAc2B,GAAOsC,OAAQjE,EAAKga,GAAYA,IAGvD0B,EAAW,GAkEZ,OA/DA/Z,GAAOsB,KAAMsY,EAAQ,SAAU1a,EAAGkb,GACjC,IAAIpB,EAAOoB,EAAO,GACjBsB,EAActB,EAAO,GAKtB/B,EAAS+B,EAAO,IAAQpB,EAAK/B,IAGxByE,GACJ1C,EAAK/B,IACJ,WAIC4C,EAAQ6B,GAKT9B,EAAQ,EAAI1a,GAAK,GAAIqa,QAIrBK,EAAQ,EAAI1a,GAAK,GAAIqa,QAGrBK,EAAQ,GAAK,GAAIJ,KAGjBI,EAAQ,GAAK,GAAIJ,MAOnBR,EAAK/B,IAAKmD,EAAO,GAAIjB,MAKrBY,EAAUK,EAAO,IAAQ,WAExB,OADAL,EAAUK,EAAO,GAAM,QAAUrd,OAASgd,OAAW/W,EAAYjG,KAAM0E,WAChE1E,MAMRgd,EAAUK,EAAO,GAAM,QAAWpB,EAAKS,WAIxCpB,EAAQA,QAAS0B,GAGZJ,GACJA,EAAKnc,KAAMuc,EAAUA,GAIfA,GAIR4B,KAAM,SAAUC,GACf,IAGCC,EAAYpa,UAAUnB,OAGtBpB,EAAI2c,EAGJC,EAAkBhZ,MAAO5D,GACzB6c,EAAgB1e,GAAMG,KAAMiE,WAG5Bua,EAAUhc,GAAO0Z,WAGjBuC,EAAa,SAAU/c,GACtB,OAAO,SAAU4F,GAChBgX,EAAiB5c,GAAMnC,KACvBgf,EAAe7c,GAAyB,EAAnBuC,UAAUnB,OAAajD,GAAMG,KAAMiE,WAAcqD,IAC5D+W,GACTG,EAAQb,YAAaW,EAAiBC,KAM1C,GAAKF,GAAa,IACjB7D,EAAY4D,EAAaI,EAAQnV,KAAMoV,EAAY/c,IAAM+Y,QAAS+D,EAAQ9D,QACxE2D,GAGuB,YAApBG,EAAQnC,SACZzb,EAAY2d,EAAe7c,IAAO6c,EAAe7c,GAAIqZ,OAErD,OAAOyD,EAAQzD,OAKjB,MAAQrZ,IACP8Y,EAAY+D,EAAe7c,GAAK+c,EAAY/c,GAAK8c,EAAQ9D,QAG1D,OAAO8D,EAAQ3D,aAOjB,IAAI6D,EAAc,yDAKlBlc,GAAO0Z,SAAS2B,cAAgB,SAAU/X,EAAO6Y,GAI3Crf,GAAOsf,SAAWtf,GAAOsf,QAAQC,MAAQ/Y,GAAS4Y,EAAY1X,KAAMlB,EAAM7C,OAC9E3D,GAAOsf,QAAQC,KAAM,8BAAgC/Y,EAAMgZ,QAC1DhZ,EAAMiZ,MAAOJ,IAOhBnc,GAAOwc,eAAiB,SAAUlZ,GACjCxG,GAAO2e,WAAY,WAClB,MAAMnY,KAQR,IAAImZ,EAAYzc,GAAO0Z,WAkDvB,SAASgD,IACR/f,EAASggB,oBAAqB,mBAAoBD,GAClD5f,GAAO6f,oBAAqB,OAAQD,GACpC1c,GAAOoW,QAnDRpW,GAAOG,GAAGiW,MAAQ,SAAUjW,GAY3B,OAVAsc,EACElE,KAAMpY,GAKN6Z,SAAO,SAAU1W,GACjBtD,GAAOwc,eAAgBlZ,KAGlBvG,MAGRiD,GAAOsC,OAAQ,CAGde,SAAS,EAITuZ,UAAW,EAGXxG,MAAO,SAAUyG,KAGF,IAATA,IAAkB7c,GAAO4c,UAAY5c,GAAOqD,WAKjDrD,GAAOqD,SAAU,KAGZwZ,GAAsC,IAAnB7c,GAAO4c,WAK/BH,EAAUtB,YAAaxe,EAAU,CAAEqD,QAIrCA,GAAOoW,MAAMmC,KAAOkE,EAAUlE,KAaD,aAAxB5b,EAASmgB,YACa,YAAxBngB,EAASmgB,aAA6BngB,EAASmH,gBAAgBiZ,SAGjEjgB,GAAO2e,WAAYzb,GAAOoW,QAK1BzZ,EAAS2P,iBAAkB,mBAAoBoQ,GAG/C5f,GAAOwP,iBAAkB,OAAQoQ,IAQlC,IAAIM,EAAS,SAAU9b,EAAOf,EAAIiL,EAAKtG,EAAOmY,EAAWC,EAAUC,GAClE,IAAIje,EAAI,EACP+C,EAAMf,EAAMZ,OACZ8c,EAAc,MAAPhS,EAGR,GAAuB,WAAlBvL,EAAQuL,GAEZ,IAAMlM,KADN+d,GAAY,EACD7R,EACV4R,EAAQ9b,EAAOf,EAAIjB,EAAGkM,EAAKlM,IAAK,EAAMge,EAAUC,QAI3C,QAAena,IAAV8B,IACXmY,GAAY,EAEN7e,EAAY0G,KACjBqY,GAAM,GAGFC,IAGCD,GACJhd,EAAG3C,KAAM0D,EAAO4D,GAChB3E,EAAK,OAILid,EAAOjd,EACPA,EAAK,SAAUK,EAAM6c,EAAMvY,GAC1B,OAAOsY,EAAK5f,KAAMwC,GAAQQ,GAAQsE,MAKhC3E,GACJ,KAAQjB,EAAI+C,EAAK/C,IAChBiB,EACCe,EAAOhC,GAAKkM,EAAK+R,EAChBrY,EACAA,EAAMtH,KAAM0D,EAAOhC,GAAKA,EAAGiB,EAAIe,EAAOhC,GAAKkM,KAMhD,OAAK6R,EACG/b,EAIHkc,EACGjd,EAAG3C,KAAM0D,GAGVe,EAAM9B,EAAIe,EAAO,GAAKkK,GAAQ8R,GAKlCI,EAAY,QACfC,EAAa,YAGd,SAASC,EAAYC,EAAMC,GAC1B,OAAOA,EAAOC,cAMf,SAASC,EAAWC,GACnB,OAAOA,EAAOza,QAASka,EAAW,OAAQla,QAASma,EAAYC,GAEhE,IAAIM,EAAa,SAAUC,GAQ1B,OAA0B,IAAnBA,EAAMzf,UAAqC,IAAnByf,EAAMzf,YAAsByf,EAAMzf,UAMlE,SAAS0f,IACRjhB,KAAKkG,QAAUjD,GAAOiD,QAAU+a,EAAKC,MAGtCD,EAAKC,IAAM,EAEXD,EAAKrd,UAAY,CAEhBwK,MAAO,SAAU4S,GAGhB,IAAIjZ,EAAQiZ,EAAOhhB,KAAKkG,SA4BxB,OAzBM6B,IACLA,EAAQ,GAKHgZ,EAAYC,KAIXA,EAAMzf,SACVyf,EAAOhhB,KAAKkG,SAAY6B,EAMxB3H,OAAO+gB,eAAgBH,EAAOhhB,KAAKkG,QAAS,CAC3C6B,MAAOA,EACPqZ,cAAc,MAMXrZ,GAERsZ,IAAK,SAAUL,EAAOM,EAAMvZ,GAC3B,IAAIwZ,EACHnT,EAAQpO,KAAKoO,MAAO4S,GAIrB,GAAqB,iBAATM,EACXlT,EAAOyS,EAAWS,IAAWvZ,OAM7B,IAAMwZ,KAAQD,EACblT,EAAOyS,EAAWU,IAAWD,EAAMC,GAGrC,OAAOnT,GAERpK,IAAK,SAAUgd,EAAO3S,GACrB,YAAepI,IAARoI,EACNrO,KAAKoO,MAAO4S,GAGZA,EAAOhhB,KAAKkG,UAAa8a,EAAOhhB,KAAKkG,SAAW2a,EAAWxS,KAE7D4R,OAAQ,SAAUe,EAAO3S,EAAKtG,GAa7B,YAAa9B,IAARoI,GACCA,GAAsB,iBAARA,QAAgCpI,IAAV8B,EAElC/H,KAAKgE,IAAKgd,EAAO3S,IASzBrO,KAAKqhB,IAAKL,EAAO3S,EAAKtG,QAIL9B,IAAV8B,EAAsBA,EAAQsG,IAEtCkO,OAAQ,SAAUyE,EAAO3S,GACxB,IAAIlM,EACHiM,EAAQ4S,EAAOhhB,KAAKkG,SAErB,QAAeD,IAAVmI,EAAL,CAIA,QAAanI,IAARoI,EAAoB,CAkBxBlM,GAXCkM,EAJItI,MAAMC,QAASqI,GAIbA,EAAI5J,IAAKoc,IAEfxS,EAAMwS,EAAWxS,MAIJD,EACZ,CAAEC,GACAA,EAAIpB,MAAO2N,IAAmB,IAG1BrX,OAER,MAAQpB,WACAiM,EAAOC,EAAKlM,UAKR8D,IAARoI,GAAqBpL,GAAO2D,cAAewH,MAM1C4S,EAAMzf,SACVyf,EAAOhhB,KAAKkG,cAAYD,SAEjB+a,EAAOhhB,KAAKkG,YAItBsb,QAAS,SAAUR,GAClB,IAAI5S,EAAQ4S,EAAOhhB,KAAKkG,SACxB,YAAiBD,IAAVmI,IAAwBnL,GAAO2D,cAAewH,KAGvD,IAAIqT,EAAW,IAAIR,EAEfS,EAAW,IAAIT,EAcfU,EAAS,gCACZC,EAAa,SA2Bd,SAASC,EAAUpe,EAAM4K,EAAKiT,GAC7B,IAAI5d,EA1Ba4d,EA8BjB,QAAcrb,IAATqb,GAAwC,IAAlB7d,EAAKlC,SAI/B,GAHAmC,EAAO,QAAU2K,EAAIhI,QAASub,EAAY,OAAQje,cAG7B,iBAFrB2d,EAAO7d,EAAKjB,aAAckB,IAEM,CAC/B,IACC4d,EAnCW,UADGA,EAoCEA,IA/BL,UAATA,IAIS,SAATA,EACG,KAIHA,KAAUA,EAAO,IACbA,EAGJK,EAAOla,KAAM6Z,GACVQ,KAAKC,MAAOT,GAGbA,GAeH,MAAQ3U,IAGV+U,EAASL,IAAK5d,EAAM4K,EAAKiT,QAEzBA,OAAOrb,EAGT,OAAOqb,EAGRre,GAAOsC,OAAQ,CACdic,QAAS,SAAU/d,GAClB,OAAOie,EAASF,QAAS/d,IAAUge,EAASD,QAAS/d,IAGtD6d,KAAM,SAAU7d,EAAMC,EAAM4d,GAC3B,OAAOI,EAASzB,OAAQxc,EAAMC,EAAM4d,IAGrCU,WAAY,SAAUve,EAAMC,GAC3Bge,EAASnF,OAAQ9Y,EAAMC,IAKxBue,MAAO,SAAUxe,EAAMC,EAAM4d,GAC5B,OAAOG,EAASxB,OAAQxc,EAAMC,EAAM4d,IAGrCY,YAAa,SAAUze,EAAMC,GAC5B+d,EAASlF,OAAQ9Y,EAAMC,MAIzBT,GAAOG,GAAGmC,OAAQ,CACjB+b,KAAM,SAAUjT,EAAKtG,GACpB,IAAI5F,EAAGuB,EAAM4d,EACZ7d,EAAOzD,KAAM,GACbmiB,EAAQ1e,GAAQA,EAAK8G,WAGtB,QAAatE,IAARoI,EAAoB,CACxB,GAAKrO,KAAKuD,SACT+d,EAAOI,EAAS1d,IAAKP,GAEE,IAAlBA,EAAKlC,WAAmBkgB,EAASzd,IAAKP,EAAM,iBAAmB,CACnEtB,EAAIggB,EAAM5e,OACV,MAAQpB,IAIFggB,EAAOhgB,IAEsB,KADjCuB,EAAOye,EAAOhgB,GAAIuB,MACR7C,QAAS,WAClB6C,EAAOmd,EAAWnd,EAAKpD,MAAO,IAC9BuhB,EAAUpe,EAAMC,EAAM4d,EAAM5d,KAI/B+d,EAASJ,IAAK5d,EAAM,gBAAgB,GAItC,OAAO6d,EAIR,MAAoB,iBAARjT,EACJrO,KAAKuE,KAAM,WACjBmd,EAASL,IAAKrhB,KAAMqO,KAIf4R,EAAQjgB,KAAM,SAAU+H,GAC9B,IAAIuZ,EAOJ,GAAK7d,QAAkBwC,IAAV8B,EAKZ,YAAc9B,KADdqb,EAAOI,EAAS1d,IAAKP,EAAM4K,IAEnBiT,OAMMrb,KADdqb,EAAOO,EAAUpe,EAAM4K,IAEfiT,OAIR,EAIDthB,KAAKuE,KAAM,WAGVmd,EAASL,IAAKrhB,KAAMqO,EAAKtG,MAExB,KAAMA,EAA0B,EAAnBrD,UAAUnB,OAAY,MAAM,IAG7Cye,WAAY,SAAU3T,GACrB,OAAOrO,KAAKuE,KAAM,WACjBmd,EAASnF,OAAQvc,KAAMqO,QAM1BpL,GAAOsC,OAAQ,CACd2W,MAAO,SAAUzY,EAAM9B,EAAM2f,GAC5B,IAAIpF,EAEJ,GAAKzY,EAYJ,OAXA9B,GAASA,GAAQ,MAAS,QAC1Bua,EAAQuF,EAASzd,IAAKP,EAAM9B,GAGvB2f,KACEpF,GAASnW,MAAMC,QAASsb,GAC7BpF,EAAQuF,EAASxB,OAAQxc,EAAM9B,EAAMsB,GAAOgE,UAAWqa,IAEvDpF,EAAMtb,KAAM0gB,IAGPpF,GAAS,IAIlBkG,QAAS,SAAU3e,EAAM9B,GACxBA,EAAOA,GAAQ,KAEf,IAAIua,EAAQjZ,GAAOiZ,MAAOzY,EAAM9B,GAC/B0gB,EAAcnG,EAAM3Y,OACpBH,EAAK8Y,EAAM3N,QACX+T,EAAQrf,GAAOsf,YAAa9e,EAAM9B,GAMvB,eAAPyB,IACJA,EAAK8Y,EAAM3N,QACX8T,KAGIjf,IAIU,OAATzB,GACJua,EAAMsG,QAAS,qBAITF,EAAMG,KACbrf,EAAG3C,KAAMgD,EApBF,WACNR,GAAOmf,QAAS3e,EAAM9B,IAmBF2gB,KAGhBD,GAAeC,GACpBA,EAAMhO,MAAM8H,QAKdmG,YAAa,SAAU9e,EAAM9B,GAC5B,IAAI0M,EAAM1M,EAAO,aACjB,OAAO8f,EAASzd,IAAKP,EAAM4K,IAASoT,EAASxB,OAAQxc,EAAM4K,EAAK,CAC/DiG,MAAOrR,GAAOwY,UAAW,eAAgBvB,IAAK,WAC7CuH,EAASlF,OAAQ9Y,EAAM,CAAE9B,EAAO,QAAS0M,WAM7CpL,GAAOG,GAAGmC,OAAQ,CACjB2W,MAAO,SAAUva,EAAM2f,GACtB,IAAIoB,EAAS,EAQb,MANqB,iBAAT/gB,IACX2f,EAAO3f,EACPA,EAAO,KACP+gB,KAGIhe,UAAUnB,OAASmf,EAChBzf,GAAOiZ,MAAOlc,KAAM,GAAK2B,QAGjBsE,IAATqb,EACNthB,KACAA,KAAKuE,KAAM,WACV,IAAI2X,EAAQjZ,GAAOiZ,MAAOlc,KAAM2B,EAAM2f,GAGtCre,GAAOsf,YAAaviB,KAAM2B,GAEZ,OAATA,GAAgC,eAAfua,EAAO,IAC5BjZ,GAAOmf,QAASpiB,KAAM2B,MAI1BygB,QAAS,SAAUzgB,GAClB,OAAO3B,KAAKuE,KAAM,WACjBtB,GAAOmf,QAASpiB,KAAM2B,MAGxBghB,WAAY,SAAUhhB,GACrB,OAAO3B,KAAKkc,MAAOva,GAAQ,KAAM,KAKlC2Z,QAAS,SAAU3Z,EAAML,GACxB,IAAIshB,EACHC,EAAQ,EACRC,EAAQ7f,GAAO0Z,WACfpM,EAAWvQ,KACXmC,EAAInC,KAAKuD,OACT2X,EAAU,aACC2H,GACTC,EAAM1E,YAAa7N,EAAU,CAAEA,KAIb,iBAAT5O,IACXL,EAAMK,EACNA,OAAOsE,GAERtE,EAAOA,GAAQ,KAEf,MAAQQ,KACPygB,EAAMnB,EAASzd,IAAKuM,EAAUpO,GAAKR,EAAO,gBAC9BihB,EAAItO,QACfuO,IACAD,EAAItO,MAAM4F,IAAKgB,IAIjB,OADAA,IACO4H,EAAMxH,QAASha,MAGxB,IAAIyhB,EAAO,sCAA0CC,OAEjDC,EAAU,IAAIza,OAAQ,iBAAmBua,EAAO,cAAe,KAG/DG,EAAY,CAAE,MAAO,QAAS,SAAU,QAExCnc,EAAkBnH,EAASmH,gBAI1Boc,EAAa,SAAU1f,GACzB,OAAOR,GAAOwF,SAAUhF,EAAK+D,cAAe/D,IAE7C2f,EAAW,CAAEA,UAAU,GAOnBrc,EAAgBsc,cACpBF,EAAa,SAAU1f,GACtB,OAAOR,GAAOwF,SAAUhF,EAAK+D,cAAe/D,IAC3CA,EAAK4f,YAAaD,KAAe3f,EAAK+D,gBAG1C,IAAI8b,GAAqB,SAAU7f,EAAMiL,GAOvC,MAA8B,UAH9BjL,EAAOiL,GAAMjL,GAGD8f,MAAMC,SACM,KAAvB/f,EAAK8f,MAAMC,SAMXL,EAAY1f,IAEsB,SAAlCR,GAAOwgB,IAAKhgB,EAAM,YAKrB,SAASigB,GAAWjgB,EAAM8d,EAAMoC,EAAYC,GAC3C,IAAIC,EAAUC,EACbC,EAAgB,GAChBC,EAAeJ,EACd,WACC,OAAOA,EAAMhK,OAEd,WACC,OAAO3W,GAAOwgB,IAAKhgB,EAAM8d,EAAM,KAEjC0C,EAAUD,IACVE,EAAOP,GAAcA,EAAY,KAAS1gB,GAAOkhB,UAAW5C,GAAS,GAAK,MAG1E6C,EAAgB3gB,EAAKlC,WAClB0B,GAAOkhB,UAAW5C,IAAmB,OAAT2C,IAAkBD,IAChDhB,EAAQ5V,KAAMpK,GAAOwgB,IAAKhgB,EAAM8d,IAElC,GAAK6C,GAAiBA,EAAe,KAAQF,EAAO,CAInDD,GAAoB,EAGpBC,EAAOA,GAAQE,EAAe,GAG9BA,GAAiBH,GAAW,EAE5B,MAAQF,IAIP9gB,GAAOsgB,MAAO9f,EAAM8d,EAAM6C,EAAgBF,IACnC,EAAIJ,IAAY,GAAMA,EAAQE,IAAiBC,GAAW,MAAW,IAC3EF,EAAgB,GAEjBK,GAAgCN,EAIjCM,GAAgC,EAChCnhB,GAAOsgB,MAAO9f,EAAM8d,EAAM6C,EAAgBF,GAG1CP,EAAaA,GAAc,GAgB5B,OAbKA,IACJS,GAAiBA,IAAkBH,GAAW,EAG9CJ,EAAWF,EAAY,GACtBS,GAAkBT,EAAY,GAAM,GAAMA,EAAY,IACrDA,EAAY,GACTC,IACJA,EAAMM,KAAOA,EACbN,EAAMtR,MAAQ8R,EACdR,EAAMxe,IAAMye,IAGPA,EAIR,IAAIQ,GAAoB,GAyBxB,SAASC,GAAU/T,EAAUgU,GAO5B,IANA,IAAIf,EAAS/f,EAxBcA,EACvBiT,EACHxU,EACAsB,EACAggB,EAqBAgB,EAAS,GACTxK,EAAQ,EACRzW,EAASgN,EAAShN,OAGXyW,EAAQzW,EAAQyW,KACvBvW,EAAO8M,EAAUyJ,IACNuJ,QAIXC,EAAU/f,EAAK8f,MAAMC,QAChBe,GAKa,SAAZf,IACJgB,EAAQxK,GAAUyH,EAASzd,IAAKP,EAAM,YAAe,KAC/C+gB,EAAQxK,KACbvW,EAAK8f,MAAMC,QAAU,KAGK,KAAvB/f,EAAK8f,MAAMC,SAAkBF,GAAoB7f,KACrD+gB,EAAQxK,IA7CVwJ,EAFAthB,EADGwU,OAAAA,EACHxU,GAF0BuB,EAiDaA,GA/C5B+D,cACXhE,EAAWC,EAAKD,UAChBggB,EAAUa,GAAmB7gB,MAM9BkT,EAAOxU,EAAIuiB,KAAK9hB,YAAaT,EAAII,cAAekB,IAChDggB,EAAUvgB,GAAOwgB,IAAK/M,EAAM,WAE5BA,EAAK9T,WAAWC,YAAa6T,GAEZ,SAAZ8M,IACJA,EAAU,SAEXa,GAAmB7gB,GAAaggB,MAkCb,SAAZA,IACJgB,EAAQxK,GAAU,OAGlByH,EAASJ,IAAK5d,EAAM,UAAW+f,KAMlC,IAAMxJ,EAAQ,EAAGA,EAAQzW,EAAQyW,IACR,MAAnBwK,EAAQxK,KACZzJ,EAAUyJ,GAAQuJ,MAAMC,QAAUgB,EAAQxK,IAI5C,OAAOzJ,EAGRtN,GAAOG,GAAGmC,OAAQ,CACjBgf,KAAM,WACL,OAAOD,GAAUtkB,MAAM,IAExB0kB,KAAM,WACL,OAAOJ,GAAUtkB,OAElB2kB,OAAQ,SAAU7H,GACjB,MAAsB,kBAAVA,EACJA,EAAQ9c,KAAKukB,OAASvkB,KAAK0kB,OAG5B1kB,KAAKuE,KAAM,WACZ+e,GAAoBtjB,MACxBiD,GAAQjD,MAAOukB,OAEfthB,GAAQjD,MAAO0kB,YAKnB,IAUEE,GACA1U,GAXE2U,GAAiB,wBAEjBC,GAAW,iCAEXC,GAAc,qCAMhBH,GADchlB,EAASolB,yBACRriB,YAAa/C,EAAS0C,cAAe,SACpD4N,GAAQtQ,EAAS0C,cAAe,UAM3BG,aAAc,OAAQ,SAC5ByN,GAAMzN,aAAc,UAAW,WAC/ByN,GAAMzN,aAAc,OAAQ,KAE5BmiB,GAAIjiB,YAAauN,IAIjB9O,GAAQ6jB,WAAaL,GAAIM,WAAW,GAAOA,WAAW,GAAOvS,UAAUwB,QAIvEyQ,GAAIzU,UAAY,yBAChB/O,GAAQ+jB,iBAAmBP,GAAIM,WAAW,GAAOvS,UAAUyS,aAK3DR,GAAIzU,UAAY,oBAChB/O,GAAQikB,SAAWT,GAAIjS,UAKxB,IAAI2S,GAAU,CAKbC,MAAO,CAAE,EAAG,UAAW,YACvBC,IAAK,CAAE,EAAG,oBAAqB,uBAC/BC,GAAI,CAAE,EAAG,iBAAkB,oBAC3BC,GAAI,CAAE,EAAG,qBAAsB,yBAE/BC,SAAU,CAAE,EAAG,GAAI,KAYpB,SAASC,GAAQziB,EAAS6M,GAIzB,IAAI5L,EAYJ,OATCA,EAD4C,oBAAjCjB,EAAQqK,qBACbrK,EAAQqK,qBAAsBwC,GAAO,KAEI,oBAA7B7M,EAAQ4K,iBACpB5K,EAAQ4K,iBAAkBiC,GAAO,KAGjC,QAGM/J,IAAR+J,GAAqBA,GAAOxM,GAAUL,EAAS6M,GAC5C/M,GAAOoB,MAAO,CAAElB,GAAWiB,GAG5BA,EAKR,SAASyhB,GAAe1hB,EAAO2hB,GAI9B,IAHA,IAAI3jB,EAAI,EACP2X,EAAI3V,EAAMZ,OAEHpB,EAAI2X,EAAG3X,IACdsf,EAASJ,IACRld,EAAOhC,GACP,cACC2jB,GAAerE,EAASzd,IAAK8hB,EAAa3jB,GAAK,eA1CnDmjB,GAAQS,MAAQT,GAAQU,MAAQV,GAAQW,SAAWX,GAAQY,QAAUZ,GAAQC,MAC7ED,GAAQa,GAAKb,GAAQI,GAGftkB,GAAQikB,SACbC,GAAQc,SAAWd,GAAQD,OAAS,CAAE,EAAG,+BAAgC,cA2C1E,IAAIgB,GAAQ,YAEZ,SAASC,GAAeniB,EAAOhB,EAASojB,EAASC,EAAWC,GAO3D,IANA,IAAIhjB,EAAMmf,EAAK5S,EAAK0W,EAAMC,EAAUxhB,EACnCyhB,EAAWzjB,EAAQ6hB,yBACnB6B,EAAQ,GACR1kB,EAAI,EACJ2X,EAAI3V,EAAMZ,OAEHpB,EAAI2X,EAAG3X,IAGd,IAFAsB,EAAOU,EAAOhC,KAEQ,IAATsB,EAGZ,GAAwB,WAAnBX,EAAQW,GAIZR,GAAOoB,MAAOwiB,EAAOpjB,EAAKlC,SAAW,CAAEkC,GAASA,QAG1C,GAAM4iB,GAAM5e,KAAMhE,GAIlB,CACNmf,EAAMA,GAAOgE,EAASjkB,YAAaQ,EAAQb,cAAe,QAG1D0N,GAAQ8U,GAASzX,KAAM5J,IAAU,CAAE,GAAI,KAAQ,GAAIE,cACnD+iB,EAAOpB,GAAStV,IAASsV,GAAQK,SACjC/C,EAAIzS,UAAYuW,EAAM,GAAMzjB,GAAO6jB,cAAerjB,GAASijB,EAAM,GAGjEvhB,EAAIuhB,EAAM,GACV,MAAQvhB,IACPyd,EAAMA,EAAIjQ,UAKX1P,GAAOoB,MAAOwiB,EAAOjE,EAAIlW,aAGzBkW,EAAMgE,EAASlU,YAGX5L,YAAc,QAzBlB+f,EAAMjmB,KAAMuC,EAAQ4jB,eAAgBtjB,IA+BvCmjB,EAAS9f,YAAc,GAEvB3E,EAAI,EACJ,MAAUsB,EAAOojB,EAAO1kB,KAGvB,GAAKqkB,IAAkD,EAArCvjB,GAAOkE,QAAS1D,EAAM+iB,GAClCC,GACJA,EAAQ7lB,KAAM6C,QAgBhB,GAXAkjB,EAAWxD,EAAY1f,GAGvBmf,EAAMgD,GAAQgB,EAASjkB,YAAac,GAAQ,UAGvCkjB,GACJd,GAAejD,GAIX2D,EAAU,CACdphB,EAAI,EACJ,MAAU1B,EAAOmf,EAAKzd,KAChB4f,GAAYtd,KAAMhE,EAAK9B,MAAQ,KACnC4kB,EAAQ3lB,KAAM6C,GAMlB,OAAOmjB,EAIR,IAAII,GAAiB,sBAErB,SAASC,KACR,OAAO,EAGR,SAASC,KACR,OAAO,EAGR,SAASC,GAAI1jB,EAAM2jB,EAAOlkB,EAAUoe,EAAMle,EAAIikB,GAC7C,IAAIC,EAAQ3lB,EAGZ,GAAsB,iBAAVylB,EAAqB,CAShC,IAAMzlB,IANmB,iBAAbuB,IAGXoe,EAAOA,GAAQpe,EACfA,OAAW+C,GAEEmhB,EACbD,GAAI1jB,EAAM9B,EAAMuB,EAAUoe,EAAM8F,EAAOzlB,GAAQ0lB,GAEhD,OAAO5jB,EAsBR,GAnBa,MAAR6d,GAAsB,MAANle,GAGpBA,EAAKF,EACLoe,EAAOpe,OAAW+C,GACD,MAAN7C,IACc,iBAAbF,GAGXE,EAAKke,EACLA,OAAOrb,IAIP7C,EAAKke,EACLA,EAAOpe,EACPA,OAAW+C,KAGD,IAAP7C,EACJA,EAAK8jB,QACC,IAAM9jB,EACZ,OAAOK,EAeR,OAZa,IAAR4jB,IACJC,EAASlkB,GACTA,EAAK,SAAUmkB,GAId,OADAtkB,KAASukB,IAAKD,GACPD,EAAO3mB,MAAOX,KAAM0E,aAIzBsD,KAAOsf,EAAOtf,OAAUsf,EAAOtf,KAAO/E,GAAO+E,SAE1CvE,EAAKc,KAAM,WACjBtB,GAAOskB,MAAMrN,IAAKla,KAAMonB,EAAOhkB,EAAIke,EAAMpe,KA+a3C,SAASukB,GAAgB/Y,EAAI/M,EAAM+lB,GAG5BA,GAQNjG,EAASJ,IAAK3S,EAAI/M,GAAM,GACxBsB,GAAOskB,MAAMrN,IAAKxL,EAAI/M,EAAM,CAC3B0F,WAAW,EACXyW,QAAS,SAAUyJ,GAClB,IAAI3V,EACH+V,EAAQlG,EAASzd,IAAKhE,KAAM2B,GAE7B,GAAyB,EAAlB4lB,EAAMK,WAAmB5nB,KAAM2B,IAGrC,GAAMgmB,GA4BQ1kB,GAAOskB,MAAMxJ,QAASpc,IAAU,IAAKkmB,cAClDN,EAAMO,uBAhBN,GARAH,EAAQrnB,GAAMG,KAAMiE,WACpB+c,EAASJ,IAAKrhB,KAAM2B,EAAMgmB,GAG1B3nB,KAAM2B,KACNiQ,EAAS6P,EAASzd,IAAKhE,KAAM2B,GAC7B8f,EAASJ,IAAKrhB,KAAM2B,GAAM,GAErBgmB,IAAU/V,EAMd,OAHA2V,EAAMQ,2BACNR,EAAMS,iBAECpW,OAeE+V,IAGXlG,EAASJ,IAAKrhB,KAAM2B,EAAMsB,GAAOskB,MAAMU,QACtCN,EAAO,GACPA,EAAMrnB,MAAO,GACbN,OAWDunB,EAAMO,kBACNP,EAAMW,8BAAgCjB,aArENhhB,IAA7Bwb,EAASzd,IAAK0K,EAAI/M,IACtBsB,GAAOskB,MAAMrN,IAAKxL,EAAI/M,EAAMslB,IA5a/BhkB,GAAOskB,MAAQ,CAEd/nB,OAAQ,GAER0a,IAAK,SAAUzW,EAAM2jB,EAAOtJ,EAASwD,EAAMpe,GAE1C,IAAIilB,EAAaC,EAAaxF,EAC7ByF,EAAQC,EAAGC,EACXxK,EAASyK,EAAU7mB,EAAM8mB,EAAYC,EACrCC,EAAWlH,EAASzd,IAAKP,GAG1B,GAAMsd,EAAYtd,GAAlB,CAKKqa,EAAQA,UAEZA,GADAqK,EAAcrK,GACQA,QACtB5a,EAAWilB,EAAYjlB,UAKnBA,GACJD,GAAO4J,KAAK2D,gBAAiBzJ,EAAiB7D,GAIzC4a,EAAQ9V,OACb8V,EAAQ9V,KAAO/E,GAAO+E,SAIfqgB,EAASM,EAASN,UACzBA,EAASM,EAASN,OAASjoB,OAAOwoB,OAAQ,QAEnCR,EAAcO,EAASE,UAC9BT,EAAcO,EAASE,OAAS,SAAUlc,GAIzC,MAAyB,oBAAX1J,IAA0BA,GAAOskB,MAAMuB,YAAcnc,EAAEhL,KACpEsB,GAAOskB,MAAMwB,SAASpoB,MAAO8C,EAAMiB,gBAAcuB,IAMpDqiB,GADAlB,GAAUA,GAAS,IAAKna,MAAO2N,IAAmB,CAAE,KAC1CrX,OACV,MAAQ+kB,IAEP3mB,EAAO+mB,GADP9F,EAAMoE,GAAe3Z,KAAM+Z,EAAOkB,KAAS,IACpB,GACvBG,GAAe7F,EAAK,IAAO,IAAKza,MAAO,KAAM9C,OAGvC1D,IAKNoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAG1CA,GAASuB,EAAW6a,EAAQ8J,aAAe9J,EAAQiL,WAAcrnB,EAGjEoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAG1C4mB,EAAYtlB,GAAOsC,OAAQ,CAC1B5D,KAAMA,EACN+mB,SAAUA,EACVpH,KAAMA,EACNxD,QAASA,EACT9V,KAAM8V,EAAQ9V,KACd9E,SAAUA,EACVqI,aAAcrI,GAAYD,GAAOqN,KAAKrD,MAAM1B,aAAa9D,KAAMvE,GAC/DmE,UAAWohB,EAAW3a,KAAM,MAC1Bqa,IAGKK,EAAWH,EAAQ1mB,OAC1B6mB,EAAWH,EAAQ1mB,GAAS,IACnBsnB,cAAgB,EAGnBlL,EAAQmL,QACiD,IAA9DnL,EAAQmL,MAAMzoB,KAAMgD,EAAM6d,EAAMmH,EAAYL,IAEvC3kB,EAAK8L,kBACT9L,EAAK8L,iBAAkB5N,EAAMymB,IAK3BrK,EAAQ7D,MACZ6D,EAAQ7D,IAAIzZ,KAAMgD,EAAM8kB,GAElBA,EAAUzK,QAAQ9V,OACvBugB,EAAUzK,QAAQ9V,KAAO8V,EAAQ9V,OAK9B9E,EACJslB,EAASljB,OAAQkjB,EAASS,gBAAiB,EAAGV,GAE9CC,EAAS5nB,KAAM2nB,GAIhBtlB,GAAOskB,MAAM/nB,OAAQmC,IAAS,KAMhC4a,OAAQ,SAAU9Y,EAAM2jB,EAAOtJ,EAAS5a,EAAUimB,GAEjD,IAAIhkB,EAAGikB,EAAWxG,EACjByF,EAAQC,EAAGC,EACXxK,EAASyK,EAAU7mB,EAAM8mB,EAAYC,EACrCC,EAAWlH,EAASD,QAAS/d,IAAUge,EAASzd,IAAKP,GAEtD,GAAMklB,IAAeN,EAASM,EAASN,QAAvC,CAMAC,GADAlB,GAAUA,GAAS,IAAKna,MAAO2N,IAAmB,CAAE,KAC1CrX,OACV,MAAQ+kB,IAMP,GAJA3mB,EAAO+mB,GADP9F,EAAMoE,GAAe3Z,KAAM+Z,EAAOkB,KAAS,IACpB,GACvBG,GAAe7F,EAAK,IAAO,IAAKza,MAAO,KAAM9C,OAGvC1D,EAAN,CAOAoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAE1C6mB,EAAWH,EADX1mB,GAASuB,EAAW6a,EAAQ8J,aAAe9J,EAAQiL,WAAcrnB,IACpC,GAC7BihB,EAAMA,EAAK,IACV,IAAIpa,OAAQ,UAAYigB,EAAW3a,KAAM,iBAAoB,WAG9Dsb,EAAYjkB,EAAIqjB,EAASjlB,OACzB,MAAQ4B,IACPojB,EAAYC,EAAUrjB,IAEfgkB,GAAeT,IAAaH,EAAUG,UACzC5K,GAAWA,EAAQ9V,OAASugB,EAAUvgB,MACtC4a,IAAOA,EAAInb,KAAM8gB,EAAUlhB,YAC3BnE,GAAYA,IAAaqlB,EAAUrlB,WACxB,OAAbA,IAAqBqlB,EAAUrlB,YAChCslB,EAASljB,OAAQH,EAAG,GAEfojB,EAAUrlB,UACdslB,EAASS,gBAELlL,EAAQxB,QACZwB,EAAQxB,OAAO9b,KAAMgD,EAAM8kB,IAOzBa,IAAcZ,EAASjlB,SACrBwa,EAAQsL,WACkD,IAA/DtL,EAAQsL,SAAS5oB,KAAMgD,EAAMglB,EAAYE,EAASE,SAElD5lB,GAAOqmB,YAAa7lB,EAAM9B,EAAMgnB,EAASE,eAGnCR,EAAQ1mB,SA1Cf,IAAMA,KAAQ0mB,EACbplB,GAAOskB,MAAMhL,OAAQ9Y,EAAM9B,EAAOylB,EAAOkB,GAAKxK,EAAS5a,GAAU,GA8C/DD,GAAO2D,cAAeyhB,IAC1B5G,EAASlF,OAAQ9Y,EAAM,mBAIzBslB,SAAU,SAAUQ,GAEnB,IAAIpnB,EAAGgD,EAAGf,EAAK4O,EAASuV,EAAWiB,EAClC3W,EAAO,IAAI9M,MAAOrB,UAAUnB,QAG5BgkB,EAAQtkB,GAAOskB,MAAMkC,IAAKF,GAE1Bf,GACC/G,EAASzd,IAAKhE,KAAM,WAAcI,OAAOwoB,OAAQ,OAC/CrB,EAAM5lB,OAAU,GACnBoc,EAAU9a,GAAOskB,MAAMxJ,QAASwJ,EAAM5lB,OAAU,GAKjD,IAFAkR,EAAM,GAAM0U,EAENplB,EAAI,EAAGA,EAAIuC,UAAUnB,OAAQpB,IAClC0Q,EAAM1Q,GAAMuC,UAAWvC,GAMxB,GAHAolB,EAAMmC,eAAiB1pB,MAGlB+d,EAAQ4L,cAA2D,IAA5C5L,EAAQ4L,YAAYlpB,KAAMT,KAAMunB,GAA5D,CAKAiC,EAAevmB,GAAOskB,MAAMiB,SAAS/nB,KAAMT,KAAMunB,EAAOiB,GAGxDrmB,EAAI,EACJ,OAAU6Q,EAAUwW,EAAcrnB,QAAYolB,EAAMqC,uBAAyB,CAC5ErC,EAAMsC,cAAgB7W,EAAQvP,KAE9B0B,EAAI,EACJ,OAAUojB,EAAYvV,EAAQwV,SAAUrjB,QACtCoiB,EAAMW,gCAIDX,EAAMuC,aAAsC,IAAxBvB,EAAUlhB,YACnCkgB,EAAMuC,WAAWriB,KAAM8gB,EAAUlhB,aAEjCkgB,EAAMgB,UAAYA,EAClBhB,EAAMjG,KAAOiH,EAAUjH,UAKVrb,KAHb7B,IAAUnB,GAAOskB,MAAMxJ,QAASwK,EAAUG,WAAc,IAAKG,QAC5DN,EAAUzK,SAAUnd,MAAOqS,EAAQvP,KAAMoP,MAGT,KAAzB0U,EAAM3V,OAASxN,KACrBmjB,EAAMS,iBACNT,EAAMO,oBAYX,OAJK/J,EAAQgM,cACZhM,EAAQgM,aAAatpB,KAAMT,KAAMunB,GAG3BA,EAAM3V,SAGd4W,SAAU,SAAUjB,EAAOiB,GAC1B,IAAIrmB,EAAGomB,EAAWnf,EAAK4gB,EAAiBC,EACvCT,EAAe,GACfP,EAAgBT,EAASS,cACzBrP,EAAM2N,EAAM3hB,OAGb,GAAKqjB,GAIJrP,EAAIrY,YAOc,UAAfgmB,EAAM5lB,MAAoC,GAAhB4lB,EAAM9S,QAEnC,KAAQmF,IAAQ5Z,KAAM4Z,EAAMA,EAAIhX,YAAc5C,KAI7C,GAAsB,IAAjB4Z,EAAIrY,WAAoC,UAAfgmB,EAAM5lB,OAAqC,IAAjBiY,EAAIrN,UAAsB,CAGjF,IAFAyd,EAAkB,GAClBC,EAAmB,GACb9nB,EAAI,EAAGA,EAAI8mB,EAAe9mB,SAME8D,IAA5BgkB,EAFL7gB,GAHAmf,EAAYC,EAAUrmB,IAGNe,SAAW,OAG1B+mB,EAAkB7gB,GAAQmf,EAAUhd,cACC,EAApCtI,GAAQmG,EAAKpJ,MAAOga,MAAOJ,GAC3B3W,GAAO4J,KAAMzD,EAAKpJ,KAAM,KAAM,CAAE4Z,IAAQrW,QAErC0mB,EAAkB7gB,IACtB4gB,EAAgBppB,KAAM2nB,GAGnByB,EAAgBzmB,QACpBimB,EAAa5oB,KAAM,CAAE6C,KAAMmW,EAAK4O,SAAUwB,IAY9C,OALApQ,EAAM5Z,KACDipB,EAAgBT,EAASjlB,QAC7BimB,EAAa5oB,KAAM,CAAE6C,KAAMmW,EAAK4O,SAAUA,EAASloB,MAAO2oB,KAGpDO,GAGRU,QAAS,SAAUxmB,EAAMymB,GACxB/pB,OAAO+gB,eAAgBle,GAAOmnB,MAAMxmB,UAAWF,EAAM,CACpD2mB,YAAY,EACZjJ,cAAc,EAEdpd,IAAK3C,EAAY8oB,GAChB,WACC,GAAKnqB,KAAKsqB,cACT,OAAOH,EAAMnqB,KAAKsqB,gBAGpB,WACC,GAAKtqB,KAAKsqB,cACT,OAAOtqB,KAAKsqB,cAAe5mB,IAI9B2d,IAAK,SAAUtZ,GACd3H,OAAO+gB,eAAgBnhB,KAAM0D,EAAM,CAClC2mB,YAAY,EACZjJ,cAAc,EACdmJ,UAAU,EACVxiB,MAAOA,QAMX0hB,IAAK,SAAUa,GACd,OAAOA,EAAernB,GAAOiD,SAC5BokB,EACA,IAAIrnB,GAAOmnB,MAAOE,IAGpBvM,QAAS,CACRyM,KAAM,CAGLC,UAAU,GAEXC,MAAO,CAGNxB,MAAO,SAAU5H,GAIhB,IAAI5S,EAAK1O,MAAQshB,EAWjB,OARKuD,GAAepd,KAAMiH,EAAG/M,OAC5B+M,EAAGgc,OAASlnB,GAAUkL,EAAI,UAG1B+Y,GAAgB/Y,EAAI,SAAS,IAIvB,GAERuZ,QAAS,SAAU3G,GAIlB,IAAI5S,EAAK1O,MAAQshB,EAUjB,OAPKuD,GAAepd,KAAMiH,EAAG/M,OAC5B+M,EAAGgc,OAASlnB,GAAUkL,EAAI,UAE1B+Y,GAAgB/Y,EAAI,UAId,GAKRiX,SAAU,SAAU4B,GACnB,IAAI3hB,EAAS2hB,EAAM3hB,OACnB,OAAOif,GAAepd,KAAM7B,EAAOjE,OAClCiE,EAAO8kB,OAASlnB,GAAUoC,EAAQ,UAClC6b,EAASzd,IAAK4B,EAAQ,UACtBpC,GAAUoC,EAAQ,OAIrB+kB,aAAc,CACbZ,aAAc,SAAUxC,QAIDthB,IAAjBshB,EAAM3V,QAAwB2V,EAAM+C,gBACxC/C,EAAM+C,cAAcM,YAAcrD,EAAM3V,YA0F7C3O,GAAOqmB,YAAc,SAAU7lB,EAAM9B,EAAMknB,GAGrCplB,EAAKmc,qBACTnc,EAAKmc,oBAAqBje,EAAMknB,IAIlC5lB,GAAOmnB,MAAQ,SAAUxoB,EAAKipB,GAG7B,KAAQ7qB,gBAAgBiD,GAAOmnB,OAC9B,OAAO,IAAInnB,GAAOmnB,MAAOxoB,EAAKipB,GAI1BjpB,GAAOA,EAAID,MACf3B,KAAKsqB,cAAgB1oB,EACrB5B,KAAK2B,KAAOC,EAAID,KAIhB3B,KAAK8qB,mBAAqBlpB,EAAImpB,uBACH9kB,IAAzBrE,EAAImpB,mBAGgB,IAApBnpB,EAAIgpB,YACL3D,GACAC,GAKDlnB,KAAK4F,OAAWhE,EAAIgE,QAAkC,IAAxBhE,EAAIgE,OAAOrE,SACxCK,EAAIgE,OAAOhD,WACXhB,EAAIgE,OAEL5F,KAAK6pB,cAAgBjoB,EAAIioB,cACzB7pB,KAAKgrB,cAAgBppB,EAAIopB,eAIzBhrB,KAAK2B,KAAOC,EAIRipB,GACJ5nB,GAAOsC,OAAQvF,KAAM6qB,GAItB7qB,KAAKirB,UAAYrpB,GAAOA,EAAIqpB,WAAaC,KAAKC,MAG9CnrB,KAAMiD,GAAOiD,UAAY,GAK1BjD,GAAOmnB,MAAMxmB,UAAY,CACxBE,YAAab,GAAOmnB,MACpBU,mBAAoB5D,GACpB0C,qBAAsB1C,GACtBgB,8BAA+BhB,GAC/BkE,aAAa,EAEbpD,eAAgB,WACf,IAAIrb,EAAI3M,KAAKsqB,cAEbtqB,KAAK8qB,mBAAqB7D,GAErBta,IAAM3M,KAAKorB,aACfze,EAAEqb,kBAGJF,gBAAiB,WAChB,IAAInb,EAAI3M,KAAKsqB,cAEbtqB,KAAK4pB,qBAAuB3C,GAEvBta,IAAM3M,KAAKorB,aACfze,EAAEmb,mBAGJC,yBAA0B,WACzB,IAAIpb,EAAI3M,KAAKsqB,cAEbtqB,KAAKkoB,8BAAgCjB,GAEhCta,IAAM3M,KAAKorB,aACfze,EAAEob,2BAGH/nB,KAAK8nB,oBAKP7kB,GAAOsB,KAAM,CACZ8mB,QAAQ,EACRC,SAAS,EACTC,YAAY,EACZC,gBAAgB,EAChBC,SAAS,EACTC,QAAQ,EACRC,YAAY,EACZC,SAAS,EACTC,OAAO,EACPC,OAAO,EACPC,UAAU,EACVC,MAAM,EACNC,QAAQ,EACRjqB,MAAM,EACNkqB,UAAU,EACV7d,KAAK,EACL8d,SAAS,EACT1X,QAAQ,EACR2X,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,WAAW,EACXC,aAAa,EACbC,SAAS,EACTC,SAAS,EACTC,eAAe,EACfC,WAAW,EACXC,SAAS,EACTC,OAAO,GACL/pB,GAAOskB,MAAM2C,SAEhBjnB,GAAOsB,KAAM,CAAEoP,MAAO,UAAWsZ,KAAM,YAAc,SAAUtrB,EAAMkmB,GAEpE,SAASqF,EAAoB3D,GAC5B,GAAK3pB,EAASutB,aAAe,CAS5B,IAAItE,EAASpH,EAASzd,IAAKhE,KAAM,UAChCunB,EAAQtkB,GAAOskB,MAAMkC,IAAKF,GAC3BhC,EAAM5lB,KAA4B,YAArB4nB,EAAY5nB,KAAqB,QAAU,OACxD4lB,EAAM6D,aAAc,EAGpBvC,EAAQU,GAMHhC,EAAM3hB,SAAW2hB,EAAMsC,eAK3BhB,EAAQtB,QAMTtkB,GAAOskB,MAAM6F,SAAUvF,EAAc0B,EAAY3jB,OAChD3C,GAAOskB,MAAMkC,IAAKF,IAIrBtmB,GAAOskB,MAAMxJ,QAASpc,GAAS,CAG9BunB,MAAO,WAEN,IAAImE,EAOJ,GAFA5F,GAAgBznB,KAAM2B,GAAM,IAEvB/B,EAASutB,aAcb,OAAO,GARPE,EAAW5L,EAASzd,IAAKhE,KAAM6nB,KAE9B7nB,KAAKuP,iBAAkBsY,EAAcqF,GAEtCzL,EAASJ,IAAKrhB,KAAM6nB,GAAgBwF,GAAY,GAAM,IAOxDpF,QAAS,WAMR,OAHAR,GAAgBznB,KAAM2B,IAGf,GAGR0nB,SAAU,WACT,IAAIgE,EAEJ,IAAKztB,EAASutB,aAWb,OAAO,GAVPE,EAAW5L,EAASzd,IAAKhE,KAAM6nB,GAAiB,GAK/CpG,EAASJ,IAAKrhB,KAAM6nB,EAAcwF,IAHlCrtB,KAAK4f,oBAAqBiI,EAAcqF,GACxCzL,EAASlF,OAAQvc,KAAM6nB,KAa1BlC,SAAU,SAAU4B,GACnB,OAAO9F,EAASzd,IAAKujB,EAAM3hB,OAAQjE,IAGpCkmB,aAAcA,GAef5kB,GAAOskB,MAAMxJ,QAAS8J,GAAiB,CACtCqB,MAAO,WAIN,IAAIhnB,EAAMlC,KAAKwH,eAAiBxH,KAAKJ,UAAYI,KAChDstB,EAAa1tB,EAASutB,aAAentB,KAAOkC,EAC5CmrB,EAAW5L,EAASzd,IAAKspB,EAAYzF,GAMhCwF,IACAztB,EAASutB,aACbntB,KAAKuP,iBAAkBsY,EAAcqF,GAErChrB,EAAIqN,iBAAkB5N,EAAMurB,GAAoB,IAGlDzL,EAASJ,IAAKiM,EAAYzF,GAAgBwF,GAAY,GAAM,IAE7DhE,SAAU,WACT,IAAInnB,EAAMlC,KAAKwH,eAAiBxH,KAAKJ,UAAYI,KAChDstB,EAAa1tB,EAASutB,aAAentB,KAAOkC,EAC5CmrB,EAAW5L,EAASzd,IAAKspB,EAAYzF,GAAiB,EAEjDwF,EAQL5L,EAASJ,IAAKiM,EAAYzF,EAAcwF,IAPnCztB,EAASutB,aACbntB,KAAK4f,oBAAqBiI,EAAcqF,GAExChrB,EAAI0d,oBAAqBje,EAAMurB,GAAoB,GAEpDzL,EAASlF,OAAQ+Q,EAAYzF,QAgBjC5kB,GAAOsB,KAAM,CACZgpB,WAAY,YACZC,WAAY,WACZC,aAAc,cACdC,aAAc,cACZ,SAAUC,EAAMlE,GAClBxmB,GAAOskB,MAAMxJ,QAAS4P,GAAS,CAC9B9F,aAAc4B,EACdT,SAAUS,EAEVZ,OAAQ,SAAUtB,GACjB,IAAInjB,EAEHwpB,EAAUrG,EAAMyD,cAChBzC,EAAYhB,EAAMgB,UASnB,OALMqF,IAAaA,IANT5tB,MAMgCiD,GAAOwF,SANvCzI,KAMyD4tB,MAClErG,EAAM5lB,KAAO4mB,EAAUG,SACvBtkB,EAAMmkB,EAAUzK,QAAQnd,MAAOX,KAAM0E,WACrC6iB,EAAM5lB,KAAO8nB,GAEPrlB,MAKVnB,GAAOG,GAAGmC,OAAQ,CAEjB4hB,GAAI,SAAUC,EAAOlkB,EAAUoe,EAAMle,GACpC,OAAO+jB,GAAInnB,KAAMonB,EAAOlkB,EAAUoe,EAAMle,IAEzCikB,IAAK,SAAUD,EAAOlkB,EAAUoe,EAAMle,GACrC,OAAO+jB,GAAInnB,KAAMonB,EAAOlkB,EAAUoe,EAAMle,EAAI,IAE7CokB,IAAK,SAAUJ,EAAOlkB,EAAUE,GAC/B,IAAImlB,EAAW5mB,EACf,GAAKylB,GAASA,EAAMY,gBAAkBZ,EAAMmB,UAW3C,OARAA,EAAYnB,EAAMmB,UAClBtlB,GAAQmkB,EAAMsC,gBAAiBlC,IAC9Be,EAAUlhB,UACTkhB,EAAUG,SAAW,IAAMH,EAAUlhB,UACrCkhB,EAAUG,SACXH,EAAUrlB,SACVqlB,EAAUzK,SAEJ9d,KAER,GAAsB,iBAAVonB,EAAqB,CAGhC,IAAMzlB,KAAQylB,EACbpnB,KAAKwnB,IAAK7lB,EAAMuB,EAAUkkB,EAAOzlB,IAElC,OAAO3B,KAWR,OATkB,IAAbkD,GAA0C,mBAAbA,IAGjCE,EAAKF,EACLA,OAAW+C,IAEA,IAAP7C,IACJA,EAAK8jB,IAEClnB,KAAKuE,KAAM,WACjBtB,GAAOskB,MAAMhL,OAAQvc,KAAMonB,EAAOhkB,EAAIF,QAMzC,IAKC2qB,GAAe,wBAGfC,GAAW,oCAEXC,GAAe,6BAGhB,SAASC,GAAoBvqB,EAAMiX,GAClC,OAAKlX,GAAUC,EAAM,UACpBD,GAA+B,KAArBkX,EAAQnZ,SAAkBmZ,EAAUA,EAAQhI,WAAY,OAE3DzP,GAAQQ,GAAO+V,SAAU,SAAW,IAGrC/V,EAIR,SAASwqB,GAAexqB,GAEvB,OADAA,EAAK9B,MAAyC,OAAhC8B,EAAKjB,aAAc,SAAsB,IAAMiB,EAAK9B,KAC3D8B,EAER,SAASyqB,GAAezqB,GAOvB,MAN2C,WAApCA,EAAK9B,MAAQ,IAAKrB,MAAO,EAAG,GAClCmD,EAAK9B,KAAO8B,EAAK9B,KAAKrB,MAAO,GAE7BmD,EAAKwK,gBAAiB,QAGhBxK,EAGR,SAAS0qB,GAAgBvsB,EAAKwsB,GAC7B,IAAIjsB,EAAG2X,EAAGnY,EAAgB0sB,EAAUC,EAAUjG,EAE9C,GAAuB,IAAlB+F,EAAK7sB,SAAV,CAKA,GAAKkgB,EAASD,QAAS5f,KAEtBymB,EADW5G,EAASzd,IAAKpC,GACPymB,QAKjB,IAAM1mB,KAFN8f,EAASlF,OAAQ6R,EAAM,iBAET/F,EACb,IAAMlmB,EAAI,EAAG2X,EAAIuO,EAAQ1mB,GAAO4B,OAAQpB,EAAI2X,EAAG3X,IAC9Cc,GAAOskB,MAAMrN,IAAKkU,EAAMzsB,EAAM0mB,EAAQ1mB,GAAQQ,IAO7Cuf,EAASF,QAAS5f,KACtBysB,EAAW3M,EAASzB,OAAQre,GAC5B0sB,EAAWrrB,GAAOsC,OAAQ,GAAI8oB,GAE9B3M,EAASL,IAAK+M,EAAME,KAkBtB,SAASC,GAAUC,EAAY3b,EAAMrO,EAAUiiB,GAG9C5T,EAAOtS,EAAMsS,GAEb,IAAI+T,EAAUjiB,EAAO4hB,EAASkI,EAAYxsB,EAAMC,EAC/CC,EAAI,EACJ2X,EAAI0U,EAAWjrB,OACfmrB,EAAW5U,EAAI,EACf/R,EAAQ8K,EAAM,GACd8b,EAAkBttB,EAAY0G,GAG/B,GAAK4mB,GACG,EAAJ7U,GAA0B,iBAAV/R,IAChB3G,GAAQ6jB,YAAc6I,GAASrmB,KAAMM,GACxC,OAAOymB,EAAWjqB,KAAM,SAAUyV,GACjC,IAAId,EAAOsV,EAAW5pB,GAAIoV,GACrB2U,IACJ9b,EAAM,GAAM9K,EAAMtH,KAAMT,KAAMga,EAAOd,EAAK0V,SAE3CL,GAAUrV,EAAMrG,EAAMrO,EAAUiiB,KAIlC,GAAK3M,IAEJnV,GADAiiB,EAAWN,GAAezT,EAAM2b,EAAY,GAAIhnB,eAAe,EAAOgnB,EAAY/H,IACjE/T,WAEmB,IAA/BkU,EAASla,WAAWnJ,SACxBqjB,EAAWjiB,GAIPA,GAAS8hB,GAAU,CAOvB,IALAgI,GADAlI,EAAUtjB,GAAOwB,IAAKmhB,GAAQgB,EAAU,UAAYqH,KAC/B1qB,OAKbpB,EAAI2X,EAAG3X,IACdF,EAAO2kB,EAEFzkB,IAAMusB,IACVzsB,EAAOgB,GAAO0C,MAAO1D,GAAM,GAAM,GAG5BwsB,GAIJxrB,GAAOoB,MAAOkiB,EAASX,GAAQ3jB,EAAM,YAIvCuC,EAAS/D,KAAM+tB,EAAYrsB,GAAKF,EAAME,GAGvC,GAAKssB,EAOJ,IANAvsB,EAAMqkB,EAASA,EAAQhjB,OAAS,GAAIiE,cAGpCvE,GAAOwB,IAAK8hB,EAAS2H,IAGf/rB,EAAI,EAAGA,EAAIssB,EAAYtsB,IAC5BF,EAAOskB,EAASpkB,GACX4iB,GAAYtd,KAAMxF,EAAKN,MAAQ,MAClC8f,EAASxB,OAAQhe,EAAM,eACxBgB,GAAOwF,SAAUvG,EAAKD,KAEjBA,EAAKL,KAA8C,YAArCK,EAAKN,MAAQ,IAAKgC,cAG/BV,GAAO4rB,WAAa5sB,EAAKH,UAC7BmB,GAAO4rB,SAAU5sB,EAAKL,IAAK,CAC1BC,MAAOI,EAAKJ,OAASI,EAAKO,aAAc,UACtCN,GASJH,EAASE,EAAK6E,YAAYT,QAAS0nB,GAAc,IAAM9rB,EAAMC,IAQnE,OAAOssB,EAGR,SAASjS,GAAQ9Y,EAAMP,EAAU4rB,GAKhC,IAJA,IAAI7sB,EACH4kB,EAAQ3jB,EAAWD,GAAO4M,OAAQ3M,EAAUO,GAASA,EACrDtB,EAAI,EAE4B,OAAvBF,EAAO4kB,EAAO1kB,IAAeA,IAChC2sB,GAA8B,IAAlB7sB,EAAKV,UACtB0B,GAAO8rB,UAAWnJ,GAAQ3jB,IAGtBA,EAAKW,aACJksB,GAAY3L,EAAYlhB,IAC5B4jB,GAAeD,GAAQ3jB,EAAM,WAE9BA,EAAKW,WAAWC,YAAaZ,IAI/B,OAAOwB,EAGRR,GAAOsC,OAAQ,CACduhB,cAAe,SAAU8H,GACxB,OAAOA,GAGRjpB,MAAO,SAAUlC,EAAMurB,EAAeC,GACrC,IAAI9sB,EAAG2X,EAAGoV,EAAaC,EA1INvtB,EAAKwsB,EACnB5qB,EA0IFmC,EAAQlC,EAAKyhB,WAAW,GACxBkK,EAASjM,EAAY1f,GAGtB,KAAMrC,GAAQ+jB,gBAAsC,IAAlB1hB,EAAKlC,UAAoC,KAAlBkC,EAAKlC,UAC3D0B,GAAOmE,SAAU3D,IAOnB,IAHA0rB,EAAevJ,GAAQjgB,GAGjBxD,EAAI,EAAG2X,GAFboV,EAActJ,GAAQniB,IAEOF,OAAQpB,EAAI2X,EAAG3X,IAvJ5BP,EAwJLstB,EAAa/sB,GAxJHisB,EAwJQe,EAAchtB,QAvJzCqB,EAGc,WAHdA,EAAW4qB,EAAK5qB,SAASG,gBAGAkhB,GAAepd,KAAM7F,EAAID,MACrDysB,EAAKja,QAAUvS,EAAIuS,QAGK,UAAb3Q,GAAqC,aAAbA,IACnC4qB,EAAKhJ,aAAexjB,EAAIwjB,cAoJxB,GAAK4J,EACJ,GAAKC,EAIJ,IAHAC,EAAcA,GAAetJ,GAAQniB,GACrC0rB,EAAeA,GAAgBvJ,GAAQjgB,GAEjCxD,EAAI,EAAG2X,EAAIoV,EAAY3rB,OAAQpB,EAAI2X,EAAG3X,IAC3CgsB,GAAgBe,EAAa/sB,GAAKgtB,EAAchtB,SAGjDgsB,GAAgB1qB,EAAMkC,GAWxB,OAL2B,GAD3BwpB,EAAevJ,GAAQjgB,EAAO,WACZpC,QACjBsiB,GAAesJ,GAAeC,GAAUxJ,GAAQniB,EAAM,WAIhDkC,GAGRopB,UAAW,SAAU5qB,GAKpB,IAJA,IAAImd,EAAM7d,EAAM9B,EACfoc,EAAU9a,GAAOskB,MAAMxJ,QACvB5b,EAAI,OAE6B8D,KAAxBxC,EAAOU,EAAOhC,IAAqBA,IAC5C,GAAK4e,EAAYtd,GAAS,CACzB,GAAO6d,EAAO7d,EAAMge,EAASvb,SAAc,CAC1C,GAAKob,EAAK+G,OACT,IAAM1mB,KAAQ2f,EAAK+G,OACbtK,EAASpc,GACbsB,GAAOskB,MAAMhL,OAAQ9Y,EAAM9B,GAI3BsB,GAAOqmB,YAAa7lB,EAAM9B,EAAM2f,EAAKuH,QAOxCplB,EAAMge,EAASvb,cAAYD,EAEvBxC,EAAMie,EAASxb,WAInBzC,EAAMie,EAASxb,cAAYD,OAOhChD,GAAOG,GAAGmC,OAAQ,CACjB8pB,OAAQ,SAAUnsB,GACjB,OAAOqZ,GAAQvc,KAAMkD,GAAU,IAGhCqZ,OAAQ,SAAUrZ,GACjB,OAAOqZ,GAAQvc,KAAMkD,IAGtBX,KAAM,SAAUwF,GACf,OAAOkY,EAAQjgB,KAAM,SAAU+H,GAC9B,YAAiB9B,IAAV8B,EACN9E,GAAOV,KAAMvC,MACbA,KAAKsU,QAAQ/P,KAAM,WACK,IAAlBvE,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,WACxDvB,KAAK8G,YAAciB,MAGpB,KAAMA,EAAOrD,UAAUnB,SAG3B+rB,OAAQ,WACP,OAAOf,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACpB,IAAlBzD,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,UAC3CysB,GAAoBhuB,KAAMyD,GAChCd,YAAac,MAKvB8rB,QAAS,WACR,OAAOhB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GAC3C,GAAuB,IAAlBzD,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,SAAiB,CACzE,IAAIqE,EAASooB,GAAoBhuB,KAAMyD,GACvCmC,EAAO4pB,aAAc/rB,EAAMmC,EAAO8M,gBAKrC+c,OAAQ,WACP,OAAOlB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACtCzD,KAAK4C,YACT5C,KAAK4C,WAAW4sB,aAAc/rB,EAAMzD,SAKvC0vB,MAAO,WACN,OAAOnB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACtCzD,KAAK4C,YACT5C,KAAK4C,WAAW4sB,aAAc/rB,EAAMzD,KAAKuU,gBAK5CD,MAAO,WAIN,IAHA,IAAI7Q,EACHtB,EAAI,EAE2B,OAAtBsB,EAAOzD,KAAMmC,IAAeA,IACd,IAAlBsB,EAAKlC,WAGT0B,GAAO8rB,UAAWnJ,GAAQniB,GAAM,IAGhCA,EAAKqD,YAAc,IAIrB,OAAO9G,MAGR2F,MAAO,SAAUqpB,EAAeC,GAI/B,OAHAD,EAAiC,MAAjBA,GAAgCA,EAChDC,EAAyC,MAArBA,EAA4BD,EAAgBC,EAEzDjvB,KAAKyE,IAAK,WAChB,OAAOxB,GAAO0C,MAAO3F,KAAMgvB,EAAeC,MAI5CL,KAAM,SAAU7mB,GACf,OAAOkY,EAAQjgB,KAAM,SAAU+H,GAC9B,IAAItE,EAAOzD,KAAM,IAAO,GACvBmC,EAAI,EACJ2X,EAAI9Z,KAAKuD,OAEV,QAAe0C,IAAV8B,GAAyC,IAAlBtE,EAAKlC,SAChC,OAAOkC,EAAK0M,UAIb,GAAsB,iBAAVpI,IAAuB8lB,GAAapmB,KAAMM,KACpDud,IAAWR,GAASzX,KAAMtF,IAAW,CAAE,GAAI,KAAQ,GAAIpE,eAAkB,CAE1EoE,EAAQ9E,GAAO6jB,cAAe/e,GAE9B,IACC,KAAQ5F,EAAI2X,EAAG3X,IAIS,KAHvBsB,EAAOzD,KAAMmC,IAAO,IAGVZ,WACT0B,GAAO8rB,UAAWnJ,GAAQniB,GAAM,IAChCA,EAAK0M,UAAYpI,GAInBtE,EAAO,EAGN,MAAQkJ,KAGNlJ,GACJzD,KAAKsU,QAAQgb,OAAQvnB,IAEpB,KAAMA,EAAOrD,UAAUnB,SAG3BosB,YAAa,WACZ,IAAIlJ,EAAU,GAGd,OAAO8H,GAAUvuB,KAAM0E,UAAW,SAAUjB,GAC3C,IAAI8O,EAASvS,KAAK4C,WAEbK,GAAOkE,QAASnH,KAAMymB,GAAY,IACtCxjB,GAAO8rB,UAAWnJ,GAAQ5lB,OACrBuS,GACJA,EAAOqd,aAAcnsB,EAAMzD,QAK3BymB,MAILxjB,GAAOsB,KAAM,CACZsrB,SAAU,SACVC,UAAW,UACXN,aAAc,SACdO,YAAa,QACbC,WAAY,eACV,SAAUtsB,EAAMusB,GAClBhtB,GAAOG,GAAIM,GAAS,SAAUR,GAO7B,IANA,IAAIiB,EACHC,EAAM,GACN8rB,EAASjtB,GAAQC,GACjB2B,EAAOqrB,EAAO3sB,OAAS,EACvBpB,EAAI,EAEGA,GAAK0C,EAAM1C,IAClBgC,EAAQhC,IAAM0C,EAAO7E,KAAOA,KAAK2F,OAAO,GACxC1C,GAAQitB,EAAQ/tB,IAAO8tB,GAAY9rB,GAInCvD,EAAKD,MAAOyD,EAAKD,EAAMH,OAGxB,OAAOhE,KAAKkE,UAAWE,MAGzB,IAAI+rB,GAAY,IAAI3nB,OAAQ,KAAOua,EAAO,kBAAmB,KAEzDqN,GAAc,MAGdC,GAAY,SAAU5sB,GAKxB,IAAIuoB,EAAOvoB,EAAK+D,cAAc6H,YAM9B,OAJM2c,GAASA,EAAKsE,SACnBtE,EAAOjsB,IAGDisB,EAAKuE,iBAAkB9sB,IAG5B+sB,GAAO,SAAU/sB,EAAM+B,EAAShB,GACnC,IAAIJ,EAAKV,EACR+sB,EAAM,GAGP,IAAM/sB,KAAQ8B,EACbirB,EAAK/sB,GAASD,EAAK8f,MAAO7f,GAC1BD,EAAK8f,MAAO7f,GAAS8B,EAAS9B,GAM/B,IAAMA,KAHNU,EAAMI,EAAS/D,KAAMgD,GAGP+B,EACb/B,EAAK8f,MAAO7f,GAAS+sB,EAAK/sB,GAG3B,OAAOU,GAIJssB,GAAY,IAAIloB,OAAQ0a,EAAUpV,KAAM,KAAO,KAiJnD,SAAS6iB,GAAQltB,EAAMC,EAAMktB,GAC5B,IAAIC,EAAOC,EAAUC,EAAU3sB,EAC9B4sB,EAAeZ,GAAY3oB,KAAM/D,GAMjC6f,EAAQ9f,EAAK8f,MAoEd,OAlEAqN,EAAWA,GAAYP,GAAW5sB,MAgBjCW,EAAMwsB,EAASK,iBAAkBvtB,IAAUktB,EAAUltB,GAEhDstB,GAAgB5sB,IAkBpBA,EAAMA,EAAIiC,QAASkC,GAAU,YAAUtC,GAG3B,KAAR7B,GAAe+e,EAAY1f,KAC/BW,EAAMnB,GAAOsgB,MAAO9f,EAAMC,KAQrBtC,GAAQ8vB,kBAAoBf,GAAU1oB,KAAMrD,IAASssB,GAAUjpB,KAAM/D,KAG1EmtB,EAAQtN,EAAMsN,MACdC,EAAWvN,EAAMuN,SACjBC,EAAWxN,EAAMwN,SAGjBxN,EAAMuN,SAAWvN,EAAMwN,SAAWxN,EAAMsN,MAAQzsB,EAChDA,EAAMwsB,EAASC,MAGftN,EAAMsN,MAAQA,EACdtN,EAAMuN,SAAWA,EACjBvN,EAAMwN,SAAWA,SAIJ9qB,IAAR7B,EAINA,EAAM,GACNA,EAIF,SAAS+sB,GAAcC,EAAaC,GAGnC,MAAO,CACNrtB,IAAK,WACJ,IAAKotB,IASL,OAASpxB,KAAKgE,IAAMqtB,GAAS1wB,MAAOX,KAAM0E,kBALlC1E,KAAKgE,OA3OhB,WAIC,SAASstB,IAGR,GAAM1M,EAAN,CAIA2M,EAAUhO,MAAMiO,QAAU,+EAE1B5M,EAAIrB,MAAMiO,QACT,4HAGDzqB,EAAgBpE,YAAa4uB,GAAY5uB,YAAaiiB,GAEtD,IAAI6M,EAAW1xB,GAAOwwB,iBAAkB3L,GACxC8M,EAAoC,OAAjBD,EAASniB,IAG5BqiB,EAAsE,KAA9CC,EAAoBH,EAASI,YAIrDjN,EAAIrB,MAAMuO,MAAQ,MAClBC,EAA6D,KAAzCH,EAAoBH,EAASK,OAIjDE,EAAgE,KAAzCJ,EAAoBH,EAASZ,OAMpDjM,EAAIrB,MAAM0O,SAAW,WACrBC,EAAiE,KAA9CN,EAAoBhN,EAAIuN,YAAc,GAEzDprB,EAAgBlE,YAAa0uB,GAI7B3M,EAAM,MAGP,SAASgN,EAAoBQ,GAC5B,OAAOjsB,KAAKksB,MAAOC,WAAYF,IAGhC,IAAIV,EAAkBM,EAAsBE,EAAkBH,EAC7DQ,EAAyBZ,EACzBJ,EAAY3xB,EAAS0C,cAAe,OACpCsiB,EAAMhlB,EAAS0C,cAAe,OAGzBsiB,EAAIrB,QAMVqB,EAAIrB,MAAMiP,eAAiB,cAC3B5N,EAAIM,WAAW,GAAO3B,MAAMiP,eAAiB,GAC7CpxB,GAAQqxB,gBAA+C,gBAA7B7N,EAAIrB,MAAMiP,eAEpCvvB,GAAOsC,OAAQnE,GAAS,CACvBsxB,kBAAmB,WAElB,OADApB,IACOU,GAERd,eAAgB,WAEf,OADAI,IACOS,GAERY,cAAe,WAEd,OADArB,IACOI,GAERkB,mBAAoB,WAEnB,OADAtB,IACOK,GAERkB,cAAe,WAEd,OADAvB,IACOY,GAYRY,qBAAsB,WACrB,IAAIC,EAAOtN,EAAIuN,EAASC,EAmCxB,OAlCgC,MAA3BV,IACJQ,EAAQnzB,EAAS0C,cAAe,SAChCmjB,EAAK7lB,EAAS0C,cAAe,MAC7B0wB,EAAUpzB,EAAS0C,cAAe,OAElCywB,EAAMxP,MAAMiO,QAAU,2DACtB/L,EAAGlC,MAAMiO,QAAU,0CAKnB/L,EAAGlC,MAAM2P,OAAS,MAClBF,EAAQzP,MAAM2P,OAAS,MAQvBF,EAAQzP,MAAMC,QAAU,QAExBzc,EACEpE,YAAaowB,GACbpwB,YAAa8iB,GACb9iB,YAAaqwB,GAEfC,EAAUlzB,GAAOwwB,iBAAkB9K,GACnC8M,EAA4BY,SAAUF,EAAQC,OAAQ,IACrDC,SAAUF,EAAQG,eAAgB,IAClCD,SAAUF,EAAQI,kBAAmB,MAAW5N,EAAG6N,aAEpDvsB,EAAgBlE,YAAakwB,IAEvBR,MAvIV,GAsPA,IAAIgB,GAAc,CAAE,SAAU,MAAO,MACpCC,GAAa5zB,EAAS0C,cAAe,OAAQihB,MAC7CkQ,GAAc,GAkBf,SAASC,GAAehwB,GACvB,IAAIiwB,EAAQ1wB,GAAO2wB,SAAUlwB,IAAU+vB,GAAa/vB,GAEpD,OAAKiwB,IAGAjwB,KAAQ8vB,GACL9vB,EAED+vB,GAAa/vB,GAxBrB,SAAyBA,GAGxB,IAAImwB,EAAUnwB,EAAM,GAAIkd,cAAgBld,EAAKpD,MAAO,GACnD6B,EAAIoxB,GAAYhwB,OAEjB,MAAQpB,IAEP,IADAuB,EAAO6vB,GAAapxB,GAAM0xB,KACbL,GACZ,OAAO9vB,EAeoBowB,CAAgBpwB,IAAUA,GAIxD,IAKCqwB,GAAe,4BACfC,GAAU,CAAE/B,SAAU,WAAYgC,WAAY,SAAUzQ,QAAS,SACjE0Q,GAAqB,CACpBC,cAAe,IACfC,WAAY,OAGd,SAASC,GAAmBrvB,EAAO+C,EAAOusB,GAIzC,IAAI1sB,EAAUqb,EAAQ5V,KAAMtF,GAC5B,OAAOH,EAGNzB,KAAKouB,IAAK,EAAG3sB,EAAS,IAAQ0sB,GAAY,KAAU1sB,EAAS,IAAO,MACpEG,EAGF,SAASysB,GAAoB/wB,EAAMgxB,EAAWC,EAAKC,EAAaC,EAAQC,GACvE,IAAI1yB,EAAkB,UAAdsyB,EAAwB,EAAI,EACnCK,EAAQ,EACRC,EAAQ,EACRC,EAAc,EAGf,GAAKN,KAAUC,EAAc,SAAW,WACvC,OAAO,EAGR,KAAQxyB,EAAI,EAAGA,GAAK,EAKN,WAARuyB,IACJM,GAAe/xB,GAAOwgB,IAAKhgB,EAAMixB,EAAMxR,EAAW/gB,IAAK,EAAMyyB,IAIxDD,GAmBQ,YAARD,IACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,UAAYyf,EAAW/gB,IAAK,EAAMyyB,IAIjD,WAARF,IACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,MAtBvEG,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,UAAYyf,EAAW/gB,IAAK,EAAMyyB,GAGhD,YAARF,EACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,GAItEE,GAAS7xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,IAoCzE,OAhBMD,GAA8B,GAAfE,IAIpBE,GAAS5uB,KAAKouB,IAAK,EAAGpuB,KAAK8uB,KAC1BxxB,EAAM,SAAWgxB,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,IACjEu0B,EACAE,EACAD,EACA,MAIM,GAGDC,EAAQC,EAGhB,SAASE,GAAkBzxB,EAAMgxB,EAAWK,GAG3C,IAAIF,EAASvE,GAAW5sB,GAKvBkxB,IADmBvzB,GAAQsxB,qBAAuBoC,IAEE,eAAnD7xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,GACvCO,EAAmBR,EAEnBvyB,EAAMuuB,GAAQltB,EAAMgxB,EAAWG,GAC/BQ,EAAa,SAAWX,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,GAIzE,GAAK6vB,GAAU1oB,KAAMrF,GAAQ,CAC5B,IAAM0yB,EACL,OAAO1yB,EAERA,EAAM,OAyCP,QAlCQhB,GAAQsxB,qBAAuBiC,IAMrCvzB,GAAQ0xB,wBAA0BtvB,GAAUC,EAAM,OAI3C,SAARrB,IAICkwB,WAAYlwB,IAA0D,WAAjDa,GAAOwgB,IAAKhgB,EAAM,WAAW,EAAOmxB,KAG1DnxB,EAAK4xB,iBAAiB9xB,SAEtBoxB,EAAiE,eAAnD1xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,IAKpDO,EAAmBC,KAAc3xB,KAEhCrB,EAAMqB,EAAM2xB,MAKdhzB,EAAMkwB,WAAYlwB,IAAS,GAI1BoyB,GACC/wB,EACAgxB,EACAK,IAAWH,EAAc,SAAW,WACpCQ,EACAP,EAGAxyB,GAEE,KAwTL,SAASkzB,GAAO7xB,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,GACzC,OAAO,IAAID,GAAM1xB,UAAUP,KAAMI,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,GAtT5DtyB,GAAOsC,OAAQ,CAIdiwB,SAAU,CACTC,QAAS,CACRzxB,IAAK,SAAUP,EAAMmtB,GACpB,GAAKA,EAAW,CAGf,IAAIxsB,EAAMusB,GAAQltB,EAAM,WACxB,MAAe,KAARW,EAAa,IAAMA,MAO9B+f,UAAW,CACVuR,yBAAyB,EACzBC,aAAa,EACbC,kBAAkB,EAClBC,aAAa,EACbC,UAAU,EACVC,YAAY,EACZ3B,YAAY,EACZ4B,UAAU,EACVC,YAAY,EACZC,eAAe,EACfC,iBAAiB,EACjBC,SAAS,EACTC,YAAY,EACZC,cAAc,EACdC,YAAY,EACZd,SAAS,EACTe,OAAO,EACPC,SAAS,EACT3S,OAAO,EACP4S,QAAQ,EACRC,QAAQ,EACRC,MAAM,EAGNC,aAAa,EACbC,cAAc,EACdC,aAAa,EACbC,kBAAkB,EAClBC,eAAe,GAKhBrD,SAAU,GAGVrQ,MAAO,SAAU9f,EAAMC,EAAMqE,EAAO+sB,GAGnC,GAAMrxB,GAA0B,IAAlBA,EAAKlC,UAAoC,IAAlBkC,EAAKlC,UAAmBkC,EAAK8f,MAAlE,CAKA,IAAInf,EAAKzC,EAAM2gB,EACd4U,EAAWrW,EAAWnd,GACtBstB,EAAeZ,GAAY3oB,KAAM/D,GACjC6f,EAAQ9f,EAAK8f,MAad,GARMyN,IACLttB,EAAOgwB,GAAewD,IAIvB5U,EAAQrf,GAAOuyB,SAAU9xB,IAAUT,GAAOuyB,SAAU0B,QAGrCjxB,IAAV8B,EA0CJ,OAAKua,GAAS,QAASA,QACwBrc,KAA5C7B,EAAMke,EAAMte,IAAKP,GAAM,EAAOqxB,IAEzB1wB,EAIDmf,EAAO7f,GA7CA,YAHd/B,SAAcoG,KAGc3D,EAAM6e,EAAQ5V,KAAMtF,KAAa3D,EAAK,KACjE2D,EAAQ2b,GAAWjgB,EAAMC,EAAMU,GAG/BzC,EAAO,UAIM,MAAToG,GAAiBA,GAAUA,IAOlB,WAATpG,GAAsBqvB,IAC1BjpB,GAAS3D,GAAOA,EAAK,KAASnB,GAAOkhB,UAAW+S,GAAa,GAAK,OAI7D91B,GAAQqxB,iBAA6B,KAAV1qB,GAAiD,IAAjCrE,EAAK7C,QAAS,gBAC9D0iB,EAAO7f,GAAS,WAIX4e,GAAY,QAASA,QACsBrc,KAA9C8B,EAAQua,EAAMjB,IAAK5d,EAAMsE,EAAO+sB,MAE7B9D,EACJzN,EAAM4T,YAAazzB,EAAMqE,GAEzBwb,EAAO7f,GAASqE,MAkBpB0b,IAAK,SAAUhgB,EAAMC,EAAMoxB,EAAOF,GACjC,IAAIxyB,EAAK6B,EAAKqe,EACb4U,EAAWrW,EAAWnd,GA6BvB,OA5BgB0sB,GAAY3oB,KAAM/D,KAMjCA,EAAOgwB,GAAewD,KAIvB5U,EAAQrf,GAAOuyB,SAAU9xB,IAAUT,GAAOuyB,SAAU0B,KAGtC,QAAS5U,IACtBlgB,EAAMkgB,EAAMte,IAAKP,GAAM,EAAMqxB,SAIjB7uB,IAAR7D,IACJA,EAAMuuB,GAAQltB,EAAMC,EAAMkxB,IAId,WAARxyB,GAAoBsB,KAAQwwB,KAChC9xB,EAAM8xB,GAAoBxwB,IAIZ,KAAVoxB,GAAgBA,GACpB7wB,EAAMquB,WAAYlwB,IACD,IAAV0yB,GAAkBsC,SAAUnzB,GAAQA,GAAO,EAAI7B,GAGhDA,KAITa,GAAOsB,KAAM,CAAE,SAAU,SAAW,SAAU6D,EAAIqsB,GACjDxxB,GAAOuyB,SAAUf,GAAc,CAC9BzwB,IAAK,SAAUP,EAAMmtB,EAAUkE,GAC9B,GAAKlE,EAIJ,OAAOmD,GAAatsB,KAAMxE,GAAOwgB,IAAKhgB,EAAM,aAQxCA,EAAK4xB,iBAAiB9xB,QAAWE,EAAK4zB,wBAAwBxG,MAIjEqE,GAAkBzxB,EAAMgxB,EAAWK,GAHnCtE,GAAM/sB,EAAMuwB,GAAS,WACpB,OAAOkB,GAAkBzxB,EAAMgxB,EAAWK,MAM9CzT,IAAK,SAAU5d,EAAMsE,EAAO+sB,GAC3B,IAAIltB,EACHgtB,EAASvE,GAAW5sB,GAIpB6zB,GAAsBl2B,GAAQyxB,iBACT,aAApB+B,EAAO3C,SAIR0C,GADkB2C,GAAsBxC,IAEY,eAAnD7xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,GACvCN,EAAWQ,EACVN,GACC/wB,EACAgxB,EACAK,EACAH,EACAC,GAED,EAqBF,OAjBKD,GAAe2C,IACnBhD,GAAYnuB,KAAK8uB,KAChBxxB,EAAM,SAAWgxB,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,IACjEgyB,WAAYsC,EAAQH,IACpBD,GAAoB/wB,EAAMgxB,EAAW,UAAU,EAAOG,GACtD,KAKGN,IAAc1sB,EAAUqb,EAAQ5V,KAAMtF,KACb,QAA3BH,EAAS,IAAO,QAElBnE,EAAK8f,MAAOkR,GAAc1sB,EAC1BA,EAAQ9E,GAAOwgB,IAAKhgB,EAAMgxB,IAGpBJ,GAAmB5wB,EAAMsE,EAAOusB,OAK1CrxB,GAAOuyB,SAAS3D,WAAaV,GAAc/vB,GAAQwxB,mBAClD,SAAUnvB,EAAMmtB,GACf,GAAKA,EACJ,OAAS0B,WAAY3B,GAAQltB,EAAM,gBAClCA,EAAK4zB,wBAAwBE,KAC5B/G,GAAM/sB,EAAM,CAAEouB,WAAY,GAAK,WAC9B,OAAOpuB,EAAK4zB,wBAAwBE,QAEnC,OAMPt0B,GAAOsB,KAAM,CACZizB,OAAQ,GACRC,QAAS,GACTC,OAAQ,SACN,SAAUC,EAAQC,GACpB30B,GAAOuyB,SAAUmC,EAASC,GAAW,CACpCC,OAAQ,SAAU9vB,GAOjB,IANA,IAAI5F,EAAI,EACP21B,EAAW,GAGXC,EAAyB,iBAAVhwB,EAAqBA,EAAMI,MAAO,KAAQ,CAAEJ,GAEpD5F,EAAI,EAAGA,IACd21B,EAAUH,EAASzU,EAAW/gB,GAAMy1B,GACnCG,EAAO51B,IAAO41B,EAAO51B,EAAI,IAAO41B,EAAO,GAGzC,OAAOD,IAIO,WAAXH,IACJ10B,GAAOuyB,SAAUmC,EAASC,GAASvW,IAAMgT,MAI3CpxB,GAAOG,GAAGmC,OAAQ,CACjBke,IAAK,SAAU/f,EAAMqE,GACpB,OAAOkY,EAAQjgB,KAAM,SAAUyD,EAAMC,EAAMqE,GAC1C,IAAI6sB,EAAQ1vB,EACXT,EAAM,GACNtC,EAAI,EAEL,GAAK4D,MAAMC,QAAStC,GAAS,CAI5B,IAHAkxB,EAASvE,GAAW5sB,GACpByB,EAAMxB,EAAKH,OAEHpB,EAAI+C,EAAK/C,IAChBsC,EAAKf,EAAMvB,IAAQc,GAAOwgB,IAAKhgB,EAAMC,EAAMvB,IAAK,EAAOyyB,GAGxD,OAAOnwB,EAGR,YAAiBwB,IAAV8B,EACN9E,GAAOsgB,MAAO9f,EAAMC,EAAMqE,GAC1B9E,GAAOwgB,IAAKhgB,EAAMC,IACjBA,EAAMqE,EAA0B,EAAnBrD,UAAUnB,aAQ5BN,GAAOqyB,MAAQA,IAET1xB,UAAY,CACjBE,YAAawxB,GACbjyB,KAAM,SAAUI,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,EAAQrR,GACjDlkB,KAAKyD,KAAOA,EACZzD,KAAKuhB,KAAOA,EACZvhB,KAAKu1B,OAASA,GAAUtyB,GAAOsyB,OAAO5P,SACtC3lB,KAAKwF,QAAUA,EACfxF,KAAKsS,MAAQtS,KAAKmrB,IAAMnrB,KAAK4Z,MAC7B5Z,KAAKoF,IAAMA,EACXpF,KAAKkkB,KAAOA,IAAUjhB,GAAOkhB,UAAW5C,GAAS,GAAK,OAEvD3H,IAAK,WACJ,IAAI0I,EAAQgT,GAAM0C,UAAWh4B,KAAKuhB,MAElC,OAAOe,GAASA,EAAMte,IACrBse,EAAMte,IAAKhE,MACXs1B,GAAM0C,UAAUrS,SAAS3hB,IAAKhE,OAEhCi4B,IAAK,SAAUC,GACd,IAAIC,EACH7V,EAAQgT,GAAM0C,UAAWh4B,KAAKuhB,MAoB/B,OAlBKvhB,KAAKwF,QAAQ4yB,SACjBp4B,KAAKq4B,IAAMF,EAAQl1B,GAAOsyB,OAAQv1B,KAAKu1B,QACtC2C,EAASl4B,KAAKwF,QAAQ4yB,SAAWF,EAAS,EAAG,EAAGl4B,KAAKwF,QAAQ4yB,UAG9Dp4B,KAAKq4B,IAAMF,EAAQD,EAEpBl4B,KAAKmrB,KAAQnrB,KAAKoF,IAAMpF,KAAKsS,OAAU6lB,EAAQn4B,KAAKsS,MAE/CtS,KAAKwF,QAAQ8yB,MACjBt4B,KAAKwF,QAAQ8yB,KAAK73B,KAAMT,KAAKyD,KAAMzD,KAAKmrB,IAAKnrB,MAGzCsiB,GAASA,EAAMjB,IACnBiB,EAAMjB,IAAKrhB,MAEXs1B,GAAM0C,UAAUrS,SAAStE,IAAKrhB,MAExBA,QAIOqD,KAAKO,UAAY0xB,GAAM1xB,WAEvC0xB,GAAM0C,UAAY,CACjBrS,SAAU,CACT3hB,IAAK,SAAU4f,GACd,IAAIhS,EAIJ,OAA6B,IAAxBgS,EAAMngB,KAAKlC,UACa,MAA5BqiB,EAAMngB,KAAMmgB,EAAMrC,OAAoD,MAAlCqC,EAAMngB,KAAK8f,MAAOK,EAAMrC,MACrDqC,EAAMngB,KAAMmgB,EAAMrC,OAO1B3P,EAAS3O,GAAOwgB,IAAKG,EAAMngB,KAAMmgB,EAAMrC,KAAM,MAGhB,SAAX3P,EAAwBA,EAAJ,GAEvCyP,IAAK,SAAUuC,GAKT3gB,GAAOs1B,GAAGD,KAAM1U,EAAMrC,MAC1Bte,GAAOs1B,GAAGD,KAAM1U,EAAMrC,MAAQqC,GACK,IAAxBA,EAAMngB,KAAKlC,WACtB0B,GAAOuyB,SAAU5R,EAAMrC,OAC6B,MAAnDqC,EAAMngB,KAAK8f,MAAOmQ,GAAe9P,EAAMrC,OAGxCqC,EAAMngB,KAAMmgB,EAAMrC,MAASqC,EAAMuH,IAFjCloB,GAAOsgB,MAAOK,EAAMngB,KAAMmgB,EAAMrC,KAAMqC,EAAMuH,IAAMvH,EAAMM,UAU5CsU,UAAYlD,GAAM0C,UAAUS,WAAa,CACxDpX,IAAK,SAAUuC,GACTA,EAAMngB,KAAKlC,UAAYqiB,EAAMngB,KAAKb,aACtCghB,EAAMngB,KAAMmgB,EAAMrC,MAASqC,EAAMuH,OAKpCloB,GAAOsyB,OAAS,CACfmD,OAAQ,SAAUC,GACjB,OAAOA,GAERC,MAAO,SAAUD,GAChB,MAAO,GAAMxyB,KAAK0yB,IAAKF,EAAIxyB,KAAK2yB,IAAO,GAExCnT,SAAU,SAGX1iB,GAAOs1B,GAAKjD,GAAM1xB,UAAUP,KAG5BJ,GAAOs1B,GAAGD,KAAO,GAKjB,IACCS,GAAOC,GAkrBH9oB,GAEH+oB,GAnrBDC,GAAW,yBACXC,GAAO,cAER,SAASC,KACHJ,MACqB,IAApBp5B,EAASy5B,QAAoBt5B,GAAOu5B,sBACxCv5B,GAAOu5B,sBAAuBF,IAE9Br5B,GAAO2e,WAAY0a,GAAUn2B,GAAOs1B,GAAGgB,UAGxCt2B,GAAOs1B,GAAGiB,QAKZ,SAASC,KAIR,OAHA15B,GAAO2e,WAAY,WAClBqa,QAAQ9yB,IAEA8yB,GAAQ7N,KAAKC,MAIvB,SAASuO,GAAO/3B,EAAMg4B,GACrB,IAAI3M,EACH7qB,EAAI,EACJggB,EAAQ,CAAE+Q,OAAQvxB,GAKnB,IADAg4B,EAAeA,EAAe,EAAI,EAC1Bx3B,EAAI,EAAGA,GAAK,EAAIw3B,EAEvBxX,EAAO,UADP6K,EAAQ9J,EAAW/gB,KACSggB,EAAO,UAAY6K,GAAUrrB,EAO1D,OAJKg4B,IACJxX,EAAMsT,QAAUtT,EAAM0O,MAAQlvB,GAGxBwgB,EAGR,SAASyX,GAAa7xB,EAAOwZ,EAAMsY,GAKlC,IAJA,IAAIjW,EACH4K,GAAesL,GAAUC,SAAUxY,IAAU,IAAK7gB,OAAQo5B,GAAUC,SAAU,MAC9E/f,EAAQ,EACRzW,EAASirB,EAAWjrB,OACbyW,EAAQzW,EAAQyW,IACvB,GAAO4J,EAAQ4K,EAAYxU,GAAQvZ,KAAMo5B,EAAWtY,EAAMxZ,GAGzD,OAAO6b,EAsNV,SAASkW,GAAWr2B,EAAMu2B,EAAYx0B,GACrC,IAAIoM,EACHqoB,EACAjgB,EAAQ,EACRzW,EAASu2B,GAAUI,WAAW32B,OAC9ByZ,EAAW/Z,GAAO0Z,WAAWI,OAAQ,kBAG7Byc,EAAK/1B,OAEb+1B,EAAO,WACN,GAAKS,EACJ,OAAO,EAYR,IAVA,IAAIE,EAAcpB,IAASU,KAC1B3a,EAAY3Y,KAAKouB,IAAK,EAAGsF,EAAUO,UAAYP,EAAUzB,SAAW+B,GAKpEjC,EAAU,GADHpZ,EAAY+a,EAAUzB,UAAY,GAEzCpe,EAAQ,EACRzW,EAASs2B,EAAUQ,OAAO92B,OAEnByW,EAAQzW,EAAQyW,IACvB6f,EAAUQ,OAAQrgB,GAAQie,IAAKC,GAMhC,OAHAlb,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW3B,EAASpZ,IAG5CoZ,EAAU,GAAK30B,EACZub,GAIFvb,GACLyZ,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW,EAAG,IAI5C7c,EAASoB,YAAa3a,EAAM,CAAEo2B,KACvB,IAERA,EAAY7c,EAAS1B,QAAS,CAC7B7X,KAAMA,EACNonB,MAAO5nB,GAAOsC,OAAQ,GAAIy0B,GAC1BM,KAAMr3B,GAAOsC,QAAQ,EAAM,CAC1Bg1B,cAAe,GACfhF,OAAQtyB,GAAOsyB,OAAO5P,UACpBngB,GACHg1B,mBAAoBR,EACpBS,gBAAiBj1B,EACjB40B,UAAWrB,IAASU,KACpBrB,SAAU5yB,EAAQ4yB,SAClBiC,OAAQ,GACRT,YAAa,SAAUrY,EAAMnc,GAC5B,IAAIwe,EAAQ3gB,GAAOqyB,MAAO7xB,EAAMo2B,EAAUS,KAAM/Y,EAAMnc,EACrDy0B,EAAUS,KAAKC,cAAehZ,IAAUsY,EAAUS,KAAK/E,QAExD,OADAsE,EAAUQ,OAAOz5B,KAAMgjB,GAChBA,GAERnB,KAAM,SAAUiY,GACf,IAAI1gB,EAAQ,EAIXzW,EAASm3B,EAAUb,EAAUQ,OAAO92B,OAAS,EAC9C,GAAK02B,EACJ,OAAOj6B,KAGR,IADAi6B,GAAU,EACFjgB,EAAQzW,EAAQyW,IACvB6f,EAAUQ,OAAQrgB,GAAQie,IAAK,GAUhC,OANKyC,GACJ1d,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW,EAAG,IAC3C7c,EAASoB,YAAa3a,EAAM,CAAEo2B,EAAWa,KAEzC1d,EAASuB,WAAY9a,EAAM,CAAEo2B,EAAWa,IAElC16B,QAGT6qB,EAAQgP,EAAUhP,MAInB,KA/HD,SAAqBA,EAAO0P,GAC3B,IAAIvgB,EAAOtW,EAAM6xB,EAAQxtB,EAAOua,EAGhC,IAAMtI,KAAS6Q,EAed,GAbA0K,EAASgF,EADT72B,EAAOmd,EAAW7G,IAElBjS,EAAQ8iB,EAAO7Q,GACVjU,MAAMC,QAAS+B,KACnBwtB,EAASxtB,EAAO,GAChBA,EAAQ8iB,EAAO7Q,GAAUjS,EAAO,IAG5BiS,IAAUtW,IACdmnB,EAAOnnB,GAASqE,SACT8iB,EAAO7Q,KAGfsI,EAAQrf,GAAOuyB,SAAU9xB,KACX,WAAY4e,EAMzB,IAAMtI,KALNjS,EAAQua,EAAMuV,OAAQ9vB,UACf8iB,EAAOnnB,GAICqE,EACNiS,KAAS6Q,IAChBA,EAAO7Q,GAAUjS,EAAOiS,GACxBugB,EAAevgB,GAAUub,QAI3BgF,EAAe72B,GAAS6xB,EA6F1BoF,CAAY9P,EAAOgP,EAAUS,KAAKC,eAE1BvgB,EAAQzW,EAAQyW,IAEvB,GADApI,EAASkoB,GAAUI,WAAYlgB,GAAQvZ,KAAMo5B,EAAWp2B,EAAMonB,EAAOgP,EAAUS,MAM9E,OAJKj5B,EAAYuQ,EAAO6Q,QACvBxf,GAAOsf,YAAasX,EAAUp2B,KAAMo2B,EAAUS,KAAKpe,OAAQuG,KAC1D7Q,EAAO6Q,KAAKmY,KAAMhpB,IAEbA,EAyBT,OArBA3O,GAAOwB,IAAKomB,EAAO+O,GAAaC,GAE3Bx4B,EAAYw4B,EAAUS,KAAKhoB,QAC/BunB,EAAUS,KAAKhoB,MAAM7R,KAAMgD,EAAMo2B,GAIlCA,EACEtc,SAAUsc,EAAUS,KAAK/c,UACzBzT,KAAM+vB,EAAUS,KAAKxwB,KAAM+vB,EAAUS,KAAKO,UAC1Ctf,KAAMse,EAAUS,KAAK/e,MACrBwB,OAAQ8c,EAAUS,KAAKvd,QAEzB9Z,GAAOs1B,GAAGuC,MACT73B,GAAOsC,OAAQi0B,EAAM,CACpB/1B,KAAMA,EACNs3B,KAAMlB,EACN3d,MAAO2d,EAAUS,KAAKpe,SAIjB2d,EAGR52B,GAAO62B,UAAY72B,GAAOsC,OAAQu0B,GAAW,CAE5CC,SAAU,CACTiB,IAAK,CAAE,SAAUzZ,EAAMxZ,GACtB,IAAI6b,EAAQ5jB,KAAK45B,YAAarY,EAAMxZ,GAEpC,OADA2b,GAAWE,EAAMngB,KAAM8d,EAAM0B,EAAQ5V,KAAMtF,GAAS6b,GAC7CA,KAITqX,QAAS,SAAUpQ,EAAOrmB,GACpBnD,EAAYwpB,IAChBrmB,EAAWqmB,EACXA,EAAQ,CAAE,MAEVA,EAAQA,EAAM5d,MAAO2N,GAOtB,IAJA,IAAI2G,EACHvH,EAAQ,EACRzW,EAASsnB,EAAMtnB,OAERyW,EAAQzW,EAAQyW,IACvBuH,EAAOsJ,EAAO7Q,GACd8f,GAAUC,SAAUxY,GAASuY,GAAUC,SAAUxY,IAAU,GAC3DuY,GAAUC,SAAUxY,GAAOiB,QAAShe,IAItC01B,WAAY,CA3Wb,SAA2Bz2B,EAAMonB,EAAOyP,GACvC,IAAI/Y,EAAMxZ,EAAO4c,EAAQrC,EAAO4Y,EAASC,EAAWC,EAAgB5X,EACnE6X,EAAQ,UAAWxQ,GAAS,WAAYA,EACxCkQ,EAAO/6B,KACP2tB,EAAO,GACPpK,EAAQ9f,EAAK8f,MACb8V,EAAS51B,EAAKlC,UAAY+hB,GAAoB7f,GAC9C63B,EAAW7Z,EAASzd,IAAKP,EAAM,UA6BhC,IAAM8d,KA1BA+Y,EAAKpe,QAEa,OADvBoG,EAAQrf,GAAOsf,YAAa9e,EAAM,OACvB83B,WACVjZ,EAAMiZ,SAAW,EACjBL,EAAU5Y,EAAMhO,MAAM8H,KACtBkG,EAAMhO,MAAM8H,KAAO,WACZkG,EAAMiZ,UACXL,MAIH5Y,EAAMiZ,WAENR,EAAKhe,OAAQ,WAGZge,EAAKhe,OAAQ,WACZuF,EAAMiZ,WACAt4B,GAAOiZ,MAAOzY,EAAM,MAAOF,QAChC+e,EAAMhO,MAAM8H,YAOFyO,EAEb,GADA9iB,EAAQ8iB,EAAOtJ,GACV2X,GAASzxB,KAAMM,GAAU,CAG7B,UAFO8iB,EAAOtJ,GACdoD,EAASA,GAAoB,WAAV5c,EACdA,KAAYsxB,EAAS,OAAS,QAAW,CAI7C,GAAe,SAAVtxB,IAAoBuzB,QAAiCr1B,IAArBq1B,EAAU/Z,GAK9C,SAJA8X,GAAS,EAOX1L,EAAMpM,GAAS+Z,GAAYA,EAAU/Z,IAAUte,GAAOsgB,MAAO9f,EAAM8d,GAMrE,IADA4Z,GAAal4B,GAAO2D,cAAeikB,MAChB5nB,GAAO2D,cAAe+mB,GA8DzC,IAAMpM,KAzDD8Z,GAA2B,IAAlB53B,EAAKlC,WAMlB+4B,EAAKkB,SAAW,CAAEjY,EAAMiY,SAAUjY,EAAMkY,UAAWlY,EAAMmY,WAIlC,OADvBN,EAAiBE,GAAYA,EAAS9X,WAErC4X,EAAiB3Z,EAASzd,IAAKP,EAAM,YAGrB,UADjB+f,EAAUvgB,GAAOwgB,IAAKhgB,EAAM,cAEtB23B,EACJ5X,EAAU4X,GAIV9W,GAAU,CAAE7gB,IAAQ,GACpB23B,EAAiB33B,EAAK8f,MAAMC,SAAW4X,EACvC5X,EAAUvgB,GAAOwgB,IAAKhgB,EAAM,WAC5B6gB,GAAU,CAAE7gB,OAKG,WAAZ+f,GAAoC,iBAAZA,GAAgD,MAAlB4X,IACrB,SAAhCn4B,GAAOwgB,IAAKhgB,EAAM,WAGhB03B,IACLJ,EAAKjxB,KAAM,WACVyZ,EAAMC,QAAU4X,IAEM,MAAlBA,IACJ5X,EAAUD,EAAMC,QAChB4X,EAA6B,SAAZ5X,EAAqB,GAAKA,IAG7CD,EAAMC,QAAU,iBAKd8W,EAAKkB,WACTjY,EAAMiY,SAAW,SACjBT,EAAKhe,OAAQ,WACZwG,EAAMiY,SAAWlB,EAAKkB,SAAU,GAChCjY,EAAMkY,UAAYnB,EAAKkB,SAAU,GACjCjY,EAAMmY,UAAYpB,EAAKkB,SAAU,MAKnCL,GAAY,EACExN,EAGPwN,IACAG,EACC,WAAYA,IAChBjC,EAASiC,EAASjC,QAGnBiC,EAAW7Z,EAASxB,OAAQxc,EAAM,SAAU,CAAE+f,QAAS4X,IAInDzW,IACJ2W,EAASjC,QAAUA,GAIfA,GACJ/U,GAAU,CAAE7gB,IAAQ,GAKrBs3B,EAAKjxB,KAAM,WASV,IAAMyX,KAJA8X,GACL/U,GAAU,CAAE7gB,IAEbge,EAASlF,OAAQ9Y,EAAM,UACTkqB,EACb1qB,GAAOsgB,MAAO9f,EAAM8d,EAAMoM,EAAMpM,OAMnC4Z,EAAYvB,GAAaP,EAASiC,EAAU/Z,GAAS,EAAGA,EAAMwZ,GACtDxZ,KAAQ+Z,IACfA,EAAU/Z,GAAS4Z,EAAU7oB,MACxB+mB,IACJ8B,EAAU/1B,IAAM+1B,EAAU7oB,MAC1B6oB,EAAU7oB,MAAQ,MAuMrBqpB,UAAW,SAAUn3B,EAAU+qB,GACzBA,EACJuK,GAAUI,WAAW1X,QAAShe,GAE9Bs1B,GAAUI,WAAWt5B,KAAM4D,MAK9BvB,GAAO24B,MAAQ,SAAUA,EAAOrG,EAAQnyB,GACvC,IAAI61B,EAAM2C,GAA0B,iBAAVA,EAAqB34B,GAAOsC,OAAQ,GAAIq2B,GAAU,CAC3Ef,SAAUz3B,IAAOA,GAAMmyB,GACtBl0B,EAAYu6B,IAAWA,EACxBxD,SAAUwD,EACVrG,OAAQnyB,GAAMmyB,GAAUA,IAAWl0B,EAAYk0B,IAAYA,GAoC5D,OAhCKtyB,GAAOs1B,GAAG/Q,IACdyR,EAAIb,SAAW,EAGc,iBAAjBa,EAAIb,WACVa,EAAIb,YAAYn1B,GAAOs1B,GAAGsD,OAC9B5C,EAAIb,SAAWn1B,GAAOs1B,GAAGsD,OAAQ5C,EAAIb,UAGrCa,EAAIb,SAAWn1B,GAAOs1B,GAAGsD,OAAOlW,UAMjB,MAAbsT,EAAI/c,QAA+B,IAAd+c,EAAI/c,QAC7B+c,EAAI/c,MAAQ,MAIb+c,EAAIxI,IAAMwI,EAAI4B,SAEd5B,EAAI4B,SAAW,WACTx5B,EAAY43B,EAAIxI,MACpBwI,EAAIxI,IAAIhwB,KAAMT,MAGVi5B,EAAI/c,OACRjZ,GAAOmf,QAASpiB,KAAMi5B,EAAI/c,QAIrB+c,GAGRh2B,GAAOG,GAAGmC,OAAQ,CACjBu2B,OAAQ,SAAUF,EAAOG,EAAIxG,EAAQ/wB,GAGpC,OAAOxE,KAAK6P,OAAQyT,IAAqBG,IAAK,UAAW,GAAIc,OAG3Dnf,MAAM42B,QAAS,CAAEvG,QAASsG,GAAMH,EAAOrG,EAAQ/wB,IAElDw3B,QAAS,SAAUza,EAAMqa,EAAOrG,EAAQ/wB,GACvC,IAAI8P,EAAQrR,GAAO2D,cAAe2a,GACjC0a,EAASh5B,GAAO24B,MAAOA,EAAOrG,EAAQ/wB,GACtC03B,EAAc,WAGb,IAAInB,EAAOjB,GAAW95B,KAAMiD,GAAOsC,OAAQ,GAAIgc,GAAQ0a,IAGlD3nB,GAASmN,EAASzd,IAAKhE,KAAM,YACjC+6B,EAAKtY,MAAM,IAMd,OAFAyZ,EAAYC,OAASD,EAEd5nB,IAA0B,IAAjB2nB,EAAO/f,MACtBlc,KAAKuE,KAAM23B,GACXl8B,KAAKkc,MAAO+f,EAAO/f,MAAOggB,IAE5BzZ,KAAM,SAAU9gB,EAAMghB,EAAY+X,GACjC,IAAI0B,EAAY,SAAU9Z,GACzB,IAAIG,EAAOH,EAAMG,YACVH,EAAMG,KACbA,EAAMiY,IAYP,MATqB,iBAAT/4B,IACX+4B,EAAU/X,EACVA,EAAahhB,EACbA,OAAOsE,GAEH0c,GACJ3iB,KAAKkc,MAAOva,GAAQ,KAAM,IAGpB3B,KAAKuE,KAAM,WACjB,IAAI6d,GAAU,EACbpI,EAAgB,MAARrY,GAAgBA,EAAO,aAC/B06B,EAASp5B,GAAOo5B,OAChB/a,EAAOG,EAASzd,IAAKhE,MAEtB,GAAKga,EACCsH,EAAMtH,IAAWsH,EAAMtH,GAAQyI,MACnC2Z,EAAW9a,EAAMtH,SAGlB,IAAMA,KAASsH,EACTA,EAAMtH,IAAWsH,EAAMtH,GAAQyI,MAAQ0W,GAAK1xB,KAAMuS,IACtDoiB,EAAW9a,EAAMtH,IAKpB,IAAMA,EAAQqiB,EAAO94B,OAAQyW,KACvBqiB,EAAQriB,GAAQvW,OAASzD,MACnB,MAAR2B,GAAgB06B,EAAQriB,GAAQkC,QAAUva,IAE5C06B,EAAQriB,GAAQ+gB,KAAKtY,KAAMiY,GAC3BtY,GAAU,EACVia,EAAO/2B,OAAQ0U,EAAO,KAOnBoI,GAAYsY,GAChBz3B,GAAOmf,QAASpiB,KAAM2B,MAIzBw6B,OAAQ,SAAUx6B,GAIjB,OAHc,IAATA,IACJA,EAAOA,GAAQ,MAET3B,KAAKuE,KAAM,WACjB,IAAIyV,EACHsH,EAAOG,EAASzd,IAAKhE,MACrBkc,EAAQoF,EAAM3f,EAAO,SACrB2gB,EAAQhB,EAAM3f,EAAO,cACrB06B,EAASp5B,GAAOo5B,OAChB94B,EAAS2Y,EAAQA,EAAM3Y,OAAS,EAajC,IAVA+d,EAAK6a,QAAS,EAGdl5B,GAAOiZ,MAAOlc,KAAM2B,EAAM,IAErB2gB,GAASA,EAAMG,MACnBH,EAAMG,KAAKhiB,KAAMT,MAAM,GAIlBga,EAAQqiB,EAAO94B,OAAQyW,KACvBqiB,EAAQriB,GAAQvW,OAASzD,MAAQq8B,EAAQriB,GAAQkC,QAAUva,IAC/D06B,EAAQriB,GAAQ+gB,KAAKtY,MAAM,GAC3B4Z,EAAO/2B,OAAQ0U,EAAO,IAKxB,IAAMA,EAAQ,EAAGA,EAAQzW,EAAQyW,IAC3BkC,EAAOlC,IAAWkC,EAAOlC,GAAQmiB,QACrCjgB,EAAOlC,GAAQmiB,OAAO17B,KAAMT,aAKvBshB,EAAK6a,YAKfl5B,GAAOsB,KAAM,CAAE,SAAU,OAAQ,QAAU,SAAU6D,EAAI1E,GACxD,IAAI44B,EAAQr5B,GAAOG,GAAIM,GACvBT,GAAOG,GAAIM,GAAS,SAAUk4B,EAAOrG,EAAQ/wB,GAC5C,OAAgB,MAATo3B,GAAkC,kBAAVA,EAC9BU,EAAM37B,MAAOX,KAAM0E,WACnB1E,KAAKg8B,QAAStC,GAAOh2B,GAAM,GAAQk4B,EAAOrG,EAAQ/wB,MAKrDvB,GAAOsB,KAAM,CACZg4B,UAAW7C,GAAO,QAClB8C,QAAS9C,GAAO,QAChB+C,YAAa/C,GAAO,UACpBgD,OAAQ,CAAEjH,QAAS,QACnBkH,QAAS,CAAElH,QAAS,QACpBmH,WAAY,CAAEnH,QAAS,WACrB,SAAU/xB,EAAMmnB,GAClB5nB,GAAOG,GAAIM,GAAS,SAAUk4B,EAAOrG,EAAQ/wB,GAC5C,OAAOxE,KAAKg8B,QAASnR,EAAO+Q,EAAOrG,EAAQ/wB,MAI7CvB,GAAOo5B,OAAS,GAChBp5B,GAAOs1B,GAAGiB,KAAO,WAChB,IAAIsB,EACH34B,EAAI,EACJk6B,EAASp5B,GAAOo5B,OAIjB,IAFAtD,GAAQ7N,KAAKC,MAELhpB,EAAIk6B,EAAO94B,OAAQpB,KAC1B24B,EAAQuB,EAAQl6B,OAGCk6B,EAAQl6B,KAAQ24B,GAChCuB,EAAO/2B,OAAQnD,IAAK,GAIhBk6B,EAAO94B,QACZN,GAAOs1B,GAAG9V,OAEXsW,QAAQ9yB,GAGThD,GAAOs1B,GAAGuC,MAAQ,SAAUA,GAC3B73B,GAAOo5B,OAAOz7B,KAAMk6B,GACpB73B,GAAOs1B,GAAGjmB,SAGXrP,GAAOs1B,GAAGgB,SAAW,GACrBt2B,GAAOs1B,GAAGjmB,MAAQ,WACZ0mB,KAILA,IAAa,EACbI,OAGDn2B,GAAOs1B,GAAG9V,KAAO,WAChBuW,GAAa,MAGd/1B,GAAOs1B,GAAGsD,OAAS,CAClBgB,KAAM,IACNC,KAAM,IAGNnX,SAAU,KAKX1iB,GAAOG,GAAG25B,MAAQ,SAAUC,EAAMr7B,GAIjC,OAHAq7B,EAAO/5B,GAAOs1B,IAAKt1B,GAAOs1B,GAAGsD,OAAQmB,IAAiBA,EACtDr7B,EAAOA,GAAQ,KAER3B,KAAKkc,MAAOva,EAAM,SAAU8K,EAAM6V,GACxC,IAAI2a,EAAUl9B,GAAO2e,WAAYjS,EAAMuwB,GACvC1a,EAAMG,KAAO,WACZ1iB,GAAOm9B,aAAcD,OAOnB/sB,GAAQtQ,EAAS0C,cAAe,SAEnC22B,GADSr5B,EAAS0C,cAAe,UACpBK,YAAa/C,EAAS0C,cAAe,WAEnD4N,GAAMvO,KAAO,WAIbP,GAAQ+7B,QAA0B,KAAhBjtB,GAAMnI,MAIxB3G,GAAQg8B,YAAcnE,GAAI7kB,UAI1BlE,GAAQtQ,EAAS0C,cAAe,UAC1ByF,MAAQ,IACdmI,GAAMvO,KAAO,QACbP,GAAQi8B,WAA6B,MAAhBntB,GAAMnI,MAI5B,IAAIu1B,GACH5sB,GAAazN,GAAOqN,KAAKI,WAE1BzN,GAAOG,GAAGmC,OAAQ,CACjBkL,KAAM,SAAU/M,EAAMqE,GACrB,OAAOkY,EAAQjgB,KAAMiD,GAAOwN,KAAM/M,EAAMqE,EAA0B,EAAnBrD,UAAUnB,SAG1Dg6B,WAAY,SAAU75B,GACrB,OAAO1D,KAAKuE,KAAM,WACjBtB,GAAOs6B,WAAYv9B,KAAM0D,QAK5BT,GAAOsC,OAAQ,CACdkL,KAAM,SAAUhN,EAAMC,EAAMqE,GAC3B,IAAI3D,EAAKke,EACRkb,EAAQ/5B,EAAKlC,SAGd,GAAe,IAAVi8B,GAAyB,IAAVA,GAAyB,IAAVA,EAKnC,MAAkC,oBAAtB/5B,EAAKjB,aACTS,GAAOse,KAAM9d,EAAMC,EAAMqE,IAKlB,IAAVy1B,GAAgBv6B,GAAOmE,SAAU3D,KACrC6e,EAAQrf,GAAOw6B,UAAW/5B,EAAKC,iBAC5BV,GAAOqN,KAAKrD,MAAM3B,KAAK7D,KAAM/D,GAAS45B,QAAWr3B,SAGtCA,IAAV8B,EACW,OAAVA,OACJ9E,GAAOs6B,WAAY95B,EAAMC,GAIrB4e,GAAS,QAASA,QACuBrc,KAA3C7B,EAAMke,EAAMjB,IAAK5d,EAAMsE,EAAOrE,IACzBU,GAGRX,EAAKhB,aAAciB,EAAMqE,EAAQ,IAC1BA,GAGHua,GAAS,QAASA,GAA+C,QAApCle,EAAMke,EAAMte,IAAKP,EAAMC,IACjDU,EAMM,OAHdA,EAAMnB,GAAO4J,KAAK4D,KAAMhN,EAAMC,SAGTuC,EAAY7B,IAGlCq5B,UAAW,CACV97B,KAAM,CACL0f,IAAK,SAAU5d,EAAMsE,GACpB,IAAM3G,GAAQi8B,YAAwB,UAAVt1B,GAC3BvE,GAAUC,EAAM,SAAY,CAC5B,IAAIrB,EAAMqB,EAAKsE,MAKf,OAJAtE,EAAKhB,aAAc,OAAQsF,GACtB3F,IACJqB,EAAKsE,MAAQ3F,GAEP2F,MAMXw1B,WAAY,SAAU95B,EAAMsE,GAC3B,IAAIrE,EACHvB,EAAI,EAIJu7B,EAAY31B,GAASA,EAAMkF,MAAO2N,GAEnC,GAAK8iB,GAA+B,IAAlBj6B,EAAKlC,SACtB,MAAUmC,EAAOg6B,EAAWv7B,KAC3BsB,EAAKwK,gBAAiBvK,MAO1B45B,GAAW,CACVjc,IAAK,SAAU5d,EAAMsE,EAAOrE,GAQ3B,OAPe,IAAVqE,EAGJ9E,GAAOs6B,WAAY95B,EAAMC,GAEzBD,EAAKhB,aAAciB,EAAMA,GAEnBA,IAITT,GAAOsB,KAAMtB,GAAOqN,KAAKrD,MAAM3B,KAAK0X,OAAO/V,MAAO,QAAU,SAAU7E,EAAI1E,GACzE,IAAIi6B,EAASjtB,GAAYhN,IAAUT,GAAO4J,KAAK4D,KAE/CC,GAAYhN,GAAS,SAAUD,EAAMC,EAAM6U,GAC1C,IAAInU,EAAKykB,EACR+U,EAAgBl6B,EAAKC,cAYtB,OAVM4U,IAGLsQ,EAASnY,GAAYktB,GACrBltB,GAAYktB,GAAkBx5B,EAC9BA,EAAqC,MAA/Bu5B,EAAQl6B,EAAMC,EAAM6U,GACzBqlB,EACA,KACDltB,GAAYktB,GAAkB/U,GAExBzkB,KAOT,IAAIy5B,GAAa,sCAChBC,GAAa,gBAwIb,SAASC,GAAkBh2B,GAE1B,OADaA,EAAMkF,MAAO2N,IAAmB,IAC/B9M,KAAM,KAItB,SAASkwB,GAAUv6B,GAClB,OAAOA,EAAKjB,cAAgBiB,EAAKjB,aAAc,UAAa,GAG7D,SAASy7B,GAAgBl2B,GACxB,OAAKhC,MAAMC,QAAS+B,GACZA,EAEc,iBAAVA,GACJA,EAAMkF,MAAO2N,IAEd,GAvJR3X,GAAOG,GAAGmC,OAAQ,CACjBgc,KAAM,SAAU7d,EAAMqE,GACrB,OAAOkY,EAAQjgB,KAAMiD,GAAOse,KAAM7d,EAAMqE,EAA0B,EAAnBrD,UAAUnB,SAG1D26B,WAAY,SAAUx6B,GACrB,OAAO1D,KAAKuE,KAAM,kBACVvE,KAAMiD,GAAOk7B,QAASz6B,IAAUA,QAK1CT,GAAOsC,OAAQ,CACdgc,KAAM,SAAU9d,EAAMC,EAAMqE,GAC3B,IAAI3D,EAAKke,EACRkb,EAAQ/5B,EAAKlC,SAGd,GAAe,IAAVi8B,GAAyB,IAAVA,GAAyB,IAAVA,EAWnC,OAPe,IAAVA,GAAgBv6B,GAAOmE,SAAU3D,KAGrCC,EAAOT,GAAOk7B,QAASz6B,IAAUA,EACjC4e,EAAQrf,GAAO+0B,UAAWt0B,SAGZuC,IAAV8B,EACCua,GAAS,QAASA,QACuBrc,KAA3C7B,EAAMke,EAAMjB,IAAK5d,EAAMsE,EAAOrE,IACzBU,EAGCX,EAAMC,GAASqE,EAGpBua,GAAS,QAASA,GAA+C,QAApCle,EAAMke,EAAMte,IAAKP,EAAMC,IACjDU,EAGDX,EAAMC,IAGds0B,UAAW,CACV/jB,SAAU,CACTjQ,IAAK,SAAUP,GAMd,IAAI26B,EAAWn7B,GAAO4J,KAAK4D,KAAMhN,EAAM,YAEvC,OAAK26B,EACGjL,SAAUiL,EAAU,IAI3BP,GAAWp2B,KAAMhE,EAAKD,WACtBs6B,GAAWr2B,KAAMhE,EAAKD,WACtBC,EAAKuQ,KAEE,GAGA,KAKXmqB,QAAS,CACRE,MAAO,UACPC,QAAS,eAYLl9B,GAAQg8B,cACbn6B,GAAO+0B,UAAU5jB,SAAW,CAC3BpQ,IAAK,SAAUP,GAId,IAAI8O,EAAS9O,EAAKb,WAIlB,OAHK2P,GAAUA,EAAO3P,YACrB2P,EAAO3P,WAAWyR,cAEZ,MAERgN,IAAK,SAAU5d,GAId,IAAI8O,EAAS9O,EAAKb,WACb2P,IACJA,EAAO8B,cAEF9B,EAAO3P,YACX2P,EAAO3P,WAAWyR,kBAOvBpR,GAAOsB,KAAM,CACZ,WACA,WACA,YACA,cACA,cACA,UACA,UACA,SACA,cACA,mBACE,WACFtB,GAAOk7B,QAASn+B,KAAK2D,eAAkB3D,OA4BxCiD,GAAOG,GAAGmC,OAAQ,CACjBg5B,SAAU,SAAUx2B,GACnB,IAAIy2B,EAAY5kB,EAAK6kB,EAAUxuB,EAAW9N,EAAGu8B,EAE7C,OAAKr9B,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUY,GAC3BlC,GAAQjD,MAAOu+B,SAAUx2B,EAAMtH,KAAMT,KAAMmF,EAAG64B,GAAUh+B,WAI1Dw+B,EAAaP,GAAgBl2B,IAEbxE,OACRvD,KAAKuE,KAAM,WAIjB,GAHAk6B,EAAWT,GAAUh+B,MACrB4Z,EAAwB,IAAlB5Z,KAAKuB,UAAoB,IAAMw8B,GAAkBU,GAAa,IAEzD,CACV,IAAMt8B,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IACnC8N,EAAYuuB,EAAYr8B,GACnByX,EAAI/Y,QAAS,IAAMoP,EAAY,KAAQ,IAC3C2J,GAAO3J,EAAY,KAKrByuB,EAAaX,GAAkBnkB,GAC1B6kB,IAAaC,GACjB1+B,KAAKyC,aAAc,QAASi8B,MAMzB1+B,MAGR2+B,YAAa,SAAU52B,GACtB,IAAIy2B,EAAY5kB,EAAK6kB,EAAUxuB,EAAW9N,EAAGu8B,EAE7C,OAAKr9B,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUY,GAC3BlC,GAAQjD,MAAO2+B,YAAa52B,EAAMtH,KAAMT,KAAMmF,EAAG64B,GAAUh+B,UAIvD0E,UAAUnB,QAIhBi7B,EAAaP,GAAgBl2B,IAEbxE,OACRvD,KAAKuE,KAAM,WAMjB,GALAk6B,EAAWT,GAAUh+B,MAGrB4Z,EAAwB,IAAlB5Z,KAAKuB,UAAoB,IAAMw8B,GAAkBU,GAAa,IAEzD,CACV,IAAMt8B,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IAAM,CACzC8N,EAAYuuB,EAAYr8B,GAGxB,OAAgD,EAAxCyX,EAAI/Y,QAAS,IAAMoP,EAAY,KACtC2J,EAAMA,EAAIvT,QAAS,IAAM4J,EAAY,IAAK,KAK5CyuB,EAAaX,GAAkBnkB,GAC1B6kB,IAAaC,GACjB1+B,KAAKyC,aAAc,QAASi8B,MAMzB1+B,KA/BCA,KAAKyQ,KAAM,QAAS,KAkC7BmuB,YAAa,SAAU72B,EAAO82B,GAC7B,IAAIL,EAAYvuB,EAAW9N,EAAG+W,EAC7BvX,SAAcoG,EACd+2B,EAAwB,WAATn9B,GAAqBoE,MAAMC,QAAS+B,GAEpD,OAAK1G,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO4+B,YACd72B,EAAMtH,KAAMT,KAAMmC,EAAG67B,GAAUh+B,MAAQ6+B,GACvCA,KAKsB,kBAAbA,GAA0BC,EAC9BD,EAAW7+B,KAAKu+B,SAAUx2B,GAAU/H,KAAK2+B,YAAa52B,IAG9Dy2B,EAAaP,GAAgBl2B,GAEtB/H,KAAKuE,KAAM,WACjB,GAAKu6B,EAKJ,IAFA5lB,EAAOjW,GAAQjD,MAETmC,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IACnC8N,EAAYuuB,EAAYr8B,GAGnB+W,EAAK6lB,SAAU9uB,GACnBiJ,EAAKylB,YAAa1uB,GAElBiJ,EAAKqlB,SAAUtuB,aAKIhK,IAAV8B,GAAgC,YAATpG,KAClCsO,EAAY+tB,GAAUh+B,QAIrByhB,EAASJ,IAAKrhB,KAAM,gBAAiBiQ,GAOjCjQ,KAAKyC,cACTzC,KAAKyC,aAAc,QAClBwN,IAAuB,IAAVlI,EACZ,GACA0Z,EAASzd,IAAKhE,KAAM,kBAAqB,SAO/C++B,SAAU,SAAU77B,GACnB,IAAI+M,EAAWxM,EACdtB,EAAI,EAEL8N,EAAY,IAAM/M,EAAW,IAC7B,MAAUO,EAAOzD,KAAMmC,KACtB,GAAuB,IAAlBsB,EAAKlC,WACoE,GAA3E,IAAMw8B,GAAkBC,GAAUv6B,IAAW,KAAM5C,QAASoP,GAC9D,OAAO,EAIT,OAAO,KAOT,IAAI+uB,GAAU,MAEd/7B,GAAOG,GAAGmC,OAAQ,CACjBnD,IAAK,SAAU2F,GACd,IAAIua,EAAOle,EAAKuqB,EACflrB,EAAOzD,KAAM,GAEd,OAAM0E,UAAUnB,QA0BhBorB,EAAkBttB,EAAY0G,GAEvB/H,KAAKuE,KAAM,SAAUpC,GAC3B,IAAIC,EAEmB,IAAlBpC,KAAKuB,WAWE,OANXa,EADIusB,EACE5mB,EAAMtH,KAAMT,KAAMmC,EAAGc,GAAQjD,MAAOoC,OAEpC2F,GAKN3F,EAAM,GAEoB,iBAARA,EAClBA,GAAO,GAEI2D,MAAMC,QAAS5D,KAC1BA,EAAMa,GAAOwB,IAAKrC,EAAK,SAAU2F,GAChC,OAAgB,MAATA,EAAgB,GAAKA,EAAQ,OAItCua,EAAQrf,GAAOg8B,SAAUj/B,KAAK2B,OAAUsB,GAAOg8B,SAAUj/B,KAAKwD,SAASG,iBAGrD,QAAS2e,QAA+Crc,IAApCqc,EAAMjB,IAAKrhB,KAAMoC,EAAK,WAC3DpC,KAAK+H,MAAQ3F,OAzDTqB,GACJ6e,EAAQrf,GAAOg8B,SAAUx7B,EAAK9B,OAC7BsB,GAAOg8B,SAAUx7B,EAAKD,SAASG,iBAG/B,QAAS2e,QACgCrc,KAAvC7B,EAAMke,EAAMte,IAAKP,EAAM,UAElBW,EAMY,iBAHpBA,EAAMX,EAAKsE,OAIH3D,EAAIiC,QAAS24B,GAAS,IAIhB,MAAP56B,EAAc,GAAKA,OAG3B,KAyCHnB,GAAOsC,OAAQ,CACd05B,SAAU,CACT5Z,OAAQ,CACPrhB,IAAK,SAAUP,GAEd,IAAIrB,EAAMa,GAAO4J,KAAK4D,KAAMhN,EAAM,SAClC,OAAc,MAAPrB,EACNA,EAMA27B,GAAkB96B,GAAOV,KAAMkB,MAGlCyK,OAAQ,CACPlK,IAAK,SAAUP,GACd,IAAIsE,EAAOsd,EAAQljB,EAClBqD,EAAU/B,EAAK+B,QACfwU,EAAQvW,EAAK4Q,cACbgT,EAAoB,eAAd5jB,EAAK9B,KACX6iB,EAAS6C,EAAM,KAAO,GACtBkN,EAAMlN,EAAMrN,EAAQ,EAAIxU,EAAQjC,OAUjC,IAPCpB,EADI6X,EAAQ,EACRua,EAGAlN,EAAMrN,EAAQ,EAIX7X,EAAIoyB,EAAKpyB,IAKhB,KAJAkjB,EAAS7f,EAASrD,IAIJiS,UAAYjS,IAAM6X,KAG7BqL,EAAO9Y,YACL8Y,EAAOziB,WAAW2J,WACnB/I,GAAU6hB,EAAOziB,WAAY,aAAiB,CAMjD,GAHAmF,EAAQ9E,GAAQoiB,GAASjjB,MAGpBilB,EACJ,OAAOtf,EAIRyc,EAAO5jB,KAAMmH,GAIf,OAAOyc,GAGRnD,IAAK,SAAU5d,EAAMsE,GACpB,IAAIm3B,EAAW7Z,EACd7f,EAAU/B,EAAK+B,QACfgf,EAASvhB,GAAOgE,UAAWc,GAC3B5F,EAAIqD,EAAQjC,OAEb,MAAQpB,MACPkjB,EAAS7f,EAASrD,IAINiS,UACuD,EAAlEnR,GAAOkE,QAASlE,GAAOg8B,SAAS5Z,OAAOrhB,IAAKqhB,GAAUb,MAEtD0a,GAAY,GAUd,OAHMA,IACLz7B,EAAK4Q,eAAiB,GAEhBmQ,OAOXvhB,GAAOsB,KAAM,CAAE,QAAS,YAAc,WACrCtB,GAAOg8B,SAAUj/B,MAAS,CACzBqhB,IAAK,SAAU5d,EAAMsE,GACpB,GAAKhC,MAAMC,QAAS+B,GACnB,OAAStE,EAAK0Q,SAA2D,EAAjDlR,GAAOkE,QAASlE,GAAQQ,GAAOrB,MAAO2F,KAI3D3G,GAAQ+7B,UACbl6B,GAAOg8B,SAAUj/B,MAAOgE,IAAM,SAAUP,GACvC,OAAwC,OAAjCA,EAAKjB,aAAc,SAAqB,KAAOiB,EAAKsE,UAS9D,IAAI0L,GAAW1T,GAAO0T,SAElB5R,GAAQ,CAAEmG,KAAMkjB,KAAKC,OAErBgU,GAAS,KAKbl8B,GAAOm8B,SAAW,SAAU9d,GAC3B,IAAInP,EAAKktB,EACT,IAAM/d,GAAwB,iBAATA,EACpB,OAAO,KAKR,IACCnP,GAAM,IAAMpS,GAAOu/B,WAAcC,gBAAiBje,EAAM,YACvD,MAAQ3U,IAYV,OAVA0yB,EAAkBltB,GAAOA,EAAI3E,qBAAsB,eAAiB,GAC9D2E,IAAOktB,GACZp8B,GAAOsD,MAAO,iBACb84B,EACCp8B,GAAOwB,IAAK46B,EAAgB3yB,WAAY,SAAUgC,GACjD,OAAOA,EAAG5H,cACPgH,KAAM,MACVwT,IAGInP,GAIR,IAAIqtB,GAAc,kCACjBC,GAA0B,SAAU9yB,GACnCA,EAAEmb,mBAGJ7kB,GAAOsC,OAAQtC,GAAOskB,MAAO,CAE5BU,QAAS,SAAUV,EAAOjG,EAAM7d,EAAMi8B,GAErC,IAAIv9B,EAAGyX,EAAKgJ,EAAK+c,EAAYC,EAAQ/W,EAAQ9K,EAAS8hB,EACrDC,EAAY,CAAEr8B,GAAQ7D,GACtB+B,EAAOX,GAAOP,KAAM8mB,EAAO,QAAWA,EAAM5lB,KAAO4lB,EACnDkB,EAAaznB,GAAOP,KAAM8mB,EAAO,aAAgBA,EAAMlgB,UAAUc,MAAO,KAAQ,GAKjF,GAHAyR,EAAMimB,EAAcjd,EAAMnf,EAAOA,GAAQ7D,EAGlB,IAAlB6D,EAAKlC,UAAoC,IAAlBkC,EAAKlC,WAK5Bi+B,GAAY/3B,KAAM9F,EAAOsB,GAAOskB,MAAMuB,cAIf,EAAvBnnB,EAAKd,QAAS,OAIlBc,GADA8mB,EAAa9mB,EAAKwG,MAAO,MACPoG,QAClBka,EAAWpjB,QAEZu6B,EAASj+B,EAAKd,QAAS,KAAQ,GAAK,KAAOc,GAG3C4lB,EAAQA,EAAOtkB,GAAOiD,SACrBqhB,EACA,IAAItkB,GAAOmnB,MAAOzoB,EAAuB,iBAAV4lB,GAAsBA,IAGhDK,UAAY8X,EAAe,EAAI,EACrCnY,EAAMlgB,UAAYohB,EAAW3a,KAAM,KACnCyZ,EAAMuC,WAAavC,EAAMlgB,UACxB,IAAImB,OAAQ,UAAYigB,EAAW3a,KAAM,iBAAoB,WAC7D,KAGDyZ,EAAM3V,YAAS3L,EACTshB,EAAM3hB,SACX2hB,EAAM3hB,OAASnC,GAIhB6d,EAAe,MAARA,EACN,CAAEiG,GACFtkB,GAAOgE,UAAWqa,EAAM,CAAEiG,IAG3BxJ,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GACpC+9B,IAAgB3hB,EAAQkK,UAAmD,IAAxClK,EAAQkK,QAAQtnB,MAAO8C,EAAM6d,IAAtE,CAMA,IAAMoe,IAAiB3hB,EAAQ0M,WAAahpB,EAAUgC,GAAS,CAM9D,IAJAk8B,EAAa5hB,EAAQ8J,cAAgBlmB,EAC/B69B,GAAY/3B,KAAMk4B,EAAah+B,KACpCiY,EAAMA,EAAIhX,YAEHgX,EAAKA,EAAMA,EAAIhX,WACtBk9B,EAAUl/B,KAAMgZ,GAChBgJ,EAAMhJ,EAIFgJ,KAAUnf,EAAK+D,eAAiB5H,IACpCkgC,EAAUl/B,KAAMgiB,EAAIvT,aAAeuT,EAAImd,cAAgBhgC,IAKzDoC,EAAI,EACJ,OAAUyX,EAAMkmB,EAAW39B,QAAYolB,EAAMqC,uBAC5CiW,EAAcjmB,EACd2N,EAAM5lB,KAAW,EAAJQ,EACZw9B,EACA5hB,EAAQiL,UAAYrnB,GAGrBknB,GAAWpH,EAASzd,IAAK4V,EAAK,WAAcxZ,OAAOwoB,OAAQ,OAAUrB,EAAM5lB,OAC1E8f,EAASzd,IAAK4V,EAAK,YAEnBiP,EAAOloB,MAAOiZ,EAAK0H,IAIpBuH,EAAS+W,GAAUhmB,EAAKgmB,KACT/W,EAAOloB,OAASogB,EAAYnH,KAC1C2N,EAAM3V,OAASiX,EAAOloB,MAAOiZ,EAAK0H,IACZ,IAAjBiG,EAAM3V,QACV2V,EAAMS,kBA8CT,OA1CAT,EAAM5lB,KAAOA,EAGP+9B,GAAiBnY,EAAMuD,sBAEpB/M,EAAQ4H,WACqC,IAApD5H,EAAQ4H,SAAShlB,MAAOm/B,EAAUz3B,MAAOiZ,KACzCP,EAAYtd,IAIPm8B,GAAUv+B,EAAYoC,EAAM9B,MAAaF,EAAUgC,MAGvDmf,EAAMnf,EAAMm8B,MAGXn8B,EAAMm8B,GAAW,MAIlB38B,GAAOskB,MAAMuB,UAAYnnB,EAEpB4lB,EAAMqC,wBACViW,EAAYtwB,iBAAkB5N,EAAM89B,IAGrCh8B,EAAM9B,KAED4lB,EAAMqC,wBACViW,EAAYjgB,oBAAqBje,EAAM89B,IAGxCx8B,GAAOskB,MAAMuB,eAAY7iB,EAEpB2c,IACJnf,EAAMm8B,GAAWhd,IAMd2E,EAAM3V,SAKdwb,SAAU,SAAUzrB,EAAM8B,EAAM8jB,GAC/B,IAAI5a,EAAI1J,GAAOsC,OACd,IAAItC,GAAOmnB,MACX7C,EACA,CACC5lB,KAAMA,EACNypB,aAAa,IAIfnoB,GAAOskB,MAAMU,QAAStb,EAAG,KAAMlJ,MAKjCR,GAAOG,GAAGmC,OAAQ,CAEjB0iB,QAAS,SAAUtmB,EAAM2f,GACxB,OAAOthB,KAAKuE,KAAM,WACjBtB,GAAOskB,MAAMU,QAAStmB,EAAM2f,EAAMthB,SAGpCggC,eAAgB,SAAUr+B,EAAM2f,GAC/B,IAAI7d,EAAOzD,KAAM,GACjB,GAAKyD,EACJ,OAAOR,GAAOskB,MAAMU,QAAStmB,EAAM2f,EAAM7d,GAAM,MAMlD,IACCw8B,GAAW,QACXC,GAAQ,SACRC,GAAkB,wCAClBC,GAAe,qCAEhB,SAASC,GAAa1I,EAAQr2B,EAAKg/B,EAAapmB,GAC/C,IAAIxW,EAEJ,GAAKqC,MAAMC,QAAS1E,GAGnB2B,GAAOsB,KAAMjD,EAAK,SAAUa,EAAG2Y,GACzBwlB,GAAeL,GAASx4B,KAAMkwB,GAGlCzd,EAAKyd,EAAQ7c,GAKbulB,GACC1I,EAAS,KAAqB,iBAAN7c,GAAuB,MAALA,EAAY3Y,EAAI,IAAO,IACjE2Y,EACAwlB,EACApmB,UAKG,GAAMomB,GAAiC,WAAlBx9B,EAAQxB,GAUnC4Y,EAAKyd,EAAQr2B,QAPb,IAAMoC,KAAQpC,EACb++B,GAAa1I,EAAS,IAAMj0B,EAAO,IAAKpC,EAAKoC,GAAQ48B,EAAapmB,GAYrEjX,GAAOs9B,MAAQ,SAAU73B,EAAG43B,GAC3B,IAAI3I,EACH6I,EAAI,GACJtmB,EAAM,SAAU7L,EAAKoyB,GAGpB,IAAI14B,EAAQ1G,EAAYo/B,GACvBA,IACAA,EAEDD,EAAGA,EAAEj9B,QAAWm9B,mBAAoBryB,GAAQ,IAC3CqyB,mBAA6B,MAAT34B,EAAgB,GAAKA,IAG5C,GAAU,MAALW,EACJ,MAAO,GAIR,GAAK3C,MAAMC,QAAS0C,IAASA,EAAE7E,SAAWZ,GAAO6C,cAAe4C,GAG/DzF,GAAOsB,KAAMmE,EAAG,WACfwR,EAAKla,KAAK0D,KAAM1D,KAAK+H,cAOtB,IAAM4vB,KAAUjvB,EACf23B,GAAa1I,EAAQjvB,EAAGivB,GAAU2I,EAAapmB,GAKjD,OAAOsmB,EAAE1yB,KAAM,MAGhB7K,GAAOG,GAAGmC,OAAQ,CACjBo7B,UAAW,WACV,OAAO19B,GAAOs9B,MAAOvgC,KAAK4gC,mBAE3BA,eAAgB,WACf,OAAO5gC,KAAKyE,IAAK,WAGhB,IAAI8L,EAAWtN,GAAOse,KAAMvhB,KAAM,YAClC,OAAOuQ,EAAWtN,GAAOgE,UAAWsJ,GAAavQ,OAC9C6P,OAAQ,WACX,IAAIlO,EAAO3B,KAAK2B,KAGhB,OAAO3B,KAAK0D,OAAST,GAAQjD,MAAO2Y,GAAI,cACvCynB,GAAa34B,KAAMzH,KAAKwD,YAAe28B,GAAgB14B,KAAM9F,KAC3D3B,KAAKmU,UAAY0Q,GAAepd,KAAM9F,MACtC8C,IAAK,SAAU2D,EAAI3E,GACtB,IAAIrB,EAAMa,GAAQjD,MAAOoC,MAEzB,OAAY,MAAPA,EACG,KAGH2D,MAAMC,QAAS5D,GACZa,GAAOwB,IAAKrC,EAAK,SAAUA,GACjC,MAAO,CAAEsB,KAAMD,EAAKC,KAAMqE,MAAO3F,EAAIiE,QAAS65B,GAAO,WAIhD,CAAEx8B,KAAMD,EAAKC,KAAMqE,MAAO3F,EAAIiE,QAAS65B,GAAO,WAClDl8B,SAKN,IACC68B,GAAM,OACNC,GAAQ,OACRC,GAAa,gBACbC,GAAW,6BAIXC,GAAa,iBACbC,GAAY,QAWZhH,GAAa,GAObiH,GAAa,GAGbC,GAAW,KAAK1gC,OAAQ,KAGxB2gC,GAAezhC,EAAS0C,cAAe,KAKxC,SAASg/B,GAA6BC,GAGrC,OAAO,SAAUC,EAAoB5kB,GAED,iBAAvB4kB,IACX5kB,EAAO4kB,EACPA,EAAqB,KAGtB,IAAIC,EACHt/B,EAAI,EACJu/B,EAAYF,EAAmB79B,cAAcsJ,MAAO2N,IAAmB,GAExE,GAAKvZ,EAAYub,GAGhB,MAAU6kB,EAAWC,EAAWv/B,KAGR,MAAlBs/B,EAAU,IACdA,EAAWA,EAASnhC,MAAO,IAAO,KAChCihC,EAAWE,GAAaF,EAAWE,IAAc,IAAKjf,QAAS5F,KAI/D2kB,EAAWE,GAAaF,EAAWE,IAAc,IAAK7gC,KAAMgc,IAQnE,SAAS+kB,GAA+BJ,EAAW/7B,EAASi1B,EAAiBmH,GAE5E,IAAIC,EAAY,GACfC,EAAqBP,IAAcJ,GAEpC,SAASY,EAASN,GACjB,IAAIrtB,EAcJ,OAbAytB,EAAWJ,IAAa,EACxBx+B,GAAOsB,KAAMg9B,EAAWE,IAAc,GAAI,SAAU9lB,EAAGqmB,GACtD,IAAIC,EAAsBD,EAAoBx8B,EAASi1B,EAAiBmH,GACxE,MAAoC,iBAAxBK,GACVH,GAAqBD,EAAWI,GAKtBH,IACD1tB,EAAW6tB,QADf,GAHNz8B,EAAQk8B,UAAUlf,QAASyf,GAC3BF,EAASE,IACF,KAKF7tB,EAGR,OAAO2tB,EAASv8B,EAAQk8B,UAAW,MAAUG,EAAW,MAASE,EAAS,KAM3E,SAASG,GAAYt8B,EAAQhE,GAC5B,IAAIyM,EAAKxI,EACRs8B,EAAcl/B,GAAOm/B,aAAaD,aAAe,GAElD,IAAM9zB,KAAOzM,OACQqE,IAAfrE,EAAKyM,MACP8zB,EAAa9zB,GAAQzI,EAAWC,IAAUA,EAAO,KAAUwI,GAAQzM,EAAKyM,IAO5E,OAJKxI,GACJ5C,GAAOsC,QAAQ,EAAMK,EAAQC,GAGvBD,EA/ERy7B,GAAartB,KAAOP,GAASO,KAgP7B/Q,GAAOsC,OAAQ,CAGd88B,OAAQ,EAGRC,aAAc,GACdC,KAAM,GAENH,aAAc,CACbI,IAAK/uB,GAASO,KACdrS,KAAM,MACN8gC,QAxRgB,4DAwRQh7B,KAAMgM,GAASivB,UACvCljC,QAAQ,EACRmjC,aAAa,EACbC,OAAO,EACPC,YAAa,mDAcbC,QAAS,CACR9H,IAAKoG,GACL7+B,KAAM,aACNqsB,KAAM,YACNzc,IAAK,4BACL4wB,KAAM,qCAGPtpB,SAAU,CACTtH,IAAK,UACLyc,KAAM,SACNmU,KAAM,YAGPC,eAAgB,CACf7wB,IAAK,cACL5P,KAAM,eACNwgC,KAAM,gBAKPE,WAAY,CAGXC,SAAUj3B,OAGVk3B,aAAa,EAGbC,YAAathB,KAAKC,MAGlBshB,WAAYpgC,GAAOm8B,UAOpB+C,YAAa,CACZK,KAAK,EACLr/B,SAAS,IAOXmgC,UAAW,SAAU19B,EAAQ29B,GAC5B,OAAOA,EAGNrB,GAAYA,GAAYt8B,EAAQ3C,GAAOm/B,cAAgBmB,GAGvDrB,GAAYj/B,GAAOm/B,aAAcx8B,IAGnC49B,cAAelC,GAA6BpH,IAC5CuJ,cAAenC,GAA6BH,IAG5CuC,KAAM,SAAUlB,EAAKh9B,GAGA,iBAARg9B,IACXh9B,EAAUg9B,EACVA,OAAMv8B,GAIPT,EAAUA,GAAW,GAErB,IAAIm+B,EAGHC,EAGAC,EACAC,EAGAC,EAGAC,EAGArkB,EAGAskB,EAGA9hC,EAGA+hC,EAGA1D,EAAIv9B,GAAOqgC,UAAW,GAAI99B,GAG1B2+B,EAAkB3D,EAAEr9B,SAAWq9B,EAG/B4D,EAAqB5D,EAAEr9B,UACpBghC,EAAgB5iC,UAAY4iC,EAAgBtgC,QAC9CZ,GAAQkhC,GACRlhC,GAAOskB,MAGRvK,EAAW/Z,GAAO0Z,WAClB0nB,EAAmBphC,GAAOwY,UAAW,eAGrC6oB,EAAa9D,EAAE8D,YAAc,GAG7BC,EAAiB,GACjBC,EAAsB,GAGtBC,EAAW,WAGX7C,EAAQ,CACP7hB,WAAY,EAGZ2kB,kBAAmB,SAAUr2B,GAC5B,IAAIpB,EACJ,GAAK0S,EAAY,CAChB,IAAMmkB,EAAkB,CACvBA,EAAkB,GAClB,MAAU72B,EAAQ+zB,GAAS3zB,KAAMw2B,GAChCC,EAAiB72B,EAAO,GAAItJ,cAAgB,MACzCmgC,EAAiB72B,EAAO,GAAItJ,cAAgB,MAAS,IACrDjD,OAAQuM,EAAO,IAGpBA,EAAQ62B,EAAiBz1B,EAAI1K,cAAgB,KAE9C,OAAgB,MAATsJ,EAAgB,KAAOA,EAAMa,KAAM,OAI3C62B,sBAAuB,WACtB,OAAOhlB,EAAYkkB,EAAwB,MAI5Ce,iBAAkB,SAAUlhC,EAAMqE,GAMjC,OALkB,MAAb4X,IACJjc,EAAO8gC,EAAqB9gC,EAAKC,eAChC6gC,EAAqB9gC,EAAKC,gBAAmBD,EAC9C6gC,EAAgB7gC,GAASqE,GAEnB/H,MAIR6kC,iBAAkB,SAAUljC,GAI3B,OAHkB,MAAbge,IACJ6gB,EAAEsE,SAAWnjC,GAEP3B,MAIRskC,WAAY,SAAU7/B,GACrB,IAAIzC,EACJ,GAAKyC,EACJ,GAAKkb,EAGJiiB,EAAM7kB,OAAQtY,EAAKm9B,EAAMmD,cAIzB,IAAM/iC,KAAQyC,EACb6/B,EAAYtiC,GAAS,CAAEsiC,EAAYtiC,GAAQyC,EAAKzC,IAInD,OAAOhC,MAIRglC,MAAO,SAAUC,GAChB,IAAIC,EAAYD,GAAcR,EAK9B,OAJKd,GACJA,EAAUqB,MAAOE,GAElBp7B,EAAM,EAAGo7B,GACFllC,OAoBV,GAfAgd,EAAS1B,QAASsmB,GAKlBpB,EAAEgC,MAAUA,GAAOhC,EAAEgC,KAAO/uB,GAASO,MAAS,IAC5C3N,QAAS66B,GAAWztB,GAASivB,SAAW,MAG1ClC,EAAE7+B,KAAO6D,EAAQ6V,QAAU7V,EAAQ7D,MAAQ6+B,EAAEnlB,QAAUmlB,EAAE7+B,KAGzD6+B,EAAEkB,WAAclB,EAAEiB,UAAY,KAAM99B,cAAcsJ,MAAO2N,IAAmB,CAAE,IAGxD,MAAjB4lB,EAAE2E,YAAsB,CAC5BnB,EAAYpkC,EAAS0C,cAAe,KAKpC,IACC0hC,EAAUhwB,KAAOwsB,EAAEgC,IAInBwB,EAAUhwB,KAAOgwB,EAAUhwB,KAC3BwsB,EAAE2E,YAAc9D,GAAaqB,SAAW,KAAOrB,GAAa+D,MAC3DpB,EAAUtB,SAAW,KAAOsB,EAAUoB,KACtC,MAAQz4B,GAIT6zB,EAAE2E,aAAc,GAalB,GARK3E,EAAElf,MAAQkf,EAAEmC,aAAiC,iBAAXnC,EAAElf,OACxCkf,EAAElf,KAAOre,GAAOs9B,MAAOC,EAAElf,KAAMkf,EAAEF,cAIlCqB,GAA+BzH,GAAYsG,EAAGh7B,EAASo8B,GAGlDjiB,EACJ,OAAOiiB,EA8ER,IAAMz/B,KAzEN8hC,EAAchhC,GAAOskB,OAASiZ,EAAEhhC,SAGQ,GAApByD,GAAOo/B,UAC1Bp/B,GAAOskB,MAAMU,QAAS,aAIvBuY,EAAE7+B,KAAO6+B,EAAE7+B,KAAKif,cAGhB4f,EAAE6E,YAAcpE,GAAWx5B,KAAM+4B,EAAE7+B,MAKnCiiC,EAAWpD,EAAEgC,IAAIn8B,QAASy6B,GAAO,IAG3BN,EAAE6E,WAwBI7E,EAAElf,MAAQkf,EAAEmC,aACoD,KAAzEnC,EAAEqC,aAAe,IAAKhiC,QAAS,uCACjC2/B,EAAElf,KAAOkf,EAAElf,KAAKjb,QAASw6B,GAAK,OAvB9BqD,EAAW1D,EAAEgC,IAAIliC,MAAOsjC,EAASrgC,QAG5Bi9B,EAAElf,OAAUkf,EAAEmC,aAAiC,iBAAXnC,EAAElf,QAC1CsiB,IAAczE,GAAO13B,KAAMm8B,GAAa,IAAM,KAAQpD,EAAElf,YAGjDkf,EAAElf,OAIO,IAAZkf,EAAEpyB,QACNw1B,EAAWA,EAASv9B,QAAS06B,GAAY,MACzCmD,GAAa/E,GAAO13B,KAAMm8B,GAAa,IAAM,KAAQ,KAAS/hC,GAAMmG,OACnEk8B,GAIF1D,EAAEgC,IAAMoB,EAAWM,GASf1D,EAAE8E,aACDriC,GAAOq/B,aAAcsB,IACzBhC,EAAMgD,iBAAkB,oBAAqB3hC,GAAOq/B,aAAcsB,IAE9D3gC,GAAOs/B,KAAMqB,IACjBhC,EAAMgD,iBAAkB,gBAAiB3hC,GAAOs/B,KAAMqB,MAKnDpD,EAAElf,MAAQkf,EAAE6E,aAAgC,IAAlB7E,EAAEqC,aAAyBr9B,EAAQq9B,cACjEjB,EAAMgD,iBAAkB,eAAgBpE,EAAEqC,aAI3CjB,EAAMgD,iBACL,SACApE,EAAEkB,UAAW,IAAOlB,EAAEsC,QAAStC,EAAEkB,UAAW,IAC3ClB,EAAEsC,QAAStC,EAAEkB,UAAW,KACA,MAArBlB,EAAEkB,UAAW,GAAc,KAAON,GAAW,WAAa,IAC7DZ,EAAEsC,QAAS,MAIFtC,EAAE+E,QACZ3D,EAAMgD,iBAAkBziC,EAAGq+B,EAAE+E,QAASpjC,IAIvC,GAAKq+B,EAAEgF,cAC+C,IAAnDhF,EAAEgF,WAAW/kC,KAAM0jC,EAAiBvC,EAAOpB,IAAiB7gB,GAG9D,OAAOiiB,EAAMoD,QAed,GAXAP,EAAW,QAGXJ,EAAiBnqB,IAAKsmB,EAAE3F,UACxB+G,EAAM93B,KAAM02B,EAAEiF,SACd7D,EAAMrmB,KAAMilB,EAAEj6B,OAGdo9B,EAAYhC,GAA+BR,GAAYX,EAAGh7B,EAASo8B,GAK5D,CASN,GARAA,EAAM7hB,WAAa,EAGdkkB,GACJG,EAAmBnc,QAAS,WAAY,CAAE2Z,EAAOpB,IAI7C7gB,EACJ,OAAOiiB,EAIHpB,EAAEoC,OAAqB,EAAZpC,EAAEvD,UACjB8G,EAAehkC,GAAO2e,WAAY,WACjCkjB,EAAMoD,MAAO,YACXxE,EAAEvD,UAGN,IACCtd,GAAY,EACZgkB,EAAU+B,KAAMnB,EAAgBz6B,GAC/B,MAAQ6C,GAGT,GAAKgT,EACJ,MAAMhT,EAIP7C,GAAO,EAAG6C,SAhCX7C,GAAO,EAAG,gBAqCX,SAASA,EAAMi7B,EAAQY,EAAkBC,EAAWL,GACnD,IAAIM,EAAWJ,EAASl/B,EAAOu/B,EAAUC,EACxCd,EAAaU,EAGThmB,IAILA,GAAY,EAGPokB,GACJhkC,GAAOm9B,aAAc6G,GAKtBJ,OAAY19B,EAGZ49B,EAAwB0B,GAAW,GAGnC3D,EAAM7hB,WAAsB,EAATglB,EAAa,EAAI,EAGpCc,EAAsB,KAAVd,GAAiBA,EAAS,KAAkB,MAAXA,EAGxCa,IACJE,EA7lBJ,SAA8BtF,EAAGoB,EAAOgE,GAEvC,IAAII,EAAIrkC,EAAMskC,EAAeC,EAC5BzsB,EAAW+mB,EAAE/mB,SACbioB,EAAYlB,EAAEkB,UAGf,MAA2B,MAAnBA,EAAW,GAClBA,EAAUnzB,aACEtI,IAAP+/B,IACJA,EAAKxF,EAAEsE,UAAYlD,EAAM8C,kBAAmB,iBAK9C,GAAKsB,EACJ,IAAMrkC,KAAQ8X,EACb,GAAKA,EAAU9X,IAAU8X,EAAU9X,GAAO8F,KAAMu+B,GAAO,CACtDtE,EAAUlf,QAAS7gB,GACnB,MAMH,GAAK+/B,EAAW,KAAOkE,EACtBK,EAAgBvE,EAAW,OACrB,CAGN,IAAM//B,KAAQikC,EAAY,CACzB,IAAMlE,EAAW,IAAOlB,EAAEyC,WAAYthC,EAAO,IAAM+/B,EAAW,IAAQ,CACrEuE,EAAgBtkC,EAChB,MAEKukC,IACLA,EAAgBvkC,GAKlBskC,EAAgBA,GAAiBC,EAMlC,GAAKD,EAIJ,OAHKA,IAAkBvE,EAAW,IACjCA,EAAUlf,QAASyjB,GAEbL,EAAWK,GA0iBLE,CAAqB3F,EAAGoB,EAAOgE,KAIrCC,IACsC,EAA3C5iC,GAAOkE,QAAS,SAAUq5B,EAAEkB,YAC5Bz+B,GAAOkE,QAAS,OAAQq5B,EAAEkB,WAAc,IACxClB,EAAEyC,WAAY,eAAkB,cAIjC6C,EA9iBH,SAAsBtF,EAAGsF,EAAUlE,EAAOiE,GACzC,IAAIO,EAAOC,EAASC,EAAM1jB,EAAKlJ,EAC9BupB,EAAa,GAGbvB,EAAYlB,EAAEkB,UAAUphC,QAGzB,GAAKohC,EAAW,GACf,IAAM4E,KAAQ9F,EAAEyC,WACfA,EAAYqD,EAAK3iC,eAAkB68B,EAAEyC,WAAYqD,GAInDD,EAAU3E,EAAUnzB,QAGpB,MAAQ83B,EAcP,GAZK7F,EAAEwC,eAAgBqD,KACtBzE,EAAOpB,EAAEwC,eAAgBqD,IAAcP,IAIlCpsB,GAAQmsB,GAAarF,EAAE+F,aAC5BT,EAAWtF,EAAE+F,WAAYT,EAAUtF,EAAEiB,WAGtC/nB,EAAO2sB,EACPA,EAAU3E,EAAUnzB,QAKnB,GAAiB,MAAZ83B,EAEJA,EAAU3sB,OAGJ,GAAc,MAATA,GAAgBA,IAAS2sB,EAAU,CAM9C,KAHAC,EAAOrD,EAAYvpB,EAAO,IAAM2sB,IAAapD,EAAY,KAAOoD,IAI/D,IAAMD,KAASnD,EAId,IADArgB,EAAMwjB,EAAMj+B,MAAO,MACT,KAAQk+B,IAGjBC,EAAOrD,EAAYvpB,EAAO,IAAMkJ,EAAK,KACpCqgB,EAAY,KAAOrgB,EAAK,KACb,EAGG,IAAT0jB,EACJA,EAAOrD,EAAYmD,IAGgB,IAAxBnD,EAAYmD,KACvBC,EAAUzjB,EAAK,GACf8e,EAAUlf,QAASI,EAAK,KAEzB,MAOJ,IAAc,IAAT0jB,EAGJ,GAAKA,GAAQ9F,EAAEgG,UACdV,EAAWQ,EAAMR,QAEjB,IACCA,EAAWQ,EAAMR,GAChB,MAAQn5B,GACT,MAAO,CACNmQ,MAAO,cACPvW,MAAO+/B,EAAO35B,EAAI,sBAAwB+M,EAAO,OAAS2sB,IASjE,MAAO,CAAEvpB,MAAO,UAAWwE,KAAMwkB,GAidpBW,CAAajG,EAAGsF,EAAUlE,EAAOiE,GAGvCA,GAGCrF,EAAE8E,cACNS,EAAWnE,EAAM8C,kBAAmB,oBAEnCzhC,GAAOq/B,aAAcsB,GAAamC,IAEnCA,EAAWnE,EAAM8C,kBAAmB,WAEnCzhC,GAAOs/B,KAAMqB,GAAamC,IAKZ,MAAXhB,GAA6B,SAAXvE,EAAE7+B,KACxBsjC,EAAa,YAGS,MAAXF,EACXE,EAAa,eAIbA,EAAaa,EAAShpB,MACtB2oB,EAAUK,EAASxkB,KAEnBukB,IADAt/B,EAAQu/B,EAASv/B,UAMlBA,EAAQ0+B,GACHF,GAAWE,IACfA,EAAa,QACRF,EAAS,IACbA,EAAS,KAMZnD,EAAMmD,OAASA,EACfnD,EAAMqD,YAAeU,GAAoBV,GAAe,GAGnDY,EACJ7oB,EAASoB,YAAa+lB,EAAiB,CAAEsB,EAASR,EAAYrD,IAE9D5kB,EAASuB,WAAY4lB,EAAiB,CAAEvC,EAAOqD,EAAY1+B,IAI5Dq7B,EAAM0C,WAAYA,GAClBA,OAAar+B,EAERg+B,GACJG,EAAmBnc,QAAS4d,EAAY,cAAgB,YACvD,CAAEjE,EAAOpB,EAAGqF,EAAYJ,EAAUl/B,IAIpC89B,EAAiB3nB,SAAUynB,EAAiB,CAAEvC,EAAOqD,IAEhDhB,IACJG,EAAmBnc,QAAS,eAAgB,CAAE2Z,EAAOpB,MAG3Cv9B,GAAOo/B,QAChBp/B,GAAOskB,MAAMU,QAAS,cAKzB,OAAO2Z,GAGR8E,QAAS,SAAUlE,EAAKlhB,EAAM9c,GAC7B,OAAOvB,GAAOe,IAAKw+B,EAAKlhB,EAAM9c,EAAU,SAGzCmiC,UAAW,SAAUnE,EAAKh+B,GACzB,OAAOvB,GAAOe,IAAKw+B,OAAKv8B,EAAWzB,EAAU,aAI/CvB,GAAOsB,KAAM,CAAE,MAAO,QAAU,SAAU6D,EAAIiT,GAC7CpY,GAAQoY,GAAW,SAAUmnB,EAAKlhB,EAAM9c,EAAU7C,GAUjD,OAPKN,EAAYigB,KAChB3f,EAAOA,GAAQ6C,EACfA,EAAW8c,EACXA,OAAOrb,GAIDhD,GAAOygC,KAAMzgC,GAAOsC,OAAQ,CAClCi9B,IAAKA,EACL7gC,KAAM0Z,EACNomB,SAAU9/B,EACV2f,KAAMA,EACNmkB,QAASjhC,GACPvB,GAAO6C,cAAe08B,IAASA,OAIpCv/B,GAAOugC,cAAe,SAAUhD,GAC/B,IAAIr+B,EACJ,IAAMA,KAAKq+B,EAAE+E,QACa,iBAApBpjC,EAAEwB,gBACN68B,EAAEqC,YAAcrC,EAAE+E,QAASpjC,IAAO,MAMrCc,GAAO4rB,SAAW,SAAU2T,EAAKh9B,EAAStD,GACzC,OAAOe,GAAOygC,KAAM,CACnBlB,IAAKA,EAGL7gC,KAAM,MACN8/B,SAAU,SACVrzB,OAAO,EACPw0B,OAAO,EACPpjC,QAAQ,EAKRyjC,WAAY,CACX2D,cAAe,cAEhBL,WAAY,SAAUT,GACrB7iC,GAAO4D,WAAYi/B,EAAUtgC,EAAStD,OAMzCe,GAAOG,GAAGmC,OAAQ,CACjBshC,QAAS,SAAUjY,GAClB,IAAIlI,EAyBJ,OAvBK1mB,KAAM,KACLqB,EAAYutB,KAChBA,EAAOA,EAAKnuB,KAAMT,KAAM,KAIzB0mB,EAAOzjB,GAAQ2rB,EAAM5uB,KAAM,GAAIwH,eAAgB5C,GAAI,GAAIe,OAAO,GAEzD3F,KAAM,GAAI4C,YACd8jB,EAAK8I,aAAcxvB,KAAM,IAG1B0mB,EAAKjiB,IAAK,WACT,IAAIhB,EAAOzD,KAEX,MAAQyD,EAAKqjC,kBACZrjC,EAAOA,EAAKqjC,kBAGb,OAAOrjC,IACJ6rB,OAAQtvB,OAGNA,MAGR+mC,UAAW,SAAUnY,GACpB,OAAKvtB,EAAYutB,GACT5uB,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO+mC,UAAWnY,EAAKnuB,KAAMT,KAAMmC,MAItCnC,KAAKuE,KAAM,WACjB,IAAI2U,EAAOjW,GAAQjD,MAClByZ,EAAWP,EAAKO,WAEZA,EAASlW,OACbkW,EAASotB,QAASjY,GAGlB1V,EAAKoW,OAAQV,MAKhBlI,KAAM,SAAUkI,GACf,IAAIoY,EAAiB3lC,EAAYutB,GAEjC,OAAO5uB,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO6mC,QAASG,EAAiBpY,EAAKnuB,KAAMT,KAAMmC,GAAMysB,MAIlEqY,OAAQ,SAAU/jC,GAIjB,OAHAlD,KAAKuS,OAAQrP,GAAW+P,IAAK,QAAS1O,KAAM,WAC3CtB,GAAQjD,MAAO2vB,YAAa3vB,KAAK0M,cAE3B1M,QAKTiD,GAAOqN,KAAK9F,QAAQ6uB,OAAS,SAAU51B,GACtC,OAAQR,GAAOqN,KAAK9F,QAAQ08B,QAASzjC,IAEtCR,GAAOqN,KAAK9F,QAAQ08B,QAAU,SAAUzjC,GACvC,SAAWA,EAAK0uB,aAAe1uB,EAAK6vB,cAAgB7vB,EAAK4xB,iBAAiB9xB,SAM3EN,GAAOm/B,aAAa+E,IAAM,WACzB,IACC,OAAO,IAAIpnC,GAAOqnC,eACjB,MAAQz6B,MAGX,IAAI06B,GAAmB,CAGrBC,EAAG,IAIHC,KAAM,KAEPC,GAAevkC,GAAOm/B,aAAa+E,MAEpC/lC,GAAQqmC,OAASD,IAAkB,oBAAqBA,GACxDpmC,GAAQsiC,KAAO8D,KAAiBA,GAEhCvkC,GAAOwgC,cAAe,SAAUj+B,GAC/B,IAAIhB,EAAUkjC,EAGd,GAAKtmC,GAAQqmC,MAAQD,KAAiBhiC,EAAQ2/B,YAC7C,MAAO,CACNO,KAAM,SAAUH,EAAS1K,GACxB,IAAI14B,EACHglC,EAAM3hC,EAAQ2hC,MAWf,GATAA,EAAIQ,KACHniC,EAAQ7D,KACR6D,EAAQg9B,IACRh9B,EAAQo9B,MACRp9B,EAAQoiC,SACRpiC,EAAQyP,UAIJzP,EAAQqiC,UACZ,IAAM1lC,KAAKqD,EAAQqiC,UAClBV,EAAKhlC,GAAMqD,EAAQqiC,UAAW1lC,GAmBhC,IAAMA,KAdDqD,EAAQs/B,UAAYqC,EAAItC,kBAC5BsC,EAAItC,iBAAkBr/B,EAAQs/B,UAQzBt/B,EAAQ2/B,aAAgBI,EAAS,sBACtCA,EAAS,oBAAuB,kBAItBA,EACV4B,EAAIvC,iBAAkBziC,EAAGojC,EAASpjC,IAInCqC,EAAW,SAAU7C,GACpB,OAAO,WACD6C,IACJA,EAAWkjC,EAAgBP,EAAIW,OAC9BX,EAAIY,QAAUZ,EAAIa,QAAUb,EAAIc,UAC/Bd,EAAIe,mBAAqB,KAEb,UAATvmC,EACJwlC,EAAInC,QACgB,UAATrjC,EAKgB,iBAAfwlC,EAAIpC,OACflK,EAAU,EAAG,SAEbA,EAGCsM,EAAIpC,OACJoC,EAAIlC,YAINpK,EACCwM,GAAkBF,EAAIpC,SAAYoC,EAAIpC,OACtCoC,EAAIlC,WAK+B,UAAjCkC,EAAIgB,cAAgB,SACM,iBAArBhB,EAAIiB,aACV,CAAEC,OAAQlB,EAAIrB,UACd,CAAEvjC,KAAM4kC,EAAIiB,cACbjB,EAAIxC,4BAQTwC,EAAIW,OAAStjC,IACbkjC,EAAgBP,EAAIY,QAAUZ,EAAIc,UAAYzjC,EAAU,cAKnCyB,IAAhBkhC,EAAIa,QACRb,EAAIa,QAAUN,EAEdP,EAAIe,mBAAqB,WAGA,IAAnBf,EAAIpnB,YAMRhgB,GAAO2e,WAAY,WACbla,GACJkjC,OAQLljC,EAAWA,EAAU,SAErB,IAGC2iC,EAAIzB,KAAMlgC,EAAQ6/B,YAAc7/B,EAAQ8b,MAAQ,MAC/C,MAAQ3U,GAGT,GAAKnI,EACJ,MAAMmI,IAKTq4B,MAAO,WACDxgC,GACJA,QAWLvB,GAAOugC,cAAe,SAAUhD,GAC1BA,EAAE2E,cACN3E,EAAE/mB,SAASpX,QAAS,KAKtBY,GAAOqgC,UAAW,CACjBR,QAAS,CACRzgC,OAAQ,6FAGToX,SAAU,CACTpX,OAAQ,2BAET4gC,WAAY,CACX2D,cAAe,SAAUrkC,GAExB,OADAU,GAAO4D,WAAYtE,GACZA,MAMVU,GAAOugC,cAAe,SAAU,SAAUhD,QACxBv6B,IAAZu6B,EAAEpyB,QACNoyB,EAAEpyB,OAAQ,GAENoyB,EAAE2E,cACN3E,EAAE7+B,KAAO,SAKXsB,GAAOwgC,cAAe,SAAU,SAAUjD,GAIxC,IAAIn+B,EAAQmC,EADb,GAAKg8B,EAAE2E,aAAe3E,EAAE8H,YAEvB,MAAO,CACN5C,KAAM,SAAU/pB,EAAGkf,GAClBx4B,EAASY,GAAQ,YACfwN,KAAM+vB,EAAE8H,aAAe,IACvB/mB,KAAM,CAAEgnB,QAAS/H,EAAEgI,cAAe5mC,IAAK4+B,EAAEgC,MACzCrb,GAAI,aAAc3iB,EAAW,SAAUikC,GACvCpmC,EAAOka,SACP/X,EAAW,KACNikC,GACJ5N,EAAuB,UAAb4N,EAAI9mC,KAAmB,IAAM,IAAK8mC,EAAI9mC,QAKnD/B,EAAS8C,KAAKC,YAAaN,EAAQ,KAEpC2iC,MAAO,WACDxgC,GACJA,QAUL,IAqGKigB,GArGDikB,GAAe,GAClBC,GAAS,oBAGV1lC,GAAOqgC,UAAW,CACjBsF,MAAO,WACPC,cAAe,WACd,IAAIrkC,EAAWkkC,GAAargC,OAAWpF,GAAOiD,QAAU,IAAQrE,GAAMmG,OAEtE,OADAhI,KAAMwE,IAAa,EACZA,KAKTvB,GAAOugC,cAAe,aAAc,SAAUhD,EAAGsI,EAAkBlH,GAElE,IAAImH,EAAcC,EAAaC,EAC9BC,GAAuB,IAAZ1I,EAAEoI,QAAqBD,GAAOlhC,KAAM+4B,EAAEgC,KAChD,MACkB,iBAAXhC,EAAElf,MAE6C,KADnDkf,EAAEqC,aAAe,IACjBhiC,QAAS,sCACX8nC,GAAOlhC,KAAM+4B,EAAElf,OAAU,QAI5B,GAAK4nB,GAAiC,UAArB1I,EAAEkB,UAAW,GA8D7B,OA3DAqH,EAAevI,EAAEqI,cAAgBxnC,EAAYm/B,EAAEqI,eAC9CrI,EAAEqI,gBACFrI,EAAEqI,cAGEK,EACJ1I,EAAG0I,GAAa1I,EAAG0I,GAAW7iC,QAASsiC,GAAQ,KAAOI,IAC/B,IAAZvI,EAAEoI,QACbpI,EAAEgC,MAASrD,GAAO13B,KAAM+4B,EAAEgC,KAAQ,IAAM,KAAQhC,EAAEoI,MAAQ,IAAMG,GAIjEvI,EAAEyC,WAAY,eAAkB,WAI/B,OAHMgG,GACLhmC,GAAOsD,MAAOwiC,EAAe,mBAEvBE,EAAmB,IAI3BzI,EAAEkB,UAAW,GAAM,OAGnBsH,EAAcjpC,GAAQgpC,GACtBhpC,GAAQgpC,GAAiB,WACxBE,EAAoBvkC,WAIrBk9B,EAAM7kB,OAAQ,gBAGQ9W,IAAhB+iC,EACJ/lC,GAAQlD,IAASm+B,WAAY6K,GAI7BhpC,GAAQgpC,GAAiBC,EAIrBxI,EAAGuI,KAGPvI,EAAEqI,cAAgBC,EAAiBD,cAGnCH,GAAa9nC,KAAMmoC,IAIfE,GAAqB5nC,EAAY2nC,IACrCA,EAAaC,EAAmB,IAGjCA,EAAoBD,OAAc/iC,IAI5B,WAYT7E,GAAQ+nC,qBACH1kB,GAAO7kB,EAASwpC,eAAeD,mBAAoB,IAAK1kB,MACvDtU,UAAY,6BACiB,IAA3BsU,GAAK/X,WAAWnJ,QAQxBN,GAAOmW,UAAY,SAAUkI,EAAMne,EAASkmC,GAC3C,MAAqB,iBAAT/nB,EACJ,IAEgB,kBAAZne,IACXkmC,EAAclmC,EACdA,GAAU,GAKLA,IAIA/B,GAAQ+nC,qBAMZxzB,GALAxS,EAAUvD,EAASwpC,eAAeD,mBAAoB,KAKvC7mC,cAAe,SACzB0R,KAAOpU,EAAS6T,SAASO,KAC9B7Q,EAAQT,KAAKC,YAAagT,IAE1BxS,EAAUvD,GAKZ2mB,GAAW8iB,GAAe,IAD1BC,EAASvwB,EAAW1L,KAAMiU,IAKlB,CAAEne,EAAQb,cAAegnC,EAAQ,MAGzCA,EAAShjB,GAAe,CAAEhF,GAAQne,EAASojB,GAEtCA,GAAWA,EAAQhjB,QACvBN,GAAQsjB,GAAUhK,SAGZtZ,GAAOoB,MAAO,GAAIilC,EAAO58B,cAlChC,IAAIiJ,EAAM2zB,EAAQ/iB,GAyCnBtjB,GAAOG,GAAGonB,KAAO,SAAUgY,EAAK+G,EAAQ/kC,GACvC,IAAItB,EAAUvB,EAAMmkC,EACnB5sB,EAAOlZ,KACPwnB,EAAMgb,EAAI3hC,QAAS,KAsDpB,OApDY,EAAP2mB,IACJtkB,EAAW66B,GAAkByE,EAAIliC,MAAOknB,IACxCgb,EAAMA,EAAIliC,MAAO,EAAGknB,IAIhBnmB,EAAYkoC,IAGhB/kC,EAAW+kC,EACXA,OAAStjC,GAGEsjC,GAA4B,iBAAXA,IAC5B5nC,EAAO,QAIW,EAAduX,EAAK3V,QACTN,GAAOygC,KAAM,CACZlB,IAAKA,EAKL7gC,KAAMA,GAAQ,MACd8/B,SAAU,OACVngB,KAAMioB,IACHz/B,KAAM,SAAUs+B,GAGnBtC,EAAWphC,UAEXwU,EAAK0V,KAAM1rB,EAIVD,GAAQ,SAAUqsB,OAAQrsB,GAAOmW,UAAWgvB,IAAiBv7B,KAAM3J,GAGnEklC,KAKErrB,OAAQvY,GAAY,SAAUo9B,EAAOmD,GACxC7rB,EAAK3U,KAAM,WACVC,EAAS7D,MAAOX,KAAM8lC,GAAY,CAAElE,EAAMwG,aAAcrD,EAAQnD,QAK5D5hC,MAMRiD,GAAOqN,KAAK9F,QAAQg/B,SAAW,SAAU/lC,GACxC,OAAOR,GAAO8B,KAAM9B,GAAOo5B,OAAQ,SAAUj5B,GAC5C,OAAOK,IAASL,EAAGK,OAChBF,QAMLN,GAAOwmC,OAAS,CACfC,UAAW,SAAUjmC,EAAM+B,EAASrD,GACnC,IAAIwnC,EAAaC,EAASC,EAAWC,EAAQC,EAAWC,EACvD/X,EAAWhvB,GAAOwgB,IAAKhgB,EAAM,YAC7BwmC,EAAUhnC,GAAQQ,GAClBonB,EAAQ,GAGS,WAAboH,IACJxuB,EAAK8f,MAAM0O,SAAW,YAGvB8X,EAAYE,EAAQR,SACpBI,EAAY5mC,GAAOwgB,IAAKhgB,EAAM,OAC9BumC,EAAa/mC,GAAOwgB,IAAKhgB,EAAM,SACI,aAAbwuB,GAAwC,UAAbA,KACA,GAA9C4X,EAAYG,GAAanpC,QAAS,SAMpCipC,GADAH,EAAcM,EAAQhY,YACD3iB,IACrBs6B,EAAUD,EAAYpS,OAGtBuS,EAASxX,WAAYuX,IAAe,EACpCD,EAAUtX,WAAY0X,IAAgB,GAGlC3oC,EAAYmE,KAGhBA,EAAUA,EAAQ/E,KAAMgD,EAAMtB,EAAGc,GAAOsC,OAAQ,GAAIwkC,KAGjC,MAAfvkC,EAAQ8J,MACZub,EAAMvb,IAAQ9J,EAAQ8J,IAAMy6B,EAAUz6B,IAAQw6B,GAE1B,MAAhBtkC,EAAQ+xB,OACZ1M,EAAM0M,KAAS/xB,EAAQ+xB,KAAOwS,EAAUxS,KAASqS,GAG7C,UAAWpkC,EACfA,EAAQ0kC,MAAMzpC,KAAMgD,EAAMonB,GAG1Bof,EAAQxmB,IAAKoH,KAKhB5nB,GAAOG,GAAGmC,OAAQ,CAGjBkkC,OAAQ,SAAUjkC,GAGjB,GAAKd,UAAUnB,OACd,YAAmB0C,IAAZT,EACNxF,KACAA,KAAKuE,KAAM,SAAUpC,GACpBc,GAAOwmC,OAAOC,UAAW1pC,KAAMwF,EAASrD,KAI3C,IAAIgoC,EAAMC,EACT3mC,EAAOzD,KAAM,GAEd,OAAMyD,EAQAA,EAAK4xB,iBAAiB9xB,QAK5B4mC,EAAO1mC,EAAK4zB,wBACZ+S,EAAM3mC,EAAK+D,cAAc6H,YAClB,CACNC,IAAK66B,EAAK76B,IAAM86B,EAAIC,YACpB9S,KAAM4S,EAAK5S,KAAO6S,EAAIE,cARf,CAAEh7B,IAAK,EAAGioB,KAAM,QATxB,GAuBDtF,SAAU,WACT,GAAMjyB,KAAM,GAAZ,CAIA,IAAIuqC,EAAcd,EAAQvnC,EACzBuB,EAAOzD,KAAM,GACbwqC,EAAe,CAAEl7B,IAAK,EAAGioB,KAAM,GAGhC,GAAwC,UAAnCt0B,GAAOwgB,IAAKhgB,EAAM,YAGtBgmC,EAAShmC,EAAK4zB,4BAER,CACNoS,EAASzpC,KAAKypC,SAIdvnC,EAAMuB,EAAK+D,cACX+iC,EAAe9mC,EAAK8mC,cAAgBroC,EAAI6E,gBACxC,MAAQwjC,IACLA,IAAiBroC,EAAIuiB,MAAQ8lB,IAAiBroC,EAAI6E,kBACT,WAA3C9D,GAAOwgB,IAAK8mB,EAAc,YAE1BA,EAAeA,EAAa3nC,WAExB2nC,GAAgBA,IAAiB9mC,GAAkC,IAA1B8mC,EAAahpC,YAG1DipC,EAAevnC,GAAQsnC,GAAed,UACzBn6B,KAAOrM,GAAOwgB,IAAK8mB,EAAc,kBAAkB,GAChEC,EAAajT,MAAQt0B,GAAOwgB,IAAK8mB,EAAc,mBAAmB,IAKpE,MAAO,CACNj7B,IAAKm6B,EAAOn6B,IAAMk7B,EAAal7B,IAAMrM,GAAOwgB,IAAKhgB,EAAM,aAAa,GACpE8zB,KAAMkS,EAAOlS,KAAOiT,EAAajT,KAAOt0B,GAAOwgB,IAAKhgB,EAAM,cAAc,MAc1E8mC,aAAc,WACb,OAAOvqC,KAAKyE,IAAK,WAChB,IAAI8lC,EAAevqC,KAAKuqC,aAExB,MAAQA,GAA2D,WAA3CtnC,GAAOwgB,IAAK8mB,EAAc,YACjDA,EAAeA,EAAaA,aAG7B,OAAOA,GAAgBxjC,OAM1B9D,GAAOsB,KAAM,CAAEk0B,WAAY,cAAeD,UAAW,eAAiB,SAAUnd,EAAQkG,GACvF,IAAIjS,EAAM,gBAAkBiS,EAE5Bte,GAAOG,GAAIiY,GAAW,SAAUjZ,GAC/B,OAAO6d,EAAQjgB,KAAM,SAAUyD,EAAM4X,EAAQjZ,GAG5C,IAAIgoC,EAOJ,GANK3oC,EAAUgC,GACd2mC,EAAM3mC,EACuB,IAAlBA,EAAKlC,WAChB6oC,EAAM3mC,EAAK4L,kBAGCpJ,IAAR7D,EACJ,OAAOgoC,EAAMA,EAAK7oB,GAAS9d,EAAM4X,GAG7B+uB,EACJA,EAAIK,SACFn7B,EAAY86B,EAAIE,YAAVloC,EACPkN,EAAMlN,EAAMgoC,EAAIC,aAIjB5mC,EAAM4X,GAAWjZ,GAEhBiZ,EAAQjZ,EAAKsC,UAAUnB,WAU5BN,GAAOsB,KAAM,CAAE,MAAO,QAAU,SAAU6D,EAAImZ,GAC7Cte,GAAOuyB,SAAUjU,GAAS4P,GAAc/vB,GAAQuxB,cAC/C,SAAUlvB,EAAMmtB,GACf,GAAKA,EAIJ,OAHAA,EAAWD,GAAQltB,EAAM8d,GAGlB4O,GAAU1oB,KAAMmpB,GACtB3tB,GAAQQ,GAAOwuB,WAAY1Q,GAAS,KACpCqP,MAQL3tB,GAAOsB,KAAM,CAAEmmC,OAAQ,SAAUC,MAAO,SAAW,SAAUjnC,EAAM/B,GAClEsB,GAAOsB,KAAM,CACZkzB,QAAS,QAAU/zB,EACnBgX,QAAS/Y,EACTipC,GAAI,QAAUlnC,GACZ,SAAUmnC,EAAcC,GAG1B7nC,GAAOG,GAAI0nC,GAAa,SAAUtT,EAAQzvB,GACzC,IAAImY,EAAYxb,UAAUnB,SAAYsnC,GAAkC,kBAAXrT,GAC5D1C,EAAQ+V,KAA6B,IAAXrT,IAA6B,IAAVzvB,EAAiB,SAAW,UAE1E,OAAOkY,EAAQjgB,KAAM,SAAUyD,EAAM9B,EAAMoG,GAC1C,IAAI7F,EAEJ,OAAKT,EAAUgC,GAGyB,IAAhCqnC,EAASjqC,QAAS,SACxB4C,EAAM,QAAUC,GAChBD,EAAK7D,SAASmH,gBAAiB,SAAWrD,GAIrB,IAAlBD,EAAKlC,UACTW,EAAMuB,EAAKsD,gBAIJZ,KAAKouB,IACX9wB,EAAKghB,KAAM,SAAW/gB,GAAQxB,EAAK,SAAWwB,GAC9CD,EAAKghB,KAAM,SAAW/gB,GAAQxB,EAAK,SAAWwB,GAC9CxB,EAAK,SAAWwB,UAIDuC,IAAV8B,EAGN9E,GAAOwgB,IAAKhgB,EAAM9B,EAAMmzB,GAGxB7xB,GAAOsgB,MAAO9f,EAAM9B,EAAMoG,EAAO+sB,IAChCnzB,EAAMue,EAAYsX,OAASvxB,EAAWia,QAM5Cjd,GAAOsB,KAAM,CACZ,YACA,WACA,eACA,YACA,cACA,YACE,SAAU6D,EAAIzG,GAChBsB,GAAOG,GAAIzB,GAAS,SAAUyB,GAC7B,OAAOpD,KAAKmnB,GAAIxlB,EAAMyB,MAOxBH,GAAOG,GAAGmC,OAAQ,CAEjBq1B,KAAM,SAAUxT,EAAO9F,EAAMle,GAC5B,OAAOpD,KAAKmnB,GAAIC,EAAO,KAAM9F,EAAMle,IAEpC2nC,OAAQ,SAAU3jB,EAAOhkB,GACxB,OAAOpD,KAAKwnB,IAAKJ,EAAO,KAAMhkB,IAG/B4nC,SAAU,SAAU9nC,EAAUkkB,EAAO9F,EAAMle,GAC1C,OAAOpD,KAAKmnB,GAAIC,EAAOlkB,EAAUoe,EAAMle,IAExC6nC,WAAY,SAAU/nC,EAAUkkB,EAAOhkB,GAGtC,OAA4B,IAArBsB,UAAUnB,OAChBvD,KAAKwnB,IAAKtkB,EAAU,MACpBlD,KAAKwnB,IAAKJ,EAAOlkB,GAAY,KAAME,IAGrC8nC,MAAO,SAAUC,EAAQC,GACxB,OAAOprC,KACLmnB,GAAI,aAAcgkB,GAClBhkB,GAAI,aAAcikB,GAASD,MAI/BloC,GAAOsB,KACN,wLAE4D4D,MAAO,KACnE,SAAUC,EAAI1E,GAGbT,GAAOG,GAAIM,GAAS,SAAU4d,EAAMle,GACnC,OAA0B,EAAnBsB,UAAUnB,OAChBvD,KAAKmnB,GAAIzjB,EAAM,KAAM4d,EAAMle,GAC3BpD,KAAKioB,QAASvkB,MAYlB,IAAI2nC,GAAQ,sDAMZpoC,GAAOqoC,MAAQ,SAAUloC,EAAID,GAC5B,IAAIyf,EAAK/P,EAAMy4B,EAUf,GARwB,iBAAZnoC,IACXyf,EAAMxf,EAAID,GACVA,EAAUC,EACVA,EAAKwf,GAKAvhB,EAAY+B,GAalB,OARAyP,EAAOvS,GAAMG,KAAMiE,UAAW,IAC9B4mC,EAAQ,WACP,OAAOloC,EAAGzC,MAAOwC,GAAWnD,KAAM6S,EAAKnS,OAAQJ,GAAMG,KAAMiE,eAItDsD,KAAO5E,EAAG4E,KAAO5E,EAAG4E,MAAQ/E,GAAO+E,OAElCsjC,GAGRroC,GAAOsoC,UAAY,SAAUC,GACvBA,EACJvoC,GAAO4c,YAEP5c,GAAOoW,OAAO,IAGhBpW,GAAO+C,QAAUD,MAAMC,QACvB/C,GAAOwoC,UAAY3pB,KAAKC,MACxB9e,GAAOO,SAAWA,GAClBP,GAAO5B,WAAaA,EACpB4B,GAAOxB,SAAWA,EAClBwB,GAAO4d,UAAYA,EACnB5d,GAAOtB,KAAOmB,EAEdG,GAAOkoB,IAAMD,KAAKC,IAElBloB,GAAOyoC,UAAY,SAAUpqC,GAK5B,IAAIK,EAAOsB,GAAOtB,KAAML,GACxB,OAAkB,WAATK,GAA8B,WAATA,KAK5BgqC,MAAOrqC,EAAMgxB,WAAYhxB,KAG5B2B,GAAO2oC,KAAO,SAAUrpC,GACvB,OAAe,MAARA,EACN,IACEA,EAAO,IAAK8D,QAASglC,GAAO,OAkBT,mBAAXQ,QAAyBA,OAAOC,KAC3CD,OAAQ,SAAU,GAAI,WACrB,OAAO5oC,KAOT,IAGC8oC,GAAUhsC,GAAOkD,OAGjB+oC,GAAKjsC,GAAOksC,EAwBb,OAtBAhpC,GAAOipC,WAAa,SAAUrmC,GAS7B,OARK9F,GAAOksC,IAAMhpC,KACjBlD,GAAOksC,EAAID,IAGPnmC,GAAQ9F,GAAOkD,SAAWA,KAC9BlD,GAAOkD,OAAS8oC,IAGV9oC,IAMiB,oBAAbhD,IACXF,GAAOkD,OAASlD,GAAOksC,EAAIhpC,IAMrBA","file":"jquery-3.7.1.min.js"} \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js b/bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js
new file mode 100644
index 0000000000..b6d9aa8c79
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js
@@ -0,0 +1,4 @@
1/*! DataTables 1.13.8
2 * ©2008-2023 SpryMedia Ltd - datatables.net/license
3 */
4!function(n){"use strict";var a;"function"==typeof define&&define.amd?define(["jquery"],function(t){return n(t,window,document)}):"object"==typeof exports?(a=require("jquery"),"undefined"==typeof window?module.exports=function(t,e){return t=t||window,e=e||a(t),n(e,t,t.document)}:module.exports=n(a,window,window.document)):window.DataTable=n(jQuery,window,document)}(function(P,j,v,H){"use strict";function d(t){var e=parseInt(t,10);return!isNaN(e)&&isFinite(t)?e:null}function l(t,e,n){var a=typeof t,r="string"==a;return"number"==a||"bigint"==a||!!h(t)||(e&&r&&(t=$(t,e)),n&&r&&(t=t.replace(q,"")),!isNaN(parseFloat(t))&&isFinite(t))}function a(t,e,n){var a;return!!h(t)||(h(a=t)||"string"==typeof a)&&!!l(t.replace(V,"").replace(/<script/i,""),e,n)||null}function m(t,e,n,a){var r=[],o=0,i=e.length;if(a!==H)for(;o<i;o++)t[e[o]][n]&&r.push(t[e[o]][n][a]);else for(;o<i;o++)r.push(t[e[o]][n]);return r}function f(t,e){var n,a=[];e===H?(e=0,n=t):(n=e,e=t);for(var r=e;r<n;r++)a.push(r);return a}function _(t){for(var e=[],n=0,a=t.length;n<a;n++)t[n]&&e.push(t[n]);return e}function s(t,e){return-1!==this.indexOf(t,e=e===H?0:e)}var p,e,t,w=function(t,v){if(w.factory(t,v))return w;if(this instanceof w)return P(t).DataTable(v);v=t,this.$=function(t,e){return this.api(!0).$(t,e)},this._=function(t,e){return this.api(!0).rows(t,e).data()},this.api=function(t){return new B(t?ge(this[p.iApiIndex]):this)},this.fnAddData=function(t,e){var n=this.api(!0),t=(Array.isArray(t)&&(Array.isArray(t[0])||P.isPlainObject(t[0]))?n.rows:n.row).add(t);return e!==H&&!e||n.draw(),t.flatten().toArray()},this.fnAdjustColumnSizing=function(t){var e=this.api(!0).columns.adjust(),n=e.settings()[0],a=n.oScroll;t===H||t?e.draw(!1):""===a.sX&&""===a.sY||Qt(n)},this.fnClearTable=function(t){var e=this.api(!0).clear();t!==H&&!t||e.draw()},this.fnClose=function(t){this.api(!0).row(t).child.hide()},this.fnDeleteRow=function(t,e,n){var a=this.api(!0),t=a.rows(t),r=t.settings()[0],o=r.aoData[t[0][0]];return t.remove(),e&&e.call(this,r,o),n!==H&&!n||a.draw(),o},this.fnDestroy=function(t){this.api(!0).destroy(t)},this.fnDraw=function(t){this.api(!0).draw(t)},this.fnFilter=function(t,e,n,a,r,o){var i=this.api(!0);(null===e||e===H?i:i.column(e)).search(t,n,a,o),i.draw()},this.fnGetData=function(t,e){var n,a=this.api(!0);return t!==H?(n=t.nodeName?t.nodeName.toLowerCase():"",e!==H||"td"==n||"th"==n?a.cell(t,e).data():a.row(t).data()||null):a.data().toArray()},this.fnGetNodes=function(t){var e=this.api(!0);return t!==H?e.row(t).node():e.rows().nodes().flatten().toArray()},this.fnGetPosition=function(t){var e=this.api(!0),n=t.nodeName.toUpperCase();return"TR"==n?e.row(t).index():"TD"==n||"TH"==n?[(n=e.cell(t).index()).row,n.columnVisible,n.column]:null},this.fnIsOpen=function(t){return this.api(!0).row(t).child.isShown()},this.fnOpen=function(t,e,n){return this.api(!0).row(t).child(e,n).show().child()[0]},this.fnPageChange=function(t,e){t=this.api(!0).page(t);e!==H&&!e||t.draw(!1)},this.fnSetColumnVis=function(t,e,n){t=this.api(!0).column(t).visible(e);n!==H&&!n||t.columns.adjust().draw()},this.fnSettings=function(){return ge(this[p.iApiIndex])},this.fnSort=function(t){this.api(!0).order(t).draw()},this.fnSortListener=function(t,e,n){this.api(!0).order.listener(t,e,n)},this.fnUpdate=function(t,e,n,a,r){var o=this.api(!0);return(n===H||null===n?o.row(e):o.cell(e,n)).data(t),r!==H&&!r||o.columns.adjust(),a!==H&&!a||o.draw(),0},this.fnVersionCheck=p.fnVersionCheck;var e,y=this,D=v===H,_=this.length;for(e in D&&(v={}),this.oApi=this.internal=p.internal,w.ext.internal)e&&(this[e]=$e(e));return this.each(function(){var r=1<_?be({},v,!0):v,o=0,t=this.getAttribute("id"),i=!1,e=w.defaults,l=P(this);if("table"!=this.nodeName.toLowerCase())W(null,0,"Non-table node initialisation ("+this.nodeName+")",2);else{K(e),Q(e.column),C(e,e,!0),C(e.column,e.column,!0),C(e,P.extend(r,l.data()),!0);for(var n=w.settings,o=0,s=n.length;o<s;o++){var a=n[o];if(a.nTable==this||a.nTHead&&a.nTHead.parentNode==this||a.nTFoot&&a.nTFoot.parentNode==this){var u=(r.bRetrieve!==H?r:e).bRetrieve,c=(r.bDestroy!==H?r:e).bDestroy;if(D||u)return a.oInstance;if(c){a.oInstance.fnDestroy();break}return void W(a,0,"Cannot reinitialise DataTable",3)}if(a.sTableId==this.id){n.splice(o,1);break}}null!==t&&""!==t||(t="DataTables_Table_"+w.ext._unique++,this.id=t);var f,d,h=P.extend(!0,{},w.models.oSettings,{sDestroyWidth:l[0].style.width,sInstance:t,sTableId:t}),p=(h.nTable=this,h.oApi=y.internal,h.oInit=r,n.push(h),h.oInstance=1===y.length?y:l.dataTable(),K(r),Z(r.oLanguage),r.aLengthMenu&&!r.iDisplayLength&&(r.iDisplayLength=(Array.isArray(r.aLengthMenu[0])?r.aLengthMenu[0]:r.aLengthMenu)[0]),r=be(P.extend(!0,{},e),r),F(h.oFeatures,r,["bPaginate","bLengthChange","bFilter","bSort","bSortMulti","bInfo","bProcessing","bAutoWidth","bSortClasses","bServerSide","bDeferRender"]),F(h,r,["asStripeClasses","ajax","fnServerData","fnFormatNumber","sServerMethod","aaSorting","aaSortingFixed","aLengthMenu","sPaginationType","sAjaxSource","sAjaxDataProp","iStateDuration","sDom","bSortCellsTop","iTabIndex","fnStateLoadCallback","fnStateSaveCallback","renderer","searchDelay","rowId",["iCookieDuration","iStateDuration"],["oSearch","oPreviousSearch"],["aoSearchCols","aoPreSearchCols"],["iDisplayLength","_iDisplayLength"]]),F(h.oScroll,r,[["sScrollX","sX"],["sScrollXInner","sXInner"],["sScrollY","sY"],["bScrollCollapse","bCollapse"]]),F(h.oLanguage,r,"fnInfoCallback"),L(h,"aoDrawCallback",r.fnDrawCallback,"user"),L(h,"aoServerParams",r.fnServerParams,"user"),L(h,"aoStateSaveParams",r.fnStateSaveParams,"user"),L(h,"aoStateLoadParams",r.fnStateLoadParams,"user"),L(h,"aoStateLoaded",r.fnStateLoaded,"user"),L(h,"aoRowCallback",r.fnRowCallback,"user"),L(h,"aoRowCreatedCallback",r.fnCreatedRow,"user"),L(h,"aoHeaderCallback",r.fnHeaderCallback,"user"),L(h,"aoFooterCallback",r.fnFooterCallback,"user"),L(h,"aoInitComplete",r.fnInitComplete,"user"),L(h,"aoPreDrawCallback",r.fnPreDrawCallback,"user"),h.rowIdFn=A(r.rowId),tt(h),h.oClasses),g=(P.extend(p,w.ext.classes,r.oClasses),l.addClass(p.sTable),h.iInitDisplayStart===H&&(h.iInitDisplayStart=r.iDisplayStart,h._iDisplayStart=r.iDisplayStart),null!==r.iDeferLoading&&(h.bDeferLoading=!0,t=Array.isArray(r.iDeferLoading),h._iRecordsDisplay=t?r.iDeferLoading[0]:r.iDeferLoading,h._iRecordsTotal=t?r.iDeferLoading[1]:r.iDeferLoading),h.oLanguage),t=(P.extend(!0,g,r.oLanguage),g.sUrl?(P.ajax({dataType:"json",url:g.sUrl,success:function(t){C(e.oLanguage,t),Z(t),P.extend(!0,g,t,h.oInit.oLanguage),R(h,null,"i18n",[h]),Jt(h)},error:function(){Jt(h)}}),i=!0):R(h,null,"i18n",[h]),null===r.asStripeClasses&&(h.asStripeClasses=[p.sStripeOdd,p.sStripeEven]),h.asStripeClasses),b=l.children("tbody").find("tr").eq(0),m=(-1!==P.inArray(!0,P.map(t,function(t,e){return b.hasClass(t)}))&&(P("tbody tr",this).removeClass(t.join(" ")),h.asDestroyStripes=t.slice()),[]),t=this.getElementsByTagName("thead");if(0!==t.length&&(wt(h.aoHeader,t[0]),m=Ct(h)),null===r.aoColumns)for(f=[],o=0,s=m.length;o<s;o++)f.push(null);else f=r.aoColumns;for(o=0,s=f.length;o<s;o++)nt(h,m?m[o]:null);st(h,r.aoColumnDefs,f,function(t,e){at(h,t,e)}),b.length&&(d=function(t,e){return null!==t.getAttribute("data-"+e)?e:null},P(b[0]).children("th, td").each(function(t,e){var n,a=h.aoColumns[t];a||W(h,0,"Incorrect column count",18),a.mData===t&&(n=d(e,"sort")||d(e,"order"),e=d(e,"filter")||d(e,"search"),null===n&&null===e||(a.mData={_:t+".display",sort:null!==n?t+".@data-"+n:H,type:null!==n?t+".@data-"+n:H,filter:null!==e?t+".@data-"+e:H},a._isArrayHost=!0,at(h,t)))}));var S=h.oFeatures,t=function(){if(r.aaSorting===H){var t=h.aaSorting;for(o=0,s=t.length;o<s;o++)t[o][1]=h.aoColumns[o].asSorting[0]}ce(h),S.bSort&&L(h,"aoDrawCallback",function(){var t,n;h.bSorted&&(t=I(h),n={},P.each(t,function(t,e){n[e.src]=e.dir}),R(h,null,"order",[h,t,n]),le(h))}),L(h,"aoDrawCallback",function(){(h.bSorted||"ssp"===E(h)||S.bDeferRender)&&ce(h)},"sc");var e=l.children("caption").each(function(){this._captionSide=P(this).css("caption-side")}),n=l.children("thead"),a=(0===n.length&&(n=P("<thead/>").appendTo(l)),h.nTHead=n[0],l.children("tbody")),n=(0===a.length&&(a=P("<tbody/>").insertAfter(n)),h.nTBody=a[0],l.children("tfoot"));if(0===(n=0===n.length&&0<e.length&&(""!==h.oScroll.sX||""!==h.oScroll.sY)?P("<tfoot/>").appendTo(l):n).length||0===n.children().length?l.addClass(p.sNoFooter):0<n.length&&(h.nTFoot=n[0],wt(h.aoFooter,h.nTFoot)),r.aaData)for(o=0;o<r.aaData.length;o++)x(h,r.aaData[o]);else!h.bDeferLoading&&"dom"!=E(h)||ut(h,P(h.nTBody).children("tr"));h.aiDisplay=h.aiDisplayMaster.slice(),!(h.bInitialised=!0)===i&&Jt(h)};L(h,"aoDrawCallback",de,"state_save"),r.bStateSave?(S.bStateSave=!0,he(h,0,t)):t()}}),y=null,this},c={},U=/[\r\n\u2028]/g,V=/<.*?>/g,X=/^\d{2,4}[\.\/\-]\d{1,2}[\.\/\-]\d{1,2}([T ]{1}\d{1,2}[:\.]\d{2}([\.:]\d{2})?)?$/,J=new RegExp("(\\"+["/",".","*","+","?","|","(",")","[","]","{","}","\\","$","^","-"].join("|\\")+")","g"),q=/['\u00A0,$£€¥%\u2009\u202F\u20BD\u20a9\u20BArfkɃΞ]/gi,h=function(t){return!t||!0===t||"-"===t},$=function(t,e){return c[e]||(c[e]=new RegExp(Ot(e),"g")),"string"==typeof t&&"."!==e?t.replace(/\./g,"").replace(c[e],"."):t},N=function(t,e,n){var a=[],r=0,o=t.length;if(n!==H)for(;r<o;r++)t[r]&&t[r][e]&&a.push(t[r][e][n]);else for(;r<o;r++)t[r]&&a.push(t[r][e]);return a},G=function(t){if(!(t.length<2))for(var e=t.slice().sort(),n=e[0],a=1,r=e.length;a<r;a++){if(e[a]===n)return!1;n=e[a]}return!0},z=function(t){if(G(t))return t.slice();var e,n,a,r=[],o=t.length,i=0;t:for(n=0;n<o;n++){for(e=t[n],a=0;a<i;a++)if(r[a]===e)continue t;r.push(e),i++}return r},Y=function(t,e){if(Array.isArray(e))for(var n=0;n<e.length;n++)Y(t,e[n]);else t.push(e);return t};function i(n){var a,r,o={};P.each(n,function(t,e){(a=t.match(/^([^A-Z]+?)([A-Z])/))&&-1!=="a aa ai ao as b fn i m o s ".indexOf(a[1]+" ")&&(r=t.replace(a[0],a[2].toLowerCase()),o[r]=t,"o"===a[1])&&i(n[t])}),n._hungarianMap=o}function C(n,a,r){var o;n._hungarianMap||i(n),P.each(a,function(t,e){(o=n._hungarianMap[t])===H||!r&&a[o]!==H||("o"===o.charAt(0)?(a[o]||(a[o]={}),P.extend(!0,a[o],a[t]),C(n[o],a[o],r)):a[o]=a[t])})}function Z(t){var e,n=w.defaults.oLanguage,a=n.sDecimal;a&&Me(a),t&&(e=t.sZeroRecords,!t.sEmptyTable&&e&&"No data available in table"===n.sEmptyTable&&F(t,t,"sZeroRecords","sEmptyTable"),!t.sLoadingRecords&&e&&"Loading..."===n.sLoadingRecords&&F(t,t,"sZeroRecords","sLoadingRecords"),t.sInfoThousands&&(t.sThousands=t.sInfoThousands),e=t.sDecimal)&&a!==e&&Me(e)}Array.isArray||(Array.isArray=function(t){return"[object Array]"===Object.prototype.toString.call(t)}),Array.prototype.includes||(Array.prototype.includes=s),String.prototype.trim||(String.prototype.trim=function(){return this.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,"")}),String.prototype.includes||(String.prototype.includes=s),w.util={throttle:function(a,t){var r,o,i=t!==H?t:200;return function(){var t=this,e=+new Date,n=arguments;r&&e<r+i?(clearTimeout(o),o=setTimeout(function(){r=H,a.apply(t,n)},i)):(r=e,a.apply(t,n))}},escapeRegex:function(t){return t.replace(J,"\\$1")},set:function(a){var d;return P.isPlainObject(a)?w.util.set(a._):null===a?function(){}:"function"==typeof a?function(t,e,n){a(t,"set",e,n)}:"string"!=typeof a||-1===a.indexOf(".")&&-1===a.indexOf("[")&&-1===a.indexOf("(")?function(t,e){t[a]=e}:(d=function(t,e,n){for(var a,r,o,i,l=dt(n),n=l[l.length-1],s=0,u=l.length-1;s<u;s++){if("__proto__"===l[s]||"constructor"===l[s])throw new Error("Cannot set prototype values");if(a=l[s].match(ft),r=l[s].match(g),a){if(l[s]=l[s].replace(ft,""),t[l[s]]=[],(a=l.slice()).splice(0,s+1),i=a.join("."),Array.isArray(e))for(var c=0,f=e.length;c<f;c++)d(o={},e[c],i),t[l[s]].push(o);else t[l[s]]=e;return}r&&(l[s]=l[s].replace(g,""),t=t[l[s]](e)),null!==t[l[s]]&&t[l[s]]!==H||(t[l[s]]={}),t=t[l[s]]}n.match(g)?t[n.replace(g,"")](e):t[n.replace(ft,"")]=e},function(t,e){return d(t,e,a)})},get:function(r){var o,d;return P.isPlainObject(r)?(o={},P.each(r,function(t,e){e&&(o[t]=w.util.get(e))}),function(t,e,n,a){var r=o[e]||o._;return r!==H?r(t,e,n,a):t}):null===r?function(t){return t}:"function"==typeof r?function(t,e,n,a){return r(t,e,n,a)}:"string"!=typeof r||-1===r.indexOf(".")&&-1===r.indexOf("[")&&-1===r.indexOf("(")?function(t,e){return t[r]}:(d=function(t,e,n){var a,r,o;if(""!==n)for(var i=dt(n),l=0,s=i.length;l<s;l++){if(f=i[l].match(ft),a=i[l].match(g),f){if(i[l]=i[l].replace(ft,""),""!==i[l]&&(t=t[i[l]]),r=[],i.splice(0,l+1),o=i.join("."),Array.isArray(t))for(var u=0,c=t.length;u<c;u++)r.push(d(t[u],e,o));var f=f[0].substring(1,f[0].length-1);t=""===f?r:r.join(f);break}if(a)i[l]=i[l].replace(g,""),t=t[i[l]]();else{if(null===t||null===t[i[l]])return null;if(t===H||t[i[l]]===H)return H;t=t[i[l]]}}return t},function(t,e){return d(t,e,r)})}};var r=function(t,e,n){t[e]!==H&&(t[n]=t[e])};function K(t){r(t,"ordering","bSort"),r(t,"orderMulti","bSortMulti"),r(t,"orderClasses","bSortClasses"),r(t,"orderCellsTop","bSortCellsTop"),r(t,"order","aaSorting"),r(t,"orderFixed","aaSortingFixed"),r(t,"paging","bPaginate"),r(t,"pagingType","sPaginationType"),r(t,"pageLength","iDisplayLength"),r(t,"searching","bFilter"),"boolean"==typeof t.sScrollX&&(t.sScrollX=t.sScrollX?"100%":""),"boolean"==typeof t.scrollX&&(t.scrollX=t.scrollX?"100%":"");var e=t.aoSearchCols;if(e)for(var n=0,a=e.length;n<a;n++)e[n]&&C(w.models.oSearch,e[n])}function Q(t){r(t,"orderable","bSortable"),r(t,"orderData","aDataSort"),r(t,"orderSequence","asSorting"),r(t,"orderDataType","sortDataType");var e=t.aDataSort;"number"!=typeof e||Array.isArray(e)||(t.aDataSort=[e])}function tt(t){var e,n,a,r;w.__browser||(w.__browser=e={},r=(a=(n=P("<div/>").css({position:"fixed",top:0,left:-1*P(j).scrollLeft(),height:1,width:1,overflow:"hidden"}).append(P("<div/>").css({position:"absolute",top:1,left:1,width:100,overflow:"scroll"}).append(P("<div/>").css({width:"100%",height:10}))).appendTo("body")).children()).children(),e.barWidth=a[0].offsetWidth-a[0].clientWidth,e.bScrollOversize=100===r[0].offsetWidth&&100!==a[0].clientWidth,e.bScrollbarLeft=1!==Math.round(r.offset().left),e.bBounding=!!n[0].getBoundingClientRect().width,n.remove()),P.extend(t.oBrowser,w.__browser),t.oScroll.iBarWidth=w.__browser.barWidth}function et(t,e,n,a,r,o){var i,l=a,s=!1;for(n!==H&&(i=n,s=!0);l!==r;)t.hasOwnProperty(l)&&(i=s?e(i,t[l],l,t):t[l],s=!0,l+=o);return i}function nt(t,e){var n=w.defaults.column,a=t.aoColumns.length,n=P.extend({},w.models.oColumn,n,{nTh:e||v.createElement("th"),sTitle:n.sTitle||(e?e.innerHTML:""),aDataSort:n.aDataSort||[a],mData:n.mData||a,idx:a}),n=(t.aoColumns.push(n),t.aoPreSearchCols);n[a]=P.extend({},w.models.oSearch,n[a]),at(t,a,P(e).data())}function at(t,e,n){function a(t){return"string"==typeof t&&-1!==t.indexOf("@")}var e=t.aoColumns[e],r=t.oClasses,o=P(e.nTh),i=(!e.sWidthOrig&&(e.sWidthOrig=o.attr("width")||null,u=(o.attr("style")||"").match(/width:\s*(\d+[pxem%]+)/))&&(e.sWidthOrig=u[1]),n!==H&&null!==n&&(Q(n),C(w.defaults.column,n,!0),n.mDataProp===H||n.mData||(n.mData=n.mDataProp),n.sType&&(e._sManualType=n.sType),n.className&&!n.sClass&&(n.sClass=n.className),n.sClass&&o.addClass(n.sClass),u=e.sClass,P.extend(e,n),F(e,n,"sWidth","sWidthOrig"),u!==e.sClass&&(e.sClass=u+" "+e.sClass),n.iDataSort!==H&&(e.aDataSort=[n.iDataSort]),F(e,n,"aDataSort"),e.ariaTitle||(e.ariaTitle=o.attr("aria-label"))),e.mData),l=A(i),s=e.mRender?A(e.mRender):null,u=(e._bAttrSrc=P.isPlainObject(i)&&(a(i.sort)||a(i.type)||a(i.filter)),e._setter=null,e.fnGetData=function(t,e,n){var a=l(t,e,H,n);return s&&e?s(a,e,t,n):a},e.fnSetData=function(t,e,n){return b(i)(t,e,n)},"number"==typeof i||e._isArrayHost||(t._rowReadObject=!0),t.oFeatures.bSort||(e.bSortable=!1,o.addClass(r.sSortableNone)),-1!==P.inArray("asc",e.asSorting)),n=-1!==P.inArray("desc",e.asSorting);e.bSortable&&(u||n)?u&&!n?(e.sSortingClass=r.sSortableAsc,e.sSortingClassJUI=r.sSortJUIAscAllowed):!u&&n?(e.sSortingClass=r.sSortableDesc,e.sSortingClassJUI=r.sSortJUIDescAllowed):(e.sSortingClass=r.sSortable,e.sSortingClassJUI=r.sSortJUI):(e.sSortingClass=r.sSortableNone,e.sSortingClassJUI="")}function O(t){if(!1!==t.oFeatures.bAutoWidth){var e=t.aoColumns;ee(t);for(var n=0,a=e.length;n<a;n++)e[n].nTh.style.width=e[n].sWidth}var r=t.oScroll;""===r.sY&&""===r.sX||Qt(t),R(t,null,"column-sizing",[t])}function rt(t,e){t=it(t,"bVisible");return"number"==typeof t[e]?t[e]:null}function ot(t,e){t=it(t,"bVisible"),e=P.inArray(e,t);return-1!==e?e:null}function T(t){var n=0;return P.each(t.aoColumns,function(t,e){e.bVisible&&"none"!==P(e.nTh).css("display")&&n++}),n}function it(t,n){var a=[];return P.map(t.aoColumns,function(t,e){t[n]&&a.push(e)}),a}function lt(t){for(var e,n,a,r,o,i,l,s=t.aoColumns,u=t.aoData,c=w.ext.type.detect,f=0,d=s.length;f<d;f++)if(l=[],!(o=s[f]).sType&&o._sManualType)o.sType=o._sManualType;else if(!o.sType){for(e=0,n=c.length;e<n;e++){for(a=0,r=u.length;a<r&&(l[a]===H&&(l[a]=S(t,a,f,"type")),(i=c[e](l[a],t))||e===c.length-1)&&("html"!==i||h(l[a]));a++);if(i){o.sType=i;break}}o.sType||(o.sType="string")}}function st(t,e,n,a){var r,o,i,l,s=t.aoColumns;if(e)for(r=e.length-1;0<=r;r--)for(var u,c=(u=e[r]).target!==H?u.target:u.targets!==H?u.targets:u.aTargets,f=0,d=(c=Array.isArray(c)?c:[c]).length;f<d;f++)if("number"==typeof c[f]&&0<=c[f]){for(;s.length<=c[f];)nt(t);a(c[f],u)}else if("number"==typeof c[f]&&c[f]<0)a(s.length+c[f],u);else if("string"==typeof c[f])for(i=0,l=s.length;i<l;i++)"_all"!=c[f]&&!P(s[i].nTh).hasClass(c[f])||a(i,u);if(n)for(r=0,o=n.length;r<o;r++)a(r,n[r])}function x(t,e,n,a){for(var r=t.aoData.length,o=P.extend(!0,{},w.models.oRow,{src:n?"dom":"data",idx:r}),i=(o._aData=e,t.aoData.push(o),t.aoColumns),l=0,s=i.length;l<s;l++)i[l].sType=null;t.aiDisplayMaster.push(r);e=t.rowIdFn(e);return e!==H&&(t.aIds[e]=o),!n&&t.oFeatures.bDeferRender||St(t,r,n,a),r}function ut(n,t){var a;return(t=t instanceof P?t:P(t)).map(function(t,e){return a=mt(n,e),x(n,a.data,e,a.cells)})}function S(t,e,n,a){"search"===a?a="filter":"order"===a&&(a="sort");var r=t.iDraw,o=t.aoColumns[n],i=t.aoData[e]._aData,l=o.sDefaultContent,s=o.fnGetData(i,a,{settings:t,row:e,col:n});if(s===H)return t.iDrawError!=r&&null===l&&(W(t,0,"Requested unknown parameter "+("function"==typeof o.mData?"{function}":"'"+o.mData+"'")+" for row "+e+", column "+n,4),t.iDrawError=r),l;if(s!==i&&null!==s||null===l||a===H){if("function"==typeof s)return s.call(i)}else s=l;return null===s&&"display"===a?"":"filter"===a&&(e=w.ext.type.search)[o.sType]?e[o.sType](s):s}function ct(t,e,n,a){var r=t.aoColumns[n],o=t.aoData[e]._aData;r.fnSetData(o,a,{settings:t,row:e,col:n})}var ft=/\[.*?\]$/,g=/\(\)$/;function dt(t){return P.map(t.match(/(\\.|[^\.])+/g)||[""],function(t){return t.replace(/\\\./g,".")})}var A=w.util.get,b=w.util.set;function ht(t){return N(t.aoData,"_aData")}function pt(t){t.aoData.length=0,t.aiDisplayMaster.length=0,t.aiDisplay.length=0,t.aIds={}}function gt(t,e,n){for(var a=-1,r=0,o=t.length;r<o;r++)t[r]==e?a=r:t[r]>e&&t[r]--;-1!=a&&n===H&&t.splice(a,1)}function bt(n,a,t,e){function r(t,e){for(;t.childNodes.length;)t.removeChild(t.firstChild);t.innerHTML=S(n,a,e,"display")}var o,i,l=n.aoData[a];if("dom"!==t&&(t&&"auto"!==t||"dom"!==l.src)){var s=l.anCells;if(s)if(e!==H)r(s[e],e);else for(o=0,i=s.length;o<i;o++)r(s[o],o)}else l._aData=mt(n,l,e,e===H?H:l._aData).data;l._aSortData=null,l._aFilterData=null;var u=n.aoColumns;if(e!==H)u[e].sType=null;else{for(o=0,i=u.length;o<i;o++)u[o].sType=null;vt(n,l)}}function mt(t,e,n,a){function r(t,e){var n;"string"==typeof t&&-1!==(n=t.indexOf("@"))&&(n=t.substring(n+1),b(t)(a,e.getAttribute(n)))}function o(t){n!==H&&n!==f||(l=d[f],s=t.innerHTML.trim(),l&&l._bAttrSrc?(b(l.mData._)(a,s),r(l.mData.sort,t),r(l.mData.type,t),r(l.mData.filter,t)):h?(l._setter||(l._setter=b(l.mData)),l._setter(a,s)):a[f]=s),f++}var i,l,s,u=[],c=e.firstChild,f=0,d=t.aoColumns,h=t._rowReadObject;a=a!==H?a:h?{}:[];if(c)for(;c;)"TD"!=(i=c.nodeName.toUpperCase())&&"TH"!=i||(o(c),u.push(c)),c=c.nextSibling;else for(var p=0,g=(u=e.anCells).length;p<g;p++)o(u[p]);var e=e.firstChild?e:e.nTr;return e&&(e=e.getAttribute("id"))&&b(t.rowId)(a,e),{data:a,cells:u}}function St(t,e,n,a){var r,o,i,l,s,u,c=t.aoData[e],f=c._aData,d=[];if(null===c.nTr){for(r=n||v.createElement("tr"),c.nTr=r,c.anCells=d,r._DT_RowIndex=e,vt(t,c),l=0,s=t.aoColumns.length;l<s;l++)i=t.aoColumns[l],(o=(u=!n)?v.createElement(i.sCellType):a[l])||W(t,0,"Incorrect column count",18),o._DT_CellIndex={row:e,column:l},d.push(o),!u&&(!i.mRender&&i.mData===l||P.isPlainObject(i.mData)&&i.mData._===l+".display")||(o.innerHTML=S(t,e,l,"display")),i.sClass&&(o.className+=" "+i.sClass),i.bVisible&&!n?r.appendChild(o):!i.bVisible&&n&&o.parentNode.removeChild(o),i.fnCreatedCell&&i.fnCreatedCell.call(t.oInstance,o,S(t,e,l),f,e,l);R(t,"aoRowCreatedCallback",null,[r,f,e,d])}}function vt(t,e){var n=e.nTr,a=e._aData;n&&((t=t.rowIdFn(a))&&(n.id=t),a.DT_RowClass&&(t=a.DT_RowClass.split(" "),e.__rowc=e.__rowc?z(e.__rowc.concat(t)):t,P(n).removeClass(e.__rowc.join(" ")).addClass(a.DT_RowClass)),a.DT_RowAttr&&P(n).attr(a.DT_RowAttr),a.DT_RowData)&&P(n).data(a.DT_RowData)}function yt(t){var e,n,a,r=t.nTHead,o=t.nTFoot,i=0===P("th, td",r).length,l=t.oClasses,s=t.aoColumns;for(i&&(n=P("<tr/>").appendTo(r)),c=0,f=s.length;c<f;c++)a=s[c],e=P(a.nTh).addClass(a.sClass),i&&e.appendTo(n),t.oFeatures.bSort&&(e.addClass(a.sSortingClass),!1!==a.bSortable)&&(e.attr("tabindex",t.iTabIndex).attr("aria-controls",t.sTableId),ue(t,a.nTh,c)),a.sTitle!=e[0].innerHTML&&e.html(a.sTitle),ve(t,"header")(t,e,a,l);if(i&&wt(t.aoHeader,r),P(r).children("tr").children("th, td").addClass(l.sHeaderTH),P(o).children("tr").children("th, td").addClass(l.sFooterTH),null!==o)for(var u=t.aoFooter[0],c=0,f=u.length;c<f;c++)(a=s[c])?(a.nTf=u[c].cell,a.sClass&&P(a.nTf).addClass(a.sClass)):W(t,0,"Incorrect column count",18)}function Dt(t,e,n){var a,r,o,i,l,s,u,c,f,d=[],h=[],p=t.aoColumns.length;if(e){for(n===H&&(n=!1),a=0,r=e.length;a<r;a++){for(d[a]=e[a].slice(),d[a].nTr=e[a].nTr,o=p-1;0<=o;o--)t.aoColumns[o].bVisible||n||d[a].splice(o,1);h.push([])}for(a=0,r=d.length;a<r;a++){if(u=d[a].nTr)for(;s=u.firstChild;)u.removeChild(s);for(o=0,i=d[a].length;o<i;o++)if(f=c=1,h[a][o]===H){for(u.appendChild(d[a][o].cell),h[a][o]=1;d[a+c]!==H&&d[a][o].cell==d[a+c][o].cell;)h[a+c][o]=1,c++;for(;d[a][o+f]!==H&&d[a][o].cell==d[a][o+f].cell;){for(l=0;l<c;l++)h[a+l][o+f]=1;f++}P(d[a][o].cell).attr("rowspan",c).attr("colspan",f)}}}}function y(t,e){n="ssp"==E(s=t),(l=s.iInitDisplayStart)!==H&&-1!==l&&(s._iDisplayStart=!n&&l>=s.fnRecordsDisplay()?0:l,s.iInitDisplayStart=-1);var n=R(t,"aoPreDrawCallback","preDraw",[t]);if(-1!==P.inArray(!1,n))D(t,!1);else{var a=[],r=0,o=t.asStripeClasses,i=o.length,l=t.oLanguage,s="ssp"==E(t),u=t.aiDisplay,n=t._iDisplayStart,c=t.fnDisplayEnd();if(t.bDrawing=!0,t.bDeferLoading)t.bDeferLoading=!1,t.iDraw++,D(t,!1);else if(s){if(!t.bDestroying&&!e)return void xt(t)}else t.iDraw++;if(0!==u.length)for(var f=s?t.aoData.length:c,d=s?0:n;d<f;d++){var h,p=u[d],g=t.aoData[p],b=(null===g.nTr&&St(t,p),g.nTr);0!==i&&(h=o[r%i],g._sRowStripe!=h)&&(P(b).removeClass(g._sRowStripe).addClass(h),g._sRowStripe=h),R(t,"aoRowCallback",null,[b,g._aData,r,d,p]),a.push(b),r++}else{e=l.sZeroRecords;1==t.iDraw&&"ajax"==E(t)?e=l.sLoadingRecords:l.sEmptyTable&&0===t.fnRecordsTotal()&&(e=l.sEmptyTable),a[0]=P("<tr/>",{class:i?o[0]:""}).append(P("<td />",{valign:"top",colSpan:T(t),class:t.oClasses.sRowEmpty}).html(e))[0]}R(t,"aoHeaderCallback","header",[P(t.nTHead).children("tr")[0],ht(t),n,c,u]),R(t,"aoFooterCallback","footer",[P(t.nTFoot).children("tr")[0],ht(t),n,c,u]);s=P(t.nTBody);s.children().detach(),s.append(P(a)),R(t,"aoDrawCallback","draw",[t]),t.bSorted=!1,t.bFiltered=!1,t.bDrawing=!1}}function u(t,e){var n=t.oFeatures,a=n.bSort,n=n.bFilter;a&&ie(t),n?Rt(t,t.oPreviousSearch):t.aiDisplay=t.aiDisplayMaster.slice(),!0!==e&&(t._iDisplayStart=0),t._drawHold=e,y(t),t._drawHold=!1}function _t(t){for(var e,n,a,r,o,i,l,s=t.oClasses,u=P(t.nTable),u=P("<div/>").insertBefore(u),c=t.oFeatures,f=P("<div/>",{id:t.sTableId+"_wrapper",class:s.sWrapper+(t.nTFoot?"":" "+s.sNoFooter)}),d=(t.nHolding=u[0],t.nTableWrapper=f[0],t.nTableReinsertBefore=t.nTable.nextSibling,t.sDom.split("")),h=0;h<d.length;h++){if(e=null,"<"==(n=d[h])){if(a=P("<div/>")[0],"'"==(r=d[h+1])||'"'==r){for(o="",i=2;d[h+i]!=r;)o+=d[h+i],i++;"H"==o?o=s.sJUIHeader:"F"==o&&(o=s.sJUIFooter),-1!=o.indexOf(".")?(l=o.split("."),a.id=l[0].substr(1,l[0].length-1),a.className=l[1]):"#"==o.charAt(0)?a.id=o.substr(1,o.length-1):a.className=o,h+=i}f.append(a),f=P(a)}else if(">"==n)f=f.parent();else if("l"==n&&c.bPaginate&&c.bLengthChange)e=Gt(t);else if("f"==n&&c.bFilter)e=Lt(t);else if("r"==n&&c.bProcessing)e=Zt(t);else if("t"==n)e=Kt(t);else if("i"==n&&c.bInfo)e=Ut(t);else if("p"==n&&c.bPaginate)e=zt(t);else if(0!==w.ext.feature.length)for(var p=w.ext.feature,g=0,b=p.length;g<b;g++)if(n==p[g].cFeature){e=p[g].fnInit(t);break}e&&((l=t.aanFeatures)[n]||(l[n]=[]),l[n].push(e),f.append(e))}u.replaceWith(f),t.nHolding=null}function wt(t,e){var n,a,r,o,i,l,s,u,c,f,d=P(e).children("tr");for(t.splice(0,t.length),r=0,l=d.length;r<l;r++)t.push([]);for(r=0,l=d.length;r<l;r++)for(a=(n=d[r]).firstChild;a;){if("TD"==a.nodeName.toUpperCase()||"TH"==a.nodeName.toUpperCase())for(u=(u=+a.getAttribute("colspan"))&&0!=u&&1!=u?u:1,c=(c=+a.getAttribute("rowspan"))&&0!=c&&1!=c?c:1,s=function(t,e,n){for(var a=t[e];a[n];)n++;return n}(t,r,0),f=1==u,i=0;i<u;i++)for(o=0;o<c;o++)t[r+o][s+i]={cell:a,unique:f},t[r+o].nTr=n;a=a.nextSibling}}function Ct(t,e,n){var a=[];n||(n=t.aoHeader,e&&wt(n=[],e));for(var r=0,o=n.length;r<o;r++)for(var i=0,l=n[r].length;i<l;i++)!n[r][i].unique||a[i]&&t.bSortCellsTop||(a[i]=n[r][i].cell);return a}function Tt(r,t,n){function e(t){var e=r.jqXHR?r.jqXHR.status:null;(null===t||"number"==typeof e&&204==e)&&Ft(r,t={},[]),(e=t.error||t.sError)&&W(r,0,e),r.json=t,R(r,null,"xhr",[r,t,r.jqXHR]),n(t)}R(r,"aoServerParams","serverParams",[t]),t&&Array.isArray(t)&&(a={},o=/(.*?)\[\]$/,P.each(t,function(t,e){var n=e.name.match(o);n?(n=n[0],a[n]||(a[n]=[]),a[n].push(e.value)):a[e.name]=e.value}),t=a);var a,o,i,l=r.ajax,s=r.oInstance,u=(P.isPlainObject(l)&&l.data&&(u="function"==typeof(i=l.data)?i(t,r):i,t="function"==typeof i&&u?u:P.extend(!0,t,u),delete l.data),{data:t,success:e,dataType:"json",cache:!1,type:r.sServerMethod,error:function(t,e,n){var a=R(r,null,"xhr",[r,null,r.jqXHR]);-1===P.inArray(!0,a)&&("parsererror"==e?W(r,0,"Invalid JSON response",1):4===t.readyState&&W(r,0,"Ajax error",7)),D(r,!1)}});r.oAjaxData=t,R(r,null,"preXhr",[r,t]),r.fnServerData?r.fnServerData.call(s,r.sAjaxSource,P.map(t,function(t,e){return{name:e,value:t}}),e,r):r.sAjaxSource||"string"==typeof l?r.jqXHR=P.ajax(P.extend(u,{url:l||r.sAjaxSource})):"function"==typeof l?r.jqXHR=l.call(s,t,e,r):(r.jqXHR=P.ajax(P.extend(u,l)),l.data=i)}function xt(e){e.iDraw++,D(e,!0);var n=e._drawHold;Tt(e,At(e),function(t){e._drawHold=n,It(e,t),e._drawHold=!1})}function At(t){for(var e,n,a,r=t.aoColumns,o=r.length,i=t.oFeatures,l=t.oPreviousSearch,s=t.aoPreSearchCols,u=[],c=I(t),f=t._iDisplayStart,d=!1!==i.bPaginate?t._iDisplayLength:-1,h=function(t,e){u.push({name:t,value:e})},p=(h("sEcho",t.iDraw),h("iColumns",o),h("sColumns",N(r,"sName").join(",")),h("iDisplayStart",f),h("iDisplayLength",d),{draw:t.iDraw,columns:[],order:[],start:f,length:d,search:{value:l.sSearch,regex:l.bRegex}}),g=0;g<o;g++)n=r[g],a=s[g],e="function"==typeof n.mData?"function":n.mData,p.columns.push({data:e,name:n.sName,searchable:n.bSearchable,orderable:n.bSortable,search:{value:a.sSearch,regex:a.bRegex}}),h("mDataProp_"+g,e),i.bFilter&&(h("sSearch_"+g,a.sSearch),h("bRegex_"+g,a.bRegex),h("bSearchable_"+g,n.bSearchable)),i.bSort&&h("bSortable_"+g,n.bSortable);i.bFilter&&(h("sSearch",l.sSearch),h("bRegex",l.bRegex)),i.bSort&&(P.each(c,function(t,e){p.order.push({column:e.col,dir:e.dir}),h("iSortCol_"+t,e.col),h("sSortDir_"+t,e.dir)}),h("iSortingCols",c.length));f=w.ext.legacy.ajax;return null===f?t.sAjaxSource?u:p:f?u:p}function It(t,n){function e(t,e){return n[t]!==H?n[t]:n[e]}var a=Ft(t,n),r=e("sEcho","draw"),o=e("iTotalRecords","recordsTotal"),i=e("iTotalDisplayRecords","recordsFiltered");if(r!==H){if(+r<t.iDraw)return;t.iDraw=+r}a=a||[],pt(t),t._iRecordsTotal=parseInt(o,10),t._iRecordsDisplay=parseInt(i,10);for(var l=0,s=a.length;l<s;l++)x(t,a[l]);t.aiDisplay=t.aiDisplayMaster.slice(),y(t,!0),t._bInitComplete||qt(t,n),D(t,!1)}function Ft(t,e,n){t=P.isPlainObject(t.ajax)&&t.ajax.dataSrc!==H?t.ajax.dataSrc:t.sAjaxDataProp;if(!n)return"data"===t?e.aaData||e[t]:""!==t?A(t)(e):e;b(t)(e,n)}function Lt(n){function e(t){i.f;var e=this.value||"";o.return&&"Enter"!==t.key||e!=o.sSearch&&(Rt(n,{sSearch:e,bRegex:o.bRegex,bSmart:o.bSmart,bCaseInsensitive:o.bCaseInsensitive,return:o.return}),n._iDisplayStart=0,y(n))}var t=n.oClasses,a=n.sTableId,r=n.oLanguage,o=n.oPreviousSearch,i=n.aanFeatures,l='<input type="search" class="'+t.sFilterInput+'"/>',s=(s=r.sSearch).match(/_INPUT_/)?s.replace("_INPUT_",l):s+l,l=P("<div/>",{id:i.f?null:a+"_filter",class:t.sFilter}).append(P("<label/>").append(s)),t=null!==n.searchDelay?n.searchDelay:"ssp"===E(n)?400:0,u=P("input",l).val(o.sSearch).attr("placeholder",r.sSearchPlaceholder).on("keyup.DT search.DT input.DT paste.DT cut.DT",t?ne(e,t):e).on("mouseup.DT",function(t){setTimeout(function(){e.call(u[0],t)},10)}).on("keypress.DT",function(t){if(13==t.keyCode)return!1}).attr("aria-controls",a);return P(n.nTable).on("search.dt.DT",function(t,e){if(n===e)try{u[0]!==v.activeElement&&u.val(o.sSearch)}catch(t){}}),l[0]}function Rt(t,e,n){function a(t){o.sSearch=t.sSearch,o.bRegex=t.bRegex,o.bSmart=t.bSmart,o.bCaseInsensitive=t.bCaseInsensitive,o.return=t.return}function r(t){return t.bEscapeRegex!==H?!t.bEscapeRegex:t.bRegex}var o=t.oPreviousSearch,i=t.aoPreSearchCols;if(lt(t),"ssp"!=E(t)){Ht(t,e.sSearch,n,r(e),e.bSmart,e.bCaseInsensitive),a(e);for(var l=0;l<i.length;l++)jt(t,i[l].sSearch,l,r(i[l]),i[l].bSmart,i[l].bCaseInsensitive);Pt(t)}else a(e);t.bFiltered=!0,R(t,null,"search",[t])}function Pt(t){for(var e,n,a=w.ext.search,r=t.aiDisplay,o=0,i=a.length;o<i;o++){for(var l=[],s=0,u=r.length;s<u;s++)n=r[s],e=t.aoData[n],a[o](t,e._aFilterData,n,e._aData,s)&&l.push(n);r.length=0,P.merge(r,l)}}function jt(t,e,n,a,r,o){if(""!==e){for(var i,l=[],s=t.aiDisplay,u=Nt(e,a,r,o),c=0;c<s.length;c++)i=t.aoData[s[c]]._aFilterData[n],u.test(i)&&l.push(s[c]);t.aiDisplay=l}}function Ht(t,e,n,a,r,o){var i,l,s,u=Nt(e,a,r,o),r=t.oPreviousSearch.sSearch,o=t.aiDisplayMaster,c=[];if(0!==w.ext.search.length&&(n=!0),l=Wt(t),e.length<=0)t.aiDisplay=o.slice();else{for((l||n||a||r.length>e.length||0!==e.indexOf(r)||t.bSorted)&&(t.aiDisplay=o.slice()),i=t.aiDisplay,s=0;s<i.length;s++)u.test(t.aoData[i[s]]._sFilterRow)&&c.push(i[s]);t.aiDisplay=c}}function Nt(t,e,n,a){return t=e?t:Ot(t),n&&(t="^(?=.*?"+P.map(t.match(/["\u201C][^"\u201D]+["\u201D]|[^ ]+/g)||[""],function(t){var e;return'"'===t.charAt(0)?t=(e=t.match(/^"(.*)"$/))?e[1]:t:"“"===t.charAt(0)&&(t=(e=t.match(/^\u201C(.*)\u201D$/))?e[1]:t),t.replace('"',"")}).join(")(?=.*?")+").*$"),new RegExp(t,a?"i":"")}var Ot=w.util.escapeRegex,kt=P("<div>")[0],Mt=kt.textContent!==H;function Wt(t){for(var e,n,a,r,o,i=t.aoColumns,l=!1,s=0,u=t.aoData.length;s<u;s++)if(!(o=t.aoData[s])._aFilterData){for(a=[],e=0,n=i.length;e<n;e++)i[e].bSearchable?"string"!=typeof(r=null===(r=S(t,s,e,"filter"))?"":r)&&r.toString&&(r=r.toString()):r="",r.indexOf&&-1!==r.indexOf("&")&&(kt.innerHTML=r,r=Mt?kt.textContent:kt.innerText),r.replace&&(r=r.replace(/[\r\n\u2028]/g,"")),a.push(r);o._aFilterData=a,o._sFilterRow=a.join(" "),l=!0}return l}function Et(t){return{search:t.sSearch,smart:t.bSmart,regex:t.bRegex,caseInsensitive:t.bCaseInsensitive}}function Bt(t){return{sSearch:t.search,bSmart:t.smart,bRegex:t.regex,bCaseInsensitive:t.caseInsensitive}}function Ut(t){var e=t.sTableId,n=t.aanFeatures.i,a=P("<div/>",{class:t.oClasses.sInfo,id:n?null:e+"_info"});return n||(t.aoDrawCallback.push({fn:Vt,sName:"information"}),a.attr("role","status").attr("aria-live","polite"),P(t.nTable).attr("aria-describedby",e+"_info")),a[0]}function Vt(t){var e,n,a,r,o,i,l=t.aanFeatures.i;0!==l.length&&(i=t.oLanguage,e=t._iDisplayStart+1,n=t.fnDisplayEnd(),a=t.fnRecordsTotal(),o=(r=t.fnRecordsDisplay())?i.sInfo:i.sInfoEmpty,r!==a&&(o+=" "+i.sInfoFiltered),o=Xt(t,o+=i.sInfoPostFix),null!==(i=i.fnInfoCallback)&&(o=i.call(t.oInstance,t,e,n,a,r,o)),P(l).html(o))}function Xt(t,e){var n=t.fnFormatNumber,a=t._iDisplayStart+1,r=t._iDisplayLength,o=t.fnRecordsDisplay(),i=-1===r;return e.replace(/_START_/g,n.call(t,a)).replace(/_END_/g,n.call(t,t.fnDisplayEnd())).replace(/_MAX_/g,n.call(t,t.fnRecordsTotal())).replace(/_TOTAL_/g,n.call(t,o)).replace(/_PAGE_/g,n.call(t,i?1:Math.ceil(a/r))).replace(/_PAGES_/g,n.call(t,i?1:Math.ceil(o/r)))}function Jt(n){var a,t,e,r=n.iInitDisplayStart,o=n.aoColumns,i=n.oFeatures,l=n.bDeferLoading;if(n.bInitialised){for(_t(n),yt(n),Dt(n,n.aoHeader),Dt(n,n.aoFooter),D(n,!0),i.bAutoWidth&&ee(n),a=0,t=o.length;a<t;a++)(e=o[a]).sWidth&&(e.nTh.style.width=M(e.sWidth));R(n,null,"preInit",[n]),u(n);i=E(n);"ssp"==i&&!l||("ajax"==i?Tt(n,[],function(t){var e=Ft(n,t);for(a=0;a<e.length;a++)x(n,e[a]);n.iInitDisplayStart=r,u(n),D(n,!1),qt(n,t)}):(D(n,!1),qt(n)))}else setTimeout(function(){Jt(n)},200)}function qt(t,e){t._bInitComplete=!0,(e||t.oInit.aaData)&&O(t),R(t,null,"plugin-init",[t,e]),R(t,"aoInitComplete","init",[t,e])}function $t(t,e){e=parseInt(e,10);t._iDisplayLength=e,Se(t),R(t,null,"length",[t,e])}function Gt(a){for(var t=a.oClasses,e=a.sTableId,n=a.aLengthMenu,r=Array.isArray(n[0]),o=r?n[0]:n,i=r?n[1]:n,l=P("<select/>",{name:e+"_length","aria-controls":e,class:t.sLengthSelect}),s=0,u=o.length;s<u;s++)l[0][s]=new Option("number"==typeof i[s]?a.fnFormatNumber(i[s]):i[s],o[s]);var c=P("<div><label/></div>").addClass(t.sLength);return a.aanFeatures.l||(c[0].id=e+"_length"),c.children().append(a.oLanguage.sLengthMenu.replace("_MENU_",l[0].outerHTML)),P("select",c).val(a._iDisplayLength).on("change.DT",function(t){$t(a,P(this).val()),y(a)}),P(a.nTable).on("length.dt.DT",function(t,e,n){a===e&&P("select",c).val(n)}),c[0]}function zt(t){function c(t){y(t)}var e=t.sPaginationType,f=w.ext.pager[e],d="function"==typeof f,e=P("<div/>").addClass(t.oClasses.sPaging+e)[0],h=t.aanFeatures;return d||f.fnInit(t,e,c),h.p||(e.id=t.sTableId+"_paginate",t.aoDrawCallback.push({fn:function(t){if(d)for(var e=t._iDisplayStart,n=t._iDisplayLength,a=t.fnRecordsDisplay(),r=-1===n,o=r?0:Math.ceil(e/n),i=r?1:Math.ceil(a/n),l=f(o,i),s=0,u=h.p.length;s<u;s++)ve(t,"pageButton")(t,h.p[s],s,l,o,i);else f.fnUpdate(t,c)},sName:"pagination"})),e}function Yt(t,e,n){var a=t._iDisplayStart,r=t._iDisplayLength,o=t.fnRecordsDisplay(),o=(0===o||-1===r?a=0:"number"==typeof e?o<(a=e*r)&&(a=0):"first"==e?a=0:"previous"==e?(a=0<=r?a-r:0)<0&&(a=0):"next"==e?a+r<o&&(a+=r):"last"==e?a=Math.floor((o-1)/r)*r:W(t,0,"Unknown paging action: "+e,5),t._iDisplayStart!==a);return t._iDisplayStart=a,o?(R(t,null,"page",[t]),n&&y(t)):R(t,null,"page-nc",[t]),o}function Zt(t){return P("<div/>",{id:t.aanFeatures.r?null:t.sTableId+"_processing",class:t.oClasses.sProcessing,role:"status"}).html(t.oLanguage.sProcessing).append("<div><div></div><div></div><div></div><div></div></div>").insertBefore(t.nTable)[0]}function D(t,e){t.oFeatures.bProcessing&&P(t.aanFeatures.r).css("display",e?"block":"none"),R(t,null,"processing",[t,e])}function Kt(t){var e,n,a,r,o,i,l,s,u,c,f,d,h=P(t.nTable),p=t.oScroll;return""===p.sX&&""===p.sY?t.nTable:(e=p.sX,n=p.sY,a=t.oClasses,o=(r=h.children("caption")).length?r[0]._captionSide:null,s=P(h[0].cloneNode(!1)),i=P(h[0].cloneNode(!1)),u=function(t){return t?M(t):null},(l=h.children("tfoot")).length||(l=null),s=P(f="<div/>",{class:a.sScrollWrapper}).append(P(f,{class:a.sScrollHead}).css({overflow:"hidden",position:"relative",border:0,width:e?u(e):"100%"}).append(P(f,{class:a.sScrollHeadInner}).css({"box-sizing":"content-box",width:p.sXInner||"100%"}).append(s.removeAttr("id").css("margin-left",0).append("top"===o?r:null).append(h.children("thead"))))).append(P(f,{class:a.sScrollBody}).css({position:"relative",overflow:"auto",width:u(e)}).append(h)),l&&s.append(P(f,{class:a.sScrollFoot}).css({overflow:"hidden",border:0,width:e?u(e):"100%"}).append(P(f,{class:a.sScrollFootInner}).append(i.removeAttr("id").css("margin-left",0).append("bottom"===o?r:null).append(h.children("tfoot"))))),u=s.children(),c=u[0],f=u[1],d=l?u[2]:null,e&&P(f).on("scroll.DT",function(t){var e=this.scrollLeft;c.scrollLeft=e,l&&(d.scrollLeft=e)}),P(f).css("max-height",n),p.bCollapse||P(f).css("height",n),t.nScrollHead=c,t.nScrollBody=f,t.nScrollFoot=d,t.aoDrawCallback.push({fn:Qt,sName:"scrolling"}),s[0])}function Qt(n){function t(t){(t=t.style).paddingTop="0",t.paddingBottom="0",t.borderTopWidth="0",t.borderBottomWidth="0",t.height=0}var e,a,r,o,i,l=n.oScroll,s=l.sX,u=l.sXInner,c=l.sY,l=l.iBarWidth,f=P(n.nScrollHead),d=f[0].style,h=f.children("div"),p=h[0].style,h=h.children("table"),g=n.nScrollBody,b=P(g),m=g.style,S=P(n.nScrollFoot).children("div"),v=S.children("table"),y=P(n.nTHead),D=P(n.nTable),_=D[0],w=_.style,C=n.nTFoot?P(n.nTFoot):null,T=n.oBrowser,x=T.bScrollOversize,A=(N(n.aoColumns,"nTh"),[]),I=[],F=[],L=[],R=g.scrollHeight>g.clientHeight;n.scrollBarVis!==R&&n.scrollBarVis!==H?(n.scrollBarVis=R,O(n)):(n.scrollBarVis=R,D.children("thead, tfoot").remove(),C&&(R=C.clone().prependTo(D),i=C.find("tr"),a=R.find("tr"),R.find("[id]").removeAttr("id")),R=y.clone().prependTo(D),y=y.find("tr"),e=R.find("tr"),R.find("th, td").removeAttr("tabindex"),R.find("[id]").removeAttr("id"),s||(m.width="100%",f[0].style.width="100%"),P.each(Ct(n,R),function(t,e){r=rt(n,t),e.style.width=n.aoColumns[r].sWidth}),C&&k(function(t){t.style.width=""},a),f=D.outerWidth(),""===s?(w.width="100%",x&&(D.find("tbody").height()>g.offsetHeight||"scroll"==b.css("overflow-y"))&&(w.width=M(D.outerWidth()-l)),f=D.outerWidth()):""!==u&&(w.width=M(u),f=D.outerWidth()),k(t,e),k(function(t){var e=j.getComputedStyle?j.getComputedStyle(t).width:M(P(t).width());F.push(t.innerHTML),A.push(e)},e),k(function(t,e){t.style.width=A[e]},y),P(e).css("height",0),C&&(k(t,a),k(function(t){L.push(t.innerHTML),I.push(M(P(t).css("width")))},a),k(function(t,e){t.style.width=I[e]},i),P(a).height(0)),k(function(t,e){t.innerHTML='<div class="dataTables_sizing">'+F[e]+"</div>",t.childNodes[0].style.height="0",t.childNodes[0].style.overflow="hidden",t.style.width=A[e]},e),C&&k(function(t,e){t.innerHTML='<div class="dataTables_sizing">'+L[e]+"</div>",t.childNodes[0].style.height="0",t.childNodes[0].style.overflow="hidden",t.style.width=I[e]},a),Math.round(D.outerWidth())<Math.round(f)?(o=g.scrollHeight>g.offsetHeight||"scroll"==b.css("overflow-y")?f+l:f,x&&(g.scrollHeight>g.offsetHeight||"scroll"==b.css("overflow-y"))&&(w.width=M(o-l)),""!==s&&""===u||W(n,1,"Possible column misalignment",6)):o="100%",m.width=M(o),d.width=M(o),C&&(n.nScrollFoot.style.width=M(o)),c||x&&(m.height=M(_.offsetHeight+l)),R=D.outerWidth(),h[0].style.width=M(R),p.width=M(R),y=D.height()>g.clientHeight||"scroll"==b.css("overflow-y"),p[i="padding"+(T.bScrollbarLeft?"Left":"Right")]=y?l+"px":"0px",C&&(v[0].style.width=M(R),S[0].style.width=M(R),S[0].style[i]=y?l+"px":"0px"),D.children("colgroup").insertBefore(D.children("thead")),b.trigger("scroll"),!n.bSorted&&!n.bFiltered||n._drawHold||(g.scrollTop=0))}function k(t,e,n){for(var a,r,o=0,i=0,l=e.length;i<l;){for(a=e[i].firstChild,r=n?n[i].firstChild:null;a;)1===a.nodeType&&(n?t(a,r,o):t(a,o),o++),a=a.nextSibling,r=n?r.nextSibling:null;i++}}var te=/<.*?>/g;function ee(t){var e,n,a=t.nTable,r=t.aoColumns,o=t.oScroll,i=o.sY,l=o.sX,o=o.sXInner,s=r.length,u=it(t,"bVisible"),c=P("th",t.nTHead),f=a.getAttribute("width"),d=a.parentNode,h=!1,p=t.oBrowser,g=p.bScrollOversize,b=a.style.width,m=(b&&-1!==b.indexOf("%")&&(f=b),ae(N(r,"sWidthOrig"),d));for(_=0;_<u.length;_++)null!==(e=r[u[_]]).sWidth&&(e.sWidth=m[_],h=!0);if(g||!h&&!l&&!i&&s==T(t)&&s==c.length)for(_=0;_<s;_++){var S=rt(t,_);null!==S&&(r[S].sWidth=M(c.eq(_).width()))}else{var b=P(a).clone().css("visibility","hidden").removeAttr("id"),v=(b.find("tbody tr").remove(),P("<tr/>").appendTo(b.find("tbody")));for(b.find("thead, tfoot").remove(),b.append(P(t.nTHead).clone()).append(P(t.nTFoot).clone()),b.find("tfoot th, tfoot td").css("width",""),c=Ct(t,b.find("thead")[0]),_=0;_<u.length;_++)e=r[u[_]],c[_].style.width=null!==e.sWidthOrig&&""!==e.sWidthOrig?M(e.sWidthOrig):"",e.sWidthOrig&&l&&P(c[_]).append(P("<div/>").css({width:e.sWidthOrig,margin:0,padding:0,border:0,height:1}));if(t.aoData.length)for(_=0;_<u.length;_++)e=r[n=u[_]],P(re(t,n)).clone(!1).append(e.sContentPadding).appendTo(v);P("[name]",b).removeAttr("name");for(var y=P("<div/>").css(l||i?{position:"absolute",top:0,left:0,height:1,right:0,overflow:"hidden"}:{}).append(b).appendTo(d),D=(l&&o?b.width(o):l?(b.css("width","auto"),b.removeAttr("width"),b.width()<d.clientWidth&&f&&b.width(d.clientWidth)):i?b.width(d.clientWidth):f&&b.width(f),0),_=0;_<u.length;_++){var w=P(c[_]),C=w.outerWidth()-w.width(),w=p.bBounding?Math.ceil(c[_].getBoundingClientRect().width):w.outerWidth();D+=w,r[u[_]].sWidth=M(w-C)}a.style.width=M(D),y.remove()}f&&(a.style.width=M(f)),!f&&!l||t._reszEvt||(o=function(){P(j).on("resize.DT-"+t.sInstance,ne(function(){O(t)}))},g?setTimeout(o,1e3):o(),t._reszEvt=!0)}var ne=w.util.throttle;function ae(t,e){for(var n=[],a=[],r=0;r<t.length;r++)t[r]?n.push(P("<div/>").css("width",M(t[r])).appendTo(e||v.body)):n.push(null);for(r=0;r<t.length;r++)a.push(n[r]?n[r][0].offsetWidth:null);return P(n).remove(),a}function re(t,e){var n,a=oe(t,e);return a<0?null:(n=t.aoData[a]).nTr?n.anCells[e]:P("<td/>").html(S(t,a,e,"display"))[0]}function oe(t,e){for(var n,a=-1,r=-1,o=0,i=t.aoData.length;o<i;o++)(n=(n=(n=S(t,o,e,"display")+"").replace(te,"")).replace(/&nbsp;/g," ")).length>a&&(a=n.length,r=o);return r}function M(t){return null===t?"0px":"number"==typeof t?t<0?"0px":t+"px":t.match(/\d$/)?t+"px":t}function I(t){function e(t){t.length&&!Array.isArray(t[0])?h.push(t):P.merge(h,t)}var n,a,r,o,i,l,s,u=[],c=t.aoColumns,f=t.aaSortingFixed,d=P.isPlainObject(f),h=[];for(Array.isArray(f)&&e(f),d&&f.pre&&e(f.pre),e(t.aaSorting),d&&f.post&&e(f.post),n=0;n<h.length;n++)for(r=(o=c[s=h[n][a=0]].aDataSort).length;a<r;a++)l=c[i=o[a]].sType||"string",h[n]._idx===H&&(h[n]._idx=P.inArray(h[n][1],c[i].asSorting)),u.push({src:s,col:i,dir:h[n][1],index:h[n]._idx,type:l,formatter:w.ext.type.order[l+"-pre"]});return u}function ie(t){var e,n,a,r,c,f=[],u=w.ext.type.order,d=t.aoData,o=(t.aoColumns,0),i=t.aiDisplayMaster;for(lt(t),e=0,n=(c=I(t)).length;e<n;e++)(r=c[e]).formatter&&o++,fe(t,r.col);if("ssp"!=E(t)&&0!==c.length){for(e=0,a=i.length;e<a;e++)f[i[e]]=e;o===c.length?i.sort(function(t,e){for(var n,a,r,o,i=c.length,l=d[t]._aSortData,s=d[e]._aSortData,u=0;u<i;u++)if(0!=(r=(n=l[(o=c[u]).col])<(a=s[o.col])?-1:a<n?1:0))return"asc"===o.dir?r:-r;return(n=f[t])<(a=f[e])?-1:a<n?1:0}):i.sort(function(t,e){for(var n,a,r,o=c.length,i=d[t]._aSortData,l=d[e]._aSortData,s=0;s<o;s++)if(n=i[(r=c[s]).col],a=l[r.col],0!==(r=(u[r.type+"-"+r.dir]||u["string-"+r.dir])(n,a)))return r;return(n=f[t])<(a=f[e])?-1:a<n?1:0})}t.bSorted=!0}function le(t){for(var e=t.aoColumns,n=I(t),a=t.oLanguage.oAria,r=0,o=e.length;r<o;r++){var i=e[r],l=i.asSorting,s=i.ariaTitle||i.sTitle.replace(/<.*?>/g,""),u=i.nTh;u.removeAttribute("aria-sort"),i=i.bSortable?s+("asc"===(0<n.length&&n[0].col==r&&(u.setAttribute("aria-sort","asc"==n[0].dir?"ascending":"descending"),l[n[0].index+1])||l[0])?a.sSortAscending:a.sSortDescending):s,u.setAttribute("aria-label",i)}}function se(t,e,n,a){function r(t,e){var n=t._idx;return(n=n===H?P.inArray(t[1],s):n)+1<s.length?n+1:e?null:0}var o,i=t.aoColumns[e],l=t.aaSorting,s=i.asSorting;"number"==typeof l[0]&&(l=t.aaSorting=[l]),n&&t.oFeatures.bSortMulti?-1!==(i=P.inArray(e,N(l,"0")))?null===(o=null===(o=r(l[i],!0))&&1===l.length?0:o)?l.splice(i,1):(l[i][1]=s[o],l[i]._idx=o):(l.push([e,s[0],0]),l[l.length-1]._idx=0):l.length&&l[0][0]==e?(o=r(l[0]),l.length=1,l[0][1]=s[o],l[0]._idx=o):(l.length=0,l.push([e,s[0]]),l[0]._idx=0),u(t),"function"==typeof a&&a(t)}function ue(e,t,n,a){var r=e.aoColumns[n];me(t,{},function(t){!1!==r.bSortable&&(e.oFeatures.bProcessing?(D(e,!0),setTimeout(function(){se(e,n,t.shiftKey,a),"ssp"!==E(e)&&D(e,!1)},0)):se(e,n,t.shiftKey,a))})}function ce(t){var e,n,a,r=t.aLastSort,o=t.oClasses.sSortColumn,i=I(t),l=t.oFeatures;if(l.bSort&&l.bSortClasses){for(e=0,n=r.length;e<n;e++)a=r[e].src,P(N(t.aoData,"anCells",a)).removeClass(o+(e<2?e+1:3));for(e=0,n=i.length;e<n;e++)a=i[e].src,P(N(t.aoData,"anCells",a)).addClass(o+(e<2?e+1:3))}t.aLastSort=i}function fe(t,e){for(var n,a,r,o=t.aoColumns[e],i=w.ext.order[o.sSortDataType],l=(i&&(n=i.call(t.oInstance,t,e,ot(t,e))),w.ext.type.order[o.sType+"-pre"]),s=0,u=t.aoData.length;s<u;s++)(a=t.aoData[s])._aSortData||(a._aSortData=[]),a._aSortData[e]&&!i||(r=i?n[s]:S(t,s,e,"sort"),a._aSortData[e]=l?l(r):r)}function de(n){var t;n._bLoadingState||(t={time:+new Date,start:n._iDisplayStart,length:n._iDisplayLength,order:P.extend(!0,[],n.aaSorting),search:Et(n.oPreviousSearch),columns:P.map(n.aoColumns,function(t,e){return{visible:t.bVisible,search:Et(n.aoPreSearchCols[e])}})},n.oSavedState=t,R(n,"aoStateSaveParams","stateSaveParams",[n,t]),n.oFeatures.bStateSave&&!n.bDestroying&&n.fnStateSaveCallback.call(n.oInstance,n,t))}function he(e,t,n){var a;if(e.oFeatures.bStateSave)return(a=e.fnStateLoadCallback.call(e.oInstance,e,function(t){pe(e,t,n)}))!==H&&pe(e,a,n),!0;n()}function pe(n,t,e){var a,r,o=n.aoColumns,i=(n._bLoadingState=!0,n._bInitComplete?new w.Api(n):null);if(t&&t.time){var l=R(n,"aoStateLoadParams","stateLoadParams",[n,t]);if(-1!==P.inArray(!1,l))n._bLoadingState=!1;else{l=n.iStateDuration;if(0<l&&t.time<+new Date-1e3*l)n._bLoadingState=!1;else if(t.columns&&o.length!==t.columns.length)n._bLoadingState=!1;else{if(n.oLoadedState=P.extend(!0,{},t),t.length!==H&&(i?i.page.len(t.length):n._iDisplayLength=t.length),t.start!==H&&(null===i?(n._iDisplayStart=t.start,n.iInitDisplayStart=t.start):Yt(n,t.start/n._iDisplayLength)),t.order!==H&&(n.aaSorting=[],P.each(t.order,function(t,e){n.aaSorting.push(e[0]>=o.length?[0,e[1]]:e)})),t.search!==H&&P.extend(n.oPreviousSearch,Bt(t.search)),t.columns){for(a=0,r=t.columns.length;a<r;a++){var s=t.columns[a];s.visible!==H&&(i?i.column(a).visible(s.visible,!1):o[a].bVisible=s.visible),s.search!==H&&P.extend(n.aoPreSearchCols[a],Bt(s.search))}i&&i.columns.adjust()}n._bLoadingState=!1,R(n,"aoStateLoaded","stateLoaded",[n,t])}}}else n._bLoadingState=!1;e()}function ge(t){var e=w.settings,t=P.inArray(t,N(e,"nTable"));return-1!==t?e[t]:null}function W(t,e,n,a){if(n="DataTables warning: "+(t?"table id="+t.sTableId+" - ":"")+n,a&&(n+=". For more information about this error, please see https://datatables.net/tn/"+a),e)j.console&&console.log&&console.log(n);else{e=w.ext,e=e.sErrMode||e.errMode;if(t&&R(t,null,"error",[t,a,n]),"alert"==e)alert(n);else{if("throw"==e)throw new Error(n);"function"==typeof e&&e(t,a,n)}}}function F(n,a,t,e){Array.isArray(t)?P.each(t,function(t,e){Array.isArray(e)?F(n,a,e[0],e[1]):F(n,a,e)}):(e===H&&(e=t),a[t]!==H&&(n[e]=a[t]))}function be(t,e,n){var a,r;for(r in e)e.hasOwnProperty(r)&&(a=e[r],P.isPlainObject(a)?(P.isPlainObject(t[r])||(t[r]={}),P.extend(!0,t[r],a)):n&&"data"!==r&&"aaData"!==r&&Array.isArray(a)?t[r]=a.slice():t[r]=a);return t}function me(e,t,n){P(e).on("click.DT",t,function(t){P(e).trigger("blur"),n(t)}).on("keypress.DT",t,function(t){13===t.which&&(t.preventDefault(),n(t))}).on("selectstart.DT",function(){return!1})}function L(t,e,n,a){n&&t[e].push({fn:n,sName:a})}function R(n,t,e,a){var r=[];return t&&(r=P.map(n[t].slice().reverse(),function(t,e){return t.fn.apply(n.oInstance,a)})),null!==e&&(t=P.Event(e+".dt"),(e=P(n.nTable)).trigger(t,a),0===e.parents("body").length&&P("body").trigger(t,a),r.push(t.result)),r}function Se(t){var e=t._iDisplayStart,n=t.fnDisplayEnd(),a=t._iDisplayLength;n<=e&&(e=n-a),e-=e%a,t._iDisplayStart=e=-1===a||e<0?0:e}function ve(t,e){var t=t.renderer,n=w.ext.renderer[e];return P.isPlainObject(t)&&t[e]?n[t[e]]||n._:"string"==typeof t&&n[t]||n._}function E(t){return t.oFeatures.bServerSide?"ssp":t.ajax||t.sAjaxSource?"ajax":"dom"}function ye(t,n){var a;return Array.isArray(t)?P.map(t,function(t){return ye(t,n)}):"number"==typeof t?[n[t]]:(a=P.map(n,function(t,e){return t.nTable}),P(a).filter(t).map(function(t){var e=P.inArray(this,a);return n[e]}).toArray())}function De(r,o,t){var e,n;t&&(e=new B(r)).one("draw",function(){t(e.ajax.json())}),"ssp"==E(r)?u(r,o):(D(r,!0),(n=r.jqXHR)&&4!==n.readyState&&n.abort(),Tt(r,[],function(t){pt(r);for(var e=Ft(r,t),n=0,a=e.length;n<a;n++)x(r,e[n]);u(r,o),D(r,!1)}))}function _e(t,e,n,a,r){for(var o,i,l,s,u=[],c=typeof e,f=0,d=(e=e&&"string"!=c&&"function"!=c&&e.length!==H?e:[e]).length;f<d;f++)for(l=0,s=(i=e[f]&&e[f].split&&!e[f].match(/[\[\(:]/)?e[f].split(","):[e[f]]).length;l<s;l++)(o=n("string"==typeof i[l]?i[l].trim():i[l]))&&o.length&&(u=u.concat(o));var h=p.selector[t];if(h.length)for(f=0,d=h.length;f<d;f++)u=h[f](a,r,u);return z(u)}function we(t){return(t=t||{}).filter&&t.search===H&&(t.search=t.filter),P.extend({search:"none",order:"current",page:"all"},t)}function Ce(t){for(var e=0,n=t.length;e<n;e++)if(0<t[e].length)return t[0]=t[e],t[0].length=1,t.length=1,t.context=[t.context[e]],t;return t.length=0,t}function Te(o,t,e,n){function i(t,e){var n;if(Array.isArray(t)||t instanceof P)for(var a=0,r=t.length;a<r;a++)i(t[a],e);else t.nodeName&&"tr"===t.nodeName.toLowerCase()?l.push(t):(n=P("<tr><td></td></tr>").addClass(e),P("td",n).addClass(e).html(t)[0].colSpan=T(o),l.push(n[0]))}var l=[];i(e,n),t._details&&t._details.detach(),t._details=P(l),t._detailsShow&&t._details.insertAfter(t.nTr)}function xe(t,e){var n=t.context;if(n.length&&t.length){var a=n[0].aoData[t[0]];if(a._details){(a._detailsShow=e)?(a._details.insertAfter(a.nTr),P(a.nTr).addClass("dt-hasChild")):(a._details.detach(),P(a.nTr).removeClass("dt-hasChild")),R(n[0],null,"childRow",[e,t.row(t[0])]);var s=n[0],r=new B(s),a=".dt.DT_details",e="draw"+a,t="column-sizing"+a,a="destroy"+a,u=s.aoData;if(r.off(e+" "+t+" "+a),N(u,"_details").length>0){r.on(e,function(t,e){if(s!==e)return;r.rows({page:"current"}).eq(0).each(function(t){var e=u[t];if(e._detailsShow)e._details.insertAfter(e.nTr)})});r.on(t,function(t,e,n,a){if(s!==e)return;var r,o=T(e);for(var i=0,l=u.length;i<l;i++){r=u[i];if(r._details)r._details.each(function(){var t=P(this).children("td");if(t.length==1)t.attr("colspan",o)})}});r.on(a,function(t,e){if(s!==e)return;for(var n=0,a=u.length;n<a;n++)if(u[n]._details)Re(r,n)})}Le(n)}}}function Ae(t,e,n,a,r){for(var o=[],i=0,l=r.length;i<l;i++)o.push(S(t,r[i],e));return o}var Ie=[],o=Array.prototype,B=function(t,e){if(!(this instanceof B))return new B(t,e);function n(t){var e,n,a,r;t=t,a=w.settings,r=P.map(a,function(t,e){return t.nTable}),(t=t?t.nTable&&t.oApi?[t]:t.nodeName&&"table"===t.nodeName.toLowerCase()?-1!==(e=P.inArray(t,r))?[a[e]]:null:t&&"function"==typeof t.settings?t.settings().toArray():("string"==typeof t?n=P(t):t instanceof P&&(n=t),n?n.map(function(t){return-1!==(e=P.inArray(this,r))?a[e]:null}).toArray():void 0):[])&&o.push.apply(o,t)}var o=[];if(Array.isArray(t))for(var a=0,r=t.length;a<r;a++)n(t[a]);else n(t);this.context=z(o),e&&P.merge(this,e),this.selector={rows:null,cols:null,opts:null},B.extend(this,this,Ie)},Fe=(w.Api=B,P.extend(B.prototype,{any:function(){return 0!==this.count()},concat:o.concat,context:[],count:function(){return this.flatten().length},each:function(t){for(var e=0,n=this.length;e<n;e++)t.call(this,this[e],e,this);return this},eq:function(t){var e=this.context;return e.length>t?new B(e[t],this[t]):null},filter:function(t){var e=[];if(o.filter)e=o.filter.call(this,t,this);else for(var n=0,a=this.length;n<a;n++)t.call(this,this[n],n,this)&&e.push(this[n]);return new B(this.context,e)},flatten:function(){var t=[];return new B(this.context,t.concat.apply(t,this.toArray()))},join:o.join,indexOf:o.indexOf||function(t,e){for(var n=e||0,a=this.length;n<a;n++)if(this[n]===t)return n;return-1},iterator:function(t,e,n,a){var r,o,i,l,s,u,c,f,d=[],h=this.context,p=this.selector;for("string"==typeof t&&(a=n,n=e,e=t,t=!1),o=0,i=h.length;o<i;o++){var g=new B(h[o]);if("table"===e)(r=n.call(g,h[o],o))!==H&&d.push(r);else if("columns"===e||"rows"===e)(r=n.call(g,h[o],this[o],o))!==H&&d.push(r);else if("column"===e||"column-rows"===e||"row"===e||"cell"===e)for(c=this[o],"column-rows"===e&&(u=Fe(h[o],p.opts)),l=0,s=c.length;l<s;l++)f=c[l],(r="cell"===e?n.call(g,h[o],f.row,f.column,o,l):n.call(g,h[o],f,o,l,u))!==H&&d.push(r)}return d.length||a?((t=(a=new B(h,t?d.concat.apply([],d):d)).selector).rows=p.rows,t.cols=p.cols,t.opts=p.opts,a):this},lastIndexOf:o.lastIndexOf||function(t,e){return this.indexOf.apply(this.toArray.reverse(),arguments)},length:0,map:function(t){var e=[];if(o.map)e=o.map.call(this,t,this);else for(var n=0,a=this.length;n<a;n++)e.push(t.call(this,this[n],n));return new B(this.context,e)},pluck:function(t){var e=w.util.get(t);return this.map(function(t){return e(t)})},pop:o.pop,push:o.push,reduce:o.reduce||function(t,e){return et(this,t,e,0,this.length,1)},reduceRight:o.reduceRight||function(t,e){return et(this,t,e,this.length-1,-1,-1)},reverse:o.reverse,selector:null,shift:o.shift,slice:function(){return new B(this.context,this)},sort:o.sort,splice:o.splice,toArray:function(){return o.slice.call(this)},to$:function(){return P(this)},toJQuery:function(){return P(this)},unique:function(){return new B(this.context,z(this))},unshift:o.unshift}),B.extend=function(t,e,n){if(n.length&&e&&(e instanceof B||e.__dt_wrapper))for(var a,r=0,o=n.length;r<o;r++)e[(a=n[r]).name]="function"===a.type?function(e,n,a){return function(){var t=n.apply(e,arguments);return B.extend(t,t,a.methodExt),t}}(t,a.val,a):"object"===a.type?{}:a.val,e[a.name].__dt_wrapper=!0,B.extend(t,e[a.name],a.propExt)},B.register=e=function(t,e){if(Array.isArray(t))for(var n=0,a=t.length;n<a;n++)B.register(t[n],e);else for(var r=t.split("."),o=Ie,i=0,l=r.length;i<l;i++){var s,u,c=function(t,e){for(var n=0,a=t.length;n<a;n++)if(t[n].name===e)return t[n];return null}(o,u=(s=-1!==r[i].indexOf("()"))?r[i].replace("()",""):r[i]);c||o.push(c={name:u,val:{},methodExt:[],propExt:[],type:"object"}),i===l-1?(c.val=e,c.type="function"==typeof e?"function":P.isPlainObject(e)?"object":"other"):o=s?c.methodExt:c.propExt}},B.registerPlural=t=function(t,e,n){B.register(t,n),B.register(e,function(){var t=n.apply(this,arguments);return t===this?this:t instanceof B?t.length?Array.isArray(t[0])?new B(t.context,t[0]):t[0]:H:t})},e("tables()",function(t){return t!==H&&null!==t?new B(ye(t,this.context)):this}),e("table()",function(t){var t=this.tables(t),e=t.context;return e.length?new B(e[0]):t}),t("tables().nodes()","table().node()",function(){return this.iterator("table",function(t){return t.nTable},1)}),t("tables().body()","table().body()",function(){return this.iterator("table",function(t){return t.nTBody},1)}),t("tables().header()","table().header()",function(){return this.iterator("table",function(t){return t.nTHead},1)}),t("tables().footer()","table().footer()",function(){return this.iterator("table",function(t){return t.nTFoot},1)}),t("tables().containers()","table().container()",function(){return this.iterator("table",function(t){return t.nTableWrapper},1)}),e("draw()",function(e){return this.iterator("table",function(t){"page"===e?y(t):u(t,!1===(e="string"==typeof e?"full-hold"!==e:e))})}),e("page()",function(e){return e===H?this.page.info().page:this.iterator("table",function(t){Yt(t,e)})}),e("page.info()",function(t){var e,n,a,r,o;return 0===this.context.length?H:(n=(e=this.context[0])._iDisplayStart,a=e.oFeatures.bPaginate?e._iDisplayLength:-1,r=e.fnRecordsDisplay(),{page:(o=-1===a)?0:Math.floor(n/a),pages:o?1:Math.ceil(r/a),start:n,end:e.fnDisplayEnd(),length:a,recordsTotal:e.fnRecordsTotal(),recordsDisplay:r,serverSide:"ssp"===E(e)})}),e("page.len()",function(e){return e===H?0!==this.context.length?this.context[0]._iDisplayLength:H:this.iterator("table",function(t){$t(t,e)})}),e("ajax.json()",function(){var t=this.context;if(0<t.length)return t[0].json}),e("ajax.params()",function(){var t=this.context;if(0<t.length)return t[0].oAjaxData}),e("ajax.reload()",function(e,n){return this.iterator("table",function(t){De(t,!1===n,e)})}),e("ajax.url()",function(e){var t=this.context;return e===H?0===t.length?H:(t=t[0]).ajax?P.isPlainObject(t.ajax)?t.ajax.url:t.ajax:t.sAjaxSource:this.iterator("table",function(t){P.isPlainObject(t.ajax)?t.ajax.url=e:t.ajax=e})}),e("ajax.url().load()",function(e,n){return this.iterator("table",function(t){De(t,!1===n,e)})}),function(t,e){var n,a=[],r=t.aiDisplay,o=t.aiDisplayMaster,i=e.search,l=e.order,e=e.page;if("ssp"==E(t))return"removed"===i?[]:f(0,o.length);if("current"==e)for(u=t._iDisplayStart,c=t.fnDisplayEnd();u<c;u++)a.push(r[u]);else if("current"==l||"applied"==l){if("none"==i)a=o.slice();else if("applied"==i)a=r.slice();else if("removed"==i){for(var s={},u=0,c=r.length;u<c;u++)s[r[u]]=null;a=P.map(o,function(t){return s.hasOwnProperty(t)?null:t})}}else if("index"==l||"original"==l)for(u=0,c=t.aoData.length;u<c;u++)("none"==i||-1===(n=P.inArray(u,r))&&"removed"==i||0<=n&&"applied"==i)&&a.push(u);return a}),Le=(e("rows()",function(e,n){e===H?e="":P.isPlainObject(e)&&(n=e,e=""),n=we(n);var t=this.iterator("table",function(t){return _e("row",e,function(n){var t=d(n),a=r.aoData;if(null!==t&&!o)return[t];if(i=i||Fe(r,o),null!==t&&-1!==P.inArray(t,i))return[t];if(null===n||n===H||""===n)return i;if("function"==typeof n)return P.map(i,function(t){var e=a[t];return n(t,e._aData,e.nTr)?t:null});if(n.nodeName)return t=n._DT_RowIndex,e=n._DT_CellIndex,t!==H?a[t]&&a[t].nTr===n?[t]:[]:e?a[e.row]&&a[e.row].nTr===n.parentNode?[e.row]:[]:(t=P(n).closest("*[data-dt-row]")).length?[t.data("dt-row")]:[];if("string"==typeof n&&"#"===n.charAt(0)){var e=r.aIds[n.replace(/^#/,"")];if(e!==H)return[e.idx]}t=_(m(r.aoData,i,"nTr"));return P(t).filter(n).map(function(){return this._DT_RowIndex}).toArray()},r=t,o=n);var r,o,i},1);return t.selector.rows=e,t.selector.opts=n,t}),e("rows().nodes()",function(){return this.iterator("row",function(t,e){return t.aoData[e].nTr||H},1)}),e("rows().data()",function(){return this.iterator(!0,"rows",function(t,e){return m(t.aoData,e,"_aData")},1)}),t("rows().cache()","row().cache()",function(n){return this.iterator("row",function(t,e){t=t.aoData[e];return"search"===n?t._aFilterData:t._aSortData},1)}),t("rows().invalidate()","row().invalidate()",function(n){return this.iterator("row",function(t,e){bt(t,e,n)})}),t("rows().indexes()","row().index()",function(){return this.iterator("row",function(t,e){return e},1)}),t("rows().ids()","row().id()",function(t){for(var e=[],n=this.context,a=0,r=n.length;a<r;a++)for(var o=0,i=this[a].length;o<i;o++){var l=n[a].rowIdFn(n[a].aoData[this[a][o]]._aData);e.push((!0===t?"#":"")+l)}return new B(n,e)}),t("rows().remove()","row().remove()",function(){var f=this;return this.iterator("row",function(t,e,n){var a,r,o,i,l,s,u=t.aoData,c=u[e];for(u.splice(e,1),a=0,r=u.length;a<r;a++)if(s=(l=u[a]).anCells,null!==l.nTr&&(l.nTr._DT_RowIndex=a),null!==s)for(o=0,i=s.length;o<i;o++)s[o]._DT_CellIndex.row=a;gt(t.aiDisplayMaster,e),gt(t.aiDisplay,e),gt(f[n],e,!1),0<t._iRecordsDisplay&&t._iRecordsDisplay--,Se(t);n=t.rowIdFn(c._aData);n!==H&&delete t.aIds[n]}),this.iterator("table",function(t){for(var e=0,n=t.aoData.length;e<n;e++)t.aoData[e].idx=e}),this}),e("rows.add()",function(o){var t=this.iterator("table",function(t){for(var e,n=[],a=0,r=o.length;a<r;a++)(e=o[a]).nodeName&&"TR"===e.nodeName.toUpperCase()?n.push(ut(t,e)[0]):n.push(x(t,e));return n},1),e=this.rows(-1);return e.pop(),P.merge(e,t),e}),e("row()",function(t,e){return Ce(this.rows(t,e))}),e("row().data()",function(t){var e,n=this.context;return t===H?n.length&&this.length?n[0].aoData[this[0]]._aData:H:((e=n[0].aoData[this[0]])._aData=t,Array.isArray(t)&&e.nTr&&e.nTr.id&&b(n[0].rowId)(t,e.nTr.id),bt(n[0],this[0],"data"),this)}),e("row().node()",function(){var t=this.context;return t.length&&this.length&&t[0].aoData[this[0]].nTr||null}),e("row.add()",function(e){e instanceof P&&e.length&&(e=e[0]);var t=this.iterator("table",function(t){return e.nodeName&&"TR"===e.nodeName.toUpperCase()?ut(t,e)[0]:x(t,e)});return this.row(t[0])}),P(v).on("plugin-init.dt",function(t,e){var n=new B(e),a="on-plugin-init",r="stateSaveParams."+a,o="destroy. "+a,a=(n.on(r,function(t,e,n){for(var a=e.rowIdFn,r=e.aoData,o=[],i=0;i<r.length;i++)r[i]._detailsShow&&o.push("#"+a(r[i]._aData));n.childRows=o}),n.on(o,function(){n.off(r+" "+o)}),n.state.loaded());a&&a.childRows&&n.rows(P.map(a.childRows,function(t){return t.replace(/:/g,"\\:")})).every(function(){R(e,null,"requestChild",[this])})}),w.util.throttle(function(t){de(t[0])},500)),Re=function(t,e){var n=t.context;n.length&&(e=n[0].aoData[e!==H?e:t[0]])&&e._details&&(e._details.remove(),e._detailsShow=H,e._details=H,P(e.nTr).removeClass("dt-hasChild"),Le(n))},Pe="row().child",je=Pe+"()",He=(e(je,function(t,e){var n=this.context;return t===H?n.length&&this.length?n[0].aoData[this[0]]._details:H:(!0===t?this.child.show():!1===t?Re(this):n.length&&this.length&&Te(n[0],n[0].aoData[this[0]],t,e),this)}),e([Pe+".show()",je+".show()"],function(t){return xe(this,!0),this}),e([Pe+".hide()",je+".hide()"],function(){return xe(this,!1),this}),e([Pe+".remove()",je+".remove()"],function(){return Re(this),this}),e(Pe+".isShown()",function(){var t=this.context;return t.length&&this.length&&t[0].aoData[this[0]]._detailsShow||!1}),/^([^:]+):(name|visIdx|visible)$/),Ne=(e("columns()",function(n,a){n===H?n="":P.isPlainObject(n)&&(a=n,n=""),a=we(a);var t=this.iterator("table",function(t){return e=n,l=a,s=(i=t).aoColumns,u=N(s,"sName"),c=N(s,"nTh"),_e("column",e,function(n){var a,t=d(n);if(""===n)return f(s.length);if(null!==t)return[0<=t?t:s.length+t];if("function"==typeof n)return a=Fe(i,l),P.map(s,function(t,e){return n(e,Ae(i,e,0,0,a),c[e])?e:null});var r="string"==typeof n?n.match(He):"";if(r)switch(r[2]){case"visIdx":case"visible":var e,o=parseInt(r[1],10);return o<0?[(e=P.map(s,function(t,e){return t.bVisible?e:null}))[e.length+o]]:[rt(i,o)];case"name":return P.map(u,function(t,e){return t===r[1]?e:null});default:return[]}return n.nodeName&&n._DT_CellIndex?[n._DT_CellIndex.column]:(t=P(c).filter(n).map(function(){return P.inArray(this,c)}).toArray()).length||!n.nodeName?t:(t=P(n).closest("*[data-dt-column]")).length?[t.data("dt-column")]:[]},i,l);var i,e,l,s,u,c},1);return t.selector.cols=n,t.selector.opts=a,t}),t("columns().header()","column().header()",function(t,e){return this.iterator("column",function(t,e){return t.aoColumns[e].nTh},1)}),t("columns().footer()","column().footer()",function(t,e){return this.iterator("column",function(t,e){return t.aoColumns[e].nTf},1)}),t("columns().data()","column().data()",function(){return this.iterator("column-rows",Ae,1)}),t("columns().dataSrc()","column().dataSrc()",function(){return this.iterator("column",function(t,e){return t.aoColumns[e].mData},1)}),t("columns().cache()","column().cache()",function(o){return this.iterator("column-rows",function(t,e,n,a,r){return m(t.aoData,r,"search"===o?"_aFilterData":"_aSortData",e)},1)}),t("columns().nodes()","column().nodes()",function(){return this.iterator("column-rows",function(t,e,n,a,r){return m(t.aoData,r,"anCells",e)},1)}),t("columns().visible()","column().visible()",function(f,n){var e=this,t=this.iterator("column",function(t,e){if(f===H)return t.aoColumns[e].bVisible;var n,a,r=e,e=f,o=t.aoColumns,i=o[r],l=t.aoData;if(e===H)i.bVisible;else if(i.bVisible!==e){if(e)for(var s=P.inArray(!0,N(o,"bVisible"),r+1),u=0,c=l.length;u<c;u++)a=l[u].nTr,n=l[u].anCells,a&&a.insertBefore(n[r],n[s]||null);else P(N(t.aoData,"anCells",r)).detach();i.bVisible=e}});return f!==H&&this.iterator("table",function(t){Dt(t,t.aoHeader),Dt(t,t.aoFooter),t.aiDisplay.length||P(t.nTBody).find("td[colspan]").attr("colspan",T(t)),de(t),e.iterator("column",function(t,e){R(t,null,"column-visibility",[t,e,f,n])}),n!==H&&!n||e.columns.adjust()}),t}),t("columns().indexes()","column().index()",function(n){return this.iterator("column",function(t,e){return"visible"===n?ot(t,e):e},1)}),e("columns.adjust()",function(){return this.iterator("table",function(t){O(t)},1)}),e("column.index()",function(t,e){var n;if(0!==this.context.length)return n=this.context[0],"fromVisible"===t||"toData"===t?rt(n,e):"fromData"===t||"toVisible"===t?ot(n,e):void 0}),e("column()",function(t,e){return Ce(this.columns(t,e))}),e("cells()",function(g,t,b){var a,r,o,i,l,s,e;return P.isPlainObject(g)&&(g.row===H?(b=g,g=null):(b=t,t=null)),P.isPlainObject(t)&&(b=t,t=null),null===t||t===H?this.iterator("table",function(t){return a=t,t=g,e=we(b),f=a.aoData,d=Fe(a,e),n=_(m(f,d,"anCells")),h=P(Y([],n)),p=a.aoColumns.length,_e("cell",t,function(t){var e,n="function"==typeof t;if(null===t||t===H||n){for(o=[],i=0,l=d.length;i<l;i++)for(r=d[i],s=0;s<p;s++)u={row:r,column:s},(!n||(c=f[r],t(u,S(a,r,s),c.anCells?c.anCells[s]:null)))&&o.push(u);return o}return P.isPlainObject(t)?t.column!==H&&t.row!==H&&-1!==P.inArray(t.row,d)?[t]:[]:(e=h.filter(t).map(function(t,e){return{row:e._DT_CellIndex.row,column:e._DT_CellIndex.column}}).toArray()).length||!t.nodeName?e:(c=P(t).closest("*[data-dt-row]")).length?[{row:c.data("dt-row"),column:c.data("dt-column")}]:[]},a,e);var a,e,r,o,i,l,s,u,c,f,d,n,h,p}):(e=b?{page:b.page,order:b.order,search:b.search}:{},a=this.columns(t,e),r=this.rows(g,e),e=this.iterator("table",function(t,e){var n=[];for(o=0,i=r[e].length;o<i;o++)for(l=0,s=a[e].length;l<s;l++)n.push({row:r[e][o],column:a[e][l]});return n},1),e=b&&b.selected?this.cells(e,b):e,P.extend(e.selector,{cols:t,rows:g,opts:b}),e)}),t("cells().nodes()","cell().node()",function(){return this.iterator("cell",function(t,e,n){t=t.aoData[e];return t&&t.anCells?t.anCells[n]:H},1)}),e("cells().data()",function(){return this.iterator("cell",function(t,e,n){return S(t,e,n)},1)}),t("cells().cache()","cell().cache()",function(a){return a="search"===a?"_aFilterData":"_aSortData",this.iterator("cell",function(t,e,n){return t.aoData[e][a][n]},1)}),t("cells().render()","cell().render()",function(a){return this.iterator("cell",function(t,e,n){return S(t,e,n,a)},1)}),t("cells().indexes()","cell().index()",function(){return this.iterator("cell",function(t,e,n){return{row:e,column:n,columnVisible:ot(t,n)}},1)}),t("cells().invalidate()","cell().invalidate()",function(a){return this.iterator("cell",function(t,e,n){bt(t,e,a,n)})}),e("cell()",function(t,e,n){return Ce(this.cells(t,e,n))}),e("cell().data()",function(t){var e=this.context,n=this[0];return t===H?e.length&&n.length?S(e[0],n[0].row,n[0].column):H:(ct(e[0],n[0].row,n[0].column,t),bt(e[0],n[0].row,"data",n[0].column),this)}),e("order()",function(e,t){var n=this.context;return e===H?0!==n.length?n[0].aaSorting:H:("number"==typeof e?e=[[e,t]]:e.length&&!Array.isArray(e[0])&&(e=Array.prototype.slice.call(arguments)),this.iterator("table",function(t){t.aaSorting=e.slice()}))}),e("order.listener()",function(e,n,a){return this.iterator("table",function(t){ue(t,e,n,a)})}),e("order.fixed()",function(e){var t;return e?this.iterator("table",function(t){t.aaSortingFixed=P.extend(!0,{},e)}):(t=(t=this.context).length?t[0].aaSortingFixed:H,Array.isArray(t)?{pre:t}:t)}),e(["columns().order()","column().order()"],function(a){var r=this;return this.iterator("table",function(t,e){var n=[];P.each(r[e],function(t,e){n.push([e,a])}),t.aaSorting=n})}),e("search()",function(e,n,a,r){var t=this.context;return e===H?0!==t.length?t[0].oPreviousSearch.sSearch:H:this.iterator("table",function(t){t.oFeatures.bFilter&&Rt(t,P.extend({},t.oPreviousSearch,{sSearch:e+"",bRegex:null!==n&&n,bSmart:null===a||a,bCaseInsensitive:null===r||r}),1)})}),t("columns().search()","column().search()",function(a,r,o,i){return this.iterator("column",function(t,e){var n=t.aoPreSearchCols;if(a===H)return n[e].sSearch;t.oFeatures.bFilter&&(P.extend(n[e],{sSearch:a+"",bRegex:null!==r&&r,bSmart:null===o||o,bCaseInsensitive:null===i||i}),Rt(t,t.oPreviousSearch,1))})}),e("state()",function(){return this.context.length?this.context[0].oSavedState:null}),e("state.clear()",function(){return this.iterator("table",function(t){t.fnStateSaveCallback.call(t.oInstance,t,{})})}),e("state.loaded()",function(){return this.context.length?this.context[0].oLoadedState:null}),e("state.save()",function(){return this.iterator("table",function(t){de(t)})}),w.use=function(t,e){"lib"===e||t.fn?P=t:"win"==e||t.document?v=(j=t).document:"datetime"!==e&&"DateTime"!==t.type||(w.DateTime=t)},w.factory=function(t,e){var n=!1;return t&&t.document&&(v=(j=t).document),e&&e.fn&&e.fn.jquery&&(P=e,n=!0),n},w.versionCheck=w.fnVersionCheck=function(t){for(var e,n,a=w.version.split("."),r=t.split("."),o=0,i=r.length;o<i;o++)if((e=parseInt(a[o],10)||0)!==(n=parseInt(r[o],10)||0))return n<e;return!0},w.isDataTable=w.fnIsDataTable=function(t){var r=P(t).get(0),o=!1;return t instanceof w.Api||(P.each(w.settings,function(t,e){var n=e.nScrollHead?P("table",e.nScrollHead)[0]:null,a=e.nScrollFoot?P("table",e.nScrollFoot)[0]:null;e.nTable!==r&&n!==r&&a!==r||(o=!0)}),o)},w.tables=w.fnTables=function(e){var t=!1,n=(P.isPlainObject(e)&&(t=e.api,e=e.visible),P.map(w.settings,function(t){if(!e||P(t.nTable).is(":visible"))return t.nTable}));return t?new B(n):n},w.camelToHungarian=C,e("$()",function(t,e){e=this.rows(e).nodes(),e=P(e);return P([].concat(e.filter(t).toArray(),e.find(t).toArray()))}),P.each(["on","one","off"],function(t,n){e(n+"()",function(){var t=Array.prototype.slice.call(arguments),e=(t[0]=P.map(t[0].split(/\s/),function(t){return t.match(/\.dt\b/)?t:t+".dt"}).join(" "),P(this.tables().nodes()));return e[n].apply(e,t),this})}),e("clear()",function(){return this.iterator("table",function(t){pt(t)})}),e("settings()",function(){return new B(this.context,this.context)}),e("init()",function(){var t=this.context;return t.length?t[0].oInit:null}),e("data()",function(){return this.iterator("table",function(t){return N(t.aoData,"_aData")}).flatten()}),e("destroy()",function(c){return c=c||!1,this.iterator("table",function(e){var n,t=e.oClasses,a=e.nTable,r=e.nTBody,o=e.nTHead,i=e.nTFoot,l=P(a),r=P(r),s=P(e.nTableWrapper),u=P.map(e.aoData,function(t){return t.nTr}),i=(e.bDestroying=!0,R(e,"aoDestroyCallback","destroy",[e]),c||new B(e).columns().visible(!0),s.off(".DT").find(":not(tbody *)").off(".DT"),P(j).off(".DT-"+e.sInstance),a!=o.parentNode&&(l.children("thead").detach(),l.append(o)),i&&a!=i.parentNode&&(l.children("tfoot").detach(),l.append(i)),e.aaSorting=[],e.aaSortingFixed=[],ce(e),P(u).removeClass(e.asStripeClasses.join(" ")),P("th, td",o).removeClass(t.sSortable+" "+t.sSortableAsc+" "+t.sSortableDesc+" "+t.sSortableNone),r.children().detach(),r.append(u),e.nTableWrapper.parentNode),o=c?"remove":"detach",u=(l[o](),s[o](),!c&&i&&(i.insertBefore(a,e.nTableReinsertBefore),l.css("width",e.sDestroyWidth).removeClass(t.sTable),n=e.asDestroyStripes.length)&&r.children().each(function(t){P(this).addClass(e.asDestroyStripes[t%n])}),P.inArray(e,w.settings));-1!==u&&w.settings.splice(u,1)})}),P.each(["column","row","cell"],function(t,s){e(s+"s().every()",function(o){var i=this.selector.opts,l=this;return this.iterator(s,function(t,e,n,a,r){o.call(l[s](e,"cell"===s?n:i,"cell"===s?i:H),e,n,a,r)})})}),e("i18n()",function(t,e,n){var a=this.context[0],t=A(t)(a.oLanguage);return t===H&&(t=e),"string"==typeof(t=n!==H&&P.isPlainObject(t)?t[n]!==H?t[n]:t._:t)?t.replace("%d",n):t}),w.version="1.13.8",w.settings=[],w.models={},w.models.oSearch={bCaseInsensitive:!0,sSearch:"",bRegex:!1,bSmart:!0,return:!1},w.models.oRow={nTr:null,anCells:null,_aData:[],_aSortData:null,_aFilterData:null,_sFilterRow:null,_sRowStripe:"",src:null,idx:-1},w.models.oColumn={idx:null,aDataSort:null,asSorting:null,bSearchable:null,bSortable:null,bVisible:null,_sManualType:null,_bAttrSrc:!1,fnCreatedCell:null,fnGetData:null,fnSetData:null,mData:null,mRender:null,nTh:null,nTf:null,sClass:null,sContentPadding:null,sDefaultContent:null,sName:null,sSortDataType:"std",sSortingClass:null,sSortingClassJUI:null,sTitle:null,sType:null,sWidth:null,sWidthOrig:null},w.defaults={aaData:null,aaSorting:[[0,"asc"]],aaSortingFixed:[],ajax:null,aLengthMenu:[10,25,50,100],aoColumns:null,aoColumnDefs:null,aoSearchCols:[],asStripeClasses:null,bAutoWidth:!0,bDeferRender:!1,bDestroy:!1,bFilter:!0,bInfo:!0,bLengthChange:!0,bPaginate:!0,bProcessing:!1,bRetrieve:!1,bScrollCollapse:!1,bServerSide:!1,bSort:!0,bSortMulti:!0,bSortCellsTop:!1,bSortClasses:!0,bStateSave:!1,fnCreatedRow:null,fnDrawCallback:null,fnFooterCallback:null,fnFormatNumber:function(t){return t.toString().replace(/\B(?=(\d{3})+(?!\d))/g,this.oLanguage.sThousands)},fnHeaderCallback:null,fnInfoCallback:null,fnInitComplete:null,fnPreDrawCallback:null,fnRowCallback:null,fnServerData:null,fnServerParams:null,fnStateLoadCallback:function(t){try{return JSON.parse((-1===t.iStateDuration?sessionStorage:localStorage).getItem("DataTables_"+t.sInstance+"_"+location.pathname))}catch(t){return{}}},fnStateLoadParams:null,fnStateLoaded:null,fnStateSaveCallback:function(t,e){try{(-1===t.iStateDuration?sessionStorage:localStorage).setItem("DataTables_"+t.sInstance+"_"+location.pathname,JSON.stringify(e))}catch(t){}},fnStateSaveParams:null,iStateDuration:7200,iDeferLoading:null,iDisplayLength:10,iDisplayStart:0,iTabIndex:0,oClasses:{},oLanguage:{oAria:{sSortAscending:": activate to sort column ascending",sSortDescending:": activate to sort column descending"},oPaginate:{sFirst:"First",sLast:"Last",sNext:"Next",sPrevious:"Previous"},sEmptyTable:"No data available in table",sInfo:"Showing _START_ to _END_ of _TOTAL_ entries",sInfoEmpty:"Showing 0 to 0 of 0 entries",sInfoFiltered:"(filtered from _MAX_ total entries)",sInfoPostFix:"",sDecimal:"",sThousands:",",sLengthMenu:"Show _MENU_ entries",sLoadingRecords:"Loading...",sProcessing:"",sSearch:"Search:",sSearchPlaceholder:"",sUrl:"",sZeroRecords:"No matching records found"},oSearch:P.extend({},w.models.oSearch),sAjaxDataProp:"data",sAjaxSource:null,sDom:"lfrtip",searchDelay:null,sPaginationType:"simple_numbers",sScrollX:"",sScrollXInner:"",sScrollY:"",sServerMethod:"GET",renderer:null,rowId:"DT_RowId"},i(w.defaults),w.defaults.column={aDataSort:null,iDataSort:-1,asSorting:["asc","desc"],bSearchable:!0,bSortable:!0,bVisible:!0,fnCreatedCell:null,mData:null,mRender:null,sCellType:"td",sClass:"",sContentPadding:"",sDefaultContent:null,sName:"",sSortDataType:"std",sTitle:null,sType:null,sWidth:null},i(w.defaults.column),w.models.oSettings={oFeatures:{bAutoWidth:null,bDeferRender:null,bFilter:null,bInfo:null,bLengthChange:null,bPaginate:null,bProcessing:null,bServerSide:null,bSort:null,bSortMulti:null,bSortClasses:null,bStateSave:null},oScroll:{bCollapse:null,iBarWidth:0,sX:null,sXInner:null,sY:null},oLanguage:{fnInfoCallback:null},oBrowser:{bScrollOversize:!1,bScrollbarLeft:!1,bBounding:!1,barWidth:0},ajax:null,aanFeatures:[],aoData:[],aiDisplay:[],aiDisplayMaster:[],aIds:{},aoColumns:[],aoHeader:[],aoFooter:[],oPreviousSearch:{},aoPreSearchCols:[],aaSorting:null,aaSortingFixed:[],asStripeClasses:null,asDestroyStripes:[],sDestroyWidth:0,aoRowCallback:[],aoHeaderCallback:[],aoFooterCallback:[],aoDrawCallback:[],aoRowCreatedCallback:[],aoPreDrawCallback:[],aoInitComplete:[],aoStateSaveParams:[],aoStateLoadParams:[],aoStateLoaded:[],sTableId:"",nTable:null,nTHead:null,nTFoot:null,nTBody:null,nTableWrapper:null,bDeferLoading:!1,bInitialised:!1,aoOpenRows:[],sDom:null,searchDelay:null,sPaginationType:"two_button",iStateDuration:0,aoStateSave:[],aoStateLoad:[],oSavedState:null,oLoadedState:null,sAjaxSource:null,sAjaxDataProp:null,jqXHR:null,json:H,oAjaxData:H,fnServerData:null,aoServerParams:[],sServerMethod:null,fnFormatNumber:null,aLengthMenu:null,iDraw:0,bDrawing:!1,iDrawError:-1,_iDisplayLength:10,_iDisplayStart:0,_iRecordsTotal:0,_iRecordsDisplay:0,oClasses:{},bFiltered:!1,bSorted:!1,bSortCellsTop:null,oInit:null,aoDestroyCallback:[],fnRecordsTotal:function(){return"ssp"==E(this)?+this._iRecordsTotal:this.aiDisplayMaster.length},fnRecordsDisplay:function(){return"ssp"==E(this)?+this._iRecordsDisplay:this.aiDisplay.length},fnDisplayEnd:function(){var t=this._iDisplayLength,e=this._iDisplayStart,n=e+t,a=this.aiDisplay.length,r=this.oFeatures,o=r.bPaginate;return r.bServerSide?!1===o||-1===t?e+a:Math.min(e+t,this._iRecordsDisplay):!o||a<n||-1===t?a:n},oInstance:null,sInstance:null,iTabIndex:0,nScrollHead:null,nScrollFoot:null,aLastSort:[],oPlugins:{},rowIdFn:null,rowId:null},w.ext=p={buttons:{},classes:{},builder:"-source-",errMode:"alert",feature:[],search:[],selector:{cell:[],column:[],row:[]},internal:{},legacy:{ajax:null},pager:{},renderer:{pageButton:{},header:{}},order:{},type:{detect:[],search:{},order:{}},_unique:0,fnVersionCheck:w.fnVersionCheck,iApiIndex:0,oJUIClasses:{},sVersion:w.version},P.extend(p,{afnFiltering:p.search,aTypes:p.type.detect,ofnSearch:p.type.search,oSort:p.type.order,afnSortData:p.order,aoFeatures:p.feature,oApi:p.internal,oStdClasses:p.classes,oPagination:p.pager}),P.extend(w.ext.classes,{sTable:"dataTable",sNoFooter:"no-footer",sPageButton:"paginate_button",sPageButtonActive:"current",sPageButtonDisabled:"disabled",sStripeOdd:"odd",sStripeEven:"even",sRowEmpty:"dataTables_empty",sWrapper:"dataTables_wrapper",sFilter:"dataTables_filter",sInfo:"dataTables_info",sPaging:"dataTables_paginate paging_",sLength:"dataTables_length",sProcessing:"dataTables_processing",sSortAsc:"sorting_asc",sSortDesc:"sorting_desc",sSortable:"sorting",sSortableAsc:"sorting_desc_disabled",sSortableDesc:"sorting_asc_disabled",sSortableNone:"sorting_disabled",sSortColumn:"sorting_",sFilterInput:"",sLengthSelect:"",sScrollWrapper:"dataTables_scroll",sScrollHead:"dataTables_scrollHead",sScrollHeadInner:"dataTables_scrollHeadInner",sScrollBody:"dataTables_scrollBody",sScrollFoot:"dataTables_scrollFoot",sScrollFootInner:"dataTables_scrollFootInner",sHeaderTH:"",sFooterTH:"",sSortJUIAsc:"",sSortJUIDesc:"",sSortJUI:"",sSortJUIAscAllowed:"",sSortJUIDescAllowed:"",sSortJUIWrapper:"",sSortIcon:"",sJUIHeader:"",sJUIFooter:""}),w.ext.pager);function Oe(t,e){var n=[],a=Ne.numbers_length,r=Math.floor(a/2);return e<=a?n=f(0,e):t<=r?((n=f(0,a-2)).push("ellipsis"),n.push(e-1)):((e-1-r<=t?n=f(e-(a-2),e):((n=f(t-r+2,t+r-1)).push("ellipsis"),n.push(e-1),n)).splice(0,0,"ellipsis"),n.splice(0,0,0)),n.DT_el="span",n}P.extend(Ne,{simple:function(t,e){return["previous","next"]},full:function(t,e){return["first","previous","next","last"]},numbers:function(t,e){return[Oe(t,e)]},simple_numbers:function(t,e){return["previous",Oe(t,e),"next"]},full_numbers:function(t,e){return["first","previous",Oe(t,e),"next","last"]},first_last_numbers:function(t,e){return["first",Oe(t,e),"last"]},_numbers:Oe,numbers_length:7}),P.extend(!0,w.ext.renderer,{pageButton:{_:function(u,t,c,e,f,d){function h(t,e){for(var n,a=b.sPageButtonDisabled,r=function(t){Yt(u,t.data.action,!0)},o=0,i=e.length;o<i;o++)if(n=e[o],Array.isArray(n)){var l=P("<"+(n.DT_el||"div")+"/>").appendTo(t);h(l,n)}else{var s=!1;switch(p=null,g=n){case"ellipsis":t.append('<span class="ellipsis">&#x2026;</span>');break;case"first":p=m.sFirst,0===f&&(s=!0);break;case"previous":p=m.sPrevious,0===f&&(s=!0);break;case"next":p=m.sNext,0!==d&&f!==d-1||(s=!0);break;case"last":p=m.sLast,0!==d&&f!==d-1||(s=!0);break;default:p=u.fnFormatNumber(n+1),g=f===n?b.sPageButtonActive:""}null!==p&&(l=u.oInit.pagingTag||"a",s&&(g+=" "+a),me(P("<"+l+">",{class:b.sPageButton+" "+g,"aria-controls":u.sTableId,"aria-disabled":s?"true":null,"aria-label":S[n],role:"link","aria-current":g===b.sPageButtonActive?"page":null,"data-dt-idx":n,tabindex:s?-1:u.iTabIndex,id:0===c&&"string"==typeof n?u.sTableId+"_"+n:null}).html(p).appendTo(t),{action:n},r))}}var p,g,n,b=u.oClasses,m=u.oLanguage.oPaginate,S=u.oLanguage.oAria.paginate||{};try{n=P(t).find(v.activeElement).data("dt-idx")}catch(t){}h(P(t).empty(),e),n!==H&&P(t).find("[data-dt-idx="+n+"]").trigger("focus")}}}),P.extend(w.ext.type.detect,[function(t,e){e=e.oLanguage.sDecimal;return l(t,e)?"num"+e:null},function(t,e){var n;return(!t||t instanceof Date||X.test(t))&&(null!==(n=Date.parse(t))&&!isNaN(n)||h(t))?"date":null},function(t,e){e=e.oLanguage.sDecimal;return l(t,e,!0)?"num-fmt"+e:null},function(t,e){e=e.oLanguage.sDecimal;return a(t,e)?"html-num"+e:null},function(t,e){e=e.oLanguage.sDecimal;return a(t,e,!0)?"html-num-fmt"+e:null},function(t,e){return h(t)||"string"==typeof t&&-1!==t.indexOf("<")?"html":null}]),P.extend(w.ext.type.search,{html:function(t){return h(t)?t:"string"==typeof t?t.replace(U," ").replace(V,""):""},string:function(t){return!h(t)&&"string"==typeof t?t.replace(U," "):t}});function ke(t,e,n,a){var r;return 0===t||t&&"-"!==t?"number"==(r=typeof t)||"bigint"==r?t:+(t=(t=e?$(t,e):t).replace&&(n&&(t=t.replace(n,"")),a)?t.replace(a,""):t):-1/0}function Me(n){P.each({num:function(t){return ke(t,n)},"num-fmt":function(t){return ke(t,n,q)},"html-num":function(t){return ke(t,n,V)},"html-num-fmt":function(t){return ke(t,n,V,q)}},function(t,e){p.type.order[t+n+"-pre"]=e,t.match(/^html\-/)&&(p.type.search[t+n]=p.type.search.html)})}P.extend(p.type.order,{"date-pre":function(t){t=Date.parse(t);return isNaN(t)?-1/0:t},"html-pre":function(t){return h(t)?"":t.replace?t.replace(/<.*?>/g,"").toLowerCase():t+""},"string-pre":function(t){return h(t)?"":"string"==typeof t?t.toLowerCase():t.toString?t.toString():""},"string-asc":function(t,e){return t<e?-1:e<t?1:0},"string-desc":function(t,e){return t<e?1:e<t?-1:0}}),Me(""),P.extend(!0,w.ext.renderer,{header:{_:function(r,o,i,l){P(r.nTable).on("order.dt.DT",function(t,e,n,a){r===e&&(e=i.idx,o.removeClass(l.sSortAsc+" "+l.sSortDesc).addClass("asc"==a[e]?l.sSortAsc:"desc"==a[e]?l.sSortDesc:i.sSortingClass))})},jqueryui:function(r,o,i,l){P("<div/>").addClass(l.sSortJUIWrapper).append(o.contents()).append(P("<span/>").addClass(l.sSortIcon+" "+i.sSortingClassJUI)).appendTo(o),P(r.nTable).on("order.dt.DT",function(t,e,n,a){r===e&&(e=i.idx,o.removeClass(l.sSortAsc+" "+l.sSortDesc).addClass("asc"==a[e]?l.sSortAsc:"desc"==a[e]?l.sSortDesc:i.sSortingClass),o.find("span."+l.sSortIcon).removeClass(l.sSortJUIAsc+" "+l.sSortJUIDesc+" "+l.sSortJUI+" "+l.sSortJUIAscAllowed+" "+l.sSortJUIDescAllowed).addClass("asc"==a[e]?l.sSortJUIAsc:"desc"==a[e]?l.sSortJUIDesc:i.sSortingClassJUI))})}}});function We(t){return"string"==typeof(t=Array.isArray(t)?t.join(","):t)?t.replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/"/g,"&quot;"):t}function Ee(t,e,n,a,r){return j.moment?t[e](r):j.luxon?t[n](r):a?t[a](r):t}var Be=!1;function Ue(t,e,n){var a;if(j.moment){if(!(a=j.moment.utc(t,e,n,!0)).isValid())return null}else if(j.luxon){if(!(a=e&&"string"==typeof t?j.luxon.DateTime.fromFormat(t,e):j.luxon.DateTime.fromISO(t)).isValid)return null;a.setLocale(n)}else e?(Be||alert("DataTables warning: Formatted date without Moment.js or Luxon - https://datatables.net/tn/17"),Be=!0):a=new Date(t);return a}function Ve(s){return function(a,r,o,i){0===arguments.length?(o="en",a=r=null):1===arguments.length?(o="en",r=a,a=null):2===arguments.length&&(o=r,r=a,a=null);var l="datetime-"+r;return w.ext.type.order[l]||(w.ext.type.detect.unshift(function(t){return t===l&&l}),w.ext.type.order[l+"-asc"]=function(t,e){t=t.valueOf(),e=e.valueOf();return t===e?0:t<e?-1:1},w.ext.type.order[l+"-desc"]=function(t,e){t=t.valueOf(),e=e.valueOf();return t===e?0:e<t?-1:1}),function(t,e){var n;return null!==t&&t!==H||(t="--now"===i?(n=new Date,new Date(Date.UTC(n.getFullYear(),n.getMonth(),n.getDate(),n.getHours(),n.getMinutes(),n.getSeconds()))):""),"type"===e?l:""===t?"sort"!==e?"":Ue("0000-01-01 00:00:00",null,o):!(null===r||a!==r||"sort"===e||"type"===e||t instanceof Date)||null===(n=Ue(t,a,o))?t:"sort"===e?n:(t=null===r?Ee(n,"toDate","toJSDate","")[s]():Ee(n,"format","toFormat","toISOString",r),"display"===e?We(t):t)}}}var Xe=",",Je=".";if(j.Intl!==H)try{for(var qe=(new Intl.NumberFormat).formatToParts(100000.1),n=0;n<qe.length;n++)"group"===qe[n].type?Xe=qe[n].value:"decimal"===qe[n].type&&(Je=qe[n].value)}catch(t){}function $e(e){return function(){var t=[ge(this[w.ext.iApiIndex])].concat(Array.prototype.slice.call(arguments));return w.ext.internal[e].apply(this,t)}}return w.datetime=function(n,a){var r="datetime-detect-"+n;a=a||"en",w.ext.type.order[r]||(w.ext.type.detect.unshift(function(t){var e=Ue(t,n,a);return!(""!==t&&!e)&&r}),w.ext.type.order[r+"-pre"]=function(t){return Ue(t,n,a)||0})},w.render={date:Ve("toLocaleDateString"),datetime:Ve("toLocaleString"),time:Ve("toLocaleTimeString"),number:function(a,r,o,i,l){return null!==a&&a!==H||(a=Xe),null!==r&&r!==H||(r=Je),{display:function(t){if("number"!=typeof t&&"string"!=typeof t)return t;if(""===t||null===t)return t;var e=t<0?"-":"",n=parseFloat(t);if(isNaN(n))return We(t);n=n.toFixed(o),t=Math.abs(n);n=parseInt(t,10),t=o?r+(t-n).toFixed(o).substring(2):"";return(e=0===n&&0===parseFloat(t)?"":e)+(i||"")+n.toString().replace(/\B(?=(\d{3})+(?!\d))/g,a)+t+(l||"")}}},text:function(){return{display:We,filter:We}}},P.extend(w.ext.internal,{_fnExternApiFunc:$e,_fnBuildAjax:Tt,_fnAjaxUpdate:xt,_fnAjaxParameters:At,_fnAjaxUpdateDraw:It,_fnAjaxDataSrc:Ft,_fnAddColumn:nt,_fnColumnOptions:at,_fnAdjustColumnSizing:O,_fnVisibleToColumnIndex:rt,_fnColumnIndexToVisible:ot,_fnVisbleColumns:T,_fnGetColumns:it,_fnColumnTypes:lt,_fnApplyColumnDefs:st,_fnHungarianMap:i,_fnCamelToHungarian:C,_fnLanguageCompat:Z,_fnBrowserDetect:tt,_fnAddData:x,_fnAddTr:ut,_fnNodeToDataIndex:function(t,e){return e._DT_RowIndex!==H?e._DT_RowIndex:null},_fnNodeToColumnIndex:function(t,e,n){return P.inArray(n,t.aoData[e].anCells)},_fnGetCellData:S,_fnSetCellData:ct,_fnSplitObjNotation:dt,_fnGetObjectDataFn:A,_fnSetObjectDataFn:b,_fnGetDataMaster:ht,_fnClearTable:pt,_fnDeleteIndex:gt,_fnInvalidate:bt,_fnGetRowElements:mt,_fnCreateTr:St,_fnBuildHead:yt,_fnDrawHead:Dt,_fnDraw:y,_fnReDraw:u,_fnAddOptionsHtml:_t,_fnDetectHeader:wt,_fnGetUniqueThs:Ct,_fnFeatureHtmlFilter:Lt,_fnFilterComplete:Rt,_fnFilterCustom:Pt,_fnFilterColumn:jt,_fnFilter:Ht,_fnFilterCreateSearch:Nt,_fnEscapeRegex:Ot,_fnFilterData:Wt,_fnFeatureHtmlInfo:Ut,_fnUpdateInfo:Vt,_fnInfoMacros:Xt,_fnInitialise:Jt,_fnInitComplete:qt,_fnLengthChange:$t,_fnFeatureHtmlLength:Gt,_fnFeatureHtmlPaginate:zt,_fnPageChange:Yt,_fnFeatureHtmlProcessing:Zt,_fnProcessingDisplay:D,_fnFeatureHtmlTable:Kt,_fnScrollDraw:Qt,_fnApplyToChildren:k,_fnCalculateColumnWidths:ee,_fnThrottle:ne,_fnConvertToWidth:ae,_fnGetWidestNode:re,_fnGetMaxLenString:oe,_fnStringToCss:M,_fnSortFlatten:I,_fnSort:ie,_fnSortAria:le,_fnSortListener:se,_fnSortAttachListener:ue,_fnSortingClasses:ce,_fnSortData:fe,_fnSaveState:de,_fnLoadState:he,_fnImplementState:pe,_fnSettingsFromNode:ge,_fnLog:W,_fnMap:F,_fnBindAction:me,_fnCallbackReg:L,_fnCallbackFire:R,_fnLengthOverflow:Se,_fnRenderer:ve,_fnDataSource:E,_fnRowAttributes:vt,_fnExtend:be,_fnCalculateEnd:function(){}}),((P.fn.dataTable=w).$=P).fn.dataTableSettings=w.settings,P.fn.dataTableExt=w.ext,P.fn.DataTable=function(t){return P(this).dataTable(t).api()},P.each(w,function(t,e){P.fn.DataTable[t]=e}),w}); \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/libtoaster.js b/bitbake/lib/toaster/toastergui/static/js/libtoaster.js
index f2c45c833e..d4ac31234c 100644
--- a/bitbake/lib/toaster/toastergui/static/js/libtoaster.js
+++ b/bitbake/lib/toaster/toastergui/static/js/libtoaster.js
@@ -657,7 +657,7 @@ $(document).ready(function() {
657 hljs.initHighlightingOnLoad(); 657 hljs.initHighlightingOnLoad();
658 658
659 // Prevent invalid links from jumping page scroll 659 // Prevent invalid links from jumping page scroll
660 $('a[href=#]').click(function() { 660 $('a[href="#"]').click(function() {
661 return false; 661 return false;
662 }); 662 });
663 663
diff --git a/bitbake/lib/toaster/toastergui/static/js/projectpage.js b/bitbake/lib/toaster/toastergui/static/js/projectpage.js
index 506471e091..a3c95810a7 100644
--- a/bitbake/lib/toaster/toastergui/static/js/projectpage.js
+++ b/bitbake/lib/toaster/toastergui/static/js/projectpage.js
@@ -61,7 +61,7 @@ function projectPageInit(ctx) {
61 distroChangeInput.val(urlParams.setDistro); 61 distroChangeInput.val(urlParams.setDistro);
62 distroChangeBtn.click(); 62 distroChangeBtn.click();
63 } else { 63 } else {
64 updateDistroName(prjInfo.distro.name); 64 updateDistroName(prjInfo.distro?.name);
65 } 65 }
66 66
67 /* Now we're really ready show the page */ 67 /* Now we're really ready show the page */
diff --git a/bitbake/lib/toaster/toastergui/templates/base.html b/bitbake/lib/toaster/toastergui/templates/base.html
index 9e19cc33ca..e90be69620 100644
--- a/bitbake/lib/toaster/toastergui/templates/base.html
+++ b/bitbake/lib/toaster/toastergui/templates/base.html
@@ -14,11 +14,11 @@
14 14
15 <meta name="viewport" content="width=device-width, initial-scale=1.0" /> 15 <meta name="viewport" content="width=device-width, initial-scale=1.0" />
16 <meta http-equiv="Content-Type" content="text/html;charset=UTF-8" /> 16 <meta http-equiv="Content-Type" content="text/html;charset=UTF-8" />
17 <script src="{% static 'js/jquery-2.0.3.min.js' %}"> 17 <script src="{% static 'js/jquery-3.7.1.min.js' %}">
18 </script> 18 </script>
19 <script src="{% static 'js/jquery.cookie.js' %}"> 19 <script src="{% static 'js/jquery.cookie.js' %}">
20 </script> 20 </script>
21 <script src="{% static 'js/bootstrap.min.js' %}"> 21 <script src="{% static 'js/bootstrap-3.4.1.min.js' %}">
22 </script> 22 </script>
23 <script src="{% static 'js/typeahead.jquery.js' %}"> 23 <script src="{% static 'js/typeahead.jquery.js' %}">
24 </script> 24 </script>
@@ -94,7 +94,7 @@
94 </a> 94 </a>
95 <a class="brand" href="/">Toaster</a> 95 <a class="brand" href="/">Toaster</a>
96 {% if DEBUG %} 96 {% if DEBUG %}
97 <span class="glyphicon glyphicon-info-sign" title="<strong>Toaster version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i> 97 <span id="toaster-version-info-sign" class="glyphicon glyphicon-info-sign" title="<strong>Toaster version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i>
98 {% endif %} 98 {% endif %}
99 </div> 99 </div>
100 </div> 100 </div>
@@ -123,7 +123,7 @@
123 {% endif %} 123 {% endif %}
124 {% endif %} 124 {% endif %}
125 <li id="navbar-docs"> 125 <li id="navbar-docs">
126 <a target="_blank" href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html"> 126 <a target="_blank" href="http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual">
127 <i class="glyphicon glyphicon-book"></i> 127 <i class="glyphicon glyphicon-book"></i>
128 Documentation 128 Documentation
129 </a> 129 </a>
@@ -132,7 +132,8 @@
132 {% if project_enable %} 132 {% if project_enable %}
133 <a class="btn btn-default navbar-btn navbar-right" id="new-project-button" href="{% url 'newproject' %}">New project</a> 133 <a class="btn btn-default navbar-btn navbar-right" id="new-project-button" href="{% url 'newproject' %}">New project</a>
134 {% endif %} 134 {% endif %}
135 </div> 135 <a class="btn btn-default navbar-btn navbar-right" id="import_page" style="margin-right: 5px !important" id="import-cmdline-button" href="{% url 'cmdlines' %}">Import command line builds</a>
136 </div>
136 </div> 137 </div>
137 </nav> 138 </nav>
138 139
diff --git a/bitbake/lib/toaster/toastergui/templates/base_specific.html b/bitbake/lib/toaster/toastergui/templates/base_specific.html
index e377cadd73..425f7ed73d 100644
--- a/bitbake/lib/toaster/toastergui/templates/base_specific.html
+++ b/bitbake/lib/toaster/toastergui/templates/base_specific.html
@@ -14,11 +14,11 @@
14 14
15 <meta name="viewport" content="width=device-width, initial-scale=1.0" /> 15 <meta name="viewport" content="width=device-width, initial-scale=1.0" />
16 <meta http-equiv="Content-Type" content="text/html;charset=UTF-8" /> 16 <meta http-equiv="Content-Type" content="text/html;charset=UTF-8" />
17 <script src="{% static 'js/jquery-2.0.3.min.js' %}"> 17 <script src="{% static 'js/jquery-3.7.1.min.js' %}">
18 </script> 18 </script>
19 <script src="{% static 'js/jquery.cookie.js' %}"> 19 <script src="{% static 'js/jquery.cookie.js' %}">
20 </script> 20 </script>
21 <script src="{% static 'js/bootstrap.min.js' %}"> 21 <script src="{% static 'js/bootstrap-3.4.1.min.js' %}">
22 </script> 22 </script>
23 <script src="{% static 'js/typeahead.jquery.js' %}"> 23 <script src="{% static 'js/typeahead.jquery.js' %}">
24 </script> 24 </script>
diff --git a/bitbake/lib/toaster/toastergui/templates/command_line_builds.html b/bitbake/lib/toaster/toastergui/templates/command_line_builds.html
new file mode 100644
index 0000000000..05db6727e7
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/command_line_builds.html
@@ -0,0 +1,209 @@
1{% extends "base.html" %}
2{% load projecttags %}
3{% load humanize %}
4{% load static %}
5
6{% block title %} Import Builds from eventlogs - Toaster {% endblock %}
7
8{% block pagecontent %}
9
10<div class="container-fluid">
11 <div id="overlay" class="hide">
12 <div class="spinner">
13 <div class="fa-spin">
14 </div>
15 </div>
16 </div>
17 <div class="row">
18 <div class="col-md-12">
19 <div class="page-header">
20 <div class="row">
21 <div class="col-md-6">
22 <h1>Import command line builds</h1>
23 </div>
24 {% if import_all %}
25 <div class="col-md-6">
26 <button id="import_all" type="button" class="btn btn-primary navbar-btn navbar-right">
27 <span class="glyphicon glyphicon-upload" style="vertical-align: top;"></span> Import All
28 </button>
29 </div>
30 {% endif %}
31 </div>
32 </div>
33 {% if messages %}
34 <div class="row-fluid" id="empty-state-{{table_name}}">
35 {% for message in messages %}
36 <div class="alert alert-danger">{{message}}</div>
37 {%endfor%}
38 </div>
39 {% endif %}
40 <div class="row">
41 <h4 style="margin-left: 15px;"><strong>Import eventlog file</strong></h4>
42 <form method="POST" enctype="multipart/form-data" action="{% url 'cmdlines' %}" id="form_file">
43 {% csrf_token %}
44 <div class="col-md-6" style="padding-left: 20px;">
45 <div class="row">
46 <input type="hidden" value="{{dir}}" name="dir">
47 <div class="col-md-3"> {{ form.eventlog_file}} </div>
48 </div>
49 <div class="row" style="padding-top: 10px;">
50 <div class="col-md-6">
51 <button id="file_import" type="submit" disabled="disabled" class="btn btn-default navbar-btn" >
52 <span class="glyphicon glyphicon-upload" style="vertical-align: top;"></span> Import
53 </button>
54 </div>
55 </div>
56 </div>
57 </form>
58 </div>
59
60 <div class="row" style="padding-top: 20px;">
61 <div class="col-md-8 ">
62 <h4><strong>Eventlogs from existing build directory: </strong>
63 <a href="#" data-toggle="tooltip" title="{{dir}}">
64 <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-info-circle" viewBox="0 0 16 16" data-toggle="tooltip">
65 <path d="M8 15A7 7 0 1 1 8 1a7 7 0 0 1 0 14m0 1A8 8 0 1 0 8 0a8 8 0 0 0 0 16"/>
66 <path d="m8.93 6.588-2.29.287-.082.38.45.083c.294.07.352.176.288.469l-.738 3.468c-.194.897.105 1.319.808 1.319.545 0 1.178-.252 1.465-.598l.088-.416c-.2.176-.492.246-.686.246-.275 0-.375-.193-.304-.533zM9 4.5a1 1 0 1 1-2 0 1 1 0 0 1 2 0"/>
67 </svg>
68 </a>
69 </h4>
70 {% if files %}
71 <div class="table-responsive">
72 <table class="table col-md-6 table-bordered table-hover" id="eventlog-table" style="border-collapse: collapse;">
73 <thead>
74 <tr class="row">
75 <th scope="col">Name</th>
76 <th scope="col">Size</th>
77 <th scope="col">Action</th>
78 </tr>
79 </thead>
80 <tbody>
81 {% for file in files %}
82 <tr class="row" style="height: 48px;">
83 <th scope="row" class="col-md-4" style="vertical-align: middle;">
84 <input type="hidden" value="{{file.name}}" name="{{file.name}}">{{file.name}}
85 </th>
86 <td class="col-md-4 align-middle" style="vertical-align: middle;">{{file.size|filesizeformat}}</td>
87 <td class="col-md-4 align-middle" style="vertical-align: middle;">
88 {% if file.imported == True and file.build_id is not None %}
89 <a href="{% url 'builddashboard' file.build_id %}">Build Details</a>
90 {% elif request.session.file == file.name or request.session.all_builds %}
91 <a data-toggle="tooltip" title="Build in progress">
92 <span class="glyphicon glyphicon-upload" style="font-size: 18px; color:grey"></span>
93 </a>
94 {%else%}
95 <a onclick="_ajax_update('{{file.name}}', false, '{{dir}}')" data-toggle="tooltip" title="Import File">
96 <span class="glyphicon glyphicon-upload" style="font-size: 18px;"></span>
97 </a>
98 {%endif%}
99 </td>
100 </tr>
101 {% endfor%}
102 </tbody>
103 </table>
104 </div>
105 {% else %}
106 <div class="row-fluid" id="empty-state-{{table_name}}">
107 <div class="alert alert-info">Sorry - no files found</div>
108 </div>
109 {%endif%}
110 </div>
111 </div>
112 </div>
113 </div>
114</div>
115
116<link rel="stylesheet" href="{% static 'css/jquery.dataTables-1.13.8.min.css' %}" type='text/css'/>
117<script src="{% static 'js/jquery.dataTables-1.13.8.min.js' %}"> </script>
118<script>
119
120function _ajax_update(file, all, dir){
121 function getCookie(name) {
122 var cookieValue = null;
123 if (document.cookie && document.cookie !== '') {
124 var cookies = document.cookie.split(';');
125 for (var i = 0; i < cookies.length; i++) {
126 var cookie = jQuery.trim(cookies[i]);
127 // Does this cookie string begin with the name we want?
128 if (cookie.substring(0, name.length + 1) === (name + '=')) {
129 cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
130 break;
131 }
132 }
133 }
134 return cookieValue;
135 }
136 var csrftoken = getCookie('csrftoken');
137
138 function csrfSafeMethod(method) {
139 // these HTTP methods do not require CSRF protection
140 return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method));
141 }
142 $.ajaxSetup({
143 beforeSend: function (xhr, settings) {
144 if (!csrfSafeMethod(settings.type) && !this.crossDomain) {
145 xhr.setRequestHeader("X-CSRFToken", csrftoken);
146 }
147 }
148 });
149
150 $.ajax({
151 url:'/toastergui/cmdline/',
152 type: "POST",
153 data: {file: file, all: all, dir: dir},
154 success:function(data){
155 if (data['response']=='building'){
156 location.reload()
157 } else {
158 window.location = '/toastergui/builds/'
159 }
160 },
161 complete:function(data){
162 },
163 error:function (xhr, textStatus, thrownError){
164 console.log('fail');
165 }
166 });
167}
168
169$('#import_all').on('click', function(){
170 _ajax_update("{{files | safe}}", true, "{{dir | safe}}");
171});
172
173
174$('#import_page').hide();
175
176$(function () {
177 $('[data-toggle="tooltip"]').tooltip()
178})
179
180
181$("#id_eventlog_file").change(function(){
182 $('#file_import').prop("disabled", false);
183 $('#file_import').addClass('btn-primary')
184 $('#file_import').removeClass('btn-default')
185})
186
187$(document).ajaxStart(function(){
188 $('#overlay').removeClass('hide');
189 window.setTimeout(
190 function() {
191 window.location = '/toastergui/builds/'
192 }, 10000)
193});
194
195$( "#form_file").on( "submit", function( event ) {
196 $('#overlay').removeClass('hide');
197 window.setTimeout(
198 function() {
199 window.location = '/toastergui/builds/'
200 }, 10000)
201});
202
203$(document).ready( function () {
204 $('#eventlog-table').DataTable({order: [[0, 'desc']], "pageLength": 50});
205});
206
207</script>
208
209{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/configvars.html b/bitbake/lib/toaster/toastergui/templates/configvars.html
index 33fef9316d..691dace3a2 100644
--- a/bitbake/lib/toaster/toastergui/templates/configvars.html
+++ b/bitbake/lib/toaster/toastergui/templates/configvars.html
@@ -66,7 +66,7 @@
66 <td class="description"> 66 <td class="description">
67 {% if variable.description %} 67 {% if variable.description %}
68 {{variable.description}} 68 {{variable.description}}
69 <a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-{{variable.variable_name|variable_parent_name}}" target="_blank"> 69 <a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-{{variable.variable_name|variable_parent_name}}" target="_blank">
70 <span class="glyphicon glyphicon-new-window get-info"></span></a> 70 <span class="glyphicon glyphicon-new-window get-info"></span></a>
71 {% endif %} 71 {% endif %}
72 </td> 72 </td>
diff --git a/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html b/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html
index ca248962f0..41553c4f9d 100644
--- a/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html
+++ b/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html
@@ -11,7 +11,7 @@
11<script src="{% static 'js/layerDepsModal.js' %}"></script> 11<script src="{% static 'js/layerDepsModal.js' %}"></script>
12<script src="{% static 'js/projectpage.js' %}"></script> 12<script src="{% static 'js/projectpage.js' %}"></script>
13 13
14<script src="{% static 'js/bootstrap.min.js' %}"></script> 14<script src="{% static 'js/bootstrap-3.4.1.min.js' %}"></script>
15<script src="{% static 'js/filtersnippet.js' %}"></script> 15<script src="{% static 'js/filtersnippet.js' %}"></script>
16<script src="{% static 'js/importlayer.js' %}"></script> 16<script src="{% static 'js/importlayer.js' %}"></script>
17<script src="{% static 'js/highlight.pack.js' %}"></script> 17<script src="{% static 'js/highlight.pack.js' %}"></script>
diff --git a/bitbake/lib/toaster/toastergui/templates/landing.html b/bitbake/lib/toaster/toastergui/templates/landing.html
index bfaaf6fc83..589ee22634 100644
--- a/bitbake/lib/toaster/toastergui/templates/landing.html
+++ b/bitbake/lib/toaster/toastergui/templates/landing.html
@@ -12,10 +12,10 @@
12 <div class="col-md-6"> 12 <div class="col-md-6">
13 <h1>This is Toaster</h1> 13 <h1>This is Toaster</h1>
14 14
15 <p>A web interface to <a href="https://www.openembedded.org">OpenEmbedded</a> and <a href="https://www.yoctoproject.org/tools-resources/projects/bitbake">BitBake</a>, the <a href="https://www.yoctoproject.org">Yocto Project</a> build system.</p> 15 <p>A web interface to <a href="https://www.openembedded.org">OpenEmbedded</a> and <a href="https://docs.yoctoproject.org/bitbake.html">BitBake</a>, the <a href="https://www.yoctoproject.org">Yocto Project</a> build system.</p>
16 16
17 <p class="top-air"> 17 <p class="top-air">
18 <a class="btn btn-info btn-lg" href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#toaster-manual-setup-and-use"> 18 <a class="btn btn-info btn-lg" href="http://docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster" style="min-width: 460px;">
19 Toaster is ready to capture your command line builds 19 Toaster is ready to capture your command line builds
20 </a> 20 </a>
21 </p> 21 </p>
@@ -23,7 +23,7 @@
23 {% if lvs_nos %} 23 {% if lvs_nos %}
24 {% if project_enable %} 24 {% if project_enable %}
25 <p class="top-air"> 25 <p class="top-air">
26 <a class="btn btn-primary btn-lg" href="{% url 'newproject' %}"> 26 <a class="btn btn-primary btn-lg" href="{% url 'newproject' %}" style="min-width: 460px;">
27 Create your first Toaster project to run manage builds 27 Create your first Toaster project to run manage builds
28 </a> 28 </a>
29 </p> 29 </p>
@@ -33,7 +33,7 @@
33 Toaster has no layer information. Without layer information, you cannot run builds. To generate layer information you can: 33 Toaster has no layer information. Without layer information, you cannot run builds. To generate layer information you can:
34 <ul> 34 <ul>
35 <li> 35 <li>
36 <a href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#layer-source">Configure a layer source</a> 36 <a href="http://docs.yoctoproject.org/toaster-manual/reference.html#layer-source">Configure a layer source</a>
37 </li> 37 </li>
38 <li> 38 <li>
39 <a href="{% url 'newproject' %}">Create a project</a>, then import layers 39 <a href="{% url 'newproject' %}">Create a project</a>, then import layers
@@ -42,9 +42,15 @@
42 </div> 42 </div>
43 {% endif %} 43 {% endif %}
44 44
45 <p class="top-air">
46 <a class="btn btn-info btn-lg" href="{% url 'cmdlines' %}" style="min-width: 460px;">
47 Import command line event logs from build directory
48 </a>
49 </p>
50
45 <ul class="list-unstyled lead"> 51 <ul class="list-unstyled lead">
46 <li> 52 <li>
47 <a href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html"> 53 <a href="http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual">
48 Read the Toaster manual 54 Read the Toaster manual
49 </a> 55 </a>
50 </li> 56 </li>
diff --git a/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html b/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html
deleted file mode 100644
index e7200b8412..0000000000
--- a/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html
+++ /dev/null
@@ -1,34 +0,0 @@
1{% extends "base.html" %}
2
3{% load static %}
4{% load projecttags %}
5{% load humanize %}
6
7{% block title %} Welcome to Toaster {% endblock %}
8
9{% block pagecontent %}
10
11 <div class="container">
12 <div class="row">
13 <!-- Empty - no build module -->
14 <div class="page-header top-air">
15 <h1>
16 This page only works with Toaster in 'Build' mode
17 </h1>
18 </div>
19 <div class="alert alert-info lead">
20 <p">
21 The 'Build' mode allows you to configure and run your Yocto Project builds from Toaster.
22 <ul>
23 <li><a href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#intro-modes">
24 Read about the 'Build' mode
25 </a></li>
26 <li><a href="/">
27 View your builds
28 </a></li>
29 </ul>
30 </p>
31 </div>
32 </div>
33
34{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/layerdetails.html b/bitbake/lib/toaster/toastergui/templates/layerdetails.html
index 1e26e31c8b..923ca3bfe4 100644
--- a/bitbake/lib/toaster/toastergui/templates/layerdetails.html
+++ b/bitbake/lib/toaster/toastergui/templates/layerdetails.html
@@ -355,7 +355,7 @@
355 {% if layerversion.layer_source == layer_source.TYPE_LAYERINDEX %} 355 {% if layerversion.layer_source == layer_source.TYPE_LAYERINDEX %}
356 <dt>Layer index</dt> 356 <dt>Layer index</dt>
357 <dd> 357 <dd>
358 <a href="http://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a> 358 <a href="https://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a>
359 </dd> 359 </dd>
360 {% endif %} 360 {% endif %}
361 </dl> 361 </dl>
diff --git a/bitbake/lib/toaster/toastergui/templates/mrb_section.html b/bitbake/lib/toaster/toastergui/templates/mrb_section.html
index 98d9fac822..9fc7dfaee4 100644
--- a/bitbake/lib/toaster/toastergui/templates/mrb_section.html
+++ b/bitbake/lib/toaster/toastergui/templates/mrb_section.html
@@ -63,7 +63,7 @@
63 <%/if%> 63 <%/if%>
64 </div> 64 </div>
65 65
66 <div data-build-state="<%:state%>"> 66 <div class="build-state" data-build-state="<%:state%>">
67 <%if state == 'Cloning'%> 67 <%if state == 'Cloning'%>
68 <%include tmpl='#cloning-repos-build-template'/%> 68 <%include tmpl='#cloning-repos-build-template'/%>
69 <%else state == 'Parsing'%> 69 <%else state == 'Parsing'%>
diff --git a/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html
index a5d5893571..2493954deb 100644
--- a/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html
+++ b/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html
@@ -18,7 +18,7 @@
18 </ul> 18 </ul>
19 <div class="tab-content"> 19 <div class="tab-content">
20 <div class="tab-pane active" id="dependencies"> 20 <div class="tab-pane active" id="dependencies">
21 {% ifequal runtime_deps|length 0 %} 21 {% if runtime_deps|length == 0 %}
22 <div class="alert alert-info"> 22 <div class="alert alert-info">
23 <strong>{{package.fullpackagespec}}</strong> has no runtime dependencies. 23 <strong>{{package.fullpackagespec}}</strong> has no runtime dependencies.
24 </div> 24 </div>
@@ -54,8 +54,8 @@
54 {% endfor %} 54 {% endfor %}
55 </tbody> 55 </tbody>
56 </table> 56 </table>
57 {% endifequal %} 57 {% endif %}
58 {% ifnotequal other_deps|length 0 %} 58 {% if other_deps|length != 0 %}
59 <h3>Other runtime relationships</h3> 59 <h3>Other runtime relationships</h3>
60 <table class="table table-bordered table-hover"> 60 <table class="table table-bordered table-hover">
61 <thead> 61 <thead>
@@ -93,7 +93,7 @@
93 {% endfor %} 93 {% endfor %}
94 </tbody> 94 </tbody>
95 </table> 95 </table>
96 {% endifnotequal %} 96 {% endif %}
97 </div> <!-- tab-pane --> 97 </div> <!-- tab-pane -->
98 </div> <!-- tab-content --> 98 </div> <!-- tab-content -->
99{% endblock tabcontent %} 99{% endblock tabcontent %}
diff --git a/bitbake/lib/toaster/toastergui/templates/package_detail_base.html b/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
index 66f8e7f069..a4fcd2aa42 100644
--- a/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
+++ b/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
@@ -127,7 +127,7 @@
127 {% comment %} 127 {% comment %}
128 # Removed per team meeting of 1/29/2014 until 128 # Removed per team meeting of 1/29/2014 until
129 # decision on index search algorithm 129 # decision on index search algorithm
130 <a href="http://layers.openembedded.org" target="_blank"> 130 <a href="https://layers.openembedded.org" target="_blank">
131 <i class="glyphicon glyphicon-share get-info"></i> 131 <i class="glyphicon glyphicon-share get-info"></i>
132 </a> 132 </a>
133 {% endcomment %} 133 {% endcomment %}
diff --git a/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html
index 95e56ded26..1f5ed6d913 100644
--- a/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html
+++ b/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html
@@ -14,7 +14,7 @@
14 {% include "package_included_tabs.html" with active_tab="dependencies" %} 14 {% include "package_included_tabs.html" with active_tab="dependencies" %}
15 <div class="tab-content"> 15 <div class="tab-content">
16 <div class="tab-pane active" id="dependencies"> 16 <div class="tab-pane active" id="dependencies">
17 {% ifnotequal runtime_deps|length 0 %} 17 {% if runtime_deps|length != 0 %}
18 <table class="table table-bordered table-hover"> 18 <table class="table table-bordered table-hover">
19 <thead> 19 <thead>
20 <tr> 20 <tr>
@@ -48,9 +48,9 @@
48 <div class="alert alert-info"> 48 <div class="alert alert-info">
49 <strong>{{package.fullpackagespec}}</strong> has no runtime dependencies. 49 <strong>{{package.fullpackagespec}}</strong> has no runtime dependencies.
50 </div> 50 </div>
51 {% endifnotequal %} 51 {% endif %}
52 52
53 {% ifnotequal other_deps|length 0 %} 53 {% if other_deps|length != 0 %}
54 <h3>Other runtime relationships</h3> 54 <h3>Other runtime relationships</h3>
55 <table class="table table-bordered table-hover"> 55 <table class="table table-bordered table-hover">
56 <thead> 56 <thead>
@@ -103,7 +103,7 @@
103 {% endfor %} 103 {% endfor %}
104 </tbody> 104 </tbody>
105 </table> 105 </table>
106 {% endifnotequal %} 106 {% endif %}
107 </div> <!-- end tab-pane --> 107 </div> <!-- end tab-pane -->
108 </div> <!-- end tab content --> 108 </div> <!-- end tab content -->
109 {% endwith %} 109 {% endwith %}
diff --git a/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
index fb310c7fc7..dae4549e21 100644
--- a/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
+++ b/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
@@ -15,7 +15,7 @@
15 <div class="tab-content"> 15 <div class="tab-content">
16 <div class="tab-pane active" id="brought-in-by"> 16 <div class="tab-pane active" id="brought-in-by">
17 17
18 {% ifequal reverse_count 0 %} 18 {% if reverse_count == 0 %}
19 <div class="alert alert-info"> 19 <div class="alert alert-info">
20 <strong>{{package.fullpackagespec}}</strong> has no reverse runtime dependencies. 20 <strong>{{package.fullpackagespec}}</strong> has no reverse runtime dependencies.
21 </div> 21 </div>
@@ -43,7 +43,7 @@
43 {% endfor %} 43 {% endfor %}
44 </tbody> 44 </tbody>
45 </table> 45 </table>
46 {% endifequal %} 46 {% endif %}
47 </div> <!-- end tab-pane --> 47 </div> <!-- end tab-pane -->
48 </div> <!-- end tab content --> 48 </div> <!-- end tab content -->
49 {% endwith %} 49 {% endwith %}
diff --git a/bitbake/lib/toaster/toastergui/templates/project.html b/bitbake/lib/toaster/toastergui/templates/project.html
index d8ad2c79dc..22239a82fd 100644
--- a/bitbake/lib/toaster/toastergui/templates/project.html
+++ b/bitbake/lib/toaster/toastergui/templates/project.html
@@ -139,7 +139,7 @@
139 <ul> 139 <ul>
140 <li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li> 140 <li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li>
141 <li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li> 141 <li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li>
142 <li><a href="https://www.yoctoproject.org/docs/current/dev-manual/dev-manual.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li> 142 <li><a href="http://docs.yoctoproject.org/dev-manual/common-tasks.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li>
143 <li>Or type a layer name below</li> 143 <li>Or type a layer name below</li>
144 </ul> 144 </ul>
145 </div> 145 </div>
diff --git a/bitbake/lib/toaster/toastergui/templates/project_specific.html b/bitbake/lib/toaster/toastergui/templates/project_specific.html
index 42725c0dba..76d45b1b39 100644
--- a/bitbake/lib/toaster/toastergui/templates/project_specific.html
+++ b/bitbake/lib/toaster/toastergui/templates/project_specific.html
@@ -137,7 +137,7 @@
137 <ul> 137 <ul>
138 <li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li> 138 <li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li>
139 <li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li> 139 <li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li>
140 <li><a href="https://www.yoctoproject.org/docs/current/dev-manual/dev-manual.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li> 140 <li><a href="http://docs.yoctoproject.org/dev-manual/common-tasks.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li>
141 <li>Or type a layer name below</li> 141 <li>Or type a layer name below</li>
142 </ul> 142 </ul>
143 </div> 143 </div>
diff --git a/bitbake/lib/toaster/toastergui/templates/projectconf.html b/bitbake/lib/toaster/toastergui/templates/projectconf.html
index bd49f1f585..c306835832 100644
--- a/bitbake/lib/toaster/toastergui/templates/projectconf.html
+++ b/bitbake/lib/toaster/toastergui/templates/projectconf.html
@@ -73,7 +73,7 @@
73 73
74 {% if image_install_append_defined %} 74 {% if image_install_append_defined %}
75 <dt> 75 <dt>
76 <span class="js-config-var-name js-config-var-managed-name">IMAGE_INSTALL_append</span> 76 <span class="js-config-var-name js-config-var-managed-name">IMAGE_INSTALL:append</span>
77 <span class="glyphicon glyphicon-question-sign get-help" title="Specifies additional packages to install into an image. If your build creates more than one image, the packages will be installed in all of them"></span> 77 <span class="glyphicon glyphicon-question-sign get-help" title="Specifies additional packages to install into an image. If your build creates more than one image, the packages will be installed in all of them"></span>
78 </dt> 78 </dt>
79 <dd class="variable-list"> 79 <dd class="variable-list">
@@ -83,7 +83,7 @@
83 <form id="change-image_install-form" class="form-inline" style="display:none;"> 83 <form id="change-image_install-form" class="form-inline" style="display:none;">
84 <div class="row"> 84 <div class="row">
85 <div class="col-md-4"> 85 <div class="col-md-4">
86 <span class="help-block">To set IMAGE_INSTALL_append to more than one package, type the package names separated by a space.</span> 86 <span class="help-block">To set IMAGE_INSTALL:append to more than one package, type the package names separated by a space.</span>
87 </div> 87 </div>
88 </div> 88 </div>
89 <div class="form-group"> 89 <div class="form-group">
@@ -167,8 +167,8 @@
167 {% for fstype in vars_fstypes %} 167 {% for fstype in vars_fstypes %}
168 <input type="hidden" class="js-checkbox-fstypes-list" value="{{fstype}}"> 168 <input type="hidden" class="js-checkbox-fstypes-list" value="{{fstype}}">
169 {% endfor %} 169 {% endfor %}
170 {% for b in vars_blacklist %} 170 {% for b in vars_disallowed %}
171 <input type="hidden" class="js-config-blacklist-name" value="{{b}}"> 171 <input type="hidden" class="js-config-disallowed-name" value="{{b}}">
172 {% endfor %} 172 {% endfor %}
173 {% for b in vars_managed %} 173 {% for b in vars_managed %}
174 <input type="hidden" class="js-config-managed-name" value="{{b}}"> 174 <input type="hidden" class="js-config-managed-name" value="{{b}}">
@@ -201,12 +201,12 @@
201 <p>Toaster cannot set any variables that impact 1) the configuration of the build servers, 201 <p>Toaster cannot set any variables that impact 1) the configuration of the build servers,
202 or 2) where artifacts produced by the build are stored. Such variables include: </p> 202 or 2) where artifacts produced by the build are stored. Such variables include: </p>
203 <p> 203 <p>
204 <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-BB_DISKMON_DIRS" target="_blank">BB_DISKMON_DIRS</a></code> 204 <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-BB_DISKMON_DIRS" target="_blank">BB_DISKMON_DIRS</a></code>
205 <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-BB_NUMBER_THREADS" target="_blank">BB_NUMBER_THREADS</a></code> 205 <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-BB_NUMBER_THREADS" target="_blank">BB_NUMBER_THREADS</a></code>
206 <code>CVS_PROXY_HOST</code> 206 <code>CVS_PROXY_HOST</code>
207 <code>CVS_PROXY_PORT</code> 207 <code>CVS_PROXY_PORT</code>
208 <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-PARALLEL_MAKE" target="_blank">PARALLEL_MAKE</a></code> 208 <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-PARALLEL_MAKE" target="_blank">PARALLEL_MAKE</a></code>
209 <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-TMPDIR" target="_blank">TMPDIR</a></code></p> 209 <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-TMPDIR" target="_blank">TMPDIR</a></code></p>
210 <p>Plus the following standard shell environment variables:</p> 210 <p>Plus the following standard shell environment variables:</p>
211 <p><code>http_proxy</code> <code>ftp_proxy</code> <code>https_proxy</code> <code>all_proxy</code></p> 211 <p><code>http_proxy</code> <code>ftp_proxy</code> <code>https_proxy</code> <code>all_proxy</code></p>
212 </div> 212 </div>
@@ -238,9 +238,9 @@ function validate_new_variable() {
238 } 238 }
239 } 239 }
240 240
241 var blacklist_configvars = document.getElementsByClassName('js-config-blacklist-name'); 241 var disallowed_configvars = document.getElementsByClassName('js-config-disallowed-name');
242 for (var i = 0, length = blacklist_configvars.length; i < length; i++) { 242 for (var i = 0, length = disallowed_configvars.length; i < length; i++) {
243 if (blacklist_configvars[i].value.toUpperCase() == variable.toUpperCase()) { 243 if (disallowed_configvars[i].value.toUpperCase() == variable.toUpperCase()) {
244 error_msg = "You cannot edit this variable in Toaster because it is set by the build servers"; 244 error_msg = "You cannot edit this variable in Toaster because it is set by the build servers";
245 } 245 }
246 } 246 }
@@ -771,10 +771,10 @@ $(document).ready(function() {
771 771
772 {% if image_install_append_defined %} 772 {% if image_install_append_defined %}
773 773
774 // init IMAGE_INSTALL_append trash icon 774 // init IMAGE_INSTALL:append trash icon
775 setDeleteTooltip($('#delete-image_install-icon')); 775 setDeleteTooltip($('#delete-image_install-icon'));
776 776
777 // change IMAGE_INSTALL_append variable 777 // change IMAGE_INSTALL:append variable
778 $('#change-image_install-icon').click(function() { 778 $('#change-image_install-icon').click(function() {
779 // preset the edit value 779 // preset the edit value
780 var current_val = $("span#image_install").text().trim(); 780 var current_val = $("span#image_install").text().trim();
@@ -814,7 +814,7 @@ $(document).ready(function() {
814 $('#apply-change-image_install').click(function(){ 814 $('#apply-change-image_install').click(function(){
815 // insure these non-empty values have single space prefix 815 // insure these non-empty values have single space prefix
816 var value = " " + $('#new-image_install').val().trim(); 816 var value = " " + $('#new-image_install').val().trim();
817 postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL_append:'+value}); 817 postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL:append:'+value});
818 $('#image_install').text(value); 818 $('#image_install').text(value);
819 $('#image_install').removeClass('text-muted'); 819 $('#image_install').removeClass('text-muted');
820 $("#change-image_install-form").slideUp(function () { 820 $("#change-image_install-form").slideUp(function () {
@@ -826,10 +826,10 @@ $(document).ready(function() {
826 }); 826 });
827 }); 827 });
828 828
829 // delete IMAGE_INSTALL_append variable value 829 // delete IMAGE_INSTALL:append variable value
830 $('#delete-image_install-icon').click(function(){ 830 $('#delete-image_install-icon').click(function(){
831 $(this).tooltip('hide'); 831 $(this).tooltip('hide');
832 postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL_append:'+''}); 832 postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL:append:'+''});
833 $('#image_install').parent().fadeOut(1000, function(){ 833 $('#image_install').parent().fadeOut(1000, function(){
834 $('#image_install').addClass('text-muted'); 834 $('#image_install').addClass('text-muted');
835 $('#image_install').text('Not set'); 835 $('#image_install').text('Not set');
@@ -1011,7 +1011,7 @@ $(document).ready(function() {
1011 $(".save").attr("disabled","disabled"); 1011 $(".save").attr("disabled","disabled");
1012 1012
1013 // Reload page if admin-removed core managed value is manually added back in 1013 // Reload page if admin-removed core managed value is manually added back in
1014 if (0 <= " DISTRO DL_DIR IMAGE_FSTYPES IMAGE_INSTALL_append PACKAGE_CLASSES SSTATE_DIR ".indexOf( " "+variable+" " )) { 1014 if (0 <= " DISTRO DL_DIR IMAGE_FSTYPES IMAGE_INSTALL:append PACKAGE_CLASSES SSTATE_DIR ".indexOf( " "+variable+" " )) {
1015 // delayed reload to avoid race condition with postEditAjaxRequest 1015 // delayed reload to avoid race condition with postEditAjaxRequest
1016 do_reload=true; 1016 do_reload=true;
1017 } 1017 }
diff --git a/bitbake/lib/toaster/toastergui/templates/recipe.html b/bitbake/lib/toaster/toastergui/templates/recipe.html
index 3f76e656fe..4b5301b548 100644
--- a/bitbake/lib/toaster/toastergui/templates/recipe.html
+++ b/bitbake/lib/toaster/toastergui/templates/recipe.html
@@ -186,9 +186,9 @@
186 <i class="icon-question-sign get-help hover-help" title="{{task.get_outcome_help}}"></i> 186 <i class="icon-question-sign get-help hover-help" title="{{task.get_outcome_help}}"></i>
187 </td> 187 </td>
188 <td> 188 <td>
189 {% ifnotequal task.sstate_result task.SSTATE_NA %} 189 {% if task.sstate_result != task.SSTATE_NA %}
190 {{task.get_sstate_result_display}} 190 {{task.get_sstate_result_display}}
191 {% endifnotequal %} 191 {% endif %}
192 </td> 192 </td>
193 193
194 </tr> 194 </tr>
diff --git a/bitbake/lib/toaster/toastergui/templates/target.html b/bitbake/lib/toaster/toastergui/templates/target.html
index 1924a0dad7..d5f60e77a8 100644
--- a/bitbake/lib/toaster/toastergui/templates/target.html
+++ b/bitbake/lib/toaster/toastergui/templates/target.html
@@ -8,11 +8,11 @@
8 8
9{% block nav-target %} 9{% block nav-target %}
10 {% for t in build.get_sorted_target_list %} 10 {% for t in build.get_sorted_target_list %}
11 {% ifequal target.pk t.pk %} 11 {% if target.pk == t.pk %}
12 <li class="active"><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li> 12 <li class="active"><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li>
13 {% else %} 13 {% else %}
14 <li><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li> 14 <li><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li>
15 {% endifequal %} 15 {% endif %}
16 {% endfor %} 16 {% endfor %}
17{% endblock %} 17{% endblock %}
18 18
diff --git a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py
index c432f59a78..aee9bbcd14 100644
--- a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py
+++ b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py
@@ -167,8 +167,8 @@ def check_filter_status(options, filter):
167def variable_parent_name(value): 167def variable_parent_name(value):
168 """ filter extended variable names to the parent name 168 """ filter extended variable names to the parent name
169 """ 169 """
170 value=re.sub('_\$.*', '', value) 170 value = re.sub(r'_\$.*', '', value)
171 return re.sub('_[a-z].*', '', value) 171 return re.sub(r'_[a-z].*', '', value)
172 172
173@register.filter 173@register.filter
174def filter_setin_files(file_list, matchstr): 174def filter_setin_files(file_list, matchstr):
@@ -233,7 +233,6 @@ def filter_sizeovertotal(package_object, total_size):
233 233
234 return '{:.1%}'.format(float(size)/float(total_size)) 234 return '{:.1%}'.format(float(size)/float(total_size))
235 235
236from django.utils.safestring import mark_safe
237@register.filter 236@register.filter
238def format_vpackage_rowclass(size): 237def format_vpackage_rowclass(size):
239 if size == -1: 238 if size == -1:
diff --git a/bitbake/lib/toaster/toastergui/urls.py b/bitbake/lib/toaster/toastergui/urls.py
index d2df4e6048..7f8489d3aa 100644
--- a/bitbake/lib/toaster/toastergui/urls.py
+++ b/bitbake/lib/toaster/toastergui/urls.py
@@ -6,7 +6,7 @@
6# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
7# 7#
8 8
9from django.conf.urls import url 9from django.urls import re_path as url
10from django.views.generic import RedirectView 10from django.views.generic import RedirectView
11 11
12from toastergui import tables 12from toastergui import tables
@@ -95,6 +95,7 @@ urlpatterns = [
95 # project URLs 95 # project URLs
96 url(r'^newproject/$', views.newproject, name='newproject'), 96 url(r'^newproject/$', views.newproject, name='newproject'),
97 97
98 url(r'^cmdline/$', views.CommandLineBuilds.as_view(), name='cmdlines'),
98 url(r'^projects/$', 99 url(r'^projects/$',
99 tables.ProjectsTable.as_view(template_name="projects-toastertable.html"), 100 tables.ProjectsTable.as_view(template_name="projects-toastertable.html"),
100 name='all-projects'), 101 name='all-projects'),
@@ -206,8 +207,7 @@ urlpatterns = [
206 url(r'^js-unit-tests/$', views.jsunittests, name='js-unit-tests'), 207 url(r'^js-unit-tests/$', views.jsunittests, name='js-unit-tests'),
207 208
208 # image customisation functionality 209 # image customisation functionality
209 url(r'^xhr_customrecipe/(?P<recipe_id>\d+)' 210 url(r'^xhr_customrecipe/(?P<recipe_id>\d+)/packages/(?P<package_id>\d+|)$',
210 '/packages/(?P<package_id>\d+|)$',
211 api.XhrCustomRecipePackages.as_view(), 211 api.XhrCustomRecipePackages.as_view(),
212 name='xhr_customrecipe_packages'), 212 name='xhr_customrecipe_packages'),
213 213
diff --git a/bitbake/lib/toaster/toastergui/views.py b/bitbake/lib/toaster/toastergui/views.py
index 9a5e48e3bb..061e6436c8 100644
--- a/bitbake/lib/toaster/toastergui/views.py
+++ b/bitbake/lib/toaster/toastergui/views.py
@@ -6,24 +6,36 @@
6# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
7# 7#
8 8
9import ast
9import re 10import re
11import subprocess
12import sys
13
14import bb.cooker
15from bb.ui import toasterui
16from bb.ui import eventreplay
10 17
11from django.db.models import F, Q, Sum 18from django.db.models import F, Q, Sum
12from django.db import IntegrityError 19from django.db import IntegrityError
13from django.shortcuts import render, redirect, get_object_or_404 20from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect
14from django.utils.http import urlencode 21from django.utils.http import urlencode
15from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe 22from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe
16from orm.models import LogMessage, Variable, Package_Dependency, Package 23from orm.models import LogMessage, Variable, Package_Dependency, Package
17from orm.models import Task_Dependency, Package_File 24from orm.models import Task_Dependency, Package_File
18from orm.models import Target_Installed_Package, Target_File 25from orm.models import Target_Installed_Package, Target_File
19from orm.models import TargetKernelFile, TargetSDKFile, Target_Image_File 26from orm.models import TargetKernelFile, TargetSDKFile, Target_Image_File
20from orm.models import BitbakeVersion, CustomImageRecipe 27from orm.models import BitbakeVersion, CustomImageRecipe, EventLogsImports
21 28
22from django.urls import reverse, resolve 29from django.urls import reverse, resolve
30from django.contrib import messages
31
23from django.core.exceptions import ObjectDoesNotExist 32from django.core.exceptions import ObjectDoesNotExist
33from django.core.files.storage import FileSystemStorage
34from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
24from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger 35from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
25from django.http import HttpResponseNotFound, JsonResponse 36from django.http import HttpResponseNotFound, JsonResponse
26from django.utils import timezone 37from django.utils import timezone
38from django.views.generic import TemplateView
27from datetime import timedelta, datetime 39from datetime import timedelta, datetime
28from toastergui.templatetags.projecttags import json as jsonfilter 40from toastergui.templatetags.projecttags import json as jsonfilter
29from decimal import Decimal 41from decimal import Decimal
@@ -32,13 +44,20 @@ import os
32from os.path import dirname 44from os.path import dirname
33import mimetypes 45import mimetypes
34 46
47from toastergui.forms import LoadFileForm
48
49from collections import namedtuple
50
35import logging 51import logging
36 52
53from toastermain.logs import log_view_mixin
54
37logger = logging.getLogger("toaster") 55logger = logging.getLogger("toaster")
38 56
39# Project creation and managed build enable 57# Project creation and managed build enable
40project_enable = ('1' == os.environ.get('TOASTER_BUILDSERVER')) 58project_enable = ('1' == os.environ.get('TOASTER_BUILDSERVER'))
41is_project_specific = ('1' == os.environ.get('TOASTER_PROJECTSPECIFIC')) 59is_project_specific = ('1' == os.environ.get('TOASTER_PROJECTSPECIFIC'))
60import_page = False
42 61
43class MimeTypeFinder(object): 62class MimeTypeFinder(object):
44 # setting this to False enables additional non-standard mimetypes 63 # setting this to False enables additional non-standard mimetypes
@@ -56,6 +75,7 @@ class MimeTypeFinder(object):
56 return guessed_type 75 return guessed_type
57 76
58# single point to add global values into the context before rendering 77# single point to add global values into the context before rendering
78@log_view_mixin
59def toaster_render(request, page, context): 79def toaster_render(request, page, context):
60 context['project_enable'] = project_enable 80 context['project_enable'] = project_enable
61 context['project_specific'] = is_project_specific 81 context['project_specific'] = is_project_specific
@@ -352,7 +372,6 @@ def _get_parameters_values(request, default_count, default_order):
352# set cookies for parameters. this is usefull in case parameters are set 372# set cookies for parameters. this is usefull in case parameters are set
353# manually from the GET values of the link 373# manually from the GET values of the link
354def _set_parameters_values(pagesize, orderby, request): 374def _set_parameters_values(pagesize, orderby, request):
355 from django.urls import resolve
356 current_url = resolve(request.path_info).url_name 375 current_url = resolve(request.path_info).url_name
357 request.session['%s_count' % current_url] = pagesize 376 request.session['%s_count' % current_url] = pagesize
358 request.session['%s_orderby' % current_url] =orderby 377 request.session['%s_orderby' % current_url] =orderby
@@ -665,20 +684,20 @@ def recipe_packages(request, build_id, recipe_id):
665 return response 684 return response
666 685
667from django.http import HttpResponse 686from django.http import HttpResponse
687@log_view_mixin
668def xhr_dirinfo(request, build_id, target_id): 688def xhr_dirinfo(request, build_id, target_id):
669 top = request.GET.get('start', '/') 689 top = request.GET.get('start', '/')
670 return HttpResponse(_get_dir_entries(build_id, target_id, top), content_type = "application/json") 690 return HttpResponse(_get_dir_entries(build_id, target_id, top), content_type = "application/json")
671 691
672from django.utils.functional import Promise 692from django.utils.functional import Promise
673from django.utils.encoding import force_text 693from django.utils.encoding import force_str
674class LazyEncoder(json.JSONEncoder): 694class LazyEncoder(json.JSONEncoder):
675 def default(self, obj): 695 def default(self, obj):
676 if isinstance(obj, Promise): 696 if isinstance(obj, Promise):
677 return force_text(obj) 697 return force_str(obj)
678 return super(LazyEncoder, self).default(obj) 698 return super(LazyEncoder, self).default(obj)
679 699
680from toastergui.templatetags.projecttags import filtered_filesizeformat 700from toastergui.templatetags.projecttags import filtered_filesizeformat
681import os
682def _get_dir_entries(build_id, target_id, start): 701def _get_dir_entries(build_id, target_id, start):
683 node_str = { 702 node_str = {
684 Target_File.ITYPE_REGULAR : '-', 703 Target_File.ITYPE_REGULAR : '-',
@@ -1404,7 +1423,7 @@ if True:
1404 if not os.path.isdir('%s/conf' % request.POST['importdir']): 1423 if not os.path.isdir('%s/conf' % request.POST['importdir']):
1405 raise BadParameterException("Bad path or missing 'conf' directory (%s)" % request.POST['importdir']) 1424 raise BadParameterException("Bad path or missing 'conf' directory (%s)" % request.POST['importdir'])
1406 from django.core import management 1425 from django.core import management
1407 management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir'], interactive=False) 1426 management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir'])
1408 prj = Project.objects.get(name = request.POST['projectname']) 1427 prj = Project.objects.get(name = request.POST['projectname'])
1409 prj.merged_attr = True 1428 prj.merged_attr = True
1410 prj.save() 1429 prj.save()
@@ -1606,12 +1625,13 @@ if True:
1606 # make sure we have a machine set for this project 1625 # make sure we have a machine set for this project
1607 ProjectVariable.objects.get_or_create(project=new_project, 1626 ProjectVariable.objects.get_or_create(project=new_project,
1608 name="MACHINE", 1627 name="MACHINE",
1609 value="qemux86") 1628 value="qemux86-64")
1610 context = {'project': new_project} 1629 context = {'project': new_project}
1611 return toaster_render(request, "js-unit-tests.html", context) 1630 return toaster_render(request, "js-unit-tests.html", context)
1612 1631
1613 from django.views.decorators.csrf import csrf_exempt 1632 from django.views.decorators.csrf import csrf_exempt
1614 @csrf_exempt 1633 @csrf_exempt
1634 @log_view_mixin
1615 def xhr_testreleasechange(request, pid): 1635 def xhr_testreleasechange(request, pid):
1616 def response(data): 1636 def response(data):
1617 return HttpResponse(jsonfilter(data), 1637 return HttpResponse(jsonfilter(data),
@@ -1648,6 +1668,7 @@ if True:
1648 except Exception as e: 1668 except Exception as e:
1649 return response({"error": str(e) }) 1669 return response({"error": str(e) })
1650 1670
1671 @log_view_mixin
1651 def xhr_configvaredit(request, pid): 1672 def xhr_configvaredit(request, pid):
1652 try: 1673 try:
1653 prj = Project.objects.get(id = pid) 1674 prj = Project.objects.get(id = pid)
@@ -1683,12 +1704,12 @@ if True:
1683 t=request.POST['configvarDel'].strip() 1704 t=request.POST['configvarDel'].strip()
1684 pt = ProjectVariable.objects.get(pk = int(t)).delete() 1705 pt = ProjectVariable.objects.get(pk = int(t)).delete()
1685 1706
1686 # return all project settings, filter out blacklist and elsewhere-managed variables 1707 # return all project settings, filter out disallowed and elsewhere-managed variables
1687 vars_managed,vars_fstypes,vars_blacklist = get_project_configvars_context() 1708 vars_managed,vars_fstypes,vars_disallowed = get_project_configvars_context()
1688 configvars_query = ProjectVariable.objects.filter(project_id = pid).all() 1709 configvars_query = ProjectVariable.objects.filter(project_id = pid).all()
1689 for var in vars_managed: 1710 for var in vars_managed:
1690 configvars_query = configvars_query.exclude(name = var) 1711 configvars_query = configvars_query.exclude(name = var)
1691 for var in vars_blacklist: 1712 for var in vars_disallowed:
1692 configvars_query = configvars_query.exclude(name = var) 1713 configvars_query = configvars_query.exclude(name = var)
1693 1714
1694 return_data = { 1715 return_data = {
@@ -1708,7 +1729,7 @@ if True:
1708 except ProjectVariable.DoesNotExist: 1729 except ProjectVariable.DoesNotExist:
1709 pass 1730 pass
1710 try: 1731 try:
1711 return_data['image_install_append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL_append").value, 1732 return_data['image_install:append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL:append").value,
1712 except ProjectVariable.DoesNotExist: 1733 except ProjectVariable.DoesNotExist:
1713 pass 1734 pass
1714 try: 1735 try:
@@ -1726,6 +1747,7 @@ if True:
1726 return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json") 1747 return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
1727 1748
1728 1749
1750 @log_view_mixin
1729 def customrecipe_download(request, pid, recipe_id): 1751 def customrecipe_download(request, pid, recipe_id):
1730 recipe = get_object_or_404(CustomImageRecipe, pk=recipe_id) 1752 recipe = get_object_or_404(CustomImageRecipe, pk=recipe_id)
1731 1753
@@ -1781,7 +1803,7 @@ if True:
1781 'MACHINE', 'BBLAYERS' 1803 'MACHINE', 'BBLAYERS'
1782 } 1804 }
1783 1805
1784 vars_blacklist = { 1806 vars_disallowed = {
1785 'PARALLEL_MAKE','BB_NUMBER_THREADS', 1807 'PARALLEL_MAKE','BB_NUMBER_THREADS',
1786 'BB_DISKMON_DIRS','BB_NUMBER_THREADS','CVS_PROXY_HOST','CVS_PROXY_PORT', 1808 'BB_DISKMON_DIRS','BB_NUMBER_THREADS','CVS_PROXY_HOST','CVS_PROXY_PORT',
1787 'PARALLEL_MAKE','TMPDIR', 1809 'PARALLEL_MAKE','TMPDIR',
@@ -1790,7 +1812,7 @@ if True:
1790 1812
1791 vars_fstypes = Target_Image_File.SUFFIXES 1813 vars_fstypes = Target_Image_File.SUFFIXES
1792 1814
1793 return(vars_managed,sorted(vars_fstypes),vars_blacklist) 1815 return(vars_managed,sorted(vars_fstypes),vars_disallowed)
1794 1816
1795 def projectconf(request, pid): 1817 def projectconf(request, pid):
1796 1818
@@ -1799,12 +1821,12 @@ if True:
1799 except Project.DoesNotExist: 1821 except Project.DoesNotExist:
1800 return HttpResponseNotFound("<h1>Project id " + pid + " is unavailable</h1>") 1822 return HttpResponseNotFound("<h1>Project id " + pid + " is unavailable</h1>")
1801 1823
1802 # remove blacklist and externally managed varaibles from this list 1824 # remove disallowed and externally managed varaibles from this list
1803 vars_managed,vars_fstypes,vars_blacklist = get_project_configvars_context() 1825 vars_managed,vars_fstypes,vars_disallowed = get_project_configvars_context()
1804 configvars = ProjectVariable.objects.filter(project_id = pid).all() 1826 configvars = ProjectVariable.objects.filter(project_id = pid).all()
1805 for var in vars_managed: 1827 for var in vars_managed:
1806 configvars = configvars.exclude(name = var) 1828 configvars = configvars.exclude(name = var)
1807 for var in vars_blacklist: 1829 for var in vars_disallowed:
1808 configvars = configvars.exclude(name = var) 1830 configvars = configvars.exclude(name = var)
1809 1831
1810 context = { 1832 context = {
@@ -1812,7 +1834,7 @@ if True:
1812 'configvars': configvars, 1834 'configvars': configvars,
1813 'vars_managed': vars_managed, 1835 'vars_managed': vars_managed,
1814 'vars_fstypes': vars_fstypes, 1836 'vars_fstypes': vars_fstypes,
1815 'vars_blacklist': vars_blacklist, 1837 'vars_disallowed': vars_disallowed,
1816 } 1838 }
1817 1839
1818 try: 1840 try:
@@ -1839,7 +1861,7 @@ if True:
1839 except ProjectVariable.DoesNotExist: 1861 except ProjectVariable.DoesNotExist:
1840 pass 1862 pass
1841 try: 1863 try:
1842 context['image_install_append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL_append").value 1864 context['image_install:append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL:append").value
1843 context['image_install_append_defined'] = "1" 1865 context['image_install_append_defined'] = "1"
1844 except ProjectVariable.DoesNotExist: 1866 except ProjectVariable.DoesNotExist:
1845 pass 1867 pass
@@ -1933,3 +1955,163 @@ if True:
1933 except (ObjectDoesNotExist, IOError): 1955 except (ObjectDoesNotExist, IOError):
1934 return toaster_render(request, "unavailable_artifact.html") 1956 return toaster_render(request, "unavailable_artifact.html")
1935 1957
1958
1959class CommandLineBuilds(TemplateView):
1960 model = EventLogsImports
1961 template_name = 'command_line_builds.html'
1962
1963 def get_context_data(self, **kwargs):
1964 context = super(CommandLineBuilds, self).get_context_data(**kwargs)
1965 #get value from BB_DEFAULT_EVENTLOG defined in bitbake.conf
1966 eventlog = subprocess.check_output(['bitbake-getvar', 'BB_DEFAULT_EVENTLOG', '--value'])
1967 if eventlog:
1968 logs_dir = os.path.dirname(eventlog.decode().strip('\n'))
1969 files = os.listdir(logs_dir)
1970 imported_files = EventLogsImports.objects.all()
1971 files_list = []
1972
1973 # Filter files that end with ".json"
1974 event_files = []
1975 for file in files:
1976 if file.endswith(".json"):
1977 # because BB_DEFAULT_EVENTLOG is a directory, we need to check if the file is a valid eventlog
1978 with open("{}/{}".format(logs_dir, file)) as efile:
1979 content = efile.read()
1980 if 'allvariables' in content:
1981 event_files.append(file)
1982
1983 #build dict for template using db data
1984 for event_file in event_files:
1985 if imported_files.filter(name=event_file):
1986 files_list.append({
1987 'name': event_file,
1988 'imported': True,
1989 'build_id': imported_files.filter(name=event_file)[0].build_id,
1990 'size': os.path.getsize("{}/{}".format(logs_dir, event_file))
1991 })
1992 else:
1993 files_list.append({
1994 'name': event_file,
1995 'imported': False,
1996 'build_id': None,
1997 'size': os.path.getsize("{}/{}".format(logs_dir, event_file))
1998 })
1999 context['import_all'] = True
2000
2001 context['files'] = files_list
2002 context['dir'] = logs_dir
2003 else:
2004 context['files'] = []
2005 context['dir'] = ''
2006
2007 # enable session variable
2008 if not self.request.session.get('file'):
2009 self.request.session['file'] = ""
2010
2011 context['form'] = LoadFileForm()
2012 context['project_enable'] = project_enable
2013 return context
2014
2015 def post(self, request, **kwargs):
2016 logs_dir = request.POST.get('dir')
2017 all_files = request.POST.get('all')
2018
2019 # check if a build is already in progress
2020 if Build.objects.filter(outcome=Build.IN_PROGRESS):
2021 messages.add_message(
2022 self.request,
2023 messages.ERROR,
2024 "A build is already in progress. Please wait for it to complete before starting a new build."
2025 )
2026 return JsonResponse({'response': 'building'})
2027 imported_files = EventLogsImports.objects.all()
2028 try:
2029 if all_files == 'true':
2030 # use of session variable to deactivate icon for builds in progress
2031 request.session['all_builds'] = True
2032 request.session.modified = True
2033 request.session.save()
2034
2035 files = ast.literal_eval(request.POST.get('file'))
2036 for file in files:
2037 if imported_files.filter(name=file.get('name')).exists():
2038 imported_files.filter(name=file.get('name'))[0].imported = True
2039 else:
2040 with open("{}/{}".format(logs_dir, file.get('name'))) as eventfile:
2041 # load variables from the first line
2042 variables = None
2043 while line := eventfile.readline().strip():
2044 try:
2045 variables = json.loads(line)['allvariables']
2046 break
2047 except (KeyError, json.JSONDecodeError):
2048 continue
2049 if not variables:
2050 raise Exception("File content missing build variables")
2051 eventfile.seek(0)
2052 params = namedtuple('ConfigParams', ['observe_only'])(True)
2053 player = eventreplay.EventPlayer(eventfile, variables)
2054
2055 toasterui.main(player, player, params)
2056 event_log_import = EventLogsImports.objects.create(name=file.get('name'), imported=True)
2057 event_log_import.build_id = Build.objects.last().id
2058 event_log_import.save()
2059 else:
2060 if self.request.FILES.get('eventlog_file'):
2061 file = self.request.FILES['eventlog_file']
2062 else:
2063 file = request.POST.get('file')
2064 # use of session variable to deactivate icon for build in progress
2065 request.session['file'] = file
2066 request.session['all_builds'] = False
2067 request.session.modified = True
2068 request.session.save()
2069
2070 if imported_files.filter(name=file).exists():
2071 imported_files.filter(name=file)[0].imported = True
2072 else:
2073 if isinstance(file, InMemoryUploadedFile) or isinstance(file, TemporaryUploadedFile):
2074 variables = None
2075 while line := file.readline().strip():
2076 try:
2077 variables = json.loads(line)['allvariables']
2078 break
2079 except (KeyError, json.JSONDecodeError):
2080 continue
2081 if not variables:
2082 raise Exception("File content missing build variables")
2083 file.seek(0)
2084 params = namedtuple('ConfigParams', ['observe_only'])(True)
2085 player = eventreplay.EventPlayer(file, variables)
2086 if not os.path.exists('{}/{}'.format(logs_dir, file.name)):
2087 fs = FileSystemStorage(location=logs_dir)
2088 fs.save(file.name, file)
2089 toasterui.main(player, player, params)
2090 else:
2091 with open("{}/{}".format(logs_dir, file)) as eventfile:
2092 # load variables from the first line
2093 variables = None
2094 while line := eventfile.readline().strip():
2095 try:
2096 variables = json.loads(line)['allvariables']
2097 break
2098 except (KeyError, json.JSONDecodeError):
2099 continue
2100 if not variables:
2101 raise Exception("File content missing build variables")
2102 eventfile.seek(0)
2103 params = namedtuple('ConfigParams', ['observe_only'])(True)
2104 player = eventreplay.EventPlayer(eventfile, variables)
2105 toasterui.main(player, player, params)
2106 event_log_import = EventLogsImports.objects.create(name=file, imported=True)
2107 event_log_import.build_id = Build.objects.last().id
2108 event_log_import.save()
2109 request.session['file'] = ""
2110 except Exception:
2111 messages.add_message(
2112 self.request,
2113 messages.ERROR,
2114 "The file content is not in the correct format. Update file content or upload a different file."
2115 )
2116 return HttpResponseRedirect("/toastergui/cmdline/")
2117 return HttpResponseRedirect('/toastergui/builds/')
diff --git a/bitbake/lib/toaster/toastergui/widgets.py b/bitbake/lib/toaster/toastergui/widgets.py
index ceff52942e..b32abf40b3 100644
--- a/bitbake/lib/toaster/toastergui/widgets.py
+++ b/bitbake/lib/toaster/toastergui/widgets.py
@@ -7,6 +7,7 @@
7# 7#
8 8
9from django.views.generic import View, TemplateView 9from django.views.generic import View, TemplateView
10from django.utils.decorators import method_decorator
10from django.views.decorators.cache import cache_control 11from django.views.decorators.cache import cache_control
11from django.shortcuts import HttpResponse 12from django.shortcuts import HttpResponse
12from django.core.cache import cache 13from django.core.cache import cache
@@ -31,6 +32,7 @@ import re
31import os 32import os
32 33
33from toastergui.tablefilter import TableFilterMap 34from toastergui.tablefilter import TableFilterMap
35from toastermain.logs import log_view_mixin
34 36
35try: 37try:
36 from urllib import unquote_plus 38 from urllib import unquote_plus
@@ -63,8 +65,8 @@ class ToasterTable(TemplateView):
63 self.default_orderby = "" 65 self.default_orderby = ""
64 66
65 # prevent HTTP caching of table data 67 # prevent HTTP caching of table data
66 @cache_control(must_revalidate=True, 68 @method_decorator(cache_control(must_revalidate=True,
67 max_age=0, no_store=True, no_cache=True) 69 max_age=0, no_store=True, no_cache=True))
68 def dispatch(self, *args, **kwargs): 70 def dispatch(self, *args, **kwargs):
69 return super(ToasterTable, self).dispatch(*args, **kwargs) 71 return super(ToasterTable, self).dispatch(*args, **kwargs)
70 72
@@ -83,6 +85,7 @@ class ToasterTable(TemplateView):
83 85
84 return context 86 return context
85 87
88 @log_view_mixin
86 def get(self, request, *args, **kwargs): 89 def get(self, request, *args, **kwargs):
87 if request.GET.get('format', None) == 'json': 90 if request.GET.get('format', None) == 'json':
88 91
@@ -304,6 +307,7 @@ class ToasterTable(TemplateView):
304 307
305 self.setup_columns(**kwargs) 308 self.setup_columns(**kwargs)
306 309
310 self.apply_orderby('pk')
307 if search: 311 if search:
308 self.apply_search(search) 312 self.apply_search(search)
309 if filters: 313 if filters:
@@ -413,6 +417,7 @@ class ToasterTypeAhead(View):
413 def __init__(self, *args, **kwargs): 417 def __init__(self, *args, **kwargs):
414 super(ToasterTypeAhead, self).__init__() 418 super(ToasterTypeAhead, self).__init__()
415 419
420 @log_view_mixin
416 def get(self, request, *args, **kwargs): 421 def get(self, request, *args, **kwargs):
417 def response(data): 422 def response(data):
418 return HttpResponse(json.dumps(data, 423 return HttpResponse(json.dumps(data,
@@ -468,6 +473,7 @@ class MostRecentBuildsView(View):
468 473
469 return False 474 return False
470 475
476 @log_view_mixin
471 def get(self, request, *args, **kwargs): 477 def get(self, request, *args, **kwargs):
472 """ 478 """
473 Returns a list of builds in JSON format. 479 Returns a list of builds in JSON format.
diff --git a/bitbake/lib/toaster/toastermain/logs.py b/bitbake/lib/toaster/toastermain/logs.py
new file mode 100644
index 0000000000..62d871963a
--- /dev/null
+++ b/bitbake/lib/toaster/toastermain/logs.py
@@ -0,0 +1,158 @@
1#!/usr/bin/env python3
2# -*- coding: utf-8 -*-
3
4import os
5import logging
6import json
7from pathlib import Path
8from django.http import HttpRequest
9
10BUILDDIR = Path(os.environ.get('BUILDDIR', '/tmp'))
11
12def log_api_request(request, response, view, logger_name='api'):
13 """Helper function for LogAPIMixin"""
14
15 repjson = {
16 'view': view,
17 'path': request.path,
18 'method': request.method,
19 'status': response.status_code
20 }
21
22 logger = logging.getLogger(logger_name)
23 logger.info(
24 json.dumps(repjson, indent=4, separators=(", ", " : "))
25 )
26
27
28def log_view_mixin(view):
29 def log_view_request(*args, **kwargs):
30 # get request from args else kwargs
31 request = None
32 if len(args) > 0:
33 for req in args:
34 if isinstance(req, HttpRequest):
35 request = req
36 break
37 elif request is None:
38 request = kwargs.get('request')
39
40 response = view(*args, **kwargs)
41 view_name = 'unknown'
42 if hasattr(request, 'resolver_match'):
43 if hasattr(request.resolver_match, 'view_name'):
44 view_name = request.resolver_match.view_name
45
46 log_api_request(
47 request, response, view_name, 'toaster')
48 return response
49 return log_view_request
50
51
52
53class LogAPIMixin:
54 """Logs API requests
55
56 tested with:
57 - APIView
58 - ModelViewSet
59 - ReadOnlyModelViewSet
60 - GenericAPIView
61
62 Note: you can set `view_name` attribute in View to override get_view_name()
63 """
64
65 def get_view_name(self):
66 if hasattr(self, 'view_name'):
67 return self.view_name
68 return super().get_view_name()
69
70 def finalize_response(self, request, response, *args, **kwargs):
71 log_api_request(request, response, self.get_view_name())
72 return super().finalize_response(request, response, *args, **kwargs)
73
74
75LOGGING_SETTINGS = {
76 'version': 1,
77 'disable_existing_loggers': False,
78 'filters': {
79 'require_debug_false': {
80 '()': 'django.utils.log.RequireDebugFalse'
81 }
82 },
83 'formatters': {
84 'datetime': {
85 'format': '%(asctime)s %(levelname)s %(message)s'
86 },
87 'verbose': {
88 'format': '{levelname} {asctime} {module} {name}.{funcName} {process:d} {thread:d} {message}',
89 'datefmt': "%d/%b/%Y %H:%M:%S",
90 'style': '{',
91 },
92 'api': {
93 'format': '\n{levelname} {asctime} {name}.{funcName}:\n{message}',
94 'style': '{'
95 }
96 },
97 'handlers': {
98 'mail_admins': {
99 'level': 'ERROR',
100 'filters': ['require_debug_false'],
101 'class': 'django.utils.log.AdminEmailHandler'
102 },
103 'console': {
104 'level': 'DEBUG',
105 'class': 'logging.StreamHandler',
106 'formatter': 'datetime',
107 },
108 'file_django': {
109 'level': 'INFO',
110 'class': 'logging.handlers.TimedRotatingFileHandler',
111 'filename': BUILDDIR / 'toaster_logs/django.log',
112 'when': 'D', # interval type
113 'interval': 1, # defaults to 1
114 'backupCount': 10, # how many files to keep
115 'formatter': 'verbose',
116 },
117 'file_api': {
118 'level': 'INFO',
119 'class': 'logging.handlers.TimedRotatingFileHandler',
120 'filename': BUILDDIR / 'toaster_logs/api.log',
121 'when': 'D',
122 'interval': 1,
123 'backupCount': 10,
124 'formatter': 'verbose',
125 },
126 'file_toaster': {
127 'level': 'INFO',
128 'class': 'logging.handlers.TimedRotatingFileHandler',
129 'filename': BUILDDIR / 'toaster_logs/web.log',
130 'when': 'D',
131 'interval': 1,
132 'backupCount': 10,
133 'formatter': 'verbose',
134 },
135 },
136 'loggers': {
137 'django.request': {
138 'handlers': ['file_django', 'console'],
139 'level': 'WARN',
140 'propagate': True,
141 },
142 'django': {
143 'handlers': ['file_django', 'console'],
144 'level': 'WARNING',
145 'propogate': True,
146 },
147 'toaster': {
148 'handlers': ['file_toaster'],
149 'level': 'INFO',
150 'propagate': False,
151 },
152 'api': {
153 'handlers': ['file_api'],
154 'level': 'INFO',
155 'propagate': False,
156 }
157 }
158}
diff --git a/bitbake/lib/toaster/toastermain/management/commands/buildimport.py b/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
index 59da6ff7ac..f7139aa041 100644
--- a/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
+++ b/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
@@ -451,7 +451,7 @@ class Command(BaseCommand):
451 # Catch vars relevant to Toaster (in case no Toaster section) 451 # Catch vars relevant to Toaster (in case no Toaster section)
452 self.update_project_vars(project,'DISTRO') 452 self.update_project_vars(project,'DISTRO')
453 self.update_project_vars(project,'MACHINE') 453 self.update_project_vars(project,'MACHINE')
454 self.update_project_vars(project,'IMAGE_INSTALL_append') 454 self.update_project_vars(project,'IMAGE_INSTALL:append')
455 self.update_project_vars(project,'IMAGE_FSTYPES') 455 self.update_project_vars(project,'IMAGE_FSTYPES')
456 self.update_project_vars(project,'PACKAGE_CLASSES') 456 self.update_project_vars(project,'PACKAGE_CLASSES')
457 # These vars are typically only assigned by Toaster 457 # These vars are typically only assigned by Toaster
@@ -545,7 +545,7 @@ class Command(BaseCommand):
545 # Find the directory's release, and promote to default_release if local paths 545 # Find the directory's release, and promote to default_release if local paths
546 release = self.find_import_release(layers_list,lv_dict,default_release) 546 release = self.find_import_release(layers_list,lv_dict,default_release)
547 # create project, SANITY: reuse any project of same name 547 # create project, SANITY: reuse any project of same name
548 project = Project.objects.create_project(project_name,release,project) 548 project = Project.objects.create_project(project_name,release,project, imported=True)
549 # Apply any new layers or variables 549 # Apply any new layers or variables
550 self.apply_conf_variables(project,layers_list,lv_dict,release) 550 self.apply_conf_variables(project,layers_list,lv_dict,release)
551 # WORKAROUND: since we now derive the release, redirect 'newproject_specific' to 'project_specific' 551 # WORKAROUND: since we now derive the release, redirect 'newproject_specific' to 'project_specific'
diff --git a/bitbake/lib/toaster/toastermain/management/commands/checksocket.py b/bitbake/lib/toaster/toastermain/management/commands/checksocket.py
index 811fd5d516..b2c002da7a 100644
--- a/bitbake/lib/toaster/toastermain/management/commands/checksocket.py
+++ b/bitbake/lib/toaster/toastermain/management/commands/checksocket.py
@@ -13,7 +13,7 @@ import errno
13import socket 13import socket
14 14
15from django.core.management.base import BaseCommand, CommandError 15from django.core.management.base import BaseCommand, CommandError
16from django.utils.encoding import force_text 16from django.utils.encoding import force_str
17 17
18DEFAULT_ADDRPORT = "0.0.0.0:8000" 18DEFAULT_ADDRPORT = "0.0.0.0:8000"
19 19
@@ -51,7 +51,7 @@ class Command(BaseCommand):
51 if hasattr(err, 'errno') and err.errno in errors: 51 if hasattr(err, 'errno') and err.errno in errors:
52 errtext = errors[err.errno] 52 errtext = errors[err.errno]
53 else: 53 else:
54 errtext = force_text(err) 54 errtext = force_str(err)
55 raise CommandError(errtext) 55 raise CommandError(errtext)
56 56
57 self.stdout.write("OK") 57 self.stdout.write("OK")
diff --git a/bitbake/lib/toaster/toastermain/settings.py b/bitbake/lib/toaster/toastermain/settings.py
index a4b370c8d4..d2a449627f 100644
--- a/bitbake/lib/toaster/toastermain/settings.py
+++ b/bitbake/lib/toaster/toastermain/settings.py
@@ -9,6 +9,8 @@
9# Django settings for Toaster project. 9# Django settings for Toaster project.
10 10
11import os 11import os
12from pathlib import Path
13from toastermain.logs import LOGGING_SETTINGS
12 14
13DEBUG = True 15DEBUG = True
14 16
@@ -39,6 +41,9 @@ DATABASES = {
39 } 41 }
40} 42}
41 43
44# New in Django 3.2
45DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
46
42# Needed when Using sqlite especially to add a longer timeout for waiting 47# Needed when Using sqlite especially to add a longer timeout for waiting
43# for the database lock to be released 48# for the database lock to be released
44# https://docs.djangoproject.com/en/1.6/ref/databases/#database-is-locked-errors 49# https://docs.djangoproject.com/en/1.6/ref/databases/#database-is-locked-errors
@@ -84,14 +89,17 @@ else:
84 from pytz.exceptions import UnknownTimeZoneError 89 from pytz.exceptions import UnknownTimeZoneError
85 try: 90 try:
86 if pytz.timezone(zonename) is not None: 91 if pytz.timezone(zonename) is not None:
87 zonefilelist[hashlib.md5(open(filepath, 'rb').read()).hexdigest()] = zonename 92 with open(filepath, 'rb') as f:
93 zonefilelist[hashlib.md5(f.read()).hexdigest()] = zonename
88 except UnknownTimeZoneError as ValueError: 94 except UnknownTimeZoneError as ValueError:
89 # we expect timezone failures here, just move over 95 # we expect timezone failures here, just move over
90 pass 96 pass
91 except ImportError: 97 except ImportError:
92 zonefilelist[hashlib.md5(open(filepath, 'rb').read()).hexdigest()] = zonename 98 with open(filepath, 'rb') as f:
99 zonefilelist[hashlib.md5(f.read()).hexdigest()] = zonename
93 100
94 TIME_ZONE = zonefilelist[hashlib.md5(open('/etc/localtime', 'rb').read()).hexdigest()] 101 with open('/etc/localtime', 'rb') as f:
102 TIME_ZONE = zonefilelist[hashlib.md5(f.read()).hexdigest()]
95 103
96# Language code for this installation. All choices can be found here: 104# Language code for this installation. All choices can be found here:
97# http://www.i18nguy.com/unicode/language-identifiers.html 105# http://www.i18nguy.com/unicode/language-identifiers.html
@@ -103,10 +111,6 @@ SITE_ID = 1
103# to load the internationalization machinery. 111# to load the internationalization machinery.
104USE_I18N = True 112USE_I18N = True
105 113
106# If you set this to False, Django will not format dates, numbers and
107# calendars according to the current locale.
108USE_L10N = True
109
110# If you set this to False, Django will not use timezone-aware datetimes. 114# If you set this to False, Django will not use timezone-aware datetimes.
111USE_TZ = True 115USE_TZ = True
112 116
@@ -147,6 +151,8 @@ STATICFILES_FINDERS = (
147# Make this unique, and don't share it with anybody. 151# Make this unique, and don't share it with anybody.
148SECRET_KEY = 'NOT_SUITABLE_FOR_HOSTED_DEPLOYMENT' 152SECRET_KEY = 'NOT_SUITABLE_FOR_HOSTED_DEPLOYMENT'
149 153
154TMPDIR = os.environ.get('TOASTER_DJANGO_TMPDIR', '/tmp')
155
150class InvalidString(str): 156class InvalidString(str):
151 def __mod__(self, other): 157 def __mod__(self, other):
152 from django.template.base import TemplateSyntaxError 158 from django.template.base import TemplateSyntaxError
@@ -183,7 +189,13 @@ TEMPLATES = [
183 'django.template.loaders.app_directories.Loader', 189 'django.template.loaders.app_directories.Loader',
184 #'django.template.loaders.eggs.Loader', 190 #'django.template.loaders.eggs.Loader',
185 ], 191 ],
186 'string_if_invalid': InvalidString("%s"), 192 # https://docs.djangoproject.com/en/4.2/ref/templates/api/#how-invalid-variables-are-handled
193 # Generally, string_if_invalid should only be enabled in order to debug
194 # a specific template problem, then cleared once debugging is complete.
195 # If you assign a value other than '' to string_if_invalid,
196 # you will experience rendering problems with these templates and sites.
197 # 'string_if_invalid': InvalidString("%s"),
198 'string_if_invalid': "",
187 'debug': DEBUG, 199 'debug': DEBUG,
188 }, 200 },
189 }, 201 },
@@ -207,7 +219,7 @@ CACHES = {
207 # }, 219 # },
208 'default': { 220 'default': {
209 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 221 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
210 'LOCATION': '/tmp/toaster_cache_%d' % os.getuid(), 222 'LOCATION': '%s/toaster_cache_%d' % (TMPDIR, os.getuid()),
211 'TIMEOUT': 1, 223 'TIMEOUT': 1,
212 } 224 }
213 } 225 }
@@ -239,6 +251,9 @@ INSTALLED_APPS = (
239 'django.contrib.humanize', 251 'django.contrib.humanize',
240 'bldcollector', 252 'bldcollector',
241 'toastermain', 253 'toastermain',
254
255 # 3rd-lib
256 "log_viewer",
242) 257)
243 258
244 259
@@ -283,7 +298,6 @@ SOUTH_TESTS_MIGRATE = False
283 298
284# We automatically detect and install applications here if 299# We automatically detect and install applications here if
285# they have a 'models.py' or 'views.py' file 300# they have a 'models.py' or 'views.py' file
286import os
287currentdir = os.path.dirname(__file__) 301currentdir = os.path.dirname(__file__)
288for t in os.walk(os.path.dirname(currentdir)): 302for t in os.walk(os.path.dirname(currentdir)):
289 modulename = os.path.basename(t[0]) 303 modulename = os.path.basename(t[0])
@@ -299,43 +313,21 @@ for t in os.walk(os.path.dirname(currentdir)):
299# the site admins on every HTTP 500 error when DEBUG=False. 313# the site admins on every HTTP 500 error when DEBUG=False.
300# See http://docs.djangoproject.com/en/dev/topics/logging for 314# See http://docs.djangoproject.com/en/dev/topics/logging for
301# more details on how to customize your logging configuration. 315# more details on how to customize your logging configuration.
302LOGGING = { 316LOGGING = LOGGING_SETTINGS
303 'version': 1, 317
304 'disable_existing_loggers': False, 318# Build paths inside the project like this: BASE_DIR / 'subdir'.
305 'filters': { 319BUILDDIR = os.environ.get("BUILDDIR", TMPDIR)
306 'require_debug_false': { 320
307 '()': 'django.utils.log.RequireDebugFalse' 321# LOG VIEWER
308 } 322# https://pypi.org/project/django-log-viewer/
309 }, 323LOG_VIEWER_FILES_PATTERN = '*.log*'
310 'formatters': { 324LOG_VIEWER_FILES_DIR = os.path.join(BUILDDIR, "toaster_logs/")
311 'datetime': { 325LOG_VIEWER_PAGE_LENGTH = 25 # total log lines per-page
312 'format': '%(asctime)s %(levelname)s %(message)s' 326LOG_VIEWER_MAX_READ_LINES = 100000 # total log lines will be read
313 } 327LOG_VIEWER_PATTERNS = ['INFO', 'DEBUG', 'WARNING', 'ERROR', 'CRITICAL']
314 }, 328
315 'handlers': { 329# Optionally you can set the next variables in order to customize the admin:
316 'mail_admins': { 330LOG_VIEWER_FILE_LIST_TITLE = "Logs list"
317 'level': 'ERROR',
318 'filters': ['require_debug_false'],
319 'class': 'django.utils.log.AdminEmailHandler'
320 },
321 'console': {
322 'level': 'DEBUG',
323 'class': 'logging.StreamHandler',
324 'formatter': 'datetime',
325 }
326 },
327 'loggers': {
328 'toaster' : {
329 'handlers': ['console'],
330 'level': 'DEBUG',
331 },
332 'django.request': {
333 'handlers': ['console'],
334 'level': 'WARN',
335 'propagate': True,
336 },
337 }
338}
339 331
340if DEBUG and SQL_DEBUG: 332if DEBUG and SQL_DEBUG:
341 LOGGING['loggers']['django.db.backends'] = { 333 LOGGING['loggers']['django.db.backends'] = {
diff --git a/bitbake/lib/toaster/toastermain/settings_test.py b/bitbake/lib/toaster/toastermain/settings_test.py
index 6538d9e453..74def2d240 100644
--- a/bitbake/lib/toaster/toastermain/settings_test.py
+++ b/bitbake/lib/toaster/toastermain/settings_test.py
@@ -19,10 +19,10 @@ TEMPLATE_DEBUG = DEBUG
19DATABASES = { 19DATABASES = {
20 'default': { 20 'default': {
21 'ENGINE': 'django.db.backends.sqlite3', 21 'ENGINE': 'django.db.backends.sqlite3',
22 'NAME': '/tmp/toaster-test-db.sqlite', 22 'NAME': '%s/toaster-test-db.sqlite' % TMPDIR,
23 'TEST': { 23 'TEST': {
24 'ENGINE': 'django.db.backends.sqlite3', 24 'ENGINE': 'django.db.backends.sqlite3',
25 'NAME': '/tmp/toaster-test-db.sqlite', 25 'NAME': '%s/toaster-test-db.sqlite' % TMPDIR,
26 } 26 }
27 } 27 }
28} 28}
diff --git a/bitbake/lib/toaster/toastermain/urls.py b/bitbake/lib/toaster/toastermain/urls.py
index 5fb520b384..3be46fcf0c 100644
--- a/bitbake/lib/toaster/toastermain/urls.py
+++ b/bitbake/lib/toaster/toastermain/urls.py
@@ -6,7 +6,7 @@
6# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
7# 7#
8 8
9from django.conf.urls import include, url 9from django.urls import re_path as url, include
10from django.views.generic import RedirectView, TemplateView 10from django.views.generic import RedirectView, TemplateView
11from django.views.decorators.cache import never_cache 11from django.views.decorators.cache import never_cache
12import bldcollector.views 12import bldcollector.views
@@ -28,6 +28,8 @@ urlpatterns = [
28 # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), 28 # url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
29 29
30 30
31 url(r'^logs/', include('log_viewer.urls')),
32
31 # This is here to maintain backward compatibility and will be deprecated 33 # This is here to maintain backward compatibility and will be deprecated
32 # in the future. 34 # in the future.
33 url(r'^orm/eventfile$', bldcollector.views.eventfile), 35 url(r'^orm/eventfile$', bldcollector.views.eventfile),
diff --git a/bitbake/lib/toaster/tox.ini b/bitbake/lib/toaster/tox.ini
new file mode 100644
index 0000000000..1516a527ae
--- /dev/null
+++ b/bitbake/lib/toaster/tox.ini
@@ -0,0 +1,24 @@
1[tox]
2envlist = py38, py39, py310, py311, py312
3skipsdist = True
4toxworkdir = {env:TOX_WORKDIR:.tox}
5passenv = *
6
7[testenv]
8passenv =
9 SSTATE_DIR
10 DL_DIR
11 TOASTER_DJANGO_TMPDIR
12setenv =
13 DJANGO_SETTINGS_MODULE=toastermain.settings_test
14 TOASTER_BUILDSERVER=1
15 BUILDDIR = {env:BUILDDIR}
16 EVENTREPLAY_DIR = {env:EVENTREPLAY_DIR:BUILDDIR}
17commands =
18 python3 {toxinidir}/manage.py test tests.db tests.commands tests.builds tests.browser tests.functional tests.views
19deps =
20 -r {toxinidir}/../../toaster-requirements.txt
21 -r {toxinidir}/tests/toaster-tests-requirements.txt
22
23[testenv:chrome]
24commands={[testenv]commands} --splinter-webdriver=chrome \ No newline at end of file
diff --git a/bitbake/toaster-requirements.txt b/bitbake/toaster-requirements.txt
index 735b614546..d8e48b7f3a 100644
--- a/bitbake/toaster-requirements.txt
+++ b/bitbake/toaster-requirements.txt
@@ -1,3 +1,4 @@
1Django>2.2,<2.3 1Django>4.2,<4.3
2beautifulsoup4>=4.4.0 2beautifulsoup4>=4.4.0
3pytz 3pytz
4django-log-viewer==1.1.7