aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--meta-selftest/conf/multiconfig/muslmc.conf2
-rw-r--r--meta-selftest/recipes-test/images/oe-selftest-image.bb2
-rw-r--r--meta-selftest/recipes-test/sysroot-test/sysroot-la-test_1.0.bb16
-rw-r--r--meta-selftest/recipes-test/sysroot-test/sysroot-pc-test_1.0.bb12
-rw-r--r--meta-selftest/recipes-test/sysroot-test/sysroot-shebang-test_1.0.bb12
-rw-r--r--meta-selftest/recipes-test/wrapper/cmdline-shebang-wrapper-test.bb30
-rw-r--r--meta-selftest/recipes-test/wrapper/files/test.awk2
-rw-r--r--meta-skeleton/recipes-kernel/hello-mod/files/hello.c13
-rw-r--r--meta-skeleton/recipes-skeleton/service/service_0.1.bb2
-rw-r--r--meta/classes-global/base.bbclass789
-rw-r--r--meta/classes-global/buildstats.bbclass302
-rw-r--r--meta/classes-global/debian.bbclass156
-rw-r--r--meta/classes-global/devshell.bbclass166
-rw-r--r--meta/classes-global/insane.bbclass1453
-rw-r--r--meta/classes-global/license.bbclass426
-rw-r--r--meta/classes-global/logging.bbclass107
-rw-r--r--meta/classes-global/mirrors.bbclass95
-rw-r--r--meta/classes-global/package.bbclass2546
-rw-r--r--meta/classes-global/package_deb.bbclass329
-rw-r--r--meta/classes-global/package_ipk.bbclass292
-rw-r--r--meta/classes-global/package_pkgdata.bbclass173
-rw-r--r--meta/classes-global/package_rpm.bbclass761
-rw-r--r--meta/classes-global/package_tar.bbclass77
-rw-r--r--meta/classes-global/packagedata.bbclass40
-rw-r--r--meta/classes-global/patch.bbclass171
-rw-r--r--meta/classes-global/sanity.bbclass1028
-rw-r--r--meta/classes-global/sstate.bbclass1364
-rw-r--r--meta/classes-global/staging.bbclass690
-rw-r--r--meta/classes-global/uninative.bbclass179
-rw-r--r--meta/classes-global/utility-tasks.bbclass60
-rw-r--r--meta/classes-global/utils.bbclass369
-rw-r--r--meta/classes-recipe/allarch.bbclass71
-rw-r--r--meta/classes-recipe/autotools-brokensep.bbclass11
-rw-r--r--meta/classes-recipe/autotools.bbclass260
-rw-r--r--meta/classes-recipe/baremetal-image.bbclass128
-rw-r--r--meta/classes-recipe/bash-completion.bbclass13
-rw-r--r--meta/classes-recipe/bin_package.bbclass42
-rw-r--r--meta/classes-recipe/binconfig-disabled.bbclass36
-rw-r--r--meta/classes-recipe/binconfig.bbclass60
-rw-r--r--meta/classes-recipe/cargo.bbclass97
-rw-r--r--meta/classes-recipe/cargo_common.bbclass139
-rw-r--r--meta/classes-recipe/cmake.bbclass223
-rw-r--r--meta/classes-recipe/cml1.bbclass107
-rw-r--r--meta/classes-recipe/compress_doc.bbclass269
-rw-r--r--meta/classes-recipe/core-image.bbclass82
-rw-r--r--meta/classes-recipe/cpan-base.bbclass33
-rw-r--r--meta/classes-recipe/cpan.bbclass71
-rw-r--r--meta/classes-recipe/cpan_build.bbclass47
-rw-r--r--meta/classes-recipe/cross-canadian.bbclass200
-rw-r--r--meta/classes-recipe/cross.bbclass103
-rw-r--r--meta/classes-recipe/crosssdk.bbclass57
-rw-r--r--meta/classes-recipe/deploy.bbclass18
-rw-r--r--meta/classes-recipe/devicetree.bbclass154
-rw-r--r--meta/classes-recipe/devupstream.bbclass61
-rw-r--r--meta/classes-recipe/distro_features_check.bbclass13
-rw-r--r--meta/classes-recipe/distrooverrides.bbclass38
-rw-r--r--meta/classes-recipe/dos2unix.bbclass20
-rw-r--r--meta/classes-recipe/externalsrc.bbclass269
-rw-r--r--meta/classes-recipe/features_check.bbclass57
-rw-r--r--meta/classes-recipe/fontcache.bbclass63
-rw-r--r--meta/classes-recipe/fs-uuid.bbclass30
-rw-r--r--meta/classes-recipe/gconf.bbclass77
-rw-r--r--meta/classes-recipe/gettext.bbclass28
-rw-r--r--meta/classes-recipe/gi-docgen.bbclass30
-rw-r--r--meta/classes-recipe/gio-module-cache.bbclass44
-rw-r--r--meta/classes-recipe/glide.bbclass15
-rw-r--r--meta/classes-recipe/gnomebase.bbclass37
-rw-r--r--meta/classes-recipe/go-mod.bbclass26
-rw-r--r--meta/classes-recipe/go-ptest.bbclass60
-rw-r--r--meta/classes-recipe/go.bbclass170
-rw-r--r--meta/classes-recipe/goarch.bbclass122
-rw-r--r--meta/classes-recipe/gobject-introspection-data.bbclass18
-rw-r--r--meta/classes-recipe/gobject-introspection.bbclass61
-rw-r--r--meta/classes-recipe/grub-efi-cfg.bbclass122
-rw-r--r--meta/classes-recipe/grub-efi.bbclass14
-rw-r--r--meta/classes-recipe/gsettings.bbclass48
-rw-r--r--meta/classes-recipe/gtk-doc.bbclass89
-rw-r--r--meta/classes-recipe/gtk-icon-cache.bbclass95
-rw-r--r--meta/classes-recipe/gtk-immodules-cache.bbclass82
-rw-r--r--meta/classes-recipe/image-artifact-names.bbclass28
-rw-r--r--meta/classes-recipe/image-combined-dbg.bbclass15
-rw-r--r--meta/classes-recipe/image-container.bbclass27
-rw-r--r--meta/classes-recipe/image-live.bbclass265
-rw-r--r--meta/classes-recipe/image-postinst-intercepts.bbclass29
-rw-r--r--meta/classes-recipe/image.bbclass684
-rw-r--r--meta/classes-recipe/image_types.bbclass355
-rw-r--r--meta/classes-recipe/image_types_wic.bbclass190
-rw-r--r--meta/classes-recipe/kernel-arch.bbclass74
-rw-r--r--meta/classes-recipe/kernel-artifact-names.bbclass37
-rw-r--r--meta/classes-recipe/kernel-devicetree.bbclass119
-rw-r--r--meta/classes-recipe/kernel-fitimage.bbclass803
-rw-r--r--meta/classes-recipe/kernel-grub.bbclass111
-rw-r--r--meta/classes-recipe/kernel-module-split.bbclass197
-rw-r--r--meta/classes-recipe/kernel-uboot.bbclass49
-rw-r--r--meta/classes-recipe/kernel-uimage.bbclass41
-rw-r--r--meta/classes-recipe/kernel-yocto.bbclass732
-rw-r--r--meta/classes-recipe/kernel.bbclass821
-rw-r--r--meta/classes-recipe/kernelsrc.bbclass16
-rw-r--r--meta/classes-recipe/lib_package.bbclass12
-rw-r--r--meta/classes-recipe/libc-package.bbclass390
-rw-r--r--meta/classes-recipe/license_image.bbclass295
-rw-r--r--meta/classes-recipe/linux-dummy.bbclass31
-rw-r--r--meta/classes-recipe/linux-kernel-base.bbclass47
-rw-r--r--meta/classes-recipe/linuxloader.bbclass82
-rw-r--r--meta/classes-recipe/live-vm-common.bbclass100
-rw-r--r--meta/classes-recipe/manpages.bbclass51
-rw-r--r--meta/classes-recipe/meson-routines.bbclass57
-rw-r--r--meta/classes-recipe/meson.bbclass179
-rw-r--r--meta/classes-recipe/mime-xdg.bbclass78
-rw-r--r--meta/classes-recipe/mime.bbclass76
-rw-r--r--meta/classes-recipe/module-base.bbclass27
-rw-r--r--meta/classes-recipe/module.bbclass80
-rw-r--r--meta/classes-recipe/multilib_header.bbclass58
-rw-r--r--meta/classes-recipe/multilib_script.bbclass40
-rw-r--r--meta/classes-recipe/native.bbclass236
-rw-r--r--meta/classes-recipe/nativesdk.bbclass124
-rw-r--r--meta/classes-recipe/nopackages.bbclass19
-rw-r--r--meta/classes-recipe/npm.bbclass340
-rw-r--r--meta/classes-recipe/packagegroup.bbclass67
-rw-r--r--meta/classes-recipe/perl-version.bbclass72
-rw-r--r--meta/classes-recipe/perlnative.bbclass9
-rw-r--r--meta/classes-recipe/pixbufcache.bbclass69
-rw-r--r--meta/classes-recipe/pkgconfig.bbclass8
-rw-r--r--meta/classes-recipe/populate_sdk.bbclass13
-rw-r--r--meta/classes-recipe/populate_sdk_base.bbclass384
-rw-r--r--meta/classes-recipe/populate_sdk_ext.bbclass842
-rw-r--r--meta/classes-recipe/ptest-gnome.bbclass14
-rw-r--r--meta/classes-recipe/ptest-perl.bbclass36
-rw-r--r--meta/classes-recipe/ptest.bbclass142
-rw-r--r--meta/classes-recipe/pypi.bbclass34
-rw-r--r--meta/classes-recipe/python3-dir.bbclass11
-rw-r--r--meta/classes-recipe/python3native.bbclass30
-rw-r--r--meta/classes-recipe/python3targetconfig.bbclass35
-rw-r--r--meta/classes-recipe/python_flit_core.bbclass14
-rw-r--r--meta/classes-recipe/python_hatchling.bbclass9
-rw-r--r--meta/classes-recipe/python_pep517.bbclass60
-rw-r--r--meta/classes-recipe/python_poetry_core.bbclass9
-rw-r--r--meta/classes-recipe/python_pyo3.bbclass36
-rw-r--r--meta/classes-recipe/python_setuptools3_rust.bbclass17
-rw-r--r--meta/classes-recipe/python_setuptools_build_meta.bbclass9
-rw-r--r--meta/classes-recipe/qemu.bbclass77
-rw-r--r--meta/classes-recipe/qemuboot.bbclass171
-rw-r--r--meta/classes-recipe/rootfs-postcommands.bbclass460
-rw-r--r--meta/classes-recipe/rootfs_deb.bbclass41
-rw-r--r--meta/classes-recipe/rootfs_ipk.bbclass44
-rw-r--r--meta/classes-recipe/rootfs_rpm.bbclass45
-rw-r--r--meta/classes-recipe/rootfsdebugfiles.bbclass47
-rw-r--r--meta/classes-recipe/rust-bin.bbclass154
-rw-r--r--meta/classes-recipe/rust-common.bbclass177
-rw-r--r--meta/classes-recipe/rust-target-config.bbclass391
-rw-r--r--meta/classes-recipe/rust.bbclass51
-rw-r--r--meta/classes-recipe/scons.bbclass34
-rw-r--r--meta/classes-recipe/setuptools3-base.bbclass37
-rw-r--r--meta/classes-recipe/setuptools3.bbclass38
-rw-r--r--meta/classes-recipe/setuptools3_legacy.bbclass84
-rw-r--r--meta/classes-recipe/siteinfo.bbclass232
-rw-r--r--meta/classes-recipe/syslinux.bbclass194
-rw-r--r--meta/classes-recipe/systemd-boot-cfg.bbclass77
-rw-r--r--meta/classes-recipe/systemd-boot.bbclass35
-rw-r--r--meta/classes-recipe/systemd.bbclass239
-rw-r--r--meta/classes-recipe/testimage.bbclass508
-rw-r--r--meta/classes-recipe/testsdk.bbclass52
-rw-r--r--meta/classes-recipe/texinfo.bbclass24
-rw-r--r--meta/classes-recipe/toolchain-scripts-base.bbclass17
-rw-r--r--meta/classes-recipe/toolchain-scripts.bbclass236
-rw-r--r--meta/classes-recipe/uboot-config.bbclass137
-rw-r--r--meta/classes-recipe/uboot-extlinux-config.bbclass158
-rw-r--r--meta/classes-recipe/uboot-sign.bbclass505
-rw-r--r--meta/classes-recipe/update-alternatives.bbclass333
-rw-r--r--meta/classes-recipe/update-rc.d.bbclass129
-rw-r--r--meta/classes-recipe/upstream-version-is-even.bbclass11
-rw-r--r--meta/classes-recipe/vala.bbclass30
-rw-r--r--meta/classes-recipe/waf.bbclass81
-rw-r--r--meta/classes-recipe/xmlcatalog.bbclass32
-rw-r--r--meta/classes/allarch.bbclass65
-rw-r--r--meta/classes/archiver.bbclass24
-rw-r--r--meta/classes/autotools-brokensep.bbclass5
-rw-r--r--meta/classes/autotools.bbclass254
-rw-r--r--meta/classes/baremetal-image.bbclass121
-rw-r--r--meta/classes/base.bbclass784
-rw-r--r--meta/classes/bash-completion.bbclass7
-rw-r--r--meta/classes/bin_package.bbclass39
-rw-r--r--meta/classes/binconfig-disabled.bbclass30
-rw-r--r--meta/classes/binconfig.bbclass54
-rw-r--r--meta/classes/buildhistory.bbclass32
-rw-r--r--meta/classes/buildstats-summary.bbclass6
-rw-r--r--meta/classes/buildstats.bbclass295
-rw-r--r--meta/classes/cargo.bbclass90
-rw-r--r--meta/classes/cargo_common.bbclass124
-rw-r--r--meta/classes/ccache.bbclass6
-rw-r--r--meta/classes/ccmake.bbclass6
-rw-r--r--meta/classes/chrpath.bbclass6
-rw-r--r--meta/classes/cmake.bbclass217
-rw-r--r--meta/classes/cml1.bbclass101
-rw-r--r--meta/classes/compress_doc.bbclass263
-rw-r--r--meta/classes/copyleft_compliance.bbclass6
-rw-r--r--meta/classes/copyleft_filter.bbclass8
-rw-r--r--meta/classes/core-image.bbclass79
-rw-r--r--meta/classes/cpan-base.bbclass27
-rw-r--r--meta/classes/cpan.bbclass65
-rw-r--r--meta/classes/cpan_build.bbclass41
-rw-r--r--meta/classes/create-spdx.bbclass33
-rw-r--r--meta/classes/cross-canadian.bbclass196
-rw-r--r--meta/classes/cross.bbclass97
-rw-r--r--meta/classes/crosssdk.bbclass51
-rw-r--r--meta/classes/cve-check.bbclass186
-rw-r--r--meta/classes/debian.bbclass150
-rw-r--r--meta/classes/deploy.bbclass12
-rw-r--r--meta/classes/devicetree.bbclass148
-rw-r--r--meta/classes/devshell.bbclass160
-rw-r--r--meta/classes/devtool-source.bbclass6
-rw-r--r--meta/classes/devupstream.bbclass55
-rw-r--r--meta/classes/distro_features_check.bbclass7
-rw-r--r--meta/classes/distrooverrides.bbclass32
-rw-r--r--meta/classes/dos2unix.bbclass14
-rw-r--r--meta/classes/externalsrc.bbclass268
-rw-r--r--meta/classes/extrausers.bbclass6
-rw-r--r--meta/classes/features_check.bbclass54
-rw-r--r--meta/classes/fontcache.bbclass57
-rw-r--r--meta/classes/fs-uuid.bbclass24
-rw-r--r--meta/classes/gconf.bbclass71
-rw-r--r--meta/classes/gettext.bbclass22
-rw-r--r--meta/classes/gi-docgen.bbclass24
-rw-r--r--meta/classes/gio-module-cache.bbclass38
-rw-r--r--meta/classes/glide.bbclass9
-rw-r--r--meta/classes/gnomebase.bbclass31
-rw-r--r--meta/classes/go-mod.bbclass20
-rw-r--r--meta/classes/go-ptest.bbclass54
-rw-r--r--meta/classes/go.bbclass161
-rw-r--r--meta/classes/goarch.bbclass116
-rw-r--r--meta/classes/gobject-introspection-data.bbclass7
-rw-r--r--meta/classes/gobject-introspection.bbclass55
-rw-r--r--meta/classes/grub-efi-cfg.bbclass123
-rw-r--r--meta/classes/grub-efi.bbclass8
-rw-r--r--meta/classes/gsettings.bbclass42
-rw-r--r--meta/classes/gtk-doc.bbclass83
-rw-r--r--meta/classes/gtk-icon-cache.bbclass89
-rw-r--r--meta/classes/gtk-immodules-cache.bbclass76
-rw-r--r--meta/classes/icecc.bbclass6
-rw-r--r--meta/classes/image-artifact-names.bbclass22
-rw-r--r--meta/classes/image-buildinfo.bbclass54
-rw-r--r--meta/classes/image-combined-dbg.bbclass9
-rw-r--r--meta/classes/image-container.bbclass21
-rw-r--r--meta/classes/image-live.bbclass264
-rw-r--r--meta/classes/image-postinst-intercepts.bbclass23
-rw-r--r--meta/classes/image.bbclass679
-rw-r--r--meta/classes/image_types.bbclass342
-rw-r--r--meta/classes/image_types_wic.bbclass182
-rw-r--r--meta/classes/insane.bbclass1434
-rw-r--r--meta/classes/kernel-arch.bbclass68
-rw-r--r--meta/classes/kernel-artifact-names.bbclass31
-rw-r--r--meta/classes/kernel-devicetree.bbclass113
-rw-r--r--meta/classes/kernel-fitimage.bbclass795
-rw-r--r--meta/classes/kernel-grub.bbclass105
-rw-r--r--meta/classes/kernel-module-split.bbclass191
-rw-r--r--meta/classes/kernel-uboot.bbclass34
-rw-r--r--meta/classes/kernel-uimage.bbclass35
-rw-r--r--meta/classes/kernel-yocto.bbclass718
-rw-r--r--meta/classes/kernel.bbclass815
-rw-r--r--meta/classes/kernelsrc.bbclass10
-rw-r--r--meta/classes/lib_package.bbclass7
-rw-r--r--meta/classes/libc-package.bbclass384
-rw-r--r--meta/classes/license.bbclass420
-rw-r--r--meta/classes/license_image.bbclass289
-rw-r--r--meta/classes/linux-dummy.bbclass26
-rw-r--r--meta/classes/linux-kernel-base.bbclass41
-rw-r--r--meta/classes/linuxloader.bbclass76
-rw-r--r--meta/classes/live-vm-common.bbclass94
-rw-r--r--meta/classes/logging.bbclass101
-rw-r--r--meta/classes/manpages.bbclass45
-rw-r--r--meta/classes/mcextend.bbclass6
-rw-r--r--meta/classes/meson-routines.bbclass51
-rw-r--r--meta/classes/meson.bbclass181
-rw-r--r--meta/classes/metadata_scm.bbclass48
-rw-r--r--meta/classes/migrate_localcount.bbclass6
-rw-r--r--meta/classes/mime-xdg.bbclass74
-rw-r--r--meta/classes/mime.bbclass70
-rw-r--r--meta/classes/mirrors.bbclass89
-rw-r--r--meta/classes/module-base.bbclass21
-rw-r--r--meta/classes/module.bbclass74
-rw-r--r--meta/classes/multilib.bbclass6
-rw-r--r--meta/classes/multilib_global.bbclass6
-rw-r--r--meta/classes/multilib_header.bbclass52
-rw-r--r--meta/classes/multilib_script.bbclass34
-rw-r--r--meta/classes/native.bbclass228
-rw-r--r--meta/classes/nativesdk.bbclass117
-rw-r--r--meta/classes/nopackages.bbclass13
-rw-r--r--meta/classes/npm.bbclass317
-rw-r--r--meta/classes/oelint.bbclass6
-rw-r--r--meta/classes/overlayfs-etc.bbclass6
-rw-r--r--meta/classes/overlayfs.bbclass6
-rw-r--r--meta/classes/own-mirrors.bbclass6
-rw-r--r--meta/classes/package.bbclass2522
-rw-r--r--meta/classes/package_deb.bbclass327
-rw-r--r--meta/classes/package_ipk.bbclass286
-rw-r--r--meta/classes/package_pkgdata.bbclass167
-rw-r--r--meta/classes/package_rpm.bbclass755
-rw-r--r--meta/classes/package_tar.bbclass71
-rw-r--r--meta/classes/packagedata.bbclass34
-rw-r--r--meta/classes/packagegroup.bbclass61
-rw-r--r--meta/classes/patch.bbclass169
-rw-r--r--meta/classes/perl-version.bbclass66
-rw-r--r--meta/classes/perlnative.bbclass3
-rw-r--r--meta/classes/pixbufcache.bbclass63
-rw-r--r--meta/classes/pkgconfig.bbclass2
-rw-r--r--meta/classes/populate_sdk.bbclass7
-rw-r--r--meta/classes/populate_sdk_base.bbclass363
-rw-r--r--meta/classes/populate_sdk_ext.bbclass837
-rw-r--r--meta/classes/prexport.bbclass6
-rw-r--r--meta/classes/primport.bbclass6
-rw-r--r--meta/classes/ptest-gnome.bbclass8
-rw-r--r--meta/classes/ptest-perl.bbclass30
-rw-r--r--meta/classes/ptest.bbclass132
-rw-r--r--meta/classes/pypi.bbclass28
-rw-r--r--meta/classes/python3-dir.bbclass5
-rw-r--r--meta/classes/python3native.bbclass24
-rw-r--r--meta/classes/python3targetconfig.bbclass29
-rw-r--r--meta/classes/python_flit_core.bbclass5
-rw-r--r--meta/classes/python_pep517.bbclass56
-rw-r--r--meta/classes/python_poetry_core.bbclass5
-rw-r--r--meta/classes/python_pyo3.bbclass30
-rw-r--r--meta/classes/python_setuptools3_rust.bbclass11
-rw-r--r--meta/classes/python_setuptools_build_meta.bbclass5
-rw-r--r--meta/classes/qemu.bbclass71
-rw-r--r--meta/classes/qemuboot.bbclass165
-rw-r--r--meta/classes/recipe_sanity.bbclass6
-rw-r--r--meta/classes/relative_symlinks.bbclass6
-rw-r--r--meta/classes/relocatable.bbclass6
-rw-r--r--meta/classes/remove-libtool.bbclass6
-rw-r--r--meta/classes/report-error.bbclass5
-rw-r--r--meta/classes/rm_work.bbclass6
-rw-r--r--meta/classes/rm_work_and_downloads.bbclass3
-rw-r--r--meta/classes/rootfs-postcommands.bbclass424
-rw-r--r--meta/classes/rootfs_deb.bbclass39
-rw-r--r--meta/classes/rootfs_ipk.bbclass38
-rw-r--r--meta/classes/rootfs_rpm.bbclass39
-rw-r--r--meta/classes/rootfsdebugfiles.bbclass41
-rw-r--r--meta/classes/rust-bin.bbclass149
-rw-r--r--meta/classes/rust-common.bbclass186
-rw-r--r--meta/classes/rust.bbclass45
-rw-r--r--meta/classes/sanity.bbclass1046
-rw-r--r--meta/classes/scons.bbclass28
-rw-r--r--meta/classes/setuptools3-base.bbclass31
-rw-r--r--meta/classes/setuptools3.bbclass33
-rw-r--r--meta/classes/setuptools3_legacy.bbclass78
-rw-r--r--meta/classes/sign_ipk.bbclass6
-rw-r--r--meta/classes/sign_package_feed.bbclass6
-rw-r--r--meta/classes/sign_rpm.bbclass6
-rw-r--r--meta/classes/siteconfig.bbclass6
-rw-r--r--meta/classes/siteinfo.bbclass226
-rw-r--r--meta/classes/sstate.bbclass1352
-rw-r--r--meta/classes/staging.bbclass684
-rw-r--r--meta/classes/syslinux.bbclass194
-rw-r--r--meta/classes/systemd-boot-cfg.bbclass71
-rw-r--r--meta/classes/systemd-boot.bbclass35
-rw-r--r--meta/classes/systemd.bbclass233
-rw-r--r--meta/classes/terminal.bbclass6
-rw-r--r--meta/classes/testexport.bbclass10
-rw-r--r--meta/classes/testimage.bbclass505
-rw-r--r--meta/classes/testsdk.bbclass52
-rw-r--r--meta/classes/texinfo.bbclass18
-rw-r--r--meta/classes/toaster.bbclass2
-rw-r--r--meta/classes/toolchain-scripts-base.bbclass11
-rw-r--r--meta/classes/toolchain-scripts.bbclass208
-rw-r--r--meta/classes/typecheck.bbclass6
-rw-r--r--meta/classes/uboot-config.bbclass129
-rw-r--r--meta/classes/uboot-extlinux-config.bbclass158
-rw-r--r--meta/classes/uboot-sign.bbclass494
-rw-r--r--meta/classes/uninative.bbclass171
-rw-r--r--meta/classes/update-alternatives.bbclass327
-rw-r--r--meta/classes/update-rc.d.bbclass123
-rw-r--r--meta/classes/upstream-version-is-even.bbclass5
-rw-r--r--meta/classes/useradd-staticids.bbclass6
-rw-r--r--meta/classes/useradd.bbclass6
-rw-r--r--meta/classes/useradd_base.bbclass6
-rw-r--r--meta/classes/utility-tasks.bbclass54
-rw-r--r--meta/classes/utils.bbclass327
-rw-r--r--meta/classes/vala.bbclass24
-rw-r--r--meta/classes/waf.bbclass75
-rw-r--r--meta/classes/xmlcatalog.bbclass26
-rw-r--r--meta/classes/yocto-check-layer.bbclass6
-rw-r--r--meta/conf/bitbake.conf20
-rw-r--r--meta/conf/distro/include/cve-extra-exclusions.inc63
-rw-r--r--meta/conf/distro/include/default-distrovars.inc5
-rw-r--r--meta/conf/distro/include/maintainers.inc16
-rw-r--r--meta/conf/distro/include/no-static-libs.inc2
-rw-r--r--meta/conf/distro/include/ptest-packagelists.inc2
-rw-r--r--meta/conf/distro/include/security_flags.inc1
-rw-r--r--meta/conf/distro/include/tcmode-default.inc10
-rw-r--r--meta/conf/distro/include/yocto-uninative.inc10
-rw-r--r--meta/conf/layer.conf3
-rw-r--r--meta/conf/local.conf.sample7
-rw-r--r--meta/conf/local.conf.sample.extended2
-rw-r--r--meta/conf/machine/include/x86/x86-base.inc2
-rw-r--r--meta/conf/machine/qemuarmv5.conf2
-rw-r--r--meta/conf/machine/qemux86-64.conf2
-rw-r--r--meta/conf/sanity.conf2
-rw-r--r--meta/conf/testexport.conf3
-rw-r--r--meta/files/overlayfs-create-dirs.service.in3
-rw-r--r--meta/files/overlayfs-create-dirs.sh11
-rw-r--r--meta/lib/bblayers/create.py2
-rw-r--r--meta/lib/buildstats.py82
-rw-r--r--meta/lib/oe/__init__.py2
-rw-r--r--meta/lib/oe/buildcfg.py49
-rw-r--r--meta/lib/oe/cachedpath.py2
-rw-r--r--meta/lib/oe/classextend.py2
-rw-r--r--meta/lib/oe/classutils.py2
-rw-r--r--meta/lib/oe/copy_buildsystem.py2
-rw-r--r--meta/lib/oe/cve_check.py18
-rw-r--r--meta/lib/oe/data.py2
-rw-r--r--meta/lib/oe/distro_check.py2
-rw-r--r--meta/lib/oe/elf.py2
-rw-r--r--meta/lib/oe/gpg_sign.py2
-rw-r--r--meta/lib/oe/license.py2
-rw-r--r--meta/lib/oe/lsb.py2
-rw-r--r--meta/lib/oe/maketype.py2
-rw-r--r--meta/lib/oe/manifest.py2
-rw-r--r--meta/lib/oe/npm_registry.py175
-rw-r--r--meta/lib/oe/overlayfs.py2
-rw-r--r--meta/lib/oe/package.py2
-rw-r--r--meta/lib/oe/package_manager/__init__.py6
-rw-r--r--meta/lib/oe/package_manager/deb/__init__.py12
-rw-r--r--meta/lib/oe/package_manager/deb/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/deb/rootfs.py2
-rw-r--r--meta/lib/oe/package_manager/deb/sdk.py2
-rw-r--r--meta/lib/oe/package_manager/ipk/__init__.py29
-rw-r--r--meta/lib/oe/package_manager/ipk/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/ipk/rootfs.py2
-rw-r--r--meta/lib/oe/package_manager/ipk/sdk.py2
-rw-r--r--meta/lib/oe/package_manager/rpm/__init__.py6
-rw-r--r--meta/lib/oe/package_manager/rpm/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/rpm/rootfs.py2
-rw-r--r--meta/lib/oe/package_manager/rpm/sdk.py2
-rw-r--r--meta/lib/oe/packagedata.py2
-rw-r--r--meta/lib/oe/packagegroup.py2
-rw-r--r--meta/lib/oe/patch.py12
-rw-r--r--meta/lib/oe/path.py2
-rw-r--r--meta/lib/oe/prservice.py2
-rw-r--r--meta/lib/oe/qa.py2
-rw-r--r--meta/lib/oe/recipeutils.py9
-rw-r--r--meta/lib/oe/reproducible.py3
-rw-r--r--meta/lib/oe/rootfs.py8
-rw-r--r--meta/lib/oe/rust.py6
-rw-r--r--meta/lib/oe/sbom.py6
-rw-r--r--meta/lib/oe/sdk.py2
-rw-r--r--meta/lib/oe/spdx.py4
-rw-r--r--meta/lib/oe/sstatesig.py11
-rw-r--r--meta/lib/oe/terminal.py2
-rw-r--r--meta/lib/oe/types.py2
-rw-r--r--meta/lib/oe/useradd.py2
-rw-r--r--meta/lib/oe/utils.py66
-rw-r--r--meta/lib/oeqa/buildperf/base.py2
-rw-r--r--meta/lib/oeqa/controllers/__init__.py2
-rw-r--r--meta/lib/oeqa/controllers/testtargetloader.py2
-rw-r--r--meta/lib/oeqa/core/utils/concurrencytest.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/_qemutiny.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/apt.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/boot.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/buildcpio.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/buildgalculator.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/buildlzip.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/connman.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/date.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/df.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/dnf.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/gi.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/go.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/gstreamer.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/kernelmodule.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ksample.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ldd.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/logrotate.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/multilib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/oe_syslog.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/opkg.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/pam.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/perl.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ping.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ptest.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/python.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/rpm.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/rt.py19
-rw-r--r--meta/lib/oeqa/runtime/cases/rtc.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/runlevel.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/rust.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/scons.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/scp.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/skeletoninit.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ssh.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/stap.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/storage.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/suspend.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/systemd.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/terminal.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/usb_hid.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/weston.py10
-rw-r--r--meta/lib/oeqa/runtime/cases/x32lib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/xorg.py2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/build.py2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/gcc.py2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/https.py2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/sanity.py2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-docs-cases/build.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/assimp.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/buildcpio.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/buildepoxy.py5
-rw-r--r--meta/lib/oeqa/sdk/cases/buildgalculator.py5
-rw-r--r--meta/lib/oeqa/sdk/cases/buildlzip.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/perl.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/python.py13
-rw-r--r--meta/lib/oeqa/sdk/cases/rust.py35
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml6
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/src/main.rs3
-rw-r--r--meta/lib/oeqa/sdk/testmetaidesupport.py45
-rw-r--r--meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/archiver.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/bblayers.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/bblogging.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py8
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/buildhistory.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/containerimage.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py165
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py165
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/eSDK.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/gdbserver.py125
-rw-r--r--meta/lib/oeqa/selftest/cases/git.py15
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py10
-rw-r--r--meta/lib/oeqa/selftest/cases/image_typedep.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py8
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/intercept.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/kerneldevelopment.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/liboe.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/lic_checksum.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/manifest.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/multiconfig.py15
-rw-r--r--meta/lib/oeqa/selftest/cases/newlib.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/buildhistory.py8
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/elf.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/license.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/path.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/types.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/utils.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/pkgdata.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/prservice.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/pseudo.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/resulttooltests.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/runcmd.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/selftest.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/signing.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/sstate.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py32
-rw-r--r--meta/lib/oeqa/selftest/cases/sysroot.py49
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/wic.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/wrapper.py16
-rw-r--r--meta/lib/oeqa/utils/__init__.py2
-rw-r--r--meta/lib/oeqa/utils/dump.py2
-rw-r--r--meta/lib/oeqa/utils/ftools.py2
-rw-r--r--meta/lib/oeqa/utils/httpserver.py2
-rw-r--r--meta/lib/oeqa/utils/logparser.py2
-rw-r--r--meta/lib/oeqa/utils/network.py2
-rw-r--r--meta/lib/oeqa/utils/nfs.py4
-rw-r--r--meta/lib/oeqa/utils/package_manager.py2
-rw-r--r--meta/lib/oeqa/utils/subprocesstweak.py2
-rw-r--r--meta/lib/rootfspostcommands.py2
-rw-r--r--meta/recipes-bsp/alsa-state/alsa-state.bb7
-rwxr-xr-xmeta/recipes-bsp/alsa-state/alsa-state/alsa-state-init3
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr/0001-remove-extra-decl.patch31
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr/0001-src-make-compatible-with-efivar-38.patch47
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr/97668ae0bce776a36ea2001dea63d376be8274ac.patch83
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr_17.bb39
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr_18.bb35
-rw-r--r--meta/recipes-bsp/efivar/efivar/0001-Fix-glibc-2.36-build-mount.h-conflicts.patch60
-rw-r--r--meta/recipes-bsp/efivar/efivar/0001-Fix-invalid-free-in-main.patch30
-rw-r--r--meta/recipes-bsp/efivar/efivar/efisecdb-fix-build-with-musl-libc.patch184
-rw-r--r--meta/recipes-bsp/efivar/efivar_38.bb5
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi_3.0.14.bb2
-rw-r--r--meta/recipes-bsp/opensbi/opensbi_1.0.bb48
-rw-r--r--meta/recipes-bsp/opensbi/opensbi_1.1.bb47
-rw-r--r--meta/recipes-bsp/pciutils/pciutils_3.8.0.bb5
-rw-r--r--meta/recipes-bsp/setserial/setserial/0001-setserial.c-Add-needed-system-headers-for-ioctl-and-.patch41
-rw-r--r--meta/recipes-bsp/setserial/setserial_2.17.bb3
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-common.inc4
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-tools.inc7
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-tools_2022.01.bb2
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-tools_2022.07.bb3
-rw-r--r--meta/recipes-bsp/u-boot/u-boot_2022.07.bb (renamed from meta/recipes-bsp/u-boot/u-boot_2022.01.bb)0
-rw-r--r--meta/recipes-connectivity/avahi/avahi_0.8.bb4
-rw-r--r--meta/recipes-connectivity/bind/bind-9.18.5/0001-avoid-start-failure-with-bind-user.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/0001-avoid-start-failure-with-bind-user.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.18.5/0001-named-lwresd-V-and-start-log-hide-build-options.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/0001-named-lwresd-V-and-start-log-hide-build-options.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.18.5/bind-ensure-searching-for-json-headers-searches-sysr.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/bind-ensure-searching-for-json-headers-searches-sysr.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.18.5/bind9 (renamed from meta/recipes-connectivity/bind/bind-9.18.2/bind9)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.18.5/conf.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/conf.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.18.5/generate-rndc-key.sh (renamed from meta/recipes-connectivity/bind/bind-9.18.2/generate-rndc-key.sh)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.18.5/init.d-add-support-for-read-only-rootfs.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/init.d-add-support-for-read-only-rootfs.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.18.5/make-etc-initd-bind-stop-work.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/make-etc-initd-bind-stop-work.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.18.5/named.service (renamed from meta/recipes-connectivity/bind/bind-9.18.2/named.service)0
-rw-r--r--meta/recipes-connectivity/bind/bind_9.18.2.bb127
-rw-r--r--meta/recipes-connectivity/bind/bind_9.18.5.bb114
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5.inc1
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5/fix_service.patch30
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5_5.64.bb70
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5_5.65.bb70
-rw-r--r--meta/recipes-connectivity/connman/connman/CVE-2022-32292.patch37
-rw-r--r--meta/recipes-connectivity/connman/connman/CVE-2022-32293_p1.patch141
-rw-r--r--meta/recipes-connectivity/connman/connman/CVE-2022-32293_p2.patch174
-rw-r--r--meta/recipes-connectivity/connman/connman_1.41.bb3
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils_2.2.bb211
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils_2.3.bb211
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2/0001-configure-Define-_GNU_SOURCE-when-checking-for-setns.patch28
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2/0001-ip-ipstats.c-add-an-include-where-MIN-is-defined.patch25
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2_5.17.0.bb11
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2_5.19.0.bb13
-rw-r--r--meta/recipes-connectivity/iw/iw_5.16.bb31
-rw-r--r--meta/recipes-connectivity/iw/iw_5.19.bb31
-rw-r--r--meta/recipes-connectivity/kea/kea_2.0.2.bb77
-rw-r--r--meta/recipes-connectivity/kea/kea_2.2.0.bb77
-rw-r--r--meta/recipes-connectivity/libuv/libuv_1.44.1.bb21
-rw-r--r--meta/recipes-connectivity/libuv/libuv_1.44.2.bb21
-rw-r--r--meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb4
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0001-Configure-do-not-tweak-mips-cflags.patch10
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch20
-rw-r--r--meta/recipes-connectivity/openssl/openssl/afalg.patch10
-rw-r--r--meta/recipes-connectivity/openssl/openssl_3.0.3.bb258
-rw-r--r--meta/recipes-connectivity/openssl/openssl_3.0.5.bb258
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0002-Use-bin-sh-instead-of-bin-bash-for-the-root-user.patch8
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0003-Remove-for-root-since-we-do-not-have-an-etc-shadow.patch8
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0006-Disable-shell-for-default-users.patch54
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0006-Make-it-possible-to-build-without-debconf-support.patch129
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0007-Disable-generation-of-the-documentation.patch32
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0007-Make-it-possible-to-disable-the-generation-of-the-do.patch46
-rw-r--r--meta/recipes-core/base-passwd/base-passwd_3.5.29.bb118
-rw-r--r--meta/recipes-core/base-passwd/base-passwd_3.5.52.bb122
-rw-r--r--meta/recipes-core/busybox/busybox/CVE-2022-30065.patch29
-rw-r--r--meta/recipes-core/busybox/busybox_1.35.0.bb1
-rw-r--r--meta/recipes-core/coreutils/coreutils_9.1.bb3
-rw-r--r--meta/recipes-core/dbus/dbus_1.14.0.bb3
-rw-r--r--meta/recipes-core/dropbear/dropbear.inc126
-rw-r--r--meta/recipes-core/dropbear/dropbear_2022.82.bb129
-rw-r--r--meta/recipes-core/ell/ell_0.50.bb23
-rw-r--r--meta/recipes-core/ell/ell_0.52.bb22
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/relocate-modules.patch2
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0_2.72.1.bb53
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0_2.72.3.bb53
-rw-r--r--meta/recipes-core/glib-networking/glib-networking_2.72.0.bb38
-rw-r--r--meta/recipes-core/glib-networking/glib-networking_2.72.1.bb38
-rw-r--r--meta/recipes-core/glibc/cross-localedef-native_2.35.bb54
-rw-r--r--meta/recipes-core/glibc/cross-localedef-native_2.36.bb54
-rw-r--r--meta/recipes-core/glibc/glibc-common.inc2
-rw-r--r--meta/recipes-core/glibc/glibc-locale_2.36.bb (renamed from meta/recipes-core/glibc/glibc-locale_2.35.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-mtrace_2.36.bb (renamed from meta/recipes-core/glibc/glibc-mtrace_2.35.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-scripts_2.36.bb (renamed from meta/recipes-core/glibc/glibc-scripts_2.35.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-tests_2.35.bb116
-rw-r--r--meta/recipes-core/glibc/glibc-tests_2.36.bb119
-rw-r--r--meta/recipes-core/glibc/glibc-testsuite_2.36.bb (renamed from meta/recipes-core/glibc/glibc-testsuite_2.35.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-version.inc6
-rw-r--r--meta/recipes-core/glibc/glibc/0001-localedef-Add-hardlink-resolver-from-util-linux.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0002-localedef-fix-ups-hardlink-to-make-it-compile.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch8
-rw-r--r--meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch6
-rw-r--r--meta/recipes-core/glibc/glibc/0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch22
-rw-r--r--meta/recipes-core/glibc/glibc/0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch6
-rw-r--r--meta/recipes-core/glibc/glibc/0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch12
-rw-r--r--meta/recipes-core/glibc/glibc/0008-nativesdk-glibc-Fall-back-to-faccessat-on-faccess2-r.patch4
-rw-r--r--meta/recipes-core/glibc/glibc/0009-yes-within-the-path-sets-wrong-config-variables.patch30
-rw-r--r--meta/recipes-core/glibc/glibc/0010-eglibc-Cross-building-and-testing-instructions.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0011-eglibc-Help-bootstrap-cross-toolchain.patch8
-rw-r--r--meta/recipes-core/glibc/glibc/0012-eglibc-Resolve-__fpscr_values-on-SH4.patch4
-rw-r--r--meta/recipes-core/glibc/glibc/0013-eglibc-Forward-port-cross-locale-generation-support.patch46
-rw-r--r--meta/recipes-core/glibc/glibc/0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch4
-rw-r--r--meta/recipes-core/glibc/glibc/0015-locale-prevent-maybe-uninitialized-errors-with-Os-BZ.patch53
-rw-r--r--meta/recipes-core/glibc/glibc/0016-locale-prevent-maybe-uninitialized-errors-with-Os-BZ.patch53
-rw-r--r--meta/recipes-core/glibc/glibc/0016-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch29
-rw-r--r--meta/recipes-core/glibc/glibc/0017-powerpc-Do-not-ask-compiler-for-finding-arch.patch48
-rw-r--r--meta/recipes-core/glibc/glibc/0017-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch29
-rw-r--r--meta/recipes-core/glibc/glibc/0018-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch19
-rw-r--r--meta/recipes-core/glibc/glibc/0019-Replace-echo-with-printf-builtin-in-nscd-init-script.patch79
-rw-r--r--meta/recipes-core/glibc/glibc/0019-powerpc-Do-not-ask-compiler-for-finding-arch.patch48
-rw-r--r--meta/recipes-core/glibc/glibc/0020-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch40
-rw-r--r--meta/recipes-core/glibc/glibc/0021-Replace-echo-with-printf-builtin-in-nscd-init-script.patch79
-rw-r--r--meta/recipes-core/glibc/glibc/0021-timezone-Make-shell-interpreter-overridable-in-tzsel.patch47
-rw-r--r--meta/recipes-core/glibc/glibc/0022-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch39
-rw-r--r--meta/recipes-core/glibc/glibc/0022-tzselect.ksh-Use-bin-sh-default-shell-interpreter.patch27
-rw-r--r--meta/recipes-core/glibc/glibc/0023-fix-create-thread-failed-in-unprivileged-process-BZ-.patch86
-rw-r--r--meta/recipes-core/glibc/glibc/0023-timezone-Make-shell-interpreter-overridable-in-tzsel.patch47
-rw-r--r--meta/recipes-core/glibc/glibc/0024-Avoid-hardcoded-build-time-paths-in-the-output-binar.patch32
-rw-r--r--meta/recipes-core/glibc/glibc/0024-fix-create-thread-failed-in-unprivileged-process-BZ-.patch88
-rw-r--r--meta/recipes-core/glibc/glibc/0025-Revert-Linux-Implement-a-useful-version-of-_startup_.patch128
-rw-r--r--meta/recipes-core/glibc/glibc_2.35.bb123
-rw-r--r--meta/recipes-core/glibc/glibc_2.36.bb126
-rw-r--r--meta/recipes-core/images/build-appliance-image_15.0.0.bb2
-rw-r--r--meta/recipes-core/initrdscripts/files/init-install-efi-testfs.sh2
-rw-r--r--meta/recipes-core/initrdscripts/files/init-install-efi.sh2
-rw-r--r--meta/recipes-core/initrdscripts/files/init-install-testfs.sh2
-rw-r--r--meta/recipes-core/initrdscripts/files/init-install.sh2
-rwxr-xr-xmeta/recipes-core/initrdscripts/initramfs-framework/finish9
-rw-r--r--meta/recipes-core/initscripts/init-system-helpers_1.62.bb41
-rw-r--r--meta/recipes-core/initscripts/init-system-helpers_1.64.bb41
-rw-r--r--meta/recipes-core/initscripts/initscripts_1.0.bb2
-rw-r--r--meta/recipes-core/kbd/kbd_2.4.0.bb46
-rw-r--r--meta/recipes-core/kbd/kbd_2.5.1.bb46
-rw-r--r--meta/recipes-core/libcgroup/libcgroup_2.0.1.bb33
-rw-r--r--meta/recipes-core/libcgroup/libcgroup_2.0.2.bb33
-rw-r--r--meta/recipes-core/libxml/libxml2_2.9.14.bb4
-rw-r--r--meta/recipes-core/meta/cve-update-db-native.bb6
-rw-r--r--meta/recipes-core/meta/meta-ide-support.bb26
-rw-r--r--meta/recipes-core/meta/signing-keys.bb2
-rw-r--r--meta/recipes-core/meta/testexport-tarball.bb2
-rw-r--r--meta/recipes-core/meta/wic-tools.bb1
-rw-r--r--meta/recipes-core/musl/bsd-headers.bb2
-rw-r--r--meta/recipes-core/musl/libssp-nonshared.bb2
-rw-r--r--meta/recipes-core/musl/musl-locales_git.bb21
-rw-r--r--meta/recipes-core/musl/musl-obstack.bb4
-rw-r--r--meta/recipes-core/musl/musl/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch18
-rw-r--r--meta/recipes-core/musl/musl/0002-ldso-Use-syslibdir-and-libdir-as-default-pathes-to-l.patch20
-rw-r--r--meta/recipes-core/musl/musl_git.bb2
-rw-r--r--meta/recipes-core/ncurses/ncurses.inc4
-rw-r--r--meta/recipes-core/ncurses/ncurses_6.3+20220423.bb15
-rw-r--r--meta/recipes-core/ncurses/ncurses_6.3.bb14
-rw-r--r--meta/recipes-core/newlib/newlib_4.2.0.bb2
-rw-r--r--meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning-1.patch51
-rw-r--r--meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning.patch49
-rw-r--r--meta/recipes-core/ovmf/ovmf_git.bb6
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-ssh-dropbear.bb1
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-rust-cross-canadian.bb9
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-self-hosted.bb6
-rw-r--r--meta/recipes-core/seatd/seatd_0.6.4.bb35
-rw-r--r--meta/recipes-core/seatd/seatd_0.7.0.bb35
-rw-r--r--meta/recipes-core/systemd/systemd-boot_251.4.bb (renamed from meta/recipes-core/systemd/systemd-boot_250.5.bb)0
-rwxr-xr-xmeta/recipes-core/systemd/systemd-systemctl/systemctl14
-rw-r--r--meta/recipes-core/systemd/systemd.inc7
-rw-r--r--meta/recipes-core/systemd/systemd/0001-Adjust-for-musl-headers.patch47
-rw-r--r--meta/recipes-core/systemd/systemd/0001-Move-sysusers.d-sysctl.d-binfmt.d-modules-load.d-to-.patch71
-rw-r--r--meta/recipes-core/systemd/systemd/0001-binfmt-Don-t-install-dependency-links-at-install-tim.patch2
-rw-r--r--meta/recipes-core/systemd/systemd/0001-pass-correct-parameters-to-getdents64.patch25
-rw-r--r--meta/recipes-core/systemd/systemd/0001-resolve-Use-sockaddr-pointer-type-for-bind.patch46
-rw-r--r--meta/recipes-core/systemd/systemd/0001-systemd.pc.in-use-ROOTPREFIX-without-suffixed-slash.patch42
-rw-r--r--meta/recipes-core/systemd/systemd/0001-test-parse-argument-Include-signal.h.patch27
-rw-r--r--meta/recipes-core/systemd/systemd/0002-Add-sys-stat.h-for-S_IFDIR.patch6
-rw-r--r--meta/recipes-core/systemd/systemd/0002-don-t-use-glibc-specific-qsort_r.patch163
-rw-r--r--meta/recipes-core/systemd/systemd/0003-implment-systemd-sysv-install-for-OE.patch4
-rw-r--r--meta/recipes-core/systemd/systemd/0003-missing_type.h-add-__compare_fn_t-and-comparison_fn_.patch76
-rw-r--r--meta/recipes-core/systemd/systemd/0003-missing_type.h-add-comparison_fn_t.patch61
-rw-r--r--meta/recipes-core/systemd/systemd/0004-add-fallback-parse_printf_format-implementation.patch8
-rw-r--r--meta/recipes-core/systemd/systemd/0005-src-basic-missing.h-check-for-missing-strndupa.patch53
-rw-r--r--meta/recipes-core/systemd/systemd/0007-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch7
-rw-r--r--meta/recipes-core/systemd/systemd/0008-add-missing-FTW_-macros-for-musl.patch43
-rw-r--r--meta/recipes-core/systemd/systemd/0009-fix-missing-of-__register_atfork-for-non-glibc-build.patch43
-rw-r--r--meta/recipes-core/systemd/systemd/0011-test-sizeof.c-Disable-tests-for-missing-typedefs-in-.patch4
-rw-r--r--meta/recipes-core/systemd/systemd/0012-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch8
-rw-r--r--meta/recipes-core/systemd/systemd/0014-Do-not-disable-buffering-when-writing-to-oom_score_a.patch4
-rw-r--r--meta/recipes-core/systemd/systemd/0016-Hide-__start_BUS_ERROR_MAP-and-__stop_BUS_ERROR_MAP.patch33
-rw-r--r--meta/recipes-core/systemd/systemd/0017-missing_type.h-add-__compar_d_fn_t-definition.patch28
-rw-r--r--meta/recipes-core/systemd/systemd/0019-Handle-missing-LOCK_EX.patch24
-rw-r--r--meta/recipes-core/systemd/systemd/0020-Fix-incompatible-pointer-type-struct-sockaddr_un.patch38
-rw-r--r--meta/recipes-core/systemd/systemd/0021-test-json.c-define-M_PIl.patch31
-rw-r--r--meta/recipes-core/systemd/systemd/0022-do-not-disable-buffer-in-writing-files.patch74
-rw-r--r--meta/recipes-core/systemd/systemd_250.5.bb796
-rw-r--r--meta/recipes-core/systemd/systemd_251.4.bb796
-rw-r--r--meta/recipes-core/sysvinit/sysvinit-inittab/start_getty3
-rw-r--r--meta/recipes-core/sysvinit/sysvinit/sysvinit_remove_linux_fs.patch17
-rw-r--r--meta/recipes-core/sysvinit/sysvinit_3.04.bb1
-rw-r--r--meta/recipes-core/udev/eudev/0001-build-Remove-dead-g-i-r-configuration.patch155
-rw-r--r--meta/recipes-core/udev/eudev/init37
-rw-r--r--meta/recipes-core/udev/eudev_3.2.11.bb44
-rw-r--r--meta/recipes-core/udev/udev-extraconf/mount.ignorelist (renamed from meta/recipes-core/udev/udev-extraconf/mount.blacklist)0
-rw-r--r--meta/recipes-core/udev/udev-extraconf/mount.sh92
-rw-r--r--meta/recipes-core/udev/udev-extraconf_1.1.bb27
-rw-r--r--meta/recipes-core/util-linux/util-linux-libuuid_2.38.1.bb (renamed from meta/recipes-core/util-linux/util-linux-libuuid_2.38.bb)0
-rw-r--r--meta/recipes-core/util-linux/util-linux.inc3
-rw-r--r--meta/recipes-core/util-linux/util-linux/0001-check-for-sys-pidfd.h.patch53
-rw-r--r--meta/recipes-core/util-linux/util-linux_2.38.1.bb (renamed from meta/recipes-core/util-linux/util-linux_2.38.bb)0
-rw-r--r--meta/recipes-core/zlib/zlib/0001-Fix-a-bug-when-getting-a-gzip-header-extra-field-wit.patch38
-rw-r--r--meta/recipes-core/zlib/zlib/0001-Fix-extra-field-processing-bug-that-dereferences-NUL.patch36
-rw-r--r--meta/recipes-core/zlib/zlib_1.2.12.bb14
-rw-r--r--meta/recipes-devtools/apt/apt/0001-Do-not-init-tables-from-dpkg-configuration.patch4
-rw-r--r--meta/recipes-devtools/apt/apt/0001-Remove-using-std-binary_function.patch87
-rw-r--r--meta/recipes-devtools/apt/apt/0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch4
-rw-r--r--meta/recipes-devtools/apt/apt_2.4.5.bb11
-rw-r--r--meta/recipes-devtools/binutils/binutils-2.38.inc3
-rw-r--r--meta/recipes-devtools/binutils/binutils/0014-CVE-2019-1010204.patch49
-rw-r--r--meta/recipes-devtools/bootchart2/bootchart2/0001-Do-not-include-linux-fs.h.patch31
-rw-r--r--meta/recipes-devtools/bootchart2/bootchart2_0.14.9.bb1
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools/0001-device-utils.c-Use-linux-mount.h-instead-of-sys-moun.patch32
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools_5.16.2.bb71
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools_5.18.1.bb73
-rw-r--r--meta/recipes-devtools/cargo/cargo-cross-canadian.inc74
-rw-r--r--meta/recipes-devtools/cargo/cargo-cross-canadian_1.60.0.bb6
-rw-r--r--meta/recipes-devtools/cargo/cargo.inc13
-rw-r--r--meta/recipes-devtools/cargo/cargo_1.60.0.bb4
-rw-r--r--meta/recipes-devtools/cargo/cargo_1.62.1.bb5
-rw-r--r--meta/recipes-devtools/ccache/ccache_4.6.1.bb28
-rw-r--r--meta/recipes-devtools/ccache/ccache_4.6.bb26
-rw-r--r--meta/recipes-devtools/ccache/files/0001-xxhash.h-Fix-build-with-gcc-12.patch39
-rw-r--r--meta/recipes-devtools/cmake/cmake-native_3.24.0.bb (renamed from meta/recipes-devtools/cmake/cmake-native_3.23.1.bb)0
-rw-r--r--meta/recipes-devtools/cmake/cmake.inc4
-rw-r--r--meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake1
-rw-r--r--meta/recipes-devtools/cmake/cmake_3.24.0.bb (renamed from meta/recipes-devtools/cmake/cmake_3.23.1.bb)0
-rw-r--r--meta/recipes-devtools/createrepo-c/createrepo-c_0.20.0.bb40
-rw-r--r--meta/recipes-devtools/createrepo-c/createrepo-c_0.20.1.bb40
-rw-r--r--meta/recipes-devtools/dmidecode/dmidecode_3.3.bb24
-rw-r--r--meta/recipes-devtools/dmidecode/dmidecode_3.4.bb24
-rw-r--r--meta/recipes-devtools/dnf/dnf_4.12.0.bb90
-rw-r--r--meta/recipes-devtools/dnf/dnf_4.13.0.bb90
-rw-r--r--meta/recipes-devtools/dpkg/dpkg_1.21.7.bb23
-rw-r--r--meta/recipes-devtools/dpkg/dpkg_1.21.9.bb23
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs_1.46.5.bb5
-rw-r--r--meta/recipes-devtools/elfutils/elfutils_0.187.bb4
-rw-r--r--meta/recipes-devtools/erofs-utils/erofs-utils/0001-fsck-main.c-add-missing-include.patch26
-rw-r--r--meta/recipes-devtools/erofs-utils/erofs-utils_1.4.bb27
-rw-r--r--meta/recipes-devtools/erofs-utils/erofs-utils_1.5.bb26
-rw-r--r--meta/recipes-devtools/expect/expect/0001-Add-prototype-to-function-definitions.patch113
-rw-r--r--meta/recipes-devtools/expect/expect_5.45.4.bb3
-rw-r--r--meta/recipes-devtools/file/file_5.41.bb54
-rw-r--r--meta/recipes-devtools/file/file_5.42.bb54
-rw-r--r--meta/recipes-devtools/flex/flex_2.6.4.bb1
-rw-r--r--meta/recipes-devtools/gcc/gcc-12.1.inc6
-rw-r--r--meta/recipes-devtools/gcc/gcc-common.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross-canadian.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross.inc1
-rw-r--r--meta/recipes-devtools/gcc/gcc-runtime.inc23
-rw-r--r--meta/recipes-devtools/gcc/gcc-source.inc1
-rw-r--r--meta/recipes-devtools/gcc/gcc/0001-libsanitizer-cherry-pick-9cf13067cb5088626ba7-from-u.patch45
-rw-r--r--meta/recipes-devtools/gcc/gcc/0026-rust-recursion-limit.patch92
-rw-r--r--meta/recipes-devtools/gcc/gcc/hardcoded-paths.patch19
-rw-r--r--meta/recipes-devtools/gcc/gcc/prefix-map-realpath.patch63
-rw-r--r--meta/recipes-devtools/gcc/libgcc-common.inc15
-rw-r--r--meta/recipes-devtools/git/git_2.36.0.bb168
-rw-r--r--meta/recipes-devtools/git/git_2.37.1.bb168
-rw-r--r--meta/recipes-devtools/gnu-config/gnu-config_git.bb4
-rw-r--r--meta/recipes-devtools/go/go-1.18.1.inc17
-rw-r--r--meta/recipes-devtools/go/go-1.19.inc19
-rw-r--r--meta/recipes-devtools/go/go-binary-native_1.18.1.bb46
-rw-r--r--meta/recipes-devtools/go/go-binary-native_1.19.bb46
-rw-r--r--meta/recipes-devtools/go/go-cross-canadian_1.19.bb (renamed from meta/recipes-devtools/go/go-cross-canadian_1.18.1.bb)0
-rw-r--r--meta/recipes-devtools/go/go-cross_1.19.bb (renamed from meta/recipes-devtools/go/go-cross_1.18.1.bb)0
-rw-r--r--meta/recipes-devtools/go/go-crosssdk_1.19.bb (renamed from meta/recipes-devtools/go/go-crosssdk_1.18.1.bb)0
-rw-r--r--meta/recipes-devtools/go/go-native_1.19.bb (renamed from meta/recipes-devtools/go/go-native_1.18.1.bb)0
-rw-r--r--meta/recipes-devtools/go/go-runtime_1.19.bb (renamed from meta/recipes-devtools/go/go-runtime_1.18.1.bb)0
-rw-r--r--meta/recipes-devtools/go/go/0001-cmd-go-make-content-based-hash-generation-less-pedan.patch39
-rw-r--r--meta/recipes-devtools/go/go/0003-allow-GOTOOLDIR-to-be-overridden-in-the-environment.patch18
-rw-r--r--meta/recipes-devtools/go/go/0004-ld-add-soname-to-shareable-objects.patch17
-rw-r--r--meta/recipes-devtools/go/go/0005-make.bash-override-CC-when-building-dist-and-go_boot.patch12
-rw-r--r--meta/recipes-devtools/go/go/0006-cmd-dist-separate-host-and-target-builds.patch41
-rw-r--r--meta/recipes-devtools/go/go/filter-build-paths.patch57
-rw-r--r--meta/recipes-devtools/go/go/stack-protector.patch32
-rw-r--r--meta/recipes-devtools/go/go_1.19.bb (renamed from meta/recipes-devtools/go/go_1.18.1.bb)0
-rw-r--r--meta/recipes-devtools/json-c/json-c/0001-Fix-build-with-clang-15.patch34
-rw-r--r--meta/recipes-devtools/json-c/json-c/run-ptest20
-rw-r--r--meta/recipes-devtools/json-c/json-c_0.16.bb18
-rw-r--r--meta/recipes-devtools/librepo/librepo_1.14.2.bb29
-rw-r--r--meta/recipes-devtools/librepo/librepo_1.14.3.bb29
-rw-r--r--meta/recipes-devtools/llvm/llvm_git.bb8
-rw-r--r--meta/recipes-devtools/log4cplus/log4cplus_2.0.7.bb19
-rw-r--r--meta/recipes-devtools/log4cplus/log4cplus_2.0.8.bb19
-rw-r--r--meta/recipes-devtools/lua/lua/CVE-2022-33099.patch61
-rw-r--r--meta/recipes-devtools/lua/lua/lua.pc.in5
-rw-r--r--meta/recipes-devtools/lua/lua_5.4.4.bb3
-rw-r--r--meta/recipes-devtools/makedevs/makedevs/COPYING.patch346
-rw-r--r--meta/recipes-devtools/makedevs/makedevs/makedevs.c4
-rw-r--r--meta/recipes-devtools/makedevs/makedevs_1.0.1.bb5
-rwxr-xr-xmeta/recipes-devtools/meson/meson/meson-wrapper2
-rw-r--r--meta/recipes-devtools/meson/meson_0.62.1.bb134
-rw-r--r--meta/recipes-devtools/meson/meson_0.63.0.bb158
-rw-r--r--meta/recipes-devtools/mmc/mmc-utils_git.bb2
-rw-r--r--meta/recipes-devtools/mtd/mtd-utils/0001-tests-Remove-unused-linux-fs.h-header-from-includes.patch31
-rw-r--r--meta/recipes-devtools/mtd/mtd-utils_git.bb1
-rw-r--r--meta/recipes-devtools/mtools/mtools/disable-hardcoded-configs.patch6
-rw-r--r--meta/recipes-devtools/mtools/mtools_4.0.39.bb49
-rw-r--r--meta/recipes-devtools/mtools/mtools_4.0.40.bb49
-rw-r--r--meta/recipes-devtools/ninja/ninja_1.10.2.bb31
-rw-r--r--meta/recipes-devtools/ninja/ninja_1.11.0.bb31
-rw-r--r--meta/recipes-devtools/opkg/opkg_0.5.0.bb72
-rw-r--r--meta/recipes-devtools/opkg/opkg_0.6.0.bb72
-rw-r--r--meta/recipes-devtools/patchelf/patchelf/handle-read-only-files.patch65
-rw-r--r--meta/recipes-devtools/patchelf/patchelf_0.14.5.bb18
-rw-r--r--meta/recipes-devtools/patchelf/patchelf_0.15.0.bb17
-rw-r--r--meta/recipes-devtools/perl-cross/files/0001-Makefile-correctly-list-modules-when-cleaning-them.patch24
-rw-r--r--meta/recipes-devtools/perl-cross/files/0001-Makefile-do-not-clean-config.h-xconfig.h.patch28
-rw-r--r--meta/recipes-devtools/perl-cross/files/0001-configure_func.sh-Add-_GNU_SOURCE-define-and-functio.patch485
-rw-r--r--meta/recipes-devtools/perl-cross/perlcross_1.3.7.bb38
-rw-r--r--meta/recipes-devtools/perl-cross/perlcross_1.4.bb41
-rw-r--r--meta/recipes-devtools/perl/files/0001-Fix-build-with-gcc-12.patch143
-rw-r--r--meta/recipes-devtools/perl/files/perl-configpm-switch.patch2
-rw-r--r--meta/recipes-devtools/perl/files/perl-dynloader.patch2
-rw-r--r--meta/recipes-devtools/perl/files/perl-rdepends.txt49
-rw-r--r--meta/recipes-devtools/perl/libmodule-build-perl_0.4231.bb1
-rw-r--r--meta/recipes-devtools/perl/perl-ptest.inc4
-rw-r--r--meta/recipes-devtools/perl/perl_5.34.1.bb413
-rw-r--r--meta/recipes-devtools/perl/perl_5.36.0.bb412
-rw-r--r--meta/recipes-devtools/python/files/0001-conditionally-do-not-fetch-code-by-easy_install.patch32
-rw-r--r--meta/recipes-devtools/python/python-cython.inc2
-rw-r--r--meta/recipes-devtools/python/python-pyasn1.inc1
-rw-r--r--meta/recipes-devtools/python/python3-atomicwrites_1.4.0.bb25
-rw-r--r--meta/recipes-devtools/python/python3-atomicwrites_1.4.1.bb24
-rw-r--r--meta/recipes-devtools/python/python3-attrs_21.4.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-attrs_22.1.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-babel_2.10.1.bb26
-rw-r--r--meta/recipes-devtools/python/python3-babel_2.10.3.bb26
-rw-r--r--meta/recipes-devtools/python/python3-bcrypt_3.2.0.bb31
-rw-r--r--meta/recipes-devtools/python/python3-bcrypt_3.2.2.bb30
-rw-r--r--meta/recipes-devtools/python/python3-certifi_2021.10.8.bb14
-rw-r--r--meta/recipes-devtools/python/python3-certifi_2022.6.15.bb14
-rw-r--r--meta/recipes-devtools/python/python3-cffi_1.15.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3-cffi_1.15.1.bb18
-rw-r--r--meta/recipes-devtools/python/python3-chardet_4.0.0.bb24
-rw-r--r--meta/recipes-devtools/python/python3-chardet_5.0.0.bb24
-rw-r--r--meta/recipes-devtools/python/python3-cryptography-vectors_37.0.1.bb29
-rw-r--r--meta/recipes-devtools/python/python3-cryptography-vectors_37.0.4.bb29
-rw-r--r--meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch40
-rw-r--r--meta/recipes-devtools/python/python3-cryptography_37.0.1.bb118
-rw-r--r--meta/recipes-devtools/python/python3-cryptography_37.0.4.bb115
-rw-r--r--meta/recipes-devtools/python/python3-cython_0.29.28.bb37
-rw-r--r--meta/recipes-devtools/python/python3-cython_0.29.32.bb39
-rw-r--r--meta/recipes-devtools/python/python3-dbusmock_0.27.5.bb18
-rw-r--r--meta/recipes-devtools/python/python3-dbusmock_0.28.4.bb18
-rw-r--r--meta/recipes-devtools/python/python3-docutils_0.18.1.bb11
-rw-r--r--meta/recipes-devtools/python/python3-docutils_0.19.bb11
-rw-r--r--meta/recipes-devtools/python/python3-dtschema_2022.4.bb15
-rw-r--r--meta/recipes-devtools/python/python3-dtschema_2022.8.bb15
-rw-r--r--meta/recipes-devtools/python/python3-editables_0.3.bb11
-rw-r--r--meta/recipes-devtools/python/python3-flit-core_3.7.1.bb6
-rw-r--r--meta/recipes-devtools/python/python3-hatch-vcs_0.2.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-hatchling_1.6.0.bb17
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis_6.46.0.bb38
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis_6.54.1.bb38
-rw-r--r--meta/recipes-devtools/python/python3-imagesize_1.3.0.bb13
-rw-r--r--meta/recipes-devtools/python/python3-imagesize_1.4.1.bb13
-rw-r--r--meta/recipes-devtools/python/python3-importlib-metadata_4.11.3.bb20
-rw-r--r--meta/recipes-devtools/python/python3-importlib-metadata_4.12.0.bb20
-rw-r--r--meta/recipes-devtools/python/python3-installer_0.5.1.bb14
-rw-r--r--meta/recipes-devtools/python/python3-jsonschema_4.4.0.bb48
-rw-r--r--meta/recipes-devtools/python/python3-jsonschema_4.9.1.bb48
-rw-r--r--meta/recipes-devtools/python/python3-magic_0.4.25.bb22
-rw-r--r--meta/recipes-devtools/python/python3-magic_0.4.27.bb22
-rw-r--r--meta/recipes-devtools/python/python3-mako_1.2.0.bb20
-rw-r--r--meta/recipes-devtools/python/python3-mako_1.2.1.bb20
-rw-r--r--meta/recipes-devtools/python/python3-markdown_3.3.6.bb13
-rw-r--r--meta/recipes-devtools/python/python3-markdown_3.4.1.bb13
-rw-r--r--meta/recipes-devtools/python/python3-more-itertools_8.12.0.bb27
-rw-r--r--meta/recipes-devtools/python/python3-more-itertools_8.13.0.bb27
-rw-r--r--meta/recipes-devtools/python/python3-numpy/0001-generate_umath.py-do-not-write-full-path-to-output-f.patch28
-rw-r--r--meta/recipes-devtools/python/python3-numpy_1.22.3.bb72
-rw-r--r--meta/recipes-devtools/python/python3-numpy_1.23.1.bb73
-rw-r--r--meta/recipes-devtools/python/python3-pathspec_0.9.0.bb11
-rw-r--r--meta/recipes-devtools/python/python3-pbr_5.8.1.bb4
-rw-r--r--meta/recipes-devtools/python/python3-pbr_5.9.0.bb4
-rw-r--r--meta/recipes-devtools/python/python3-picobuild_0.2.bb23
-rw-r--r--meta/recipes-devtools/python/python3-pip/0001-change-shebang-to-python3.patch115
-rw-r--r--meta/recipes-devtools/python/python3-pip/reproducible.patch71
-rw-r--r--meta/recipes-devtools/python/python3-pip_22.0.4.bb64
-rw-r--r--meta/recipes-devtools/python/python3-pip_22.2.2.bb63
-rw-r--r--meta/recipes-devtools/python/python3-pluggy_1.0.0.bb2
-rw-r--r--meta/recipes-devtools/python/python3-psutil/0001-fix-failure-test-cases.patch197
-rw-r--r--meta/recipes-devtools/python/python3-psutil_5.9.0.bb29
-rw-r--r--meta/recipes-devtools/python/python3-psutil_5.9.1.bb27
-rw-r--r--meta/recipes-devtools/python/python3-pyasn1/0001-Fix-DeprecationWarning-__int__-returned-non-int-on-P.patch28
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodome_3.14.1.bb5
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodome_3.15.0.bb5
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodomex_3.14.1.bb9
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodomex_3.15.0.bb9
-rw-r--r--meta/recipes-devtools/python/python3-pygobject_3.42.1.bb34
-rw-r--r--meta/recipes-devtools/python/python3-pygobject_3.42.2.bb34
-rw-r--r--meta/recipes-devtools/python/python3-pyparsing_3.0.8.bb30
-rw-r--r--meta/recipes-devtools/python/python3-pyparsing_3.0.9.bb30
-rw-r--r--meta/recipes-devtools/python/python3-pytest-subtests_0.7.0.bb20
-rw-r--r--meta/recipes-devtools/python/python3-pytest-subtests_0.8.0.bb20
-rw-r--r--meta/recipes-devtools/python/python3-requests_2.27.1.bb25
-rw-r--r--meta/recipes-devtools/python/python3-requests_2.28.1.bb25
-rw-r--r--meta/recipes-devtools/python/python3-scons_4.3.0.bb36
-rw-r--r--meta/recipes-devtools/python/python3-scons_4.4.0.bb36
-rw-r--r--meta/recipes-devtools/python/python3-semantic-version_2.10.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3-semantic-version_2.9.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-rust-native_1.1.2.bb26
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-rust/8e9892f08b1248dc03862da86915c2745e0ff7ec.patch221
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-rust_1.4.1.bb29
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-scm_6.4.2.bb26
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-scm_7.0.5.bb30
-rw-r--r--meta/recipes-devtools/python/python3-setuptools/0001-_distutils-sysconfig-append-STAGING_LIBDIR-python-sy.patch35
-rw-r--r--meta/recipes-devtools/python/python3-setuptools/0001-_distutils-sysconfig.py-make-it-possible-to-substite.patch59
-rw-r--r--meta/recipes-devtools/python/python3-setuptools/0001-conditionally-do-not-fetch-code-by-easy_install.patch32
-rw-r--r--meta/recipes-devtools/python/python3-setuptools_59.5.0.bb57
-rw-r--r--meta/recipes-devtools/python/python3-setuptools_63.4.1.bb55
-rw-r--r--meta/recipes-devtools/python/python3-sphinx_4.5.0.bb28
-rw-r--r--meta/recipes-devtools/python/python3-sphinx_5.1.1.bb28
-rw-r--r--meta/recipes-devtools/python/python3-typing-extensions_4.2.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-typing-extensions_4.3.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-urllib3_1.26.11.bb23
-rw-r--r--meta/recipes-devtools/python/python3-urllib3_1.26.9.bb22
-rw-r--r--meta/recipes-devtools/python/python3-webcolors_1.11.1.bb28
-rw-r--r--meta/recipes-devtools/python/python3-webcolors_1.12.bb27
-rw-r--r--meta/recipes-devtools/python/python3-zipp_3.8.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3-zipp_3.8.1.bb18
-rw-r--r--meta/recipes-devtools/python/python3/0001-Avoid-shebang-overflow-on-python-config.py.patch33
-rw-r--r--meta/recipes-devtools/python/python3/0001-Don-t-search-system-for-headers-libraries.patch6
-rw-r--r--meta/recipes-devtools/python/python3/0001-Lib-sysconfig.py-use-prefix-value-from-build-configu.patch2
-rw-r--r--meta/recipes-devtools/python/python3/0001-Mitigate-the-race-condition-in-testSockName.patch47
-rw-r--r--meta/recipes-devtools/python/python3/0001-distutils-sysconfig-append-STAGING_LIBDIR-python-sys.patch2
-rw-r--r--meta/recipes-devtools/python/python3/0017-setup.py-do-not-report-missing-dependencies-for-disa.patch6
-rw-r--r--meta/recipes-devtools/python/python3/12-distutils-prefix-is-inside-staging-area.patch2
-rw-r--r--meta/recipes-devtools/python/python3/deterministic_imports.patch32
-rw-r--r--meta/recipes-devtools/python/python3/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3_3.10.4.bb417
-rw-r--r--meta/recipes-devtools/python/python3_3.10.6.bb429
-rw-r--r--meta/recipes-devtools/qemu/qemu-system-native_7.0.0.bb2
-rw-r--r--meta/recipes-devtools/qemu/qemu.inc16
-rw-r--r--meta/recipes-devtools/qemu/qemu/CVE-2022-35414.patch53
-rw-r--r--meta/recipes-devtools/qemu/qemu/qemu-7.0.0-glibc-2.36.patch46
-rw-r--r--meta/recipes-devtools/qemu/qemu_7.0.0.bb4
-rw-r--r--meta/recipes-devtools/repo/repo_2.24.1.bb31
-rw-r--r--meta/recipes-devtools/repo/repo_2.28.bb31
-rw-r--r--meta/recipes-devtools/rpm/files/0001-CVE-2021-3521.patch57
-rw-r--r--meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch14
-rw-r--r--meta/recipes-devtools/rpm/files/0001-configure.ac-add-linux-gnux32-variant-to-triplet-han.patch31
-rw-r--r--meta/recipes-devtools/rpm/files/0002-CVE-2021-3521.patch64
-rw-r--r--meta/recipes-devtools/rpm/files/0003-CVE-2021-3521.patch329
-rw-r--r--meta/recipes-devtools/rpm/rpm_4.17.0.bb208
-rw-r--r--meta/recipes-devtools/rpm/rpm_4.17.1.bb206
-rw-r--r--meta/recipes-devtools/rsync/rsync_3.2.4.bb70
-rw-r--r--meta/recipes-devtools/rsync/rsync_3.2.5.bb69
-rw-r--r--meta/recipes-devtools/ruby/ruby_3.1.2.bb1
-rw-r--r--meta/recipes-devtools/rust/README-rust.md26
-rw-r--r--meta/recipes-devtools/rust/libstd-rs.inc4
-rw-r--r--meta/recipes-devtools/rust/libstd-rs/0001-Add-400-series-syscalls-to-musl-riscv64-definitions.patch44
-rw-r--r--meta/recipes-devtools/rust/libstd-rs/0001-Update-checksums-for-modified-vendored-libc.patch18
-rw-r--r--meta/recipes-devtools/rust/libstd-rs_1.60.0.bb12
-rw-r--r--meta/recipes-devtools/rust/libstd-rs_1.62.1.bb7
-rw-r--r--meta/recipes-devtools/rust/rust-common.inc371
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian-common.inc55
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian.inc97
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian_1.60.0.bb6
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian_1.62.1.bb2
-rw-r--r--meta/recipes-devtools/rust/rust-cross.inc66
-rw-r--r--meta/recipes-devtools/rust/rust-cross_1.60.0.bb8
-rw-r--r--meta/recipes-devtools/rust/rust-crosssdk_1.60.0.bb8
-rw-r--r--meta/recipes-devtools/rust/rust-llvm.inc9
-rw-r--r--meta/recipes-devtools/rust/rust-llvm_1.62.1.bb (renamed from meta/recipes-devtools/rust/rust-llvm_1.60.0.bb)0
-rw-r--r--meta/recipes-devtools/rust/rust-snapshot.inc22
-rw-r--r--meta/recipes-devtools/rust/rust-source.inc5
-rw-r--r--meta/recipes-devtools/rust/rust-target.inc2
-rw-r--r--meta/recipes-devtools/rust/rust-tools-cross-canadian.inc38
-rw-r--r--meta/recipes-devtools/rust/rust-tools-cross-canadian_1.60.0.bb6
-rw-r--r--meta/recipes-devtools/rust/rust.inc84
-rw-r--r--meta/recipes-devtools/rust/rust/hardcodepaths.patch70
-rw-r--r--meta/recipes-devtools/rust/rust_1.60.0.bb21
-rw-r--r--meta/recipes-devtools/rust/rust_1.62.1.bb82
-rw-r--r--meta/recipes-devtools/squashfs-tools/squashfs-tools_git.bb2
-rw-r--r--meta/recipes-devtools/strace/strace/0001-landlock-update-expected-string.patch67
-rw-r--r--meta/recipes-devtools/strace/strace/skip-load.patch40
-rw-r--r--meta/recipes-devtools/strace/strace_5.17.bb55
-rw-r--r--meta/recipes-devtools/strace/strace_5.18.bb57
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0001-install-don-t-install-obsolete-file-com32.ld.patch32
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0001-linux-syslinux-support-ext2-3-4-device.patch15
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0002-linux-syslinux-implement-open_ext2_fs.patch21
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0003-linux-syslinux-implement-install_to_ext2.patch13
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0004-linux-syslinux-add-ext_file_read-and-ext_file_write.patch13
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0005-linux-syslinux-implement-handle_adv_on_ext.patch13
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0006-linux-syslinux-implement-write_to_ext-and-add-syslin.patch21
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0007-linux-syslinux-implement-ext_construct_sectmap_fs.patch13
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0008-libinstaller-syslinuxext-implement-syslinux_patch_bo.patch11
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0009-linux-syslinux-implement-install_bootblock.patch11
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0010-Workaround-multiple-definition-of-symbol-errors.patch17
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0011-install-don-t-install-obsolete-file-com32.ld.patch29
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0012-libinstaller-Fix-build-with-glibc-2.36.patch56
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0013-remove-clean-script.patch27
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0014-Fix-reproducibility-issues.patch32
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/determinism.patch22
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/syslinux-remove-clean-script.patch17
-rw-r--r--meta/recipes-devtools/syslinux/syslinux_6.04-pre2.bb9
-rw-r--r--meta/recipes-devtools/vala/vala.inc6
-rw-r--r--meta/recipes-devtools/vala/vala_0.56.1.bb3
-rw-r--r--meta/recipes-devtools/vala/vala_0.56.2.bb3
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch2
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/avoid-neon-for-targets-which-don-t-support-it.patch2
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/remove-for-all1
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch2
-rw-r--r--meta/recipes-devtools/valgrind/valgrind_3.19.0.bb6
-rw-r--r--meta/recipes-extended/asciidoc/asciidoc_10.1.4.bb28
-rw-r--r--meta/recipes-extended/asciidoc/asciidoc_10.2.0.bb28
-rw-r--r--meta/recipes-extended/at/at_3.2.5.bb2
-rw-r--r--meta/recipes-extended/bash/bash/makerace2.patch2
-rw-r--r--meta/recipes-extended/cracklib/cracklib/0001-rules-Drop-using-register-keyword.patch278
-rw-r--r--meta/recipes-extended/cracklib/cracklib/0002-rules-Correct-parameter-types-to-Debug-calls.patch40
-rw-r--r--meta/recipes-extended/cracklib/cracklib_2.9.7.bb5
-rw-r--r--meta/recipes-extended/cups/cups.inc2
-rw-r--r--meta/recipes-extended/cups/cups_2.4.1.bb5
-rw-r--r--meta/recipes-extended/cups/cups_2.4.2.bb5
-rw-r--r--meta/recipes-extended/ethtool/ethtool/avoid_parallel_tests.patch6
-rw-r--r--meta/recipes-extended/ethtool/ethtool_5.17.bb37
-rw-r--r--meta/recipes-extended/ethtool/ethtool_5.18.bb37
-rw-r--r--meta/recipes-extended/gperf/gperf/1862c6e57a308a05889c80c048dbc58bdc378dcb.patch181
-rw-r--r--meta/recipes-extended/gperf/gperf_3.1.bb2
-rw-r--r--meta/recipes-extended/hdparm/hdparm/0001-hdparm-Remove-unused-linux-fs.h-header.patch32
-rw-r--r--meta/recipes-extended/hdparm/hdparm_9.63.bb43
-rw-r--r--meta/recipes-extended/hdparm/hdparm_9.64.bb44
-rw-r--r--meta/recipes-extended/iptables/iptables/0001-iptables-xshared.h-add-missing-sys.types.h-include.patch30
-rw-r--r--meta/recipes-extended/iptables/iptables/format-security.patch30
-rw-r--r--meta/recipes-extended/iptables/iptables_1.8.7.bb116
-rw-r--r--meta/recipes-extended/iptables/iptables_1.8.8.bb123
-rw-r--r--meta/recipes-extended/less/less_600.bb1
-rw-r--r--meta/recipes-extended/libarchive/libarchive/0001-libarchive-Do-not-include-sys-mount.h-when-linux-fs..patch47
-rw-r--r--meta/recipes-extended/libarchive/libarchive_3.6.1.bb6
-rw-r--r--meta/recipes-extended/libidn/libidn2_2.3.2.bb32
-rw-r--r--meta/recipes-extended/libidn/libidn2_2.3.3.bb33
-rw-r--r--meta/recipes-extended/libmnl/libmnl_1.0.5.bb4
-rw-r--r--meta/recipes-extended/libtirpc/libtirpc_1.3.2.bb25
-rw-r--r--meta/recipes-extended/libtirpc/libtirpc_1.3.3.bb25
-rw-r--r--meta/recipes-extended/lighttpd/lighttpd_1.4.64.bb79
-rw-r--r--meta/recipes-extended/lighttpd/lighttpd_1.4.65.bb79
-rw-r--r--meta/recipes-extended/logrotate/logrotate_3.19.0.bb91
-rw-r--r--meta/recipes-extended/logrotate/logrotate_3.20.1.bb91
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-kvm-Fix-stack-access-mode-in-KVM-test-ELF-headers.patch40
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-lapi-fsmount-resolve-conflict-in-different-header-fi.patch71
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-lapi-pidfd-adding-pidfd-header-file.patch60
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-memcg-functional-Fix-usage-of-PAGESIZE-from-memcg_li.patch121
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-metadata-parse.sh-sort-filelist-for-reproducibility.patch28
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-mountns0-1-3-wait-for-umount-completed-in-thread_b.patch63
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-net_stress-Fix-usage-of-variables-from-tst_net.sh.patch154
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-netstress-Restore-runtime-to-5m.patch53
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-nfs05_make_tree-Restore-5-min-timeout.patch31
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-syscalls-migrate_pages03-restore-runtime-to-5m.patch44
-rw-r--r--meta/recipes-extended/ltp/ltp/0002-kvm-use-LD-instead-of-hardcoding-ld.patch39
-rw-r--r--meta/recipes-extended/ltp/ltp/0003-Add-KVM_LD-Makefile-variable-for-building-KVM-payloa.patch63
-rw-r--r--meta/recipes-extended/ltp/ltp_20220121.bb138
-rw-r--r--meta/recipes-extended/ltp/ltp_20220527.bb165
-rw-r--r--meta/recipes-extended/man-pages/man-pages_5.13.bb7
-rw-r--r--meta/recipes-extended/msmtp/msmtp_1.8.20.bb27
-rw-r--r--meta/recipes-extended/msmtp/msmtp_1.8.22.bb27
-rw-r--r--meta/recipes-extended/pam/libpam/99_pam2
-rw-r--r--meta/recipes-extended/parted/files/run-ptest6
-rw-r--r--meta/recipes-extended/procps/procps_3.3.17.bb6
-rw-r--r--meta/recipes-extended/psmisc/psmisc_23.4.bb9
-rw-r--r--meta/recipes-extended/psmisc/psmisc_23.5.bb9
-rw-r--r--meta/recipes-extended/screen/screen/0001-configure-Add-needed-system-headers-in-checks.patch151
-rw-r--r--meta/recipes-extended/screen/screen_4.9.0.bb3
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng/0001-Makefile-quote-CC.patch41
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng/0001-core-helper-remove-include-of-sys-mount.h.patch34
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng/0002-core-shim-remove-include-of-sys-mount.h.patch34
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng_0.14.00.bb31
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng_0.14.03.bb32
-rw-r--r--meta/recipes-extended/sudo/sudo_1.9.10.bb62
-rw-r--r--meta/recipes-extended/sudo/sudo_1.9.11p3.bb62
-rw-r--r--meta/recipes-extended/sysklogd/sysklogd_2.3.0.bb56
-rw-r--r--meta/recipes-extended/sysklogd/sysklogd_2.4.2.bb56
-rw-r--r--meta/recipes-extended/sysstat/sysstat_12.4.5.bb7
-rw-r--r--meta/recipes-extended/sysstat/sysstat_12.6.0.bb7
-rw-r--r--meta/recipes-extended/tcp-wrappers/tcp-wrappers-7.6/0001-Fix-implicit-function-declaration-warnings.patch109
-rw-r--r--meta/recipes-extended/tcp-wrappers/tcp-wrappers_7.6.bb1
-rw-r--r--meta/recipes-extended/time/time/0001-include-string.h-for-memset.patch27
-rw-r--r--meta/recipes-extended/time/time_1.9.bb4
-rw-r--r--meta/recipes-extended/unzip/unzip/0001-configure-Add-correct-system-headers-and-prototypes-.patch112
-rw-r--r--meta/recipes-extended/unzip/unzip/0001-configure-Pass-LDFLAGS-to-tests-doing-link-step.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/CVE-2022-0529.patch39
-rw-r--r--meta/recipes-extended/unzip/unzip/CVE-2022-0530.patch33
-rw-r--r--meta/recipes-extended/unzip/unzip/avoid-strip.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/define-ldflags.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/fix-security-format.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/symlink.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip_6.0.bb6
-rw-r--r--meta/recipes-extended/watchdog/watchdog-config.bb7
-rw-r--r--meta/recipes-extended/zip/zip-3.0/0001-configure-Specify-correct-function-signatures-and-de.patch134
-rw-r--r--meta/recipes-extended/zip/zip-3.0/0001-configure-Use-CFLAGS-and-LDFLAGS-when-doing-link-tes.patch2
-rw-r--r--meta/recipes-extended/zip/zip-3.0/0002-unix.c-Do-not-redefine-DIR-as-FILE.patch35
-rw-r--r--meta/recipes-extended/zip/zip-3.0/10-remove-build-date.patch2
-rw-r--r--meta/recipes-extended/zip/zip-3.0/fix-security-format.patch2
-rw-r--r--meta/recipes-extended/zip/zip-3.0/zipnote-crashes-with-segfault.patch2
-rw-r--r--meta/recipes-extended/zip/zip_3.0.bb5
-rw-r--r--meta/recipes-extended/zstd/zstd_1.5.2.bb2
-rw-r--r--meta/recipes-gnome/epiphany/epiphany_42.2.bb43
-rw-r--r--meta/recipes-gnome/epiphany/epiphany_42.3.bb43
-rw-r--r--meta/recipes-gnome/gcr/gcr_3.40.0.bb6
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Don-t-use-AC_CANONICAL_HOST.patch6
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Run-installation-commands-as-shell-jobs.patch84
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme_41.0.bb43
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme_42.0.bb40
-rw-r--r--meta/recipes-gnome/gobject-introspection/gobject-introspection_1.72.0.bb3
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3_3.24.33.bb17
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3_3.24.34.bb17
-rw-r--r--meta/recipes-gnome/gtk-doc/gtk-doc_1.33.2.bb3
-rw-r--r--meta/recipes-gnome/libhandy/libhandy_1.6.2.bb27
-rw-r--r--meta/recipes-gnome/libhandy/libhandy_1.6.3.bb27
-rw-r--r--meta/recipes-gnome/libnotify/libnotify_0.7.11.bb37
-rw-r--r--meta/recipes-gnome/libnotify/libnotify_0.8.1.bb37
-rw-r--r--meta/recipes-gnome/librsvg/librsvg_2.54.1.bb75
-rw-r--r--meta/recipes-gnome/librsvg/librsvg_2.54.4.bb75
-rw-r--r--meta/recipes-graphics/drm/libdrm_2.4.110.bb60
-rw-r--r--meta/recipes-graphics/drm/libdrm_2.4.112.bb59
-rw-r--r--meta/recipes-graphics/glslang/glslang_1.3.211.0.bb32
-rw-r--r--meta/recipes-graphics/glslang/glslang_1.3.216.0.bb32
-rw-r--r--meta/recipes-graphics/harfbuzz/harfbuzz/0001-fix-signedness-of-char-in-tests.patch27
-rw-r--r--meta/recipes-graphics/harfbuzz/harfbuzz_4.2.1.bb48
-rw-r--r--meta/recipes-graphics/harfbuzz/harfbuzz_5.1.0.bb50
-rw-r--r--meta/recipes-graphics/kmscube/kmscube/0001-drm-common.c-do-not-use-invalid-modifier.patch27
-rw-r--r--meta/recipes-graphics/kmscube/kmscube_git.bb6
-rw-r--r--meta/recipes-graphics/libsdl2/libsdl2_2.0.22.bb3
-rw-r--r--meta/recipes-graphics/libva/libva-initial_2.15.0.bb (renamed from meta/recipes-graphics/libva/libva-initial_2.14.0.bb)0
-rw-r--r--meta/recipes-graphics/libva/libva-utils_2.14.0.bb32
-rw-r--r--meta/recipes-graphics/libva/libva-utils_2.15.0.bb32
-rw-r--r--meta/recipes-graphics/libva/libva.inc2
-rw-r--r--meta/recipes-graphics/libva/libva_2.15.0.bb (renamed from meta/recipes-graphics/libva/libva_2.14.0.bb)0
-rw-r--r--meta/recipes-graphics/mesa/files/0001-Revert-egl-wayland-deprecate-drm_handle_format-and-d.patch158
-rw-r--r--meta/recipes-graphics/mesa/files/0001-nir-nir_opt_move-fix-ALWAYS_INLINE-compiler-error.patch65
-rw-r--r--meta/recipes-graphics/mesa/files/0001-swrast_kms-use-swkmsDRI2Extension-instead-of-driDRI2.patch113
-rw-r--r--meta/recipes-graphics/mesa/libglu_9.0.2.bb2
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch382
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0007-Install-few-more-test-programs.patch43
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0008-glsl-perf-Add-few-missing-.glsl-.vert-.frag-files-to.patch99
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0009-glsl-perf-Install-.glsl-.vert-.frag-files.patch71
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0012-mesa-demos-OpenVG-demos-with-single-frame-need-eglSw.patch44
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos_8.4.0.bb57
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos_8.5.0.bb43
-rw-r--r--meta/recipes-graphics/mesa/mesa-gl_22.1.5.bb (renamed from meta/recipes-graphics/mesa/mesa-gl_22.0.2.bb)0
-rw-r--r--meta/recipes-graphics/mesa/mesa.inc10
-rw-r--r--meta/recipes-graphics/mesa/mesa_22.1.5.bb (renamed from meta/recipes-graphics/mesa/mesa_22.0.2.bb)0
-rw-r--r--meta/recipes-graphics/pango/pango_1.50.7.bb52
-rw-r--r--meta/recipes-graphics/pango/pango_1.50.8.bb54
-rw-r--r--meta/recipes-graphics/piglit/piglit_git.bb2
-rw-r--r--meta/recipes-graphics/shaderc/shaderc_2022.1.bb29
-rw-r--r--meta/recipes-graphics/shaderc/shaderc_2022.2.bb29
-rw-r--r--meta/recipes-graphics/spir/spirv-headers_1.3.211.0.bb18
-rw-r--r--meta/recipes-graphics/spir/spirv-headers_1.3.216.0.bb18
-rw-r--r--meta/recipes-graphics/spir/spirv-tools/0001-Remove-default-copy-constructor-in-header.-4879.patch34
-rw-r--r--meta/recipes-graphics/spir/spirv-tools_1.3.211.0.bb41
-rw-r--r--meta/recipes-graphics/spir/spirv-tools_1.3.216.0.bb43
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-headers_1.3.211.0.bb22
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-headers_1.3.216.0.bb22
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-loader_1.3.211.0.bb40
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-loader_1.3.216.0.bb40
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples/0001-Qualify-move-as-std-move.patch405
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples/0001-framework-core-hpp_vulkan_resource.h-add-header-incl.patch27
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples_git.bb4
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-tools_1.3.211.0.bb32
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-tools_1.3.216.0.bb32
-rw-r--r--meta/recipes-graphics/waffle/waffle/0001-meson.build-request-native-wayland-scanner.patch27
-rw-r--r--meta/recipes-graphics/waffle/waffle_1.7.0.bb1
-rw-r--r--meta/recipes-graphics/wayland/wayland-protocols_1.25.bb26
-rw-r--r--meta/recipes-graphics/wayland/wayland-protocols_1.26.bb26
-rw-r--r--meta/recipes-graphics/wayland/wayland/0002-Do-not-hardcode-the-path-to-wayland-scanner.patch27
-rw-r--r--meta/recipes-graphics/wayland/wayland_1.20.0.bb61
-rw-r--r--meta/recipes-graphics/wayland/wayland_1.21.0.bb61
-rw-r--r--meta/recipes-graphics/wayland/weston-init.bb4
-rw-r--r--meta/recipes-graphics/wayland/weston/dont-use-plane-add-prop.patch32
-rw-r--r--meta/recipes-graphics/wayland/weston/xwayland.weston-start3
-rw-r--r--meta/recipes-graphics/wayland/weston_10.0.0.bb144
-rw-r--r--meta/recipes-graphics/wayland/weston_10.0.2.bb143
-rw-r--r--meta/recipes-graphics/xorg-app/xev_1.2.4.bb17
-rw-r--r--meta/recipes-graphics/xorg-app/xev_1.2.5.bb19
-rw-r--r--meta/recipes-graphics/xorg-app/xmodmap_1.0.10.bb16
-rw-r--r--meta/recipes-graphics/xorg-app/xmodmap_1.0.11.bb17
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-keyboard_1.9.0.bb13
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-synaptics/64bit_time_t_support.patch51
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.1.bb18
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.2.bb17
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.5.3.bb13
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.6.0.bb14
-rw-r--r--meta/recipes-graphics/xorg-font/encodings/nocompiler.patch8
-rw-r--r--meta/recipes-graphics/xorg-font/encodings_1.0.5.bb25
-rw-r--r--meta/recipes-graphics/xorg-font/encodings_1.0.6.bb25
-rw-r--r--meta/recipes-graphics/xorg-font/font-util_1.3.2.bb22
-rw-r--r--meta/recipes-graphics/xorg-font/font-util_1.3.3.bb23
-rw-r--r--meta/recipes-graphics/xorg-font/xorg-font-common.inc3
-rw-r--r--meta/recipes-graphics/xorg-lib/libpthread-stubs_0.4.bb2
-rw-r--r--meta/recipes-graphics/xorg-lib/libx11_1.8.1.bb48
-rw-r--r--meta/recipes-graphics/xorg-lib/libx11_1.8.bb48
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcb/disable-check.patch25
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcb_1.14.bb39
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcb_1.15.bb37
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcvt_0.1.1.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcvt_0.1.2.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libxkbcommon_1.4.0.bb36
-rw-r--r--meta/recipes-graphics/xorg-lib/libxkbcommon_1.4.1.bb36
-rw-r--r--meta/recipes-graphics/xorg-lib/xkeyboard-config_2.35.1.bb30
-rw-r--r--meta/recipes-graphics/xorg-lib/xkeyboard-config_2.36.bb30
-rw-r--r--meta/recipes-graphics/xorg-lib/xtrans_1.4.0.bb2
-rw-r--r--meta/recipes-graphics/xorg-proto/xcb-proto/0001-xcb-proto.pc.in-reinstate-libdir.patch29
-rw-r--r--meta/recipes-graphics/xorg-proto/xcb-proto_1.14.1.bb28
-rw-r--r--meta/recipes-graphics/xorg-proto/xcb-proto_1.15.2.bb30
-rw-r--r--meta/recipes-graphics/xorg-proto/xorgproto_2022.1.bb2
-rw-r--r--meta/recipes-graphics/xorg-util/util-macros_1.19.3.bb2
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-render-Fix-build-with-gcc-12.patch90
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.3.bb29
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.4.bb28
-rw-r--r--meta/recipes-graphics/xwayland/xwayland_22.1.1.bb45
-rw-r--r--meta/recipes-graphics/xwayland/xwayland_22.1.3.bb45
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools_2.0.24.bb86
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools_2.0.25.bb86
-rw-r--r--meta/recipes-kernel/kmod/kmod/0001-depmod-Add-support-for-excluding-a-directory.patch172
-rw-r--r--meta/recipes-kernel/kmod/kmod_29.bb90
-rw-r--r--meta/recipes-kernel/kmod/kmod_30.bb89
-rw-r--r--meta/recipes-kernel/linux-firmware/files/0001-Makefile-replace-mkdir-by-install.patch84
-rw-r--r--meta/recipes-kernel/linux-firmware/linux-firmware_20220509.bb1084
-rw-r--r--meta/recipes-kernel/linux-firmware/linux-firmware_20220708.bb1083
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc2
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers_5.16.bb20
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers_5.19.bb20
-rw-r--r--meta/recipes-kernel/linux/kernel-devsrc.bb28
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-dev.bb4
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_5.10.bb45
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_5.15.bb6
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_5.19.bb45
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_5.10.bb32
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_5.15.bb6
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_5.19.bb30
-rw-r--r--meta/recipes-kernel/linux/linux-yocto.inc2
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_5.10.bb58
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_5.15.bb26
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_5.19.bb70
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-Fix-compaction-migratepages-event-name.patch37
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-fix-compaction.patch68
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-fix-mm-page_alloc-fix-tracepoint-mm_page_alloc_zone_.patch106
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-fix-net-skb-introduce-kfree_skb_reason-v5.15.58.v5.1.patch53
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0002-Fix-tracepoint-event-allow-same-provider-and-event-n.patch48
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0002-fix-fs-Remove-flags-parameter-from-aops-write_begin-.patch76
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0003-fix-sched-tracing-Don-t-re-read-p-state-when-emittin.patch183
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0003-fix-workqueue-Fix-type-of-cpu-in-trace-event-v5.19.patch124
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0004-fix-block-remove-genhd.h-v5.18.patch45
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0005-fix-scsi-block-Remove-REQ_OP_WRITE_SAME-support-v5.1.patch79
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0006-fix-random-remove-unused-tracepoints-v5.18.patch47
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0007-fix-kprobes-Use-rethook-for-kretprobe-if-possible-v5.patch72
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0008-fix-scsi-core-Remove-scsi-scsi_request.h-v5.18.patch44
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0010-fix-mm-compaction-cleanup-the-compaction-trace-event.patch106
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules_2.13.3.bb51
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules_2.13.4.bb47
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust_2.13.2.bb53
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust_2.13.3.bb53
-rw-r--r--meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb2
-rw-r--r--meta/recipes-kernel/perf/perf.bb12
-rwxr-xr-xmeta/recipes-kernel/perf/perf/sort-pmuevents.py5
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/0001-PR28778-gcc-warning-tweak-for-sprintf-precision-para.patch45
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/0001-PR28804-tune-default-stap-s-buffer-size-on-small-RAM.patch84
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/0001-gcc12-c-compatibility-re-tweak-for-rhel6-use-functio.patch49
-rw-r--r--meta/recipes-kernel/systemtap/systemtap_git.bb6
-rw-r--r--meta/recipes-kernel/systemtap/systemtap_git.inc6
-rw-r--r--meta/recipes-kernel/wireless-regdb/wireless-regdb_2022.04.08.bb43
-rw-r--r--meta/recipes-kernel/wireless-regdb/wireless-regdb_2022.06.06.bb43
-rw-r--r--meta/recipes-multimedia/alsa/alsa-lib_1.2.6.1.bb44
-rw-r--r--meta/recipes-multimedia/alsa/alsa-lib_1.2.7.2.bb44
-rw-r--r--meta/recipes-multimedia/alsa/alsa-plugins_1.2.6.bb174
-rw-r--r--meta/recipes-multimedia/alsa/alsa-plugins_1.2.7.1.bb174
-rw-r--r--meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.6.3.bb24
-rw-r--r--meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.7.2.bb24
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils-scripts_1.2.6.bb25
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils.inc108
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils_1.2.6.bb2
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils_1.2.7.bb120
-rw-r--r--meta/recipes-multimedia/ffmpeg/ffmpeg/0001-libavutil-include-assembly-with-full-path-from-sourc.patch112
-rw-r--r--meta/recipes-multimedia/ffmpeg/ffmpeg_5.0.1.bb176
-rw-r--r--meta/recipes-multimedia/ffmpeg/ffmpeg_5.1.bb174
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-devtools_1.20.1.bb52
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-devtools_1.20.3.bb52
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-examples/gst-player.desktop2
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.20.1.bb28
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.20.3.bb28
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.20.1.bb47
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.20.3.bb47
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.20.1.bb166
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.20.3.bb166
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-include-required-system-headers-for-isspace-and-ssca.patch35
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.20.1.bb94
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.20.3.bb95
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.20.1.bb81
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.20.3.bb81
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.20.1.bb46
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.20.3.bb46
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.20.1.bb30
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.20.3.bb30
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.20.1.bb31
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.20.3.bb31
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.20.1.bb53
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.20.3.bb53
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0_1.20.1.bb73
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0_1.20.3.bb73
-rw-r--r--meta/recipes-multimedia/libtiff/files/0001-fix-the-FPE-in-tiffcrop-415-427-and-428.patch184
-rw-r--r--meta/recipes-multimedia/libtiff/files/CVE-2022-34526.patch32
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0001-tif_jbig.c-fix-crash-when-reading-a-file-with-multip.patch38
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0001-tiffset-fix-global-buffer-overflow-for-ASCII-tags-wh.patch43
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0002-tiffcrop-fix-issue-380-and-382-heap-buffer-overflow-.patch219
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0003-add-checks-for-return-value-of-limitMalloc-392.patch93
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0004-TIFFFetchNormalTag-avoid-calling-memcpy-with-a-null-.patch33
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0005-fix-the-FPE-in-tiffcrop-393.patch36
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0006-fix-heap-buffer-overflow-in-tiffcp-278.patch57
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/561599c99f987dc32ae110370cfdd7df7975586b.patch30
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/eecb0712f4c3a5b449f70c57988260a667ddbdef.patch32
-rw-r--r--meta/recipes-multimedia/libtiff/tiff_4.3.0.bb67
-rw-r--r--meta/recipes-multimedia/libtiff/tiff_4.4.0.bb67
-rw-r--r--meta/recipes-multimedia/mpg123/mpg123_1.29.3.bb52
-rw-r--r--meta/recipes-multimedia/mpg123/mpg123_1.30.1.bb52
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio.inc4
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio/0001-meson-Check-for-__get_cpuid.patch82
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio_15.0.bb11
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio_16.1.bb10
-rw-r--r--meta/recipes-multimedia/sbc/sbc/0001-sbc_primitives-Fix-build-on-non-x86.patch45
-rw-r--r--meta/recipes-multimedia/sbc/sbc_1.5.bb24
-rw-r--r--meta/recipes-multimedia/sbc/sbc_2.0.bb22
-rw-r--r--meta/recipes-multimedia/speex/speex/CVE-2020-23903.patch30
-rw-r--r--meta/recipes-multimedia/speex/speex_1.2.0.bb22
-rw-r--r--meta/recipes-multimedia/speex/speex_1.2.1.bb20
-rw-r--r--meta/recipes-multimedia/speex/speexdsp_1.2.0.bb27
-rw-r--r--meta/recipes-multimedia/speex/speexdsp_1.2.1.bb26
-rw-r--r--meta/recipes-multimedia/webp/libwebp_1.2.2.bb55
-rw-r--r--meta/recipes-multimedia/webp/libwebp_1.2.3.bb55
-rw-r--r--meta/recipes-multimedia/x264/x264_git.bb2
-rw-r--r--meta/recipes-rt/rt-tests/files/0001-Makefile-Allow-for-CC-and-AR-to-be-overridden.patch6
-rw-r--r--meta/recipes-rt/rt-tests/rt-tests.inc4
-rw-r--r--meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb2
-rw-r--r--meta/recipes-sato/puzzles/puzzles_git.bb2
-rw-r--r--meta/recipes-sato/sato-icon-theme/icon-naming-utils_0.8.90.bb29
-rw-r--r--meta/recipes-sato/sato-icon-theme/sato-icon-theme_git.bb29
-rw-r--r--meta/recipes-sato/webkit/libwpe/0001-Fix-build-failure-due-to-libc-using-libc-functions.patch42
-rw-r--r--meta/recipes-sato/webkit/libwpe_1.12.0.bb18
-rw-r--r--meta/recipes-sato/webkit/libwpe_1.12.2.bb20
-rw-r--r--meta/recipes-sato/webkit/webkitgtk_2.36.1.bb167
-rw-r--r--meta/recipes-sato/webkit/webkitgtk_2.36.5.bb167
-rw-r--r--meta/recipes-support/argp-standalone/argp-standalone_1.3.bb2
-rw-r--r--meta/recipes-support/boost/boost/0001-The-std-lib-unary-binary_function-base-classes-are-d.patch34
-rw-r--r--meta/recipes-support/boost/boost_1.79.0.bb1
-rw-r--r--meta/recipes-support/curl/curl/disable-tests28
-rw-r--r--meta/recipes-support/curl/curl/run-ptest6
-rw-r--r--meta/recipes-support/curl/curl_7.83.0.bb93
-rw-r--r--meta/recipes-support/curl/curl_7.84.0.bb117
-rw-r--r--meta/recipes-support/curl/files/0001-easy_lock-switch-to-using-atomic_int-instead-of-bool.patch37
-rw-r--r--meta/recipes-support/curl/files/0001-easy_lock.h-include-sched.h-if-available-to-fix-buil.patch33
-rw-r--r--meta/recipes-support/diffoscope/diffoscope_211.bb30
-rw-r--r--meta/recipes-support/diffoscope/diffoscope_220.bb30
-rw-r--r--meta/recipes-support/dos2unix/dos2unix_7.4.2.bb34
-rw-r--r--meta/recipes-support/dos2unix/dos2unix_7.4.3.bb34
-rw-r--r--meta/recipes-support/gnupg/gnupg_2.3.6.bb87
-rw-r--r--meta/recipes-support/gnupg/gnupg_2.3.7.bb87
-rw-r--r--meta/recipes-support/gnutls/gnutls/0001-Creating-.hmac-file-should-be-excuted-in-target-envi.patch28
-rw-r--r--meta/recipes-support/gnutls/gnutls_3.7.4.bb68
-rw-r--r--meta/recipes-support/gnutls/gnutls_3.7.7.bb90
-rw-r--r--meta/recipes-support/iso-codes/iso-codes_4.11.0.bb22
-rw-r--r--meta/recipes-support/iso-codes/iso-codes_4.9.0.bb22
-rw-r--r--meta/recipes-support/libcap/files/0001-nativesdk-libcap-Raise-the-size-of-arrays-containing.patch2
-rw-r--r--meta/recipes-support/libcap/libcap_2.64.bb80
-rw-r--r--meta/recipes-support/libcap/libcap_2.65.bb80
-rw-r--r--meta/recipes-support/libcheck/libcheck_0.15.2.bb3
-rw-r--r--meta/recipes-support/libevdev/libevdev_1.12.1.bb17
-rw-r--r--meta/recipes-support/libevdev/libevdev_1.13.0.bb17
-rw-r--r--meta/recipes-support/libffi/libffi_3.4.2.bb2
-rw-r--r--meta/recipes-support/libgit2/libgit2_1.4.3.bb22
-rw-r--r--meta/recipes-support/libgit2/libgit2_1.5.0.bb22
-rw-r--r--meta/recipes-support/libmicrohttpd/libmicrohttpd_0.9.75.bb7
-rw-r--r--meta/recipes-support/libnl/files/fa7f97f8982544c4fcb403893bae6701230d5165.patch48
-rw-r--r--meta/recipes-support/libnl/libnl_3.6.0.bb78
-rw-r--r--meta/recipes-support/libnl/libnl_3.7.0.bb76
-rw-r--r--meta/recipes-support/libproxy/libproxy/determinism.patch26
-rw-r--r--meta/recipes-support/libproxy/libproxy_0.4.17.bb42
-rw-r--r--meta/recipes-support/libproxy/libproxy_0.4.18.bb41
-rw-r--r--meta/recipes-support/libseccomp/libseccomp_2.5.4.bb6
-rw-r--r--meta/recipes-support/libsoup/libsoup_3.0.6.bb44
-rw-r--r--meta/recipes-support/libsoup/libsoup_3.0.7.bb44
-rw-r--r--meta/recipes-support/libxslt/libxslt_1.1.35.bb4
-rw-r--r--meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch10
-rw-r--r--meta/recipes-support/lzop/lzop/acinclude.m4390
-rw-r--r--meta/recipes-support/lzop/lzop_1.04.bb27
-rw-r--r--meta/recipes-support/nettle/nettle_3.7.3.bb57
-rw-r--r--meta/recipes-support/nettle/nettle_3.8.1.bb57
-rw-r--r--meta/recipes-support/nghttp2/nghttp2_1.47.0.bb35
-rw-r--r--meta/recipes-support/nghttp2/nghttp2_1.48.0.bb35
-rw-r--r--meta/recipes-support/popt/popt_1.18.bb2
-rw-r--r--meta/recipes-support/rng-tools/rng-tools/rng-tools.service34
-rw-r--r--meta/recipes-support/rng-tools/rng-tools/rngd.service33
-rw-r--r--meta/recipes-support/rng-tools/rng-tools_6.15.bb12
-rw-r--r--meta/recipes-support/sqlite/sqlite3_3.38.3.bb14
-rw-r--r--meta/recipes-support/sqlite/sqlite3_3.39.2.bb14
-rw-r--r--meta/recipes-support/vim/files/no-path-adjust.patch35
-rw-r--r--meta/recipes-support/vim/files/racefix.patch33
-rw-r--r--meta/recipes-support/vim/vim-tiny_9.0.bb (renamed from meta/recipes-support/vim/vim-tiny_8.2.bb)0
-rw-r--r--meta/recipes-support/vim/vim.inc11
-rw-r--r--meta/recipes-support/vim/vim_8.2.bb19
-rw-r--r--meta/recipes-support/vim/vim_9.0.bb23
-rw-r--r--meta/recipes-support/xxhash/xxhash_0.8.1.bb2
-rwxr-xr-xscripts/autobuilder-worker-prereq-tests2
-rwxr-xr-xscripts/bitbake-prserv-tool2
-rwxr-xr-xscripts/bitbake-whatchanged4
-rwxr-xr-xscripts/combo-layer-hook-default.sh2
-rwxr-xr-xscripts/contrib/ddimage2
-rwxr-xr-xscripts/contrib/dialog-power-control2
-rwxr-xr-xscripts/contrib/documentation-audit.sh2
-rwxr-xr-xscripts/contrib/patchreview.py5
-rwxr-xr-xscripts/contrib/test_build_time_worker.sh2
-rwxr-xr-xscripts/contrib/verify-homepage.py2
-rwxr-xr-xscripts/cp-noerror2
-rwxr-xr-xscripts/devtool10
-rwxr-xr-xscripts/gen-lockedsig-cache3
-rwxr-xr-xscripts/git4
-rw-r--r--scripts/lib/argparse_oe.py2
-rw-r--r--scripts/lib/devtool/menuconfig.py2
-rw-r--r--scripts/lib/devtool/standard.py31
-rw-r--r--scripts/lib/devtool/upgrade.py33
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py15
-rw-r--r--scripts/lib/scriptutils.py12
-rw-r--r--scripts/lib/wic/ksparser.py2
-rw-r--r--scripts/lib/wic/misc.py7
-rw-r--r--scripts/lib/wic/plugins/imager/direct.py2
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-efi.py95
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-partition.py2
-rw-r--r--scripts/lib/wic/plugins/source/empty.py2
-rw-r--r--scripts/lib/wic/plugins/source/isoimage-isohybrid.py2
-rw-r--r--scripts/lib/wic/plugins/source/rawcopy.py6
-rw-r--r--scripts/lib/wic/plugins/source/rootfs.py2
-rwxr-xr-xscripts/oe-debuginfod2
-rwxr-xr-xscripts/oe-depends-dot21
-rwxr-xr-xscripts/oe-gnome-terminal-phonehome2
-rwxr-xr-xscripts/oe-pkgdata-browser5
-rwxr-xr-xscripts/oe-pylint2
-rwxr-xr-xscripts/oe-setup-builddir12
-rwxr-xr-xscripts/oe-time-dd-test.sh4
-rwxr-xr-xscripts/oe-trim-schemas2
-rwxr-xr-xscripts/oepydevshell-internal.py2
-rw-r--r--scripts/pybootchartgui/pybootchartgui/draw.py122
-rw-r--r--scripts/pybootchartgui/pybootchartgui/parsing.py33
-rw-r--r--scripts/pybootchartgui/pybootchartgui/samples.py25
-rwxr-xr-xscripts/pythondeps2
-rwxr-xr-xscripts/relocate_sdk.py10
-rwxr-xr-xscripts/runqemu9
-rwxr-xr-xscripts/sstate-diff-machines.sh2
-rwxr-xr-xscripts/sstate-sysroot-cruft.sh2
-rwxr-xr-xscripts/sysroot-relativelinks.py2
-rwxr-xr-xscripts/task-time2
-rwxr-xr-xscripts/verify-bashisms2
-rwxr-xr-xscripts/wic2
1545 files changed, 53491 insertions, 48210 deletions
diff --git a/meta-selftest/conf/multiconfig/muslmc.conf b/meta-selftest/conf/multiconfig/muslmc.conf
new file mode 100644
index 0000000000..043cd1ccc3
--- /dev/null
+++ b/meta-selftest/conf/multiconfig/muslmc.conf
@@ -0,0 +1,2 @@
+TCLIBC = "musl"
+TMPDIR = "${TOPDIR}/tmp-mc-musl"
diff --git a/meta-selftest/recipes-test/images/oe-selftest-image.bb b/meta-selftest/recipes-test/images/oe-selftest-image.bb
index e295943ae5..317a0712aa 100644
--- a/meta-selftest/recipes-test/images/oe-selftest-image.bb
+++ b/meta-selftest/recipes-test/images/oe-selftest-image.bb
@@ -1,7 +1,7 @@
SUMMARY = "An image used during oe-selftest tests"
# libudev is needed for deploy mdadm via devtool
-IMAGE_INSTALL = "packagegroup-core-boot dropbear libudev"
+IMAGE_INSTALL = "packagegroup-core-boot packagegroup-core-ssh-dropbear libudev"
IMAGE_FEATURES = "debug-tweaks"
IMAGE_LINGUAS = " "
diff --git a/meta-selftest/recipes-test/sysroot-test/sysroot-la-test_1.0.bb b/meta-selftest/recipes-test/sysroot-test/sysroot-la-test_1.0.bb
new file mode 100644
index 0000000000..21f06782fb
--- /dev/null
+++ b/meta-selftest/recipes-test/sysroot-test/sysroot-la-test_1.0.bb
@@ -0,0 +1,16 @@
+SUMMARY = "Produce a broken la file"
+LICENSE = "CLOSED"
+INHIBIT_DEFAULT_DEPS = "1"
+
+EXCLUDE_FROM_WORLD = "1"
+
+# remove-libtool.bbclass is inherited by default and removes all
+# .la files which for this test we specifically do not want.
+REMOVE_LIBTOOL_LA = "0"
+
+do_install() {
+ install -d ${D}${libdir}/test/
+ echo '${WORKDIR}' > ${D}${libdir}/test/la-test.la
+}
+
+BBCLASSEXTEND += "native"
diff --git a/meta-selftest/recipes-test/sysroot-test/sysroot-pc-test_1.0.bb b/meta-selftest/recipes-test/sysroot-test/sysroot-pc-test_1.0.bb
new file mode 100644
index 0000000000..e748310fc4
--- /dev/null
+++ b/meta-selftest/recipes-test/sysroot-test/sysroot-pc-test_1.0.bb
@@ -0,0 +1,12 @@
+SUMMARY = "Produce a broken pc file"
+LICENSE = "CLOSED"
+INHIBIT_DEFAULT_DEPS = "1"
+
+EXCLUDE_FROM_WORLD = "1"
+
+do_install() {
+ install -d ${D}${libdir}/test/
+ echo '${WORKDIR}' > ${D}${libdir}/test/test.pc
+}
+
+BBCLASSEXTEND += "native"
diff --git a/meta-selftest/recipes-test/sysroot-test/sysroot-shebang-test_1.0.bb b/meta-selftest/recipes-test/sysroot-test/sysroot-shebang-test_1.0.bb
new file mode 100644
index 0000000000..6c834be897
--- /dev/null
+++ b/meta-selftest/recipes-test/sysroot-test/sysroot-shebang-test_1.0.bb
@@ -0,0 +1,12 @@
+SUMMARY = "Check that shebang does not exceed 128 characters"
+LICENSE = "CLOSED"
+INHIBIT_DEFAULT_DEPS = "1"
+
+EXCLUDE_FROM_WORLD = "1"
+do_install() {
+ install -d ${D}${bindir}
+ echo '#!BiM3cnVd1Amtv6PG+FynrQiVMbZnX5ELgF21q3EkuB+44JEGWtq8TvBJ7EGidfVs3eR3wVOUbLnjYDlKUWcm7YC/ute7f+KDHbwxziRUSUBZAUqgjiQdfQ0HnxajI0ozbM863E9JV9k13yZKYfh9/zR77Y6Dl4Dd3zOWS75LSpkAXV' > ${D}${bindir}/max-shebang
+ chmod 755 ${D}${bindir}/max-shebang
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta-selftest/recipes-test/wrapper/cmdline-shebang-wrapper-test.bb b/meta-selftest/recipes-test/wrapper/cmdline-shebang-wrapper-test.bb
new file mode 100644
index 0000000000..c3d3548d4a
--- /dev/null
+++ b/meta-selftest/recipes-test/wrapper/cmdline-shebang-wrapper-test.bb
@@ -0,0 +1,30 @@
+SUMMARY = "Check that create_cmdline_shebang works"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+INHIBIT_DEFAULT_DEPS = "1"
+
+SRC_URI += "file://test.awk"
+
+EXCLUDE_FROM_WORLD = "1"
+do_install() {
+ install -d ${D}${bindir}
+ # was not able to make ownership preservation check
+ install -m 0400 ${WORKDIR}/test.awk ${D}${bindir}/test
+
+ perm_old="$(stat --format='%a' ${D}${bindir}/test)"
+ sed -i -e 's|@AWK_BIN@|${bindir}/awk|g' ${D}${bindir}/test
+ create_cmdline_shebang_wrapper ${D}${bindir}/test
+ if [ $(${D}${bindir}/test) != "Don't Panic!" ]; then
+ bbfatal "Wrapper is broken"
+ else
+ bbnote "Wrapper is good"
+ fi
+
+ perm_new="$(stat --format='%a' ${D}${bindir}/test.real)"
+
+ if [ "$perm_new" != "$perm_old" ]; then
+ bbfatal "Wrapper permissions for ${D}${bindir}/test.real not preserved. Found $perm_new but expected $perm_old"
+ fi
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta-selftest/recipes-test/wrapper/files/test.awk b/meta-selftest/recipes-test/wrapper/files/test.awk
new file mode 100644
index 0000000000..91429197b1
--- /dev/null
+++ b/meta-selftest/recipes-test/wrapper/files/test.awk
@@ -0,0 +1,2 @@
+#! @AWK_BIN@ -f
+BEGIN { print "Don't Panic!" }
diff --git a/meta-skeleton/recipes-kernel/hello-mod/files/hello.c b/meta-skeleton/recipes-kernel/hello-mod/files/hello.c
index 6b73a79524..4f73455d20 100644
--- a/meta-skeleton/recipes-kernel/hello-mod/files/hello.c
+++ b/meta-skeleton/recipes-kernel/hello-mod/files/hello.c
@@ -2,18 +2,7 @@
*
* Copyright (C) 2011 Intel Corporation. All rights reserved.
*
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; version 2 of the License.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
- * the GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ * SPDX-License-Identifier: GPL-2.0-only
*
*****************************************************************************/
diff --git a/meta-skeleton/recipes-skeleton/service/service_0.1.bb b/meta-skeleton/recipes-skeleton/service/service_0.1.bb
index d1d8c5f365..912f6b0f61 100644
--- a/meta-skeleton/recipes-skeleton/service/service_0.1.bb
+++ b/meta-skeleton/recipes-skeleton/service/service_0.1.bb
@@ -9,6 +9,8 @@ SRC_URI = "file://skeleton \
file://COPYRIGHT \
"
+S = "${WORKDIR}"
+
do_compile () {
${CC} ${CFLAGS} ${LDFLAGS} ${WORKDIR}/skeleton_test.c -o ${WORKDIR}/skeleton-test
}
diff --git a/meta/classes-global/base.bbclass b/meta/classes-global/base.bbclass
new file mode 100644
index 0000000000..8203f54519
--- /dev/null
+++ b/meta/classes-global/base.bbclass
@@ -0,0 +1,789 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+BB_DEFAULT_TASK ?= "build"
+CLASSOVERRIDE ?= "class-target"
+
+inherit patch
+inherit staging
+
+inherit mirrors
+inherit utils
+inherit utility-tasks
+inherit logging
+
+OE_EXTRA_IMPORTS ?= ""
+
+OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license oe.qa oe.reproducible oe.rust oe.buildcfg ${OE_EXTRA_IMPORTS}"
+OE_IMPORTS[type] = "list"
+
+PACKAGECONFIG_CONFARGS ??= ""
+
+def oe_import(d):
+ import sys
+
+ bbpath = [os.path.join(dir, "lib") for dir in d.getVar("BBPATH").split(":")]
+ sys.path[0:0] = [dir for dir in bbpath if dir not in sys.path]
+
+ import oe.data
+ for toimport in oe.data.typed_value("OE_IMPORTS", d):
+ try:
+ # Make a python object accessible from the metadata
+ bb.utils._context[toimport.split(".", 1)[0]] = __import__(toimport)
+ except AttributeError as e:
+ bb.error("Error importing OE modules: %s" % str(e))
+ return ""
+
+# We need the oe module name space early (before INHERITs get added)
+OE_IMPORTED := "${@oe_import(d)}"
+
+inherit metadata_scm
+
+def lsb_distro_identifier(d):
+ adjust = d.getVar('LSB_DISTRO_ADJUST')
+ adjust_func = None
+ if adjust:
+ try:
+ adjust_func = globals()[adjust]
+ except KeyError:
+ pass
+ return oe.lsb.distro_identifier(adjust_func)
+
+die() {
+ bbfatal_log "$*"
+}
+
+oe_runmake_call() {
+ bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
+ ${MAKE} ${EXTRA_OEMAKE} "$@"
+}
+
+oe_runmake() {
+ oe_runmake_call "$@" || die "oe_runmake failed"
+}
+
+
+def get_base_dep(d):
+ if d.getVar('INHIBIT_DEFAULT_DEPS', False):
+ return ""
+ return "${BASE_DEFAULT_DEPS}"
+
+BASE_DEFAULT_DEPS = "virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}compilerlibs virtual/libc"
+
+BASEDEPENDS = ""
+BASEDEPENDS:class-target = "${@get_base_dep(d)}"
+BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}"
+
+DEPENDS:prepend="${BASEDEPENDS} "
+
+FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
+# THISDIR only works properly with imediate expansion as it has to run
+# in the context of the location its used (:=)
+THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
+
+def extra_path_elements(d):
+ path = ""
+ elements = (d.getVar('EXTRANATIVEPATH') or "").split()
+ for e in elements:
+ path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
+ return path
+
+PATH:prepend = "${@extra_path_elements(d)}"
+
+def get_lic_checksum_file_list(d):
+ filelist = []
+ lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
+ tmpdir = d.getVar("TMPDIR")
+ s = d.getVar("S")
+ b = d.getVar("B")
+ workdir = d.getVar("WORKDIR")
+
+ urls = lic_files.split()
+ for url in urls:
+ # We only care about items that are absolute paths since
+ # any others should be covered by SRC_URI.
+ try:
+ (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
+ if method != "file" or not path:
+ raise bb.fetch.MalformedUrl(url)
+
+ if path[0] == '/':
+ if path.startswith((tmpdir, s, b, workdir)):
+ continue
+ filelist.append(path + ":" + str(os.path.exists(path)))
+ except bb.fetch.MalformedUrl:
+ bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
+ return " ".join(filelist)
+
+def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
+ tools = d.getVar(toolsvar).split()
+ origbbenv = d.getVar("BB_ORIGENV", False)
+ path = origbbenv.getVar("PATH")
+ # Need to ignore our own scripts directories to avoid circular links
+ for p in path.split(":"):
+ if p.endswith("/scripts"):
+ path = path.replace(p, "/ignoreme")
+ bb.utils.mkdirhier(dest)
+ notfound = []
+ for tool in tools:
+ desttool = os.path.join(dest, tool)
+ if not os.path.exists(desttool):
+ # clean up dead symlink
+ if os.path.islink(desttool):
+ os.unlink(desttool)
+ srctool = bb.utils.which(path, tool, executable=True)
+ # gcc/g++ may link to ccache on some hosts, e.g.,
+ # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
+ # would return /usr/local/bin/ccache/gcc, but what we need is
+ # /usr/bin/gcc, this code can check and fix that.
+ if "ccache" in srctool:
+ srctool = bb.utils.which(path, tool, executable=True, direction=1)
+ if srctool:
+ os.symlink(srctool, desttool)
+ else:
+ notfound.append(tool)
+
+ if notfound and fatal:
+ bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
+
+addtask fetch
+do_fetch[dirs] = "${DL_DIR}"
+do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
+do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
+do_fetch[vardeps] += "SRCREV"
+do_fetch[network] = "1"
+python base_do_fetch() {
+
+ src_uri = (d.getVar('SRC_URI') or "").split()
+ if not src_uri:
+ return
+
+ try:
+ fetcher = bb.fetch2.Fetch(src_uri, d)
+ fetcher.download()
+ except bb.fetch2.BBFetchException as e:
+ bb.fatal("Bitbake Fetcher Error: " + repr(e))
+}
+
+addtask unpack after do_fetch
+do_unpack[dirs] = "${WORKDIR}"
+
+do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
+
+python base_do_unpack() {
+ src_uri = (d.getVar('SRC_URI') or "").split()
+ if not src_uri:
+ return
+
+ try:
+ fetcher = bb.fetch2.Fetch(src_uri, d)
+ fetcher.unpack(d.getVar('WORKDIR'))
+ except bb.fetch2.BBFetchException as e:
+ bb.fatal("Bitbake Fetcher Error: " + repr(e))
+}
+
+SSTATETASKS += "do_deploy_source_date_epoch"
+
+do_deploy_source_date_epoch () {
+ mkdir -p ${SDE_DEPLOYDIR}
+ if [ -e ${SDE_FILE} ]; then
+ echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}."
+ cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt
+ else
+ echo "${SDE_FILE} not found!"
+ fi
+}
+
+python do_deploy_source_date_epoch_setscene () {
+ sstate_setscene(d)
+ bb.utils.mkdirhier(d.getVar('SDE_DIR'))
+ sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt')
+ if os.path.exists(sde_file):
+ target = d.getVar('SDE_FILE')
+ bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target))
+ bb.utils.rename(sde_file, target)
+ else:
+ bb.debug(1, "%s not found!" % sde_file)
+}
+
+do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}"
+do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}"
+addtask do_deploy_source_date_epoch_setscene
+addtask do_deploy_source_date_epoch before do_configure after do_patch
+
+python create_source_date_epoch_stamp() {
+ # Version: 1
+ source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S'))
+ oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d)
+}
+do_unpack[postfuncs] += "create_source_date_epoch_stamp"
+
+def get_source_date_epoch_value(d):
+ return oe.reproducible.epochfile_read(d.getVar('SDE_FILE'), d)
+
+def get_layers_branch_rev(d):
+ revisions = oe.buildcfg.get_layer_revisions(d)
+ layers_branch_rev = ["%-20s = \"%s:%s\"" % (r[1], r[2], r[3]) for r in revisions]
+ i = len(layers_branch_rev)-1
+ p1 = layers_branch_rev[i].find("=")
+ s1 = layers_branch_rev[i][p1:]
+ while i > 0:
+ p2 = layers_branch_rev[i-1].find("=")
+ s2= layers_branch_rev[i-1][p2:]
+ if s1 == s2:
+ layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
+ i -= 1
+ else:
+ i -= 1
+ p1 = layers_branch_rev[i].find("=")
+ s1= layers_branch_rev[i][p1:]
+ return layers_branch_rev
+
+
+BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
+BUILDCFG_FUNCS[type] = "list"
+
+def buildcfg_vars(d):
+ statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
+ for var in statusvars:
+ value = d.getVar(var)
+ if value is not None:
+ yield '%-20s = "%s"' % (var, value)
+
+def buildcfg_neededvars(d):
+ needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
+ pesteruser = []
+ for v in needed_vars:
+ val = d.getVar(v)
+ if not val or val == 'INVALID':
+ pesteruser.append(v)
+
+ if pesteruser:
+ bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
+
+addhandler base_eventhandler
+base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
+python base_eventhandler() {
+ import bb.runqueue
+
+ if isinstance(e, bb.event.ConfigParsed):
+ if not d.getVar("NATIVELSBSTRING", False):
+ d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
+ d.setVar("ORIGNATIVELSBSTRING", d.getVar("NATIVELSBSTRING", False))
+ d.setVar('BB_VERSION', bb.__version__)
+
+ # There might be no bb.event.ConfigParsed event if bitbake server is
+ # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
+ # exists.
+ if isinstance(e, bb.event.ConfigParsed) or \
+ (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
+ # Works with the line in layer.conf which changes PATH to point here
+ setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
+ setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
+
+ if isinstance(e, bb.event.MultiConfigParsed):
+ # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
+ # own contexts so the variables get expanded correctly for that arch, then inject back into
+ # the main data store.
+ deps = []
+ for config in e.mcdata:
+ deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
+ deps = " ".join(deps)
+ e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
+
+ if isinstance(e, bb.event.BuildStarted):
+ localdata = bb.data.createCopy(d)
+ statuslines = []
+ for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
+ g = globals()
+ if func not in g:
+ bb.warn("Build configuration function '%s' does not exist" % func)
+ else:
+ flines = g[func](localdata)
+ if flines:
+ statuslines.extend(flines)
+
+ statusheader = d.getVar('BUILDCFG_HEADER')
+ if statusheader:
+ bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
+
+ # This code is to silence warnings where the SDK variables overwrite the
+ # target ones and we'd see dulpicate key names overwriting each other
+ # for various PREFERRED_PROVIDERS
+ if isinstance(e, bb.event.RecipePreFinalise):
+ if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
+ d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
+ d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
+ d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
+ d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
+
+ if isinstance(e, bb.event.RecipeParsed):
+ #
+ # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
+ # skip parsing for all the other providers which will mean they get uninstalled from the
+ # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
+ # particular.
+ #
+ pn = d.getVar('PN')
+ source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
+ if not source_mirror_fetch:
+ provs = (d.getVar("PROVIDES") or "").split()
+ multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split()
+ for p in provs:
+ if p.startswith("virtual/") and p not in multiprovidersallowed:
+ profprov = d.getVar("PREFERRED_PROVIDER_" + p)
+ if profprov and pn != profprov:
+ raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
+}
+
+CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
+CLEANBROKEN = "0"
+
+addtask configure after do_patch
+do_configure[dirs] = "${B}"
+base_do_configure() {
+ if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
+ if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
+ cd ${B}
+ if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
+ oe_runmake clean
+ fi
+ # -ignore_readdir_race does not work correctly with -delete;
+ # use xargs to avoid spurious build failures
+ find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
+ fi
+ fi
+ if [ -n "${CONFIGURESTAMPFILE}" ]; then
+ mkdir -p `dirname ${CONFIGURESTAMPFILE}`
+ echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
+ fi
+}
+
+addtask compile after do_configure
+do_compile[dirs] = "${B}"
+base_do_compile() {
+ if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
+ oe_runmake || die "make failed"
+ else
+ bbnote "nothing to compile"
+ fi
+}
+
+addtask install after do_compile
+do_install[dirs] = "${B}"
+# Remove and re-create ${D} so that is it guaranteed to be empty
+do_install[cleandirs] = "${D}"
+
+base_do_install() {
+ :
+}
+
+base_do_package() {
+ :
+}
+
+addtask build after do_populate_sysroot
+do_build[noexec] = "1"
+do_build[recrdeptask] += "do_deploy"
+do_build () {
+ :
+}
+
+def set_packagetriplet(d):
+ archs = []
+ tos = []
+ tvs = []
+
+ archs.append(d.getVar("PACKAGE_ARCHS").split())
+ tos.append(d.getVar("TARGET_OS"))
+ tvs.append(d.getVar("TARGET_VENDOR"))
+
+ def settriplet(d, varname, archs, tos, tvs):
+ triplets = []
+ for i in range(len(archs)):
+ for arch in archs[i]:
+ triplets.append(arch + tvs[i] + "-" + tos[i])
+ triplets.reverse()
+ d.setVar(varname, " ".join(triplets))
+
+ settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
+
+ variants = d.getVar("MULTILIB_VARIANTS") or ""
+ for item in variants.split():
+ localdata = bb.data.createCopy(d)
+ overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
+ localdata.setVar("OVERRIDES", overrides)
+
+ archs.append(localdata.getVar("PACKAGE_ARCHS").split())
+ tos.append(localdata.getVar("TARGET_OS"))
+ tvs.append(localdata.getVar("TARGET_VENDOR"))
+
+ settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
+
+python () {
+ import string, re
+
+ # Handle backfilling
+ oe.utils.features_backfill("DISTRO_FEATURES", d)
+ oe.utils.features_backfill("MACHINE_FEATURES", d)
+
+ if d.getVar("S")[-1] == '/':
+ bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S")))
+ if d.getVar("B")[-1] == '/':
+ bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B")))
+
+ if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")):
+ d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}")
+ if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")):
+ d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}")
+
+ # To add a recipe to the skip list , set:
+ # SKIP_RECIPE[pn] = "message"
+ pn = d.getVar('PN')
+ skip_msg = d.getVarFlag('SKIP_RECIPE', pn)
+ if skip_msg:
+ bb.debug(1, "Skipping %s %s" % (pn, skip_msg))
+ raise bb.parse.SkipRecipe("Recipe will be skipped because: %s" % (skip_msg))
+
+ # Handle PACKAGECONFIG
+ #
+ # These take the form:
+ #
+ # PACKAGECONFIG ??= "<default options>"
+ # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig"
+ pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
+ if pkgconfigflags:
+ pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
+ pn = d.getVar("PN")
+
+ mlprefix = d.getVar("MLPREFIX")
+
+ def expandFilter(appends, extension, prefix):
+ appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
+ newappends = []
+ for a in appends:
+ if a.endswith("-native") or ("-cross-" in a):
+ newappends.append(a)
+ elif a.startswith("virtual/"):
+ subs = a.split("/", 1)[1]
+ if subs.startswith(prefix):
+ newappends.append(a + extension)
+ else:
+ newappends.append("virtual/" + prefix + subs + extension)
+ else:
+ if a.startswith(prefix):
+ newappends.append(a + extension)
+ else:
+ newappends.append(prefix + a + extension)
+ return newappends
+
+ def appendVar(varname, appends):
+ if not appends:
+ return
+ if varname.find("DEPENDS") != -1:
+ if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
+ appends = expandFilter(appends, "", "nativesdk-")
+ elif bb.data.inherits_class('native', d):
+ appends = expandFilter(appends, "-native", "")
+ elif mlprefix:
+ appends = expandFilter(appends, "", mlprefix)
+ varname = d.expand(varname)
+ d.appendVar(varname, " " + " ".join(appends))
+
+ extradeps = []
+ extrardeps = []
+ extrarrecs = []
+ extraconf = []
+ for flag, flagval in sorted(pkgconfigflags.items()):
+ items = flagval.split(",")
+ num = len(items)
+ if num > 6:
+ bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!"
+ % (d.getVar('PN'), flag))
+
+ if flag in pkgconfig:
+ if num >= 3 and items[2]:
+ extradeps.append(items[2])
+ if num >= 4 and items[3]:
+ extrardeps.append(items[3])
+ if num >= 5 and items[4]:
+ extrarrecs.append(items[4])
+ if num >= 1 and items[0]:
+ extraconf.append(items[0])
+ elif num >= 2 and items[1]:
+ extraconf.append(items[1])
+
+ if num >= 6 and items[5]:
+ conflicts = set(items[5].split())
+ invalid = conflicts.difference(set(pkgconfigflags.keys()))
+ if invalid:
+ bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified."
+ % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid)))
+
+ if flag in pkgconfig:
+ intersec = conflicts.intersection(set(pkgconfig))
+ if intersec:
+ bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG."
+ % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec)))
+
+ appendVar('DEPENDS', extradeps)
+ appendVar('RDEPENDS:${PN}', extrardeps)
+ appendVar('RRECOMMENDS:${PN}', extrarrecs)
+ appendVar('PACKAGECONFIG_CONFARGS', extraconf)
+
+ pn = d.getVar('PN')
+ license = d.getVar('LICENSE')
+ if license == "INVALID" and pn != "defaultpkgname":
+ bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
+
+ if bb.data.inherits_class('license', d):
+ check_license_format(d)
+ unmatched_license_flags = check_license_flags(d)
+ if unmatched_license_flags:
+ if len(unmatched_license_flags) == 1:
+ message = "because it has a restricted license '{0}'. Which is not listed in LICENSE_FLAGS_ACCEPTED".format(unmatched_license_flags[0])
+ else:
+ message = "because it has restricted licenses {0}. Which are not listed in LICENSE_FLAGS_ACCEPTED".format(
+ ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
+ bb.debug(1, "Skipping %s %s" % (pn, message))
+ raise bb.parse.SkipRecipe(message)
+
+ # If we're building a target package we need to use fakeroot (pseudo)
+ # in order to capture permissions, owners, groups and special files
+ if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
+ d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
+ d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
+ d.setVarFlag('do_install', 'fakeroot', '1')
+ d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
+ d.setVarFlag('do_package', 'fakeroot', '1')
+ d.setVarFlag('do_package_setscene', 'fakeroot', '1')
+ d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
+ d.setVarFlag('do_devshell', 'fakeroot', '1')
+ d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
+
+ need_machine = d.getVar('COMPATIBLE_MACHINE')
+ if need_machine and not d.getVar('PARSE_ALL_RECIPES', False):
+ import re
+ compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
+ for m in compat_machines:
+ if re.match(need_machine, m):
+ break
+ else:
+ raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
+
+ source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False)
+ if not source_mirror_fetch:
+ need_host = d.getVar('COMPATIBLE_HOST')
+ if need_host:
+ import re
+ this_host = d.getVar('HOST_SYS')
+ if not re.match(need_host, this_host):
+ raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
+
+ bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
+
+ check_license = False if pn.startswith("nativesdk-") else True
+ for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
+ "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
+ "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
+ if pn.endswith(d.expand(t)):
+ check_license = False
+ if pn.startswith("gcc-source-"):
+ check_license = False
+
+ if check_license and bad_licenses:
+ bad_licenses = expand_wildcard_licenses(d, bad_licenses)
+
+ exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split()
+
+ for lic_exception in exceptions:
+ if ":" in lic_exception:
+ lic_exception = lic_exception.split(":")[1]
+ if lic_exception in oe.license.obsolete_license_list():
+ bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception)
+
+ pkgs = d.getVar('PACKAGES').split()
+ skipped_pkgs = {}
+ unskipped_pkgs = []
+ for pkg in pkgs:
+ remaining_bad_licenses = oe.license.apply_pkg_license_exception(pkg, bad_licenses, exceptions)
+
+ incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg)
+ if incompatible_lic:
+ skipped_pkgs[pkg] = incompatible_lic
+ else:
+ unskipped_pkgs.append(pkg)
+
+ if unskipped_pkgs:
+ for pkg in skipped_pkgs:
+ bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg])))
+ d.setVar('_exclude_incompatible-' + pkg, ' '.join(skipped_pkgs[pkg]))
+ for pkg in unskipped_pkgs:
+ bb.debug(1, "Including the package %s" % pkg)
+ else:
+ incompatible_lic = incompatible_license(d, bad_licenses)
+ for pkg in skipped_pkgs:
+ incompatible_lic += skipped_pkgs[pkg]
+ incompatible_lic = sorted(list(set(incompatible_lic)))
+
+ if incompatible_lic:
+ bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
+ raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
+
+ needsrcrev = False
+ srcuri = d.getVar('SRC_URI')
+ for uri_string in srcuri.split():
+ uri = bb.fetch.URI(uri_string)
+ # Also check downloadfilename as the URL path might not be useful for sniffing
+ path = uri.params.get("downloadfilename", uri.path)
+
+ # HTTP/FTP use the wget fetcher
+ if uri.scheme in ("http", "https", "ftp"):
+ d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
+
+ # Svn packages should DEPEND on subversion-native
+ if uri.scheme == "svn":
+ needsrcrev = True
+ d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
+
+ # Git packages should DEPEND on git-native
+ elif uri.scheme in ("git", "gitsm"):
+ needsrcrev = True
+ d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
+
+ # Mercurial packages should DEPEND on mercurial-native
+ elif uri.scheme == "hg":
+ needsrcrev = True
+ d.appendVar("EXTRANATIVEPATH", ' python3-native ')
+ d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
+
+ # Perforce packages support SRCREV = "${AUTOREV}"
+ elif uri.scheme == "p4":
+ needsrcrev = True
+
+ # OSC packages should DEPEND on osc-native
+ elif uri.scheme == "osc":
+ d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
+
+ elif uri.scheme == "npm":
+ d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
+
+ elif uri.scheme == "repo":
+ needsrcrev = True
+ d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot')
+
+ # *.lz4 should DEPEND on lz4-native for unpacking
+ if path.endswith('.lz4'):
+ d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
+
+ # *.zst should DEPEND on zstd-native for unpacking
+ elif path.endswith('.zst'):
+ d.appendVarFlag('do_unpack', 'depends', ' zstd-native:do_populate_sysroot')
+
+ # *.lz should DEPEND on lzip-native for unpacking
+ elif path.endswith('.lz'):
+ d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
+
+ # *.xz should DEPEND on xz-native for unpacking
+ elif path.endswith('.xz') or path.endswith('.txz'):
+ d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
+
+ # .zip should DEPEND on unzip-native for unpacking
+ elif path.endswith('.zip') or path.endswith('.jar'):
+ d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
+
+ # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
+ elif path.endswith('.rpm'):
+ d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
+
+ # *.deb should DEPEND on xz-native for unpacking
+ elif path.endswith('.deb'):
+ d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
+
+ if needsrcrev:
+ d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
+
+ # Gather all named SRCREVs to add to the sstate hash calculation
+ # This anonymous python snippet is called multiple times so we
+ # need to be careful to not double up the appends here and cause
+ # the base hash to mismatch the task hash
+ for uri in srcuri.split():
+ parm = bb.fetch.decodeurl(uri)[5]
+ uri_names = parm.get("name", "").split(",")
+ for uri_name in filter(None, uri_names):
+ srcrev_name = "SRCREV_{}".format(uri_name)
+ if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
+ d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
+
+ set_packagetriplet(d)
+
+ # 'multimachine' handling
+ mach_arch = d.getVar('MACHINE_ARCH')
+ pkg_arch = d.getVar('PACKAGE_ARCH')
+
+ if (pkg_arch == mach_arch):
+ # Already machine specific - nothing further to do
+ return
+
+ #
+ # We always try to scan SRC_URI for urls with machine overrides
+ # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
+ #
+ override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
+ if override != '0':
+ paths = []
+ fpaths = (d.getVar('FILESPATH') or '').split(':')
+ machine = d.getVar('MACHINE')
+ for p in fpaths:
+ if os.path.basename(p) == machine and os.path.isdir(p):
+ paths.append(p)
+
+ if paths:
+ for s in srcuri.split():
+ if not s.startswith("file://"):
+ continue
+ fetcher = bb.fetch2.Fetch([s], d)
+ local = fetcher.localpath(s)
+ for mp in paths:
+ if local.startswith(mp):
+ #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
+ d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
+ return
+
+ packages = d.getVar('PACKAGES').split()
+ for pkg in packages:
+ pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
+
+ # We could look for != PACKAGE_ARCH here but how to choose
+ # if multiple differences are present?
+ # Look through PACKAGE_ARCHS for the priority order?
+ if pkgarch and pkgarch == mach_arch:
+ d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
+ bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
+}
+
+addtask cleansstate after do_clean
+python do_cleansstate() {
+ sstate_clean_cachefiles(d)
+}
+addtask cleanall after do_cleansstate
+do_cleansstate[nostamp] = "1"
+
+python do_cleanall() {
+ src_uri = (d.getVar('SRC_URI') or "").split()
+ if not src_uri:
+ return
+
+ try:
+ fetcher = bb.fetch2.Fetch(src_uri, d)
+ fetcher.clean()
+ except bb.fetch2.BBFetchException as e:
+ bb.fatal(str(e))
+}
+do_cleanall[nostamp] = "1"
+
+
+EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package
diff --git a/meta/classes-global/buildstats.bbclass b/meta/classes-global/buildstats.bbclass
new file mode 100644
index 0000000000..f49a67aa4f
--- /dev/null
+++ b/meta/classes-global/buildstats.bbclass
@@ -0,0 +1,302 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+BUILDSTATS_BASE = "${TMPDIR}/buildstats/"
+
+################################################################################
+# Build statistics gathering.
+#
+# The CPU and Time gathering/tracking functions and bbevent inspiration
+# were written by Christopher Larson.
+#
+################################################################################
+
+def get_buildprocess_cputime(pid):
+ with open("/proc/%d/stat" % pid, "r") as f:
+ fields = f.readline().rstrip().split()
+ # 13: utime, 14: stime, 15: cutime, 16: cstime
+ return sum(int(field) for field in fields[13:16])
+
+def get_process_cputime(pid):
+ import resource
+ with open("/proc/%d/stat" % pid, "r") as f:
+ fields = f.readline().rstrip().split()
+ stats = {
+ 'utime' : fields[13],
+ 'stime' : fields[14],
+ 'cutime' : fields[15],
+ 'cstime' : fields[16],
+ }
+ iostats = {}
+ if os.path.isfile("/proc/%d/io" % pid):
+ with open("/proc/%d/io" % pid, "r") as f:
+ while True:
+ i = f.readline().strip()
+ if not i:
+ break
+ if not ":" in i:
+ # one more extra line is appended (empty or containing "0")
+ # most probably due to race condition in kernel while
+ # updating IO stats
+ break
+ i = i.split(": ")
+ iostats[i[0]] = i[1]
+ resources = resource.getrusage(resource.RUSAGE_SELF)
+ childres = resource.getrusage(resource.RUSAGE_CHILDREN)
+ return stats, iostats, resources, childres
+
+def get_cputime():
+ with open("/proc/stat", "r") as f:
+ fields = f.readline().rstrip().split()[1:]
+ return sum(int(field) for field in fields)
+
+def set_timedata(var, d, server_time):
+ d.setVar(var, server_time)
+
+def get_timedata(var, d, end_time):
+ oldtime = d.getVar(var, False)
+ if oldtime is None:
+ return
+ return end_time - oldtime
+
+def set_buildtimedata(var, d):
+ import time
+ time = time.time()
+ cputime = get_cputime()
+ proctime = get_buildprocess_cputime(os.getpid())
+ d.setVar(var, (time, cputime, proctime))
+
+def get_buildtimedata(var, d):
+ import time
+ timedata = d.getVar(var, False)
+ if timedata is None:
+ return
+ oldtime, oldcpu, oldproc = timedata
+ procdiff = get_buildprocess_cputime(os.getpid()) - oldproc
+ cpudiff = get_cputime() - oldcpu
+ end_time = time.time()
+ timediff = end_time - oldtime
+ if cpudiff > 0:
+ cpuperc = float(procdiff) * 100 / cpudiff
+ else:
+ cpuperc = None
+ return timediff, cpuperc
+
+def write_task_data(status, logfile, e, d):
+ with open(os.path.join(logfile), "a") as f:
+ elapsedtime = get_timedata("__timedata_task", d, e.time)
+ if elapsedtime:
+ f.write(d.expand("${PF}: %s\n" % e.task))
+ f.write(d.expand("Elapsed time: %0.2f seconds\n" % elapsedtime))
+ cpu, iostats, resources, childres = get_process_cputime(os.getpid())
+ if cpu:
+ f.write("utime: %s\n" % cpu['utime'])
+ f.write("stime: %s\n" % cpu['stime'])
+ f.write("cutime: %s\n" % cpu['cutime'])
+ f.write("cstime: %s\n" % cpu['cstime'])
+ for i in iostats:
+ f.write("IO %s: %s\n" % (i, iostats[i]))
+ rusages = ["ru_utime", "ru_stime", "ru_maxrss", "ru_minflt", "ru_majflt", "ru_inblock", "ru_oublock", "ru_nvcsw", "ru_nivcsw"]
+ for i in rusages:
+ f.write("rusage %s: %s\n" % (i, getattr(resources, i)))
+ for i in rusages:
+ f.write("Child rusage %s: %s\n" % (i, getattr(childres, i)))
+ if status == "passed":
+ f.write("Status: PASSED \n")
+ else:
+ f.write("Status: FAILED \n")
+ f.write("Ended: %0.2f \n" % e.time)
+
+def write_host_data(logfile, e, d, type):
+ import subprocess, os, datetime
+ # minimum time allowed for each command to run, in seconds
+ time_threshold = 0.5
+ limit = 10
+ # the total number of commands
+ num_cmds = 0
+ msg = ""
+ if type == "interval":
+ # interval at which data will be logged
+ interval = d.getVar("BB_HEARTBEAT_EVENT", False)
+ if interval is None:
+ bb.warn("buildstats: Collecting host data at intervals failed. Set BB_HEARTBEAT_EVENT=\"<interval>\" in conf/local.conf for the interval at which host data will be logged.")
+ d.setVar("BB_LOG_HOST_STAT_ON_INTERVAL", "0")
+ return
+ interval = int(interval)
+ cmds = d.getVar('BB_LOG_HOST_STAT_CMDS_INTERVAL')
+ msg = "Host Stats: Collecting data at %d second intervals.\n" % interval
+ if cmds is None:
+ d.setVar("BB_LOG_HOST_STAT_ON_INTERVAL", "0")
+ bb.warn("buildstats: Collecting host data at intervals failed. Set BB_LOG_HOST_STAT_CMDS_INTERVAL=\"command1 ; command2 ; ... \" in conf/local.conf.")
+ return
+ if type == "failure":
+ cmds = d.getVar('BB_LOG_HOST_STAT_CMDS_FAILURE')
+ msg = "Host Stats: Collecting data on failure.\n"
+ msg += "Failed at task: " + e.task + "\n"
+ if cmds is None:
+ d.setVar("BB_LOG_HOST_STAT_ON_FAILURE", "0")
+ bb.warn("buildstats: Collecting host data on failure failed. Set BB_LOG_HOST_STAT_CMDS_FAILURE=\"command1 ; command2 ; ... \" in conf/local.conf.")
+ return
+ c_san = []
+ for cmd in cmds.split(";"):
+ if len(cmd) == 0:
+ continue
+ num_cmds += 1
+ c_san.append(cmd)
+ if num_cmds == 0:
+ if type == "interval":
+ d.setVar("BB_LOG_HOST_STAT_ON_INTERVAL", "0")
+ if type == "failure":
+ d.setVar("BB_LOG_HOST_STAT_ON_FAILURE", "0")
+ return
+
+ # return if the interval is not enough to run all commands within the specified BB_HEARTBEAT_EVENT interval
+ if type == "interval":
+ limit = interval / num_cmds
+ if limit <= time_threshold:
+ d.setVar("BB_LOG_HOST_STAT_ON_INTERVAL", "0")
+ bb.warn("buildstats: Collecting host data failed. BB_HEARTBEAT_EVENT interval not enough to run the specified commands. Increase value of BB_HEARTBEAT_EVENT in conf/local.conf.")
+ return
+
+ # set the environment variables
+ path = d.getVar("PATH")
+ opath = d.getVar("BB_ORIGENV", False).getVar("PATH")
+ ospath = os.environ['PATH']
+ os.environ['PATH'] = path + ":" + opath + ":" + ospath
+ with open(logfile, "a") as f:
+ f.write("Event Time: %f\nDate: %s\n" % (e.time, datetime.datetime.now()))
+ f.write("%s" % msg)
+ for c in c_san:
+ try:
+ output = subprocess.check_output(c.split(), stderr=subprocess.STDOUT, timeout=limit).decode('utf-8')
+ except (subprocess.CalledProcessError, subprocess.TimeoutExpired, FileNotFoundError) as err:
+ output = "Error running command: %s\n%s\n" % (c, err)
+ f.write("%s\n%s\n" % (c, output))
+ # reset the environment
+ os.environ['PATH'] = ospath
+
+python run_buildstats () {
+ import bb.build
+ import bb.event
+ import time, subprocess, platform
+
+ bn = d.getVar('BUILDNAME')
+ ########################################################################
+ # bitbake fires HeartbeatEvent even before a build has been
+ # triggered, causing BUILDNAME to be None
+ ########################################################################
+ if bn is not None:
+ bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
+ taskdir = os.path.join(bsdir, d.getVar('PF'))
+ if isinstance(e, bb.event.HeartbeatEvent) and bb.utils.to_boolean(d.getVar("BB_LOG_HOST_STAT_ON_INTERVAL")):
+ bb.utils.mkdirhier(bsdir)
+ write_host_data(os.path.join(bsdir, "host_stats_interval"), e, d, "interval")
+
+ if isinstance(e, bb.event.BuildStarted):
+ ########################################################################
+ # If the kernel was not configured to provide I/O statistics, issue
+ # a one time warning.
+ ########################################################################
+ if not os.path.isfile("/proc/%d/io" % os.getpid()):
+ bb.warn("The Linux kernel on your build host was not configured to provide process I/O statistics. (CONFIG_TASK_IO_ACCOUNTING is not set)")
+
+ ########################################################################
+ # at first pass make the buildstats hierarchy and then
+ # set the buildname
+ ########################################################################
+ bb.utils.mkdirhier(bsdir)
+ set_buildtimedata("__timedata_build", d)
+ build_time = os.path.join(bsdir, "build_stats")
+ # write start of build into build_time
+ with open(build_time, "a") as f:
+ host_info = platform.uname()
+ f.write("Host Info: ")
+ for x in host_info:
+ if x:
+ f.write(x + " ")
+ f.write("\n")
+ f.write("Build Started: %0.2f \n" % d.getVar('__timedata_build', False)[0])
+
+ elif isinstance(e, bb.event.BuildCompleted):
+ build_time = os.path.join(bsdir, "build_stats")
+ with open(build_time, "a") as f:
+ ########################################################################
+ # Write build statistics for the build
+ ########################################################################
+ timedata = get_buildtimedata("__timedata_build", d)
+ if timedata:
+ time, cpu = timedata
+ # write end of build and cpu used into build_time
+ f.write("Elapsed time: %0.2f seconds \n" % (time))
+ if cpu:
+ f.write("CPU usage: %0.1f%% \n" % cpu)
+
+ if isinstance(e, bb.build.TaskStarted):
+ set_timedata("__timedata_task", d, e.time)
+ bb.utils.mkdirhier(taskdir)
+ # write into the task event file the name and start time
+ with open(os.path.join(taskdir, e.task), "a") as f:
+ f.write("Event: %s \n" % bb.event.getName(e))
+ f.write("Started: %0.2f \n" % e.time)
+
+ elif isinstance(e, bb.build.TaskSucceeded):
+ write_task_data("passed", os.path.join(taskdir, e.task), e, d)
+ if e.task == "do_rootfs":
+ bs = os.path.join(bsdir, "build_stats")
+ with open(bs, "a") as f:
+ rootfs = d.getVar('IMAGE_ROOTFS')
+ if os.path.isdir(rootfs):
+ try:
+ rootfs_size = subprocess.check_output(["du", "-sh", rootfs],
+ stderr=subprocess.STDOUT).decode('utf-8')
+ f.write("Uncompressed Rootfs size: %s" % rootfs_size)
+ except subprocess.CalledProcessError as err:
+ bb.warn("Failed to get rootfs size: %s" % err.output.decode('utf-8'))
+
+ elif isinstance(e, bb.build.TaskFailed):
+ # Can have a failure before TaskStarted so need to mkdir here too
+ bb.utils.mkdirhier(taskdir)
+ write_task_data("failed", os.path.join(taskdir, e.task), e, d)
+ ########################################################################
+ # Lets make things easier and tell people where the build failed in
+ # build_status. We do this here because BuildCompleted triggers no
+ # matter what the status of the build actually is
+ ########################################################################
+ build_status = os.path.join(bsdir, "build_stats")
+ with open(build_status, "a") as f:
+ f.write(d.expand("Failed at: ${PF} at task: %s \n" % e.task))
+ if bb.utils.to_boolean(d.getVar("BB_LOG_HOST_STAT_ON_FAILURE")):
+ write_host_data(os.path.join(bsdir, "host_stats_%s_failure" % e.task), e, d, "failure")
+}
+
+addhandler run_buildstats
+run_buildstats[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted bb.event.HeartbeatEvent bb.build.TaskStarted bb.build.TaskSucceeded bb.build.TaskFailed"
+
+python runqueue_stats () {
+ import buildstats
+ from bb import event, runqueue
+ # We should not record any samples before the first task has started,
+ # because that's the first activity shown in the process chart.
+ # Besides, at that point we are sure that the build variables
+ # are available that we need to find the output directory.
+ # The persistent SystemStats is stored in the datastore and
+ # closed when the build is done.
+ system_stats = d.getVar('_buildstats_system_stats', False)
+ if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)):
+ system_stats = buildstats.SystemStats(d)
+ d.setVar('_buildstats_system_stats', system_stats)
+ if system_stats:
+ # Ensure that we sample at important events.
+ done = isinstance(e, bb.event.BuildCompleted)
+ if system_stats.sample(e, force=done):
+ d.setVar('_buildstats_system_stats', system_stats)
+ if done:
+ system_stats.close()
+ d.delVar('_buildstats_system_stats')
+}
+
+addhandler runqueue_stats
+runqueue_stats[eventmask] = "bb.runqueue.sceneQueueTaskStarted bb.runqueue.runQueueTaskStarted bb.event.HeartbeatEvent bb.event.BuildCompleted bb.event.MonitorDiskEvent"
diff --git a/meta/classes-global/debian.bbclass b/meta/classes-global/debian.bbclass
new file mode 100644
index 0000000000..7135d74837
--- /dev/null
+++ b/meta/classes-global/debian.bbclass
@@ -0,0 +1,156 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Debian package renaming only occurs when a package is built
+# We therefore have to make sure we build all runtime packages
+# before building the current package to make the packages runtime
+# depends are correct
+#
+# Custom library package names can be defined setting
+# DEBIANNAME: + pkgname to the desired name.
+#
+# Better expressed as ensure all RDEPENDS package before we package
+# This means we can't have circular RDEPENDS/RRECOMMENDS
+
+AUTO_LIBNAME_PKGS = "${PACKAGES}"
+
+inherit package
+
+DEBIANRDEP = "do_packagedata"
+do_package_write_ipk[deptask] = "${DEBIANRDEP}"
+do_package_write_deb[deptask] = "${DEBIANRDEP}"
+do_package_write_tar[deptask] = "${DEBIANRDEP}"
+do_package_write_rpm[deptask] = "${DEBIANRDEP}"
+do_package_write_ipk[rdeptask] = "${DEBIANRDEP}"
+do_package_write_deb[rdeptask] = "${DEBIANRDEP}"
+do_package_write_tar[rdeptask] = "${DEBIANRDEP}"
+do_package_write_rpm[rdeptask] = "${DEBIANRDEP}"
+
+python () {
+ if not d.getVar("PACKAGES"):
+ d.setVar("DEBIANRDEP", "")
+}
+
+python debian_package_name_hook () {
+ import glob, copy, stat, errno, re, pathlib, subprocess
+
+ pkgdest = d.getVar("PKGDEST")
+ packages = d.getVar('PACKAGES')
+ so_re = re.compile(r"lib.*\.so")
+
+ def socrunch(s):
+ s = s.lower().replace('_', '-')
+ m = re.match(r"^(.*)(.)\.so\.(.*)$", s)
+ if m is None:
+ return None
+ if m.group(2) in '0123456789':
+ bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
+ else:
+ bin = m.group(1) + m.group(2) + m.group(3)
+ dev = m.group(1) + m.group(2)
+ return (bin, dev)
+
+ def isexec(path):
+ try:
+ s = os.stat(path)
+ except (os.error, AttributeError):
+ return 0
+ return (s[stat.ST_MODE] & stat.S_IEXEC)
+
+ def add_rprovides(pkg, d):
+ newpkg = d.getVar('PKG:' + pkg)
+ if newpkg and newpkg != pkg:
+ provs = (d.getVar('RPROVIDES:' + pkg) or "").split()
+ if pkg not in provs:
+ d.appendVar('RPROVIDES:' + pkg, " " + pkg + " (=" + d.getVar("PKGV") + ")")
+
+ def auto_libname(packages, orig_pkg):
+ p = lambda var: pathlib.PurePath(d.getVar(var))
+ libdirs = (p("base_libdir"), p("libdir"))
+ bindirs = (p("base_bindir"), p("base_sbindir"), p("bindir"), p("sbindir"))
+
+ sonames = []
+ has_bins = 0
+ has_libs = 0
+ for f in pkgfiles[orig_pkg]:
+ # This is .../packages-split/orig_pkg/
+ pkgpath = pathlib.PurePath(pkgdest, orig_pkg)
+ # Strip pkgpath off the full path to a file in the package, re-root
+ # so it is absolute, and then get the parent directory of the file.
+ path = pathlib.PurePath("/") / (pathlib.PurePath(f).relative_to(pkgpath).parent)
+ if path in bindirs:
+ has_bins = 1
+ if path in libdirs:
+ has_libs = 1
+ if so_re.match(os.path.basename(f)):
+ try:
+ cmd = [d.expand("${TARGET_PREFIX}objdump"), "-p", f]
+ output = subprocess.check_output(cmd).decode("utf-8")
+ for m in re.finditer(r"\s+SONAME\s+([^\s]+)", output):
+ if m.group(1) not in sonames:
+ sonames.append(m.group(1))
+ except subprocess.CalledProcessError:
+ pass
+ bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
+ soname = None
+ if len(sonames) == 1:
+ soname = sonames[0]
+ elif len(sonames) > 1:
+ lead = d.getVar('LEAD_SONAME')
+ if lead:
+ r = re.compile(lead)
+ filtered = []
+ for s in sonames:
+ if r.match(s):
+ filtered.append(s)
+ if len(filtered) == 1:
+ soname = filtered[0]
+ elif len(filtered) > 1:
+ bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
+ else:
+ bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
+ else:
+ bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))
+
+ if has_libs and not has_bins and soname:
+ soname_result = socrunch(soname)
+ if soname_result:
+ (pkgname, devname) = soname_result
+ for pkg in packages.split():
+ if (d.getVar('PKG:' + pkg, False) or d.getVar('DEBIAN_NOAUTONAME:' + pkg, False)):
+ add_rprovides(pkg, d)
+ continue
+ debian_pn = d.getVar('DEBIANNAME:' + pkg, False)
+ if debian_pn:
+ newpkg = debian_pn
+ elif pkg == orig_pkg:
+ newpkg = pkgname
+ else:
+ newpkg = pkg.replace(orig_pkg, devname, 1)
+ mlpre=d.getVar('MLPREFIX')
+ if mlpre:
+ if not newpkg.find(mlpre) == 0:
+ newpkg = mlpre + newpkg
+ if newpkg != pkg:
+ bb.note("debian: renaming %s to %s" % (pkg, newpkg))
+ d.setVar('PKG:' + pkg, newpkg)
+ add_rprovides(pkg, d)
+ else:
+ add_rprovides(orig_pkg, d)
+
+ # reversed sort is needed when some package is substring of another
+ # ie in ncurses we get without reverse sort:
+ # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5
+ # and later
+ # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
+ # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
+ for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS') or "").split(), reverse=True):
+ auto_libname(packages, pkg)
+}
+
+EXPORT_FUNCTIONS package_name_hook
+
+DEBIAN_NAMES = "1"
diff --git a/meta/classes-global/devshell.bbclass b/meta/classes-global/devshell.bbclass
new file mode 100644
index 0000000000..03af56b7a2
--- /dev/null
+++ b/meta/classes-global/devshell.bbclass
@@ -0,0 +1,166 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit terminal
+
+DEVSHELL = "${SHELL}"
+
+PATH:prepend:task-devshell = "${COREBASE}/scripts/git-intercept:"
+
+python do_devshell () {
+ if d.getVarFlag("do_devshell", "manualfakeroot"):
+ d.prependVar("DEVSHELL", "pseudo ")
+ fakeenv = d.getVar("FAKEROOTENV").split()
+ for f in fakeenv:
+ k = f.split("=")
+ d.setVar(k[0], k[1])
+ d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0])
+ d.delVarFlag("do_devshell", "fakeroot")
+
+ oe_terminal(d.getVar('DEVSHELL'), 'OpenEmbedded Developer Shell', d)
+}
+
+addtask devshell after do_patch do_prepare_recipe_sysroot
+
+# The directory that the terminal starts in
+DEVSHELL_STARTDIR ?= "${S}"
+do_devshell[dirs] = "${DEVSHELL_STARTDIR}"
+do_devshell[nostamp] = "1"
+do_devshell[network] = "1"
+
+# devshell and fakeroot/pseudo need careful handling since only the final
+# command should run under fakeroot emulation, any X connection should
+# be done as the normal user. We therfore carefully construct the envionment
+# manually
+python () {
+ if d.getVarFlag("do_devshell", "fakeroot"):
+ # We need to signal our code that we want fakeroot however we
+ # can't manipulate the environment and variables here yet (see YOCTO #4795)
+ d.setVarFlag("do_devshell", "manualfakeroot", "1")
+ d.delVarFlag("do_devshell", "fakeroot")
+}
+
+def pydevshell(d):
+
+ import code
+ import select
+ import signal
+ import termios
+
+ m, s = os.openpty()
+ sname = os.ttyname(s)
+
+ def noechoicanon(fd):
+ old = termios.tcgetattr(fd)
+ old[3] = old[3] &~ termios.ECHO &~ termios.ICANON
+ # &~ termios.ISIG
+ termios.tcsetattr(fd, termios.TCSADRAIN, old)
+
+ # No echo or buffering over the pty
+ noechoicanon(s)
+
+ pid = os.fork()
+ if pid:
+ os.close(m)
+ oe_terminal("oepydevshell-internal.py %s %d" % (sname, pid), 'OpenEmbedded Developer PyShell', d)
+ os._exit(0)
+ else:
+ os.close(s)
+
+ os.dup2(m, sys.stdin.fileno())
+ os.dup2(m, sys.stdout.fileno())
+ os.dup2(m, sys.stderr.fileno())
+
+ bb.utils.nonblockingfd(sys.stdout)
+ bb.utils.nonblockingfd(sys.stderr)
+ bb.utils.nonblockingfd(sys.stdin)
+
+ _context = {
+ "os": os,
+ "bb": bb,
+ "time": time,
+ "d": d,
+ }
+
+ ps1 = "pydevshell> "
+ ps2 = "... "
+ buf = []
+ more = False
+
+ i = code.InteractiveInterpreter(locals=_context)
+ print("OE PyShell (PN = %s)\n" % d.getVar("PN"))
+
+ def prompt(more):
+ if more:
+ prompt = ps2
+ else:
+ prompt = ps1
+ sys.stdout.write(prompt)
+ sys.stdout.flush()
+
+ # Restore Ctrl+C since bitbake masks this
+ def signal_handler(signal, frame):
+ raise KeyboardInterrupt
+ signal.signal(signal.SIGINT, signal_handler)
+
+ child = None
+
+ prompt(more)
+ while True:
+ try:
+ try:
+ (r, _, _) = select.select([sys.stdin], [], [], 1)
+ if not r:
+ continue
+ line = sys.stdin.readline().strip()
+ if not line:
+ prompt(more)
+ continue
+ except EOFError as e:
+ sys.stdout.write("\n")
+ sys.stdout.flush()
+ except (OSError, IOError) as e:
+ if e.errno == 11:
+ continue
+ if e.errno == 5:
+ return
+ raise
+ else:
+ if not child:
+ child = int(line)
+ continue
+ buf.append(line)
+ source = "\n".join(buf)
+ more = i.runsource(source, "<pyshell>")
+ if not more:
+ buf = []
+ sys.stderr.flush()
+ prompt(more)
+ except KeyboardInterrupt:
+ i.write("\nKeyboardInterrupt\n")
+ buf = []
+ more = False
+ prompt(more)
+ except SystemExit:
+ # Easiest way to ensure everything exits
+ os.kill(child, signal.SIGTERM)
+ break
+
+python do_pydevshell() {
+ import signal
+
+ try:
+ pydevshell(d)
+ except SystemExit:
+ # Stop the SIGTERM above causing an error exit code
+ return
+ finally:
+ return
+}
+addtask pydevshell after do_patch
+
+do_pydevshell[nostamp] = "1"
+do_pydevshell[network] = "1"
diff --git a/meta/classes-global/insane.bbclass b/meta/classes-global/insane.bbclass
new file mode 100644
index 0000000000..46ea41e271
--- /dev/null
+++ b/meta/classes-global/insane.bbclass
@@ -0,0 +1,1453 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# BB Class inspired by ebuild.sh
+#
+# This class will test files after installation for certain
+# security issues and other kind of issues.
+#
+# Checks we do:
+# -Check the ownership and permissions
+# -Check the RUNTIME path for the $TMPDIR
+# -Check if .la files wrongly point to workdir
+# -Check if .pc files wrongly point to workdir
+# -Check if packages contains .debug directories or .so files
+# where they should be in -dev or -dbg
+# -Check if config.log contains traces to broken autoconf tests
+# -Check invalid characters (non-utf8) on some package metadata
+# -Ensure that binaries in base_[bindir|sbindir|libdir] do not link
+# into exec_prefix
+# -Check that scripts in base_[bindir|sbindir|libdir] do not reference
+# files under exec_prefix
+# -Check if the package name is upper case
+
+# Elect whether a given type of error is a warning or error, they may
+# have been set by other files.
+WARN_QA ?= " libdir xorg-driver-abi buildpaths \
+ textrel incompatible-license files-invalid \
+ infodir build-deps src-uri-bad symlink-to-sysroot multilib \
+ invalid-packageconfig host-user-contaminated uppercase-pn patch-fuzz \
+ mime mime-xdg unlisted-pkg-lics unhandled-features-check \
+ missing-update-alternatives native-last missing-ptest \
+ license-exists license-no-generic license-syntax license-format \
+ license-incompatible license-file-missing obsolete-license \
+ "
+ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
+ perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
+ split-strip packages-list pkgv-undefined var-undefined \
+ version-going-backwards expanded-d invalid-chars \
+ license-checksum dev-elf file-rdeps configure-unsafe \
+ configure-gettext perllocalpod shebang-size \
+ already-stripped installed-vs-shipped ldflags compile-host-path \
+ install-host-path pn-overrides unknown-configure-option \
+ useless-rpaths rpaths staticdev empty-dirs \
+ "
+# Add usrmerge QA check based on distro feature
+ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}"
+
+FAKEROOT_QA = "host-user-contaminated"
+FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \
+enabled tests are listed here, the do_package_qa task will run under fakeroot."
+
+ALL_QA = "${WARN_QA} ${ERROR_QA}"
+
+UNKNOWN_CONFIGURE_OPT_IGNORE ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --with-libtool-sysroot --disable-static"
+
+# This is a list of directories that are expected to be empty.
+QA_EMPTY_DIRS ?= " \
+ /dev/pts \
+ /media \
+ /proc \
+ /run \
+ /tmp \
+ ${localstatedir}/run \
+ ${localstatedir}/volatile \
+"
+# It is possible to specify why a directory is expected to be empty by defining
+# QA_EMPTY_DIRS_RECOMMENDATION:<path>, which will then be included in the error
+# message if the directory is not empty. If it is not specified for a directory,
+# then "but it is expected to be empty" will be used.
+
+def package_qa_clean_path(path, d, pkg=None):
+ """
+ Remove redundant paths from the path for display. If pkg isn't set then
+ TMPDIR is stripped, otherwise PKGDEST/pkg is stripped.
+ """
+ if pkg:
+ path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/")
+ return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/")
+
+QAPATHTEST[shebang-size] = "package_qa_check_shebang_size"
+def package_qa_check_shebang_size(path, name, d, elf, messages):
+ import stat
+ if os.path.islink(path) or stat.S_ISFIFO(os.stat(path).st_mode) or elf:
+ return
+
+ try:
+ with open(path, 'rb') as f:
+ stanza = f.readline(130)
+ except IOError:
+ return
+
+ if stanza.startswith(b'#!'):
+ #Shebang not found
+ try:
+ stanza = stanza.decode("utf-8")
+ except UnicodeDecodeError:
+ #If it is not a text file, it is not a script
+ return
+
+ if len(stanza) > 129:
+ oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d)))
+ return
+
+QAPATHTEST[libexec] = "package_qa_check_libexec"
+def package_qa_check_libexec(path,name, d, elf, messages):
+
+ # Skip the case where the default is explicitly /usr/libexec
+ libexec = d.getVar('libexecdir')
+ if libexec == "/usr/libexec":
+ return True
+
+ if 'libexec' in path.split(os.path.sep):
+ oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec))
+ return False
+
+ return True
+
+QAPATHTEST[rpaths] = "package_qa_check_rpath"
+def package_qa_check_rpath(file,name, d, elf, messages):
+ """
+ Check for dangerous RPATHs
+ """
+ if not elf:
+ return
+
+ if os.path.islink(file):
+ return
+
+ bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')]
+
+ phdrs = elf.run_objdump("-p", d)
+
+ import re
+ rpath_re = re.compile(r"\s+RPATH\s+(.*)")
+ for line in phdrs.split("\n"):
+ m = rpath_re.match(line)
+ if m:
+ rpath = m.group(1)
+ for dir in bad_dirs:
+ if dir in rpath:
+ oe.qa.add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file))
+
+QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths"
+def package_qa_check_useless_rpaths(file, name, d, elf, messages):
+ """
+ Check for RPATHs that are useless but not dangerous
+ """
+ def rpath_eq(a, b):
+ return os.path.normpath(a) == os.path.normpath(b)
+
+ if not elf:
+ return
+
+ if os.path.islink(file):
+ return
+
+ libdir = d.getVar("libdir")
+ base_libdir = d.getVar("base_libdir")
+
+ phdrs = elf.run_objdump("-p", d)
+
+ import re
+ rpath_re = re.compile(r"\s+RPATH\s+(.*)")
+ for line in phdrs.split("\n"):
+ m = rpath_re.match(line)
+ if m:
+ rpath = m.group(1)
+ if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir):
+ # The dynamic linker searches both these places anyway. There is no point in
+ # looking there again.
+ oe.qa.add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath))
+
+QAPATHTEST[dev-so] = "package_qa_check_dev"
+def package_qa_check_dev(path, name, d, elf, messages):
+ """
+ Check for ".so" library symlinks in non-dev packages
+ """
+
+ if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path):
+ oe.qa.add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \
+ (name, package_qa_clean_path(path, d, name)))
+
+QAPATHTEST[dev-elf] = "package_qa_check_dev_elf"
+def package_qa_check_dev_elf(path, name, d, elf, messages):
+ """
+ Check that -dev doesn't contain real shared libraries. The test has to
+ check that the file is not a link and is an ELF object as some recipes
+ install link-time .so files that are linker scripts.
+ """
+ if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf:
+ oe.qa.add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \
+ (name, package_qa_clean_path(path, d, name)))
+
+QAPATHTEST[staticdev] = "package_qa_check_staticdev"
+def package_qa_check_staticdev(path, name, d, elf, messages):
+ """
+ Check for ".a" library in non-staticdev packages
+ There are a number of exceptions to this rule, -pic packages can contain
+ static libraries, the _nonshared.a belong with their -dev packages and
+ libgcc.a, libgcov.a will be skipped in their packages
+ """
+
+ if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path:
+ oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \
+ (name, package_qa_clean_path(path,d, name)))
+
+QAPATHTEST[mime] = "package_qa_check_mime"
+def package_qa_check_mime(path, name, d, elf, messages):
+ """
+ Check if package installs mime types to /usr/share/mime/packages
+ while no inheriting mime.bbclass
+ """
+
+ if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d):
+ oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \
+ (name, package_qa_clean_path(path,d)))
+
+QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg"
+def package_qa_check_mime_xdg(path, name, d, elf, messages):
+ """
+ Check if package installs desktop file containing MimeType and requires
+ mime-types.bbclass to create /usr/share/applications/mimeinfo.cache
+ """
+
+ if d.getVar("datadir") + "/applications" in path and path.endswith('.desktop') and not bb.data.inherits_class("mime-xdg", d):
+ mime_type_found = False
+ try:
+ with open(path, 'r') as f:
+ for line in f.read().split('\n'):
+ if 'MimeType' in line:
+ mime_type_found = True
+ break;
+ except:
+ # At least libreoffice installs symlinks with absolute paths that are dangling here.
+ # We could implement some magic but for few (one) recipes it is not worth the effort so just warn:
+ wstr = "%s cannot open %s - is it a symlink with absolute path?\n" % (name, package_qa_clean_path(path,d))
+ wstr += "Please check if (linked) file contains key 'MimeType'.\n"
+ pkgname = name
+ if name == d.getVar('PN'):
+ pkgname = '${PN}'
+ wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname)
+ oe.qa.add_message(messages, "mime-xdg", wstr)
+ if mime_type_found:
+ oe.qa.add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \
+ (name, package_qa_clean_path(path,d)))
+
+def package_qa_check_libdir(d):
+ """
+ Check for wrong library installation paths. For instance, catch
+ recipes installing /lib/bar.so when ${base_libdir}="lib32" or
+ installing in /usr/lib64 when ${libdir}="/usr/lib"
+ """
+ import re
+
+ pkgdest = d.getVar('PKGDEST')
+ base_libdir = d.getVar("base_libdir") + os.sep
+ libdir = d.getVar("libdir") + os.sep
+ libexecdir = d.getVar("libexecdir") + os.sep
+ exec_prefix = d.getVar("exec_prefix") + os.sep
+
+ messages = []
+
+ # The re's are purposely fuzzy, as some there are some .so.x.y.z files
+ # that don't follow the standard naming convention. It checks later
+ # that they are actual ELF files
+ lib_re = re.compile(r"^/lib.+\.so(\..+)?$")
+ exec_re = re.compile(r"^%s.*/lib.+\.so(\..+)?$" % exec_prefix)
+
+ for root, dirs, files in os.walk(pkgdest):
+ if root == pkgdest:
+ # Skip subdirectories for any packages with libdir in INSANE_SKIP
+ skippackages = []
+ for package in dirs:
+ if 'libdir' in (d.getVar('INSANE_SKIP:' + package) or "").split():
+ bb.note("Package %s skipping libdir QA test" % (package))
+ skippackages.append(package)
+ elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"):
+ bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package))
+ skippackages.append(package)
+ for package in skippackages:
+ dirs.remove(package)
+ for file in files:
+ full_path = os.path.join(root, file)
+ rel_path = os.path.relpath(full_path, pkgdest)
+ if os.sep in rel_path:
+ package, rel_path = rel_path.split(os.sep, 1)
+ rel_path = os.sep + rel_path
+ if lib_re.match(rel_path):
+ if base_libdir not in rel_path:
+ # make sure it's an actual ELF file
+ elf = oe.qa.ELFFile(full_path)
+ try:
+ elf.open()
+ messages.append("%s: found library in wrong location: %s" % (package, rel_path))
+ except (oe.qa.NotELFFileError):
+ pass
+ if exec_re.match(rel_path):
+ if libdir not in rel_path and libexecdir not in rel_path:
+ # make sure it's an actual ELF file
+ elf = oe.qa.ELFFile(full_path)
+ try:
+ elf.open()
+ messages.append("%s: found library in wrong location: %s" % (package, rel_path))
+ except (oe.qa.NotELFFileError):
+ pass
+
+ if messages:
+ oe.qa.handle_error("libdir", "\n".join(messages), d)
+
+QAPATHTEST[debug-files] = "package_qa_check_dbg"
+def package_qa_check_dbg(path, name, d, elf, messages):
+ """
+ Check for ".debug" files or directories outside of the dbg package
+ """
+
+ if not "-dbg" in name and not "-ptest" in name:
+ if '.debug' in path.split(os.path.sep):
+ oe.qa.add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \
+ (name, package_qa_clean_path(path,d)))
+
+QAPATHTEST[arch] = "package_qa_check_arch"
+def package_qa_check_arch(path,name,d, elf, messages):
+ """
+ Check if archs are compatible
+ """
+ import re, oe.elf
+
+ if not elf:
+ return
+
+ target_os = d.getVar('HOST_OS')
+ target_arch = d.getVar('HOST_ARCH')
+ provides = d.getVar('PROVIDES')
+ bpn = d.getVar('BPN')
+
+ if target_arch == "allarch":
+ pn = d.getVar('PN')
+ oe.qa.add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries")
+ return
+
+ # FIXME: Cross package confuse this check, so just skip them
+ for s in ['cross', 'nativesdk', 'cross-canadian']:
+ if bb.data.inherits_class(s, d):
+ return
+
+ # avoid following links to /usr/bin (e.g. on udev builds)
+ # we will check the files pointed to anyway...
+ if os.path.islink(path):
+ return
+
+ #if this will throw an exception, then fix the dict above
+ (machine, osabi, abiversion, littleendian, bits) \
+ = oe.elf.machine_dict(d)[target_os][target_arch]
+
+ # Check the architecture and endiannes of the binary
+ is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \
+ (target_os == "linux-gnux32" or target_os == "linux-muslx32" or \
+ target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE')))
+ is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF")
+ if not ((machine == elf.machine()) or is_32 or is_bpf):
+ oe.qa.add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \
+ (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name)))
+ elif not ((bits == elf.abiSize()) or is_32 or is_bpf):
+ oe.qa.add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \
+ (elf.abiSize(), bits, package_qa_clean_path(path, d, name)))
+ elif not ((littleendian == elf.isLittleEndian()) or is_bpf):
+ oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \
+ (elf.isLittleEndian(), littleendian, package_qa_clean_path(path,d, name)))
+
+QAPATHTEST[desktop] = "package_qa_check_desktop"
+def package_qa_check_desktop(path, name, d, elf, messages):
+ """
+ Run all desktop files through desktop-file-validate.
+ """
+ if path.endswith(".desktop"):
+ desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE'),'desktop-file-validate')
+ output = os.popen("%s %s" % (desktop_file_validate, path))
+ # This only produces output on errors
+ for l in output:
+ oe.qa.add_message(messages, "desktop", "Desktop file issue: " + l.strip())
+
+QAPATHTEST[textrel] = "package_qa_textrel"
+def package_qa_textrel(path, name, d, elf, messages):
+ """
+ Check if the binary contains relocations in .text
+ """
+
+ if not elf:
+ return
+
+ if os.path.islink(path):
+ return
+
+ phdrs = elf.run_objdump("-p", d)
+ sane = True
+
+ import re
+ textrel_re = re.compile(r"\s+TEXTREL\s+")
+ for line in phdrs.split("\n"):
+ if textrel_re.match(line):
+ sane = False
+ break
+
+ if not sane:
+ path = package_qa_clean_path(path, d, name)
+ oe.qa.add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path))
+
+QAPATHTEST[ldflags] = "package_qa_hash_style"
+def package_qa_hash_style(path, name, d, elf, messages):
+ """
+ Check if the binary has the right hash style...
+ """
+
+ if not elf:
+ return
+
+ if os.path.islink(path):
+ return
+
+ gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS')
+ if not gnu_hash:
+ gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS')
+ if not gnu_hash:
+ return
+
+ sane = False
+ has_syms = False
+
+ phdrs = elf.run_objdump("-p", d)
+
+ # If this binary has symbols, we expect it to have GNU_HASH too.
+ for line in phdrs.split("\n"):
+ if "SYMTAB" in line:
+ has_syms = True
+ if "GNU_HASH" in line or "MIPS_XHASH" in line:
+ sane = True
+ if ("[mips32]" in line or "[mips64]" in line) and d.getVar('TCLIBC') == "musl":
+ sane = True
+ if has_syms and not sane:
+ path = package_qa_clean_path(path, d, name)
+ oe.qa.add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name))
+
+
+QAPATHTEST[buildpaths] = "package_qa_check_buildpaths"
+def package_qa_check_buildpaths(path, name, d, elf, messages):
+ """
+ Check for build paths inside target files and error if paths are not
+ explicitly ignored.
+ """
+ import stat
+
+ # Ignore symlinks/devs/fifos
+ mode = os.lstat(path).st_mode
+ if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode):
+ return
+
+ tmpdir = bytes(d.getVar('TMPDIR'), encoding="utf-8")
+ with open(path, 'rb') as f:
+ file_content = f.read()
+ if tmpdir in file_content:
+ trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
+ oe.qa.add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name))
+
+
+QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi"
+def package_qa_check_xorg_driver_abi(path, name, d, elf, messages):
+ """
+ Check that all packages containing Xorg drivers have ABI dependencies
+ """
+
+ # Skip dev, dbg or nativesdk packages
+ if name.endswith("-dev") or name.endswith("-dbg") or name.startswith("nativesdk-"):
+ return
+
+ driverdir = d.expand("${libdir}/xorg/modules/drivers/")
+ if driverdir in path and path.endswith(".so"):
+ mlprefix = d.getVar('MLPREFIX') or ''
+ for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""):
+ if rdep.startswith("%sxorg-abi-" % mlprefix):
+ return
+ oe.qa.add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)))
+
+QAPATHTEST[infodir] = "package_qa_check_infodir"
+def package_qa_check_infodir(path, name, d, elf, messages):
+ """
+ Check that /usr/share/info/dir isn't shipped in a particular package
+ """
+ infodir = d.expand("${infodir}/dir")
+
+ if infodir in path:
+ oe.qa.add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.")
+
+QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot"
+def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
+ """
+ Check that the package doesn't contain any absolute symlinks to the sysroot.
+ """
+ if os.path.islink(path):
+ target = os.readlink(path)
+ if os.path.isabs(target):
+ tmpdir = d.getVar('TMPDIR')
+ if target.startswith(tmpdir):
+ trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
+ oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name))
+
+# Check license variables
+do_populate_lic[postfuncs] += "populate_lic_qa_checksum"
+python populate_lic_qa_checksum() {
+ """
+ Check for changes in the license files.
+ """
+
+ lic_files = d.getVar('LIC_FILES_CHKSUM') or ''
+ lic = d.getVar('LICENSE')
+ pn = d.getVar('PN')
+
+ if lic == "CLOSED":
+ return
+
+ if not lic_files and d.getVar('SRC_URI'):
+ oe.qa.handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d)
+
+ srcdir = d.getVar('S')
+ corebase_licensefile = d.getVar('COREBASE') + "/LICENSE"
+ for url in lic_files.split():
+ try:
+ (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
+ except bb.fetch.MalformedUrl:
+ oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d)
+ continue
+ srclicfile = os.path.join(srcdir, path)
+ if not os.path.isfile(srclicfile):
+ oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d)
+ continue
+
+ if (srclicfile == corebase_licensefile):
+ bb.warn("${COREBASE}/LICENSE is not a valid license file, please use '${COMMON_LICENSE_DIR}/MIT' for a MIT License file in LIC_FILES_CHKSUM. This will become an error in the future")
+
+ recipemd5 = parm.get('md5', '')
+ beginline, endline = 0, 0
+ if 'beginline' in parm:
+ beginline = int(parm['beginline'])
+ if 'endline' in parm:
+ endline = int(parm['endline'])
+
+ if (not beginline) and (not endline):
+ md5chksum = bb.utils.md5_file(srclicfile)
+ with open(srclicfile, 'r', errors='replace') as f:
+ license = f.read().splitlines()
+ else:
+ with open(srclicfile, 'rb') as f:
+ import hashlib
+ lineno = 0
+ license = []
+ m = hashlib.new('MD5', usedforsecurity=False)
+ for line in f:
+ lineno += 1
+ if (lineno >= beginline):
+ if ((lineno <= endline) or not endline):
+ m.update(line)
+ license.append(line.decode('utf-8', errors='replace').rstrip())
+ else:
+ break
+ md5chksum = m.hexdigest()
+ if recipemd5 == md5chksum:
+ bb.note (pn + ": md5 checksum matched for ", url)
+ else:
+ if recipemd5:
+ msg = pn + ": The LIC_FILES_CHKSUM does not match for " + url
+ msg = msg + "\n" + pn + ": The new md5 checksum is " + md5chksum
+ max_lines = int(d.getVar('QA_MAX_LICENSE_LINES') or 20)
+ if not license or license[-1] != '':
+ # Ensure that our license text ends with a line break
+ # (will be added with join() below).
+ license.append('')
+ remove = len(license) - max_lines
+ if remove > 0:
+ start = max_lines // 2
+ end = start + remove - 1
+ del license[start:end]
+ license.insert(start, '...')
+ msg = msg + "\n" + pn + ": Here is the selected license text:" + \
+ "\n" + \
+ "{:v^70}".format(" beginline=%d " % beginline if beginline else "") + \
+ "\n" + "\n".join(license) + \
+ "{:^^70}".format(" endline=%d " % endline if endline else "")
+ if beginline:
+ if endline:
+ srcfiledesc = "%s (lines %d through to %d)" % (srclicfile, beginline, endline)
+ else:
+ srcfiledesc = "%s (beginning on line %d)" % (srclicfile, beginline)
+ elif endline:
+ srcfiledesc = "%s (ending on line %d)" % (srclicfile, endline)
+ else:
+ srcfiledesc = srclicfile
+ msg = msg + "\n" + pn + ": Check if the license information has changed in %s to verify that the LICENSE value \"%s\" remains valid" % (srcfiledesc, lic)
+
+ else:
+ msg = pn + ": LIC_FILES_CHKSUM is not specified for " + url
+ msg = msg + "\n" + pn + ": The md5 checksum is " + md5chksum
+ oe.qa.handle_error("license-checksum", msg, d)
+
+ oe.qa.exit_if_errors(d)
+}
+
+def qa_check_staged(path,d):
+ """
+ Check staged la and pc files for common problems like references to the work
+ directory.
+
+ As this is run after every stage we should be able to find the one
+ responsible for the errors easily even if we look at every .pc and .la file.
+ """
+
+ tmpdir = d.getVar('TMPDIR')
+ workdir = os.path.join(tmpdir, "work")
+ recipesysroot = d.getVar("RECIPE_SYSROOT")
+
+ if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
+ pkgconfigcheck = workdir
+ else:
+ pkgconfigcheck = tmpdir
+
+ skip = (d.getVar('INSANE_SKIP') or "").split()
+ skip_la = False
+ if 'la' in skip:
+ bb.note("Recipe %s skipping qa checking: la" % d.getVar('PN'))
+ skip_la = True
+
+ skip_pkgconfig = False
+ if 'pkgconfig' in skip:
+ bb.note("Recipe %s skipping qa checking: pkgconfig" % d.getVar('PN'))
+ skip_pkgconfig = True
+
+ skip_shebang_size = False
+ if 'shebang-size' in skip:
+ bb.note("Recipe %s skipping qa checkking: shebang-size" % d.getVar('PN'))
+ skip_shebang_size = True
+
+ # find all .la and .pc files
+ # read the content
+ # and check for stuff that looks wrong
+ for root, dirs, files in os.walk(path):
+ for file in files:
+ path = os.path.join(root,file)
+ if file.endswith(".la") and not skip_la:
+ with open(path) as f:
+ file_content = f.read()
+ file_content = file_content.replace(recipesysroot, "")
+ if workdir in file_content:
+ error_msg = "%s failed sanity test (workdir) in path %s" % (file,root)
+ oe.qa.handle_error("la", error_msg, d)
+ elif file.endswith(".pc") and not skip_pkgconfig:
+ with open(path) as f:
+ file_content = f.read()
+ file_content = file_content.replace(recipesysroot, "")
+ if pkgconfigcheck in file_content:
+ error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root)
+ oe.qa.handle_error("pkgconfig", error_msg, d)
+
+ if not skip_shebang_size:
+ errors = {}
+ package_qa_check_shebang_size(path, "", d, None, errors)
+ for e in errors:
+ oe.qa.handle_error(e, errors[e], d)
+
+
+# Run all package-wide warnfuncs and errorfuncs
+def package_qa_package(warnfuncs, errorfuncs, package, d):
+ warnings = {}
+ errors = {}
+
+ for func in warnfuncs:
+ func(package, d, warnings)
+ for func in errorfuncs:
+ func(package, d, errors)
+
+ for w in warnings:
+ oe.qa.handle_error(w, warnings[w], d)
+ for e in errors:
+ oe.qa.handle_error(e, errors[e], d)
+
+ return len(errors) == 0
+
+# Run all recipe-wide warnfuncs and errorfuncs
+def package_qa_recipe(warnfuncs, errorfuncs, pn, d):
+ warnings = {}
+ errors = {}
+
+ for func in warnfuncs:
+ func(pn, d, warnings)
+ for func in errorfuncs:
+ func(pn, d, errors)
+
+ for w in warnings:
+ oe.qa.handle_error(w, warnings[w], d)
+ for e in errors:
+ oe.qa.handle_error(e, errors[e], d)
+
+ return len(errors) == 0
+
+def prepopulate_objdump_p(elf, d):
+ output = elf.run_objdump("-p", d)
+ return (elf.name, output)
+
+# Walk over all files in a directory and call func
+def package_qa_walk(warnfuncs, errorfuncs, package, d):
+ #if this will throw an exception, then fix the dict above
+ target_os = d.getVar('HOST_OS')
+ target_arch = d.getVar('HOST_ARCH')
+
+ warnings = {}
+ errors = {}
+ elves = {}
+ for path in pkgfiles[package]:
+ elf = None
+ if os.path.isfile(path):
+ elf = oe.qa.ELFFile(path)
+ try:
+ elf.open()
+ elf.close()
+ except oe.qa.NotELFFileError:
+ elf = None
+ if elf:
+ elves[path] = elf
+
+ results = oe.utils.multiprocess_launch(prepopulate_objdump_p, elves.values(), d, extraargs=(d,))
+ for item in results:
+ elves[item[0]].set_objdump("-p", item[1])
+
+ for path in pkgfiles[package]:
+ if path in elves:
+ elves[path].open()
+ for func in warnfuncs:
+ func(path, package, d, elves.get(path), warnings)
+ for func in errorfuncs:
+ func(path, package, d, elves.get(path), errors)
+ if path in elves:
+ elves[path].close()
+
+ for w in warnings:
+ oe.qa.handle_error(w, warnings[w], d)
+ for e in errors:
+ oe.qa.handle_error(e, errors[e], d)
+
+def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
+ # Don't do this check for kernel/module recipes, there aren't too many debug/development
+ # packages and you can get false positives e.g. on kernel-module-lirc-dev
+ if bb.data.inherits_class("kernel", d) or bb.data.inherits_class("module-base", d):
+ return
+
+ if not "-dbg" in pkg and not "packagegroup-" in pkg and not "-image" in pkg:
+ localdata = bb.data.createCopy(d)
+ localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES') + ':' + pkg)
+
+ # Now check the RDEPENDS
+ rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "")
+
+ # Now do the sanity check!!!
+ if "build-deps" not in skip:
+ for rdepend in rdepends:
+ if "-dbg" in rdepend and "debug-deps" not in skip:
+ error_msg = "%s rdepends on %s" % (pkg,rdepend)
+ oe.qa.handle_error("debug-deps", error_msg, d)
+ if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip:
+ error_msg = "%s rdepends on %s" % (pkg, rdepend)
+ oe.qa.handle_error("dev-deps", error_msg, d)
+ if rdepend not in packages:
+ rdep_data = oe.packagedata.read_subpkgdata(rdepend, d)
+ if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
+ continue
+ if not rdep_data or not 'PN' in rdep_data:
+ pkgdata_dir = d.getVar("PKGDATA_DIR")
+ try:
+ possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend))
+ except OSError:
+ possibles = []
+ for p in possibles:
+ rdep_data = oe.packagedata.read_subpkgdata(p, d)
+ if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
+ break
+ if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
+ continue
+ if rdep_data and 'PN' in rdep_data:
+ error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN'])
+ else:
+ error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend)
+ oe.qa.handle_error("build-deps", error_msg, d)
+
+ if "file-rdeps" not in skip:
+ ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)'])
+ if bb.data.inherits_class('nativesdk', d):
+ ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl', 'perl'])
+ # For Saving the FILERDEPENDS
+ filerdepends = {}
+ rdep_data = oe.packagedata.read_subpkgdata(pkg, d)
+ for key in rdep_data:
+ if key.startswith("FILERDEPENDS:"):
+ for subkey in bb.utils.explode_deps(rdep_data[key]):
+ if subkey not in ignored_file_rdeps and \
+ not subkey.startswith('perl('):
+ # We already know it starts with FILERDEPENDS_
+ filerdepends[subkey] = key[13:]
+
+ if filerdepends:
+ done = rdepends[:]
+ # Add the rprovides of itself
+ if pkg not in done:
+ done.insert(0, pkg)
+
+ # The python is not a package, but python-core provides it, so
+ # skip checking /usr/bin/python if python is in the rdeps, in
+ # case there is a RDEPENDS:pkg = "python" in the recipe.
+ for py in [ d.getVar('MLPREFIX') + "python", "python" ]:
+ if py in done:
+ filerdepends.pop("/usr/bin/python",None)
+ done.remove(py)
+ for rdep in done:
+ # The file dependencies may contain package names, e.g.,
+ # perl
+ filerdepends.pop(rdep,None)
+
+ # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO
+ rdep_data = oe.packagedata.read_subpkgdata(rdep, d)
+ for key in rdep_data:
+ if key.startswith("FILERPROVIDES:") or key.startswith("RPROVIDES:"):
+ for subkey in bb.utils.explode_deps(rdep_data[key]):
+ filerdepends.pop(subkey,None)
+ # Add the files list to the rprovides
+ if key.startswith("FILES_INFO:"):
+ # Use eval() to make it as a dict
+ for subkey in eval(rdep_data[key]):
+ filerdepends.pop(subkey,None)
+ if not filerdepends:
+ # Break if all the file rdepends are met
+ break
+ if filerdepends:
+ for key in filerdepends:
+ error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS:%s?" % \
+ (filerdepends[key].replace(":%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg)
+ oe.qa.handle_error("file-rdeps", error_msg, d)
+package_qa_check_rdepends[vardepsexclude] = "OVERRIDES"
+
+def package_qa_check_deps(pkg, pkgdest, d):
+
+ localdata = bb.data.createCopy(d)
+ localdata.setVar('OVERRIDES', pkg)
+
+ def check_valid_deps(var):
+ try:
+ rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "")
+ except ValueError as e:
+ bb.fatal("%s:%s: %s" % (var, pkg, e))
+ for dep in rvar:
+ for v in rvar[dep]:
+ if v and not v.startswith(('< ', '= ', '> ', '<= ', '>=')):
+ error_msg = "%s:%s is invalid: %s (%s) only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v)
+ oe.qa.handle_error("dep-cmp", error_msg, d)
+
+ check_valid_deps('RDEPENDS')
+ check_valid_deps('RRECOMMENDS')
+ check_valid_deps('RSUGGESTS')
+ check_valid_deps('RPROVIDES')
+ check_valid_deps('RREPLACES')
+ check_valid_deps('RCONFLICTS')
+
+QAPKGTEST[usrmerge] = "package_qa_check_usrmerge"
+def package_qa_check_usrmerge(pkg, d, messages):
+
+ pkgdest = d.getVar('PKGDEST')
+ pkg_dir = pkgdest + os.sep + pkg + os.sep
+ merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split()
+ for f in merged_dirs:
+ if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f):
+ msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f)
+ oe.qa.add_message(messages, "usrmerge", msg)
+ return False
+ return True
+
+QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod"
+def package_qa_check_perllocalpod(pkg, d, messages):
+ """
+ Check that the recipe didn't ship a perlocal.pod file, which shouldn't be
+ installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to
+ handle this for most recipes.
+ """
+ import glob
+ pkgd = oe.path.join(d.getVar('PKGDEST'), pkg)
+ podpath = oe.path.join(pkgd, d.getVar("libdir"), "perl*", "*", "*", "perllocal.pod")
+
+ matches = glob.glob(podpath)
+ if matches:
+ matches = [package_qa_clean_path(path, d, pkg) for path in matches]
+ msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches))
+ oe.qa.add_message(messages, "perllocalpod", msg)
+
+QAPKGTEST[expanded-d] = "package_qa_check_expanded_d"
+def package_qa_check_expanded_d(package, d, messages):
+ """
+ Check for the expanded D (${D}) value in pkg_* and FILES
+ variables, warn the user to use it correctly.
+ """
+ sane = True
+ expanded_d = d.getVar('D')
+
+ for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
+ bbvar = d.getVar(var + ":" + package) or ""
+ if expanded_d in bbvar:
+ if var == 'FILES':
+ oe.qa.add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
+ sane = False
+ else:
+ oe.qa.add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package))
+ sane = False
+ return sane
+
+QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics"
+def package_qa_check_unlisted_pkg_lics(package, d, messages):
+ """
+ Check that all licenses for a package are among the licenses for the recipe.
+ """
+ pkg_lics = d.getVar('LICENSE:' + package)
+ if not pkg_lics:
+ return True
+
+ recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE'))
+ package_lics = oe.license.list_licenses(pkg_lics)
+ unlisted = package_lics - recipe_lics_set
+ if unlisted:
+ oe.qa.add_message(messages, "unlisted-pkg-lics",
+ "LICENSE:%s includes licenses (%s) that are not "
+ "listed in LICENSE" % (package, ' '.join(unlisted)))
+ return False
+ obsolete = set(oe.license.obsolete_license_list()) & package_lics - recipe_lics_set
+ if obsolete:
+ oe.qa.add_message(messages, "obsolete-license",
+ "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete)))
+ return False
+ return True
+
+QAPKGTEST[empty-dirs] = "package_qa_check_empty_dirs"
+def package_qa_check_empty_dirs(pkg, d, messages):
+ """
+ Check for the existence of files in directories that are expected to be
+ empty.
+ """
+
+ pkgd = oe.path.join(d.getVar('PKGDEST'), pkg)
+ for dir in (d.getVar('QA_EMPTY_DIRS') or "").split():
+ empty_dir = oe.path.join(pkgd, dir)
+ if os.path.exists(empty_dir) and os.listdir(empty_dir):
+ recommendation = (d.getVar('QA_EMPTY_DIRS_RECOMMENDATION:' + dir) or
+ "but it is expected to be empty")
+ msg = "%s installs files in %s, %s" % (pkg, dir, recommendation)
+ oe.qa.add_message(messages, "empty-dirs", msg)
+
+def package_qa_check_encoding(keys, encode, d):
+ def check_encoding(key, enc):
+ sane = True
+ value = d.getVar(key)
+ if value:
+ try:
+ s = value.encode(enc)
+ except UnicodeDecodeError as e:
+ error_msg = "%s has non %s characters" % (key,enc)
+ sane = False
+ oe.qa.handle_error("invalid-chars", error_msg, d)
+ return sane
+
+ for key in keys:
+ sane = check_encoding(key, encode)
+ if not sane:
+ break
+
+HOST_USER_UID := "${@os.getuid()}"
+HOST_USER_GID := "${@os.getgid()}"
+
+QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user"
+def package_qa_check_host_user(path, name, d, elf, messages):
+ """Check for paths outside of /home which are owned by the user running bitbake."""
+
+ if not os.path.lexists(path):
+ return
+
+ dest = d.getVar('PKGDEST')
+ pn = d.getVar('PN')
+ home = os.path.join(dest, name, 'home')
+ if path == home or path.startswith(home + os.sep):
+ return
+
+ try:
+ stat = os.lstat(path)
+ except OSError as exc:
+ import errno
+ if exc.errno != errno.ENOENT:
+ raise
+ else:
+ check_uid = int(d.getVar('HOST_USER_UID'))
+ if stat.st_uid == check_uid:
+ oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid))
+ return False
+
+ check_gid = int(d.getVar('HOST_USER_GID'))
+ if stat.st_gid == check_gid:
+ oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid))
+ return False
+ return True
+
+QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check"
+def package_qa_check_unhandled_features_check(pn, d, messages):
+ if not bb.data.inherits_class('features_check', d):
+ var_set = False
+ for kind in ['DISTRO', 'MACHINE', 'COMBINED']:
+ for var in ['ANY_OF_' + kind + '_FEATURES', 'REQUIRED_' + kind + '_FEATURES', 'CONFLICT_' + kind + '_FEATURES']:
+ if d.getVar(var) is not None or d.hasOverrides(var):
+ var_set = True
+ if var_set:
+ oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d)
+
+QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives"
+def package_qa_check_missing_update_alternatives(pn, d, messages):
+ # Look at all packages and find out if any of those sets ALTERNATIVE variable
+ # without inheriting update-alternatives class
+ for pkg in (d.getVar('PACKAGES') or '').split():
+ if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d):
+ oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d)
+
+# The PACKAGE FUNC to scan each package
+python do_package_qa () {
+ import subprocess
+ import oe.packagedata
+
+ bb.note("DO PACKAGE QA")
+
+ main_lic = d.getVar('LICENSE')
+
+ # Check for obsolete license references in main LICENSE (packages are checked below for any changes)
+ main_licenses = oe.license.list_licenses(d.getVar('LICENSE'))
+ obsolete = set(oe.license.obsolete_license_list()) & main_licenses
+ if obsolete:
+ oe.qa.handle_error("obsolete-license", "Recipe LICENSE includes obsolete licenses %s" % ' '.join(obsolete), d)
+
+ bb.build.exec_func("read_subpackage_metadata", d)
+
+ # Check non UTF-8 characters on recipe's metadata
+ package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d)
+
+ logdir = d.getVar('T')
+ pn = d.getVar('PN')
+
+ # Scan the packages...
+ pkgdest = d.getVar('PKGDEST')
+ packages = set((d.getVar('PACKAGES') or '').split())
+
+ global pkgfiles
+ pkgfiles = {}
+ for pkg in packages:
+ pkgfiles[pkg] = []
+ pkgdir = os.path.join(pkgdest, pkg)
+ for walkroot, dirs, files in os.walk(pkgdir):
+ # Don't walk into top-level CONTROL or DEBIAN directories as these
+ # are temporary directories created by do_package.
+ if walkroot == pkgdir:
+ for control in ("CONTROL", "DEBIAN"):
+ if control in dirs:
+ dirs.remove(control)
+ for file in files:
+ pkgfiles[pkg].append(os.path.join(walkroot, file))
+
+ # no packages should be scanned
+ if not packages:
+ return
+
+ import re
+ # The package name matches the [a-z0-9.+-]+ regular expression
+ pkgname_pattern = re.compile(r"^[a-z0-9.+-]+$")
+
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ taskdeps = set()
+ for dep in taskdepdata:
+ taskdeps.add(taskdepdata[dep][0])
+
+ def parse_test_matrix(matrix_name):
+ testmatrix = d.getVarFlags(matrix_name) or {}
+ g = globals()
+ warnchecks = []
+ for w in (d.getVar("WARN_QA") or "").split():
+ if w in skip:
+ continue
+ if w in testmatrix and testmatrix[w] in g:
+ warnchecks.append(g[testmatrix[w]])
+
+ errorchecks = []
+ for e in (d.getVar("ERROR_QA") or "").split():
+ if e in skip:
+ continue
+ if e in testmatrix and testmatrix[e] in g:
+ errorchecks.append(g[testmatrix[e]])
+ return warnchecks, errorchecks
+
+ for package in packages:
+ skip = set((d.getVar('INSANE_SKIP') or "").split() +
+ (d.getVar('INSANE_SKIP:' + package) or "").split())
+ if skip:
+ bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
+
+ bb.note("Checking Package: %s" % package)
+ # Check package name
+ if not pkgname_pattern.match(package):
+ oe.qa.handle_error("pkgname",
+ "%s doesn't match the [a-z0-9.+-]+ regex" % package, d)
+
+ warn_checks, error_checks = parse_test_matrix("QAPATHTEST")
+ package_qa_walk(warn_checks, error_checks, package, d)
+
+ warn_checks, error_checks = parse_test_matrix("QAPKGTEST")
+ package_qa_package(warn_checks, error_checks, package, d)
+
+ package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d)
+ package_qa_check_deps(package, pkgdest, d)
+
+ warn_checks, error_checks = parse_test_matrix("QARECIPETEST")
+ package_qa_recipe(warn_checks, error_checks, pn, d)
+
+ if 'libdir' in d.getVar("ALL_QA").split():
+ package_qa_check_libdir(d)
+
+ oe.qa.exit_if_errors(d)
+}
+
+# binutils is used for most checks, so need to set as dependency
+# POPULATESYSROOTDEPS is defined in staging class.
+do_package_qa[depends] += "${POPULATESYSROOTDEPS}"
+do_package_qa[vardeps] = "${@bb.utils.contains('ERROR_QA', 'empty-dirs', 'QA_EMPTY_DIRS', '', d)}"
+do_package_qa[vardepsexclude] = "BB_TASKDEPDATA"
+do_package_qa[rdeptask] = "do_packagedata"
+addtask do_package_qa after do_packagedata do_package before do_build
+
+# Add the package specific INSANE_SKIPs to the sstate dependencies
+python() {
+ pkgs = (d.getVar('PACKAGES') or '').split()
+ for pkg in pkgs:
+ d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg))
+}
+
+SSTATETASKS += "do_package_qa"
+do_package_qa[sstate-inputdirs] = ""
+do_package_qa[sstate-outputdirs] = ""
+python do_package_qa_setscene () {
+ sstate_setscene(d)
+}
+addtask do_package_qa_setscene
+
+python do_qa_sysroot() {
+ bb.note("QA checking do_populate_sysroot")
+ sysroot_destdir = d.expand('${SYSROOT_DESTDIR}')
+ for sysroot_dir in d.expand('${SYSROOT_DIRS}').split():
+ qa_check_staged(sysroot_destdir + sysroot_dir, d)
+ oe.qa.exit_with_message_if_errors("do_populate_sysroot for this recipe installed files with QA issues", d)
+}
+do_populate_sysroot[postfuncs] += "do_qa_sysroot"
+
+python do_qa_patch() {
+ import subprocess
+
+ ###########################################################################
+ # Check patch.log for fuzz warnings
+ #
+ # Further information on why we check for patch fuzz warnings:
+ # http://lists.openembedded.org/pipermail/openembedded-core/2018-March/148675.html
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=10450
+ ###########################################################################
+
+ logdir = d.getVar('T')
+ patchlog = os.path.join(logdir,"log.do_patch")
+
+ if os.path.exists(patchlog):
+ fuzzheader = '--- Patch fuzz start ---'
+ fuzzfooter = '--- Patch fuzz end ---'
+ statement = "grep -e '%s' %s > /dev/null" % (fuzzheader, patchlog)
+ if subprocess.call(statement, shell=True) == 0:
+ msg = "Fuzz detected:\n\n"
+ fuzzmsg = ""
+ inFuzzInfo = False
+ f = open(patchlog, "r")
+ for line in f:
+ if fuzzheader in line:
+ inFuzzInfo = True
+ fuzzmsg = ""
+ elif fuzzfooter in line:
+ fuzzmsg = fuzzmsg.replace('\n\n', '\n')
+ msg += fuzzmsg
+ msg += "\n"
+ inFuzzInfo = False
+ elif inFuzzInfo and not 'Now at patch' in line:
+ fuzzmsg += line
+ f.close()
+ msg += "The context lines in the patches can be updated with devtool:\n"
+ msg += "\n"
+ msg += " devtool modify %s\n" % d.getVar('PN')
+ msg += " devtool finish --force-patch-refresh %s <layer_path>\n\n" % d.getVar('PN')
+ msg += "Don't forget to review changes done by devtool!\n"
+ if bb.utils.filter('ERROR_QA', 'patch-fuzz', d):
+ bb.error(msg)
+ elif bb.utils.filter('WARN_QA', 'patch-fuzz', d):
+ bb.warn(msg)
+ msg = "Patch log indicates that patches do not apply cleanly."
+ oe.qa.handle_error("patch-fuzz", msg, d)
+
+ # Check if the patch contains a correctly formatted and spelled Upstream-Status
+ import re
+ from oe import patch
+
+ for url in patch.src_patches(d):
+ (_, _, fullpath, _, _, _) = bb.fetch.decodeurl(url)
+
+ # skip patches not in oe-core
+ if '/meta/' not in fullpath:
+ continue
+
+ kinda_status_re = re.compile(r"^.*upstream.*status.*$", re.IGNORECASE | re.MULTILINE)
+ strict_status_re = re.compile(r"^Upstream-Status: (Pending|Submitted|Denied|Accepted|Inappropriate|Backport|Inactive-Upstream)( .+)?$", re.MULTILINE)
+ guidelines = "https://www.openembedded.org/wiki/Commit_Patch_Message_Guidelines#Patch_Header_Recommendations:_Upstream-Status"
+
+ with open(fullpath, encoding='utf-8', errors='ignore') as f:
+ file_content = f.read()
+ match_kinda = kinda_status_re.search(file_content)
+ match_strict = strict_status_re.search(file_content)
+
+ if not match_strict:
+ if match_kinda:
+ bb.error("Malformed Upstream-Status in patch\n%s\nPlease correct according to %s :\n%s" % (fullpath, guidelines, match_kinda.group(0)))
+ else:
+ bb.error("Missing Upstream-Status in patch\n%s\nPlease add according to %s ." % (fullpath, guidelines))
+}
+
+python do_qa_configure() {
+ import subprocess
+
+ ###########################################################################
+ # Check config.log for cross compile issues
+ ###########################################################################
+
+ configs = []
+ workdir = d.getVar('WORKDIR')
+
+ skip = (d.getVar('INSANE_SKIP') or "").split()
+ skip_configure_unsafe = False
+ if 'configure-unsafe' in skip:
+ bb.note("Recipe %s skipping qa checking: configure-unsafe" % d.getVar('PN'))
+ skip_configure_unsafe = True
+
+ if bb.data.inherits_class('autotools', d) and not skip_configure_unsafe:
+ bb.note("Checking autotools environment for common misconfiguration")
+ for root, dirs, files in os.walk(workdir):
+ statement = "grep -q -F -e 'is unsafe for cross-compilation' %s" % \
+ os.path.join(root,"config.log")
+ if "config.log" in files:
+ if subprocess.call(statement, shell=True) == 0:
+ error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities.
+Rerun configure task after fixing this."""
+ oe.qa.handle_error("configure-unsafe", error_msg, d)
+
+ if "configure.ac" in files:
+ configs.append(os.path.join(root,"configure.ac"))
+ if "configure.in" in files:
+ configs.append(os.path.join(root, "configure.in"))
+
+ ###########################################################################
+ # Check gettext configuration and dependencies are correct
+ ###########################################################################
+
+ skip_configure_gettext = False
+ if 'configure-gettext' in skip:
+ bb.note("Recipe %s skipping qa checking: configure-gettext" % d.getVar('PN'))
+ skip_configure_gettext = True
+
+ cnf = d.getVar('EXTRA_OECONF') or ""
+ if not ("gettext" in d.getVar('P') or "gcc-runtime" in d.getVar('P') or \
+ "--disable-nls" in cnf or skip_configure_gettext):
+ ml = d.getVar("MLPREFIX") or ""
+ if bb.data.inherits_class('cross-canadian', d):
+ gt = "nativesdk-gettext"
+ else:
+ gt = "gettext-native"
+ deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "")
+ if gt not in deps:
+ for config in configs:
+ gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
+ if subprocess.call(gnu, shell=True) == 0:
+ error_msg = "AM_GNU_GETTEXT used but no inherit gettext"
+ oe.qa.handle_error("configure-gettext", error_msg, d)
+
+ ###########################################################################
+ # Check unrecognised configure options (with a white list)
+ ###########################################################################
+ if bb.data.inherits_class("autotools", d):
+ bb.note("Checking configure output for unrecognised options")
+ try:
+ if bb.data.inherits_class("autotools", d):
+ flag = "WARNING: unrecognized options:"
+ log = os.path.join(d.getVar('B'), 'config.log')
+ output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ').replace('"', '')
+ options = set()
+ for line in output.splitlines():
+ options |= set(line.partition(flag)[2].split())
+ ignore_opts = set(d.getVar("UNKNOWN_CONFIGURE_OPT_IGNORE").split())
+ options -= ignore_opts
+ if options:
+ pn = d.getVar('PN')
+ error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options)
+ oe.qa.handle_error("unknown-configure-option", error_msg, d)
+ except subprocess.CalledProcessError:
+ pass
+
+ # Check invalid PACKAGECONFIG
+ pkgconfig = (d.getVar("PACKAGECONFIG") or "").split()
+ if pkgconfig:
+ pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
+ for pconfig in pkgconfig:
+ if pconfig not in pkgconfigflags:
+ pn = d.getVar('PN')
+ error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig)
+ oe.qa.handle_error("invalid-packageconfig", error_msg, d)
+
+ oe.qa.exit_if_errors(d)
+}
+
+def unpack_check_src_uri(pn, d):
+ import re
+
+ skip = (d.getVar('INSANE_SKIP') or "").split()
+ if 'src-uri-bad' in skip:
+ bb.note("Recipe %s skipping qa checking: src-uri-bad" % d.getVar('PN'))
+ return
+
+ if "${PN}" in d.getVar("SRC_URI", False):
+ oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
+
+ for url in d.getVar("SRC_URI").split():
+ # Search for github and gitlab URLs that pull unstable archives (comment for future greppers)
+ if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url):
+ oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d)
+
+python do_qa_unpack() {
+ src_uri = d.getVar('SRC_URI')
+ s_dir = d.getVar('S')
+ if src_uri and not os.path.exists(s_dir):
+ bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir))
+
+ unpack_check_src_uri(d.getVar('PN'), d)
+}
+
+# Check for patch fuzz
+do_patch[postfuncs] += "do_qa_patch "
+
+# Check broken config.log files, for packages requiring Gettext which
+# don't have it in DEPENDS.
+#addtask qa_configure after do_configure before do_compile
+do_configure[postfuncs] += "do_qa_configure "
+
+# Check does S exist.
+do_unpack[postfuncs] += "do_qa_unpack"
+
+python () {
+ import re
+
+ tests = d.getVar('ALL_QA').split()
+ if "desktop" in tests:
+ d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native")
+
+ ###########################################################################
+ # Check various variables
+ ###########################################################################
+
+ # Checking ${FILESEXTRAPATHS}
+ extrapaths = (d.getVar("FILESEXTRAPATHS") or "")
+ if '__default' not in extrapaths.split(":"):
+ msg = "FILESEXTRAPATHS-variable, must always use :prepend (or :append)\n"
+ msg += "type of assignment, and don't forget the colon.\n"
+ msg += "Please assign it with the format of:\n"
+ msg += " FILESEXTRAPATHS:append := \":${THISDIR}/Your_Files_Path\" or\n"
+ msg += " FILESEXTRAPATHS:prepend := \"${THISDIR}/Your_Files_Path:\"\n"
+ msg += "in your bbappend file\n\n"
+ msg += "Your incorrect assignment is:\n"
+ msg += "%s\n" % extrapaths
+ bb.warn(msg)
+
+ overrides = d.getVar('OVERRIDES').split(':')
+ pn = d.getVar('PN')
+ if pn in overrides:
+ msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn)
+ oe.qa.handle_error("pn-overrides", msg, d)
+ prog = re.compile(r'[A-Z]')
+ if prog.search(pn):
+ oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d)
+
+ # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder
+ # why it doesn't work.
+ if (d.getVar(d.expand('DEPENDS:${PN}'))):
+ oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d)
+
+ issues = []
+ if (d.getVar('PACKAGES') or "").split():
+ for dep in (d.getVar('QADEPENDS') or "").split():
+ d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep)
+ for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY':
+ if d.getVar(var, False):
+ issues.append(var)
+
+ fakeroot_tests = d.getVar('FAKEROOT_QA').split()
+ if set(tests) & set(fakeroot_tests):
+ d.setVarFlag('do_package_qa', 'fakeroot', '1')
+ d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
+ else:
+ d.setVarFlag('do_package_qa', 'rdeptask', '')
+ for i in issues:
+ oe.qa.handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d)
+
+ if 'native-last' not in (d.getVar('INSANE_SKIP') or "").split():
+ for native_class in ['native', 'nativesdk']:
+ if bb.data.inherits_class(native_class, d):
+
+ inherited_classes = d.getVar('__inherit_cache', False) or []
+ needle = "/" + native_class
+
+ bbclassextend = (d.getVar('BBCLASSEXTEND') or '').split()
+ # BBCLASSEXTEND items are always added in the end
+ skip_classes = bbclassextend
+ if bb.data.inherits_class('native', d) or 'native' in bbclassextend:
+ # native also inherits nopackages and relocatable bbclasses
+ skip_classes.extend(['nopackages', 'relocatable'])
+
+ broken_order = []
+ for class_item in reversed(inherited_classes):
+ if needle not in class_item:
+ for extend_item in skip_classes:
+ if '/%s.bbclass' % extend_item in class_item:
+ break
+ else:
+ pn = d.getVar('PN')
+ broken_order.append(os.path.basename(class_item))
+ else:
+ break
+ if broken_order:
+ oe.qa.handle_error("native-last", "%s: native/nativesdk class is not inherited last, this can result in unexpected behaviour. "
+ "Classes inherited after native/nativesdk: %s" % (pn, " ".join(broken_order)), d)
+
+ oe.qa.exit_if_errors(d)
+}
diff --git a/meta/classes-global/license.bbclass b/meta/classes-global/license.bbclass
new file mode 100644
index 0000000000..560acb8b6f
--- /dev/null
+++ b/meta/classes-global/license.bbclass
@@ -0,0 +1,426 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Populates LICENSE_DIRECTORY as set in distro config with the license files as set by
+# LIC_FILES_CHKSUM.
+# TODO:
+# - There is a real issue revolving around license naming standards.
+
+LICENSE_DIRECTORY ??= "${DEPLOY_DIR}/licenses"
+LICSSTATEDIR = "${WORKDIR}/license-destdir/"
+
+# Create extra package with license texts and add it to RRECOMMENDS:${PN}
+LICENSE_CREATE_PACKAGE[type] = "boolean"
+LICENSE_CREATE_PACKAGE ??= "0"
+LICENSE_PACKAGE_SUFFIX ??= "-lic"
+LICENSE_FILES_DIRECTORY ??= "${datadir}/licenses/"
+
+addtask populate_lic after do_patch before do_build
+do_populate_lic[dirs] = "${LICSSTATEDIR}/${PN}"
+do_populate_lic[cleandirs] = "${LICSSTATEDIR}"
+
+python do_populate_lic() {
+ """
+ Populate LICENSE_DIRECTORY with licenses.
+ """
+ lic_files_paths = find_license_files(d)
+
+ # The base directory we wrangle licenses to
+ destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('PN'))
+ copy_license_files(lic_files_paths, destdir)
+ info = get_recipe_info(d)
+ with open(os.path.join(destdir, "recipeinfo"), "w") as f:
+ for key in sorted(info.keys()):
+ f.write("%s: %s\n" % (key, info[key]))
+ oe.qa.exit_if_errors(d)
+}
+
+PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '') + ' ' + d.getVar('COREBASE') + '/meta/COPYING').split())}"
+# it would be better to copy them in do_install:append, but find_license_filesa is python
+python perform_packagecopy:prepend () {
+ enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d)
+ if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled:
+ lic_files_paths = find_license_files(d)
+
+ # LICENSE_FILES_DIRECTORY starts with '/' so os.path.join cannot be used to join D and LICENSE_FILES_DIRECTORY
+ destdir = d.getVar('D') + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY'), d.getVar('PN'))
+ copy_license_files(lic_files_paths, destdir)
+ add_package_and_files(d)
+}
+perform_packagecopy[vardeps] += "LICENSE_CREATE_PACKAGE"
+
+def get_recipe_info(d):
+ info = {}
+ info["PV"] = d.getVar("PV")
+ info["PR"] = d.getVar("PR")
+ info["LICENSE"] = d.getVar("LICENSE")
+ return info
+
+def add_package_and_files(d):
+ packages = d.getVar('PACKAGES')
+ files = d.getVar('LICENSE_FILES_DIRECTORY')
+ pn = d.getVar('PN')
+ pn_lic = "%s%s" % (pn, d.getVar('LICENSE_PACKAGE_SUFFIX', False))
+ if pn_lic in packages.split():
+ bb.warn("%s package already existed in %s." % (pn_lic, pn))
+ else:
+ # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY
+ d.setVar('PACKAGES', "%s %s" % (pn_lic, packages))
+ d.setVar('FILES:' + pn_lic, files)
+
+def copy_license_files(lic_files_paths, destdir):
+ import shutil
+ import errno
+
+ bb.utils.mkdirhier(destdir)
+ for (basename, path, beginline, endline) in lic_files_paths:
+ try:
+ src = path
+ dst = os.path.join(destdir, basename)
+ if os.path.exists(dst):
+ os.remove(dst)
+ if os.path.islink(src):
+ src = os.path.realpath(src)
+ canlink = os.access(src, os.W_OK) and (os.stat(src).st_dev == os.stat(destdir).st_dev) and beginline is None and endline is None
+ if canlink:
+ try:
+ os.link(src, dst)
+ except OSError as err:
+ if err.errno == errno.EXDEV:
+ # Copy license files if hardlink is not possible even if st_dev is the
+ # same on source and destination (docker container with device-mapper?)
+ canlink = False
+ else:
+ raise
+ # Only chown if we did hardlink and we're running under pseudo
+ if canlink and os.environ.get('PSEUDO_DISABLED') == '0':
+ os.chown(dst,0,0)
+ if not canlink:
+ begin_idx = max(0, int(beginline) - 1) if beginline is not None else None
+ end_idx = max(0, int(endline)) if endline is not None else None
+ if begin_idx is None and end_idx is None:
+ shutil.copyfile(src, dst)
+ else:
+ with open(src, 'rb') as src_f:
+ with open(dst, 'wb') as dst_f:
+ dst_f.write(b''.join(src_f.readlines()[begin_idx:end_idx]))
+
+ except Exception as e:
+ bb.warn("Could not copy license file %s to %s: %s" % (src, dst, e))
+
+def find_license_files(d):
+ """
+ Creates list of files used in LIC_FILES_CHKSUM and generic LICENSE files.
+ """
+ import shutil
+ import oe.license
+ from collections import defaultdict, OrderedDict
+
+ # All the license files for the package
+ lic_files = d.getVar('LIC_FILES_CHKSUM') or ""
+ pn = d.getVar('PN')
+ # The license files are located in S/LIC_FILE_CHECKSUM.
+ srcdir = d.getVar('S')
+ # Directory we store the generic licenses as set in the distro configuration
+ generic_directory = d.getVar('COMMON_LICENSE_DIR')
+ # List of basename, path tuples
+ lic_files_paths = []
+ # hash for keep track generic lics mappings
+ non_generic_lics = {}
+ # Entries from LIC_FILES_CHKSUM
+ lic_chksums = {}
+ license_source_dirs = []
+ license_source_dirs.append(generic_directory)
+ try:
+ additional_lic_dirs = d.getVar('LICENSE_PATH').split()
+ for lic_dir in additional_lic_dirs:
+ license_source_dirs.append(lic_dir)
+ except:
+ pass
+
+ class FindVisitor(oe.license.LicenseVisitor):
+ def visit_Str(self, node):
+ #
+ # Until I figure out what to do with
+ # the two modifiers I support (or greater = +
+ # and "with exceptions" being *
+ # we'll just strip out the modifier and put
+ # the base license.
+ find_license(node.s.replace("+", "").replace("*", ""))
+ self.generic_visit(node)
+
+ def visit_Constant(self, node):
+ find_license(node.value.replace("+", "").replace("*", ""))
+ self.generic_visit(node)
+
+ def find_license(license_type):
+ try:
+ bb.utils.mkdirhier(gen_lic_dest)
+ except:
+ pass
+ spdx_generic = None
+ license_source = None
+ # If the generic does not exist we need to check to see if there is an SPDX mapping to it,
+ # unless NO_GENERIC_LICENSE is set.
+ for lic_dir in license_source_dirs:
+ if not os.path.isfile(os.path.join(lic_dir, license_type)):
+ if d.getVarFlag('SPDXLICENSEMAP', license_type) != None:
+ # Great, there is an SPDXLICENSEMAP. We can copy!
+ bb.debug(1, "We need to use a SPDXLICENSEMAP for %s" % (license_type))
+ spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type)
+ license_source = lic_dir
+ break
+ elif os.path.isfile(os.path.join(lic_dir, license_type)):
+ spdx_generic = license_type
+ license_source = lic_dir
+ break
+
+ non_generic_lic = d.getVarFlag('NO_GENERIC_LICENSE', license_type)
+ if spdx_generic and license_source:
+ # we really should copy to generic_ + spdx_generic, however, that ends up messing the manifest
+ # audit up. This should be fixed in emit_pkgdata (or, we actually got and fix all the recipes)
+
+ lic_files_paths.append(("generic_" + license_type, os.path.join(license_source, spdx_generic),
+ None, None))
+
+ # The user may attempt to use NO_GENERIC_LICENSE for a generic license which doesn't make sense
+ # and should not be allowed, warn the user in this case.
+ if d.getVarFlag('NO_GENERIC_LICENSE', license_type):
+ oe.qa.handle_error("license-no-generic",
+ "%s: %s is a generic license, please don't use NO_GENERIC_LICENSE for it." % (pn, license_type), d)
+
+ elif non_generic_lic and non_generic_lic in lic_chksums:
+ # if NO_GENERIC_LICENSE is set, we copy the license files from the fetched source
+ # of the package rather than the license_source_dirs.
+ lic_files_paths.append(("generic_" + license_type,
+ os.path.join(srcdir, non_generic_lic), None, None))
+ non_generic_lics[non_generic_lic] = license_type
+ else:
+ # Explicitly avoid the CLOSED license because this isn't generic
+ if license_type != 'CLOSED':
+ # And here is where we warn people that their licenses are lousy
+ oe.qa.handle_error("license-exists",
+ "%s: No generic license file exists for: %s in any provider" % (pn, license_type), d)
+ pass
+
+ if not generic_directory:
+ bb.fatal("COMMON_LICENSE_DIR is unset. Please set this in your distro config")
+
+ for url in lic_files.split():
+ try:
+ (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
+ if method != "file" or not path:
+ raise bb.fetch.MalformedUrl()
+ except bb.fetch.MalformedUrl:
+ bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL: %s" % (d.getVar('PF'), url))
+ # We want the license filename and path
+ chksum = parm.get('md5', None)
+ beginline = parm.get('beginline')
+ endline = parm.get('endline')
+ lic_chksums[path] = (chksum, beginline, endline)
+
+ v = FindVisitor()
+ try:
+ v.visit_string(d.getVar('LICENSE'))
+ except oe.license.InvalidLicense as exc:
+ bb.fatal('%s: %s' % (d.getVar('PF'), exc))
+ except SyntaxError:
+ oe.qa.handle_error("license-syntax",
+ "%s: Failed to parse it's LICENSE field." % (d.getVar('PF')), d)
+ # Add files from LIC_FILES_CHKSUM to list of license files
+ lic_chksum_paths = defaultdict(OrderedDict)
+ for path, data in sorted(lic_chksums.items()):
+ lic_chksum_paths[os.path.basename(path)][data] = (os.path.join(srcdir, path), data[1], data[2])
+ for basename, files in lic_chksum_paths.items():
+ if len(files) == 1:
+ # Don't copy again a LICENSE already handled as non-generic
+ if basename in non_generic_lics:
+ continue
+ data = list(files.values())[0]
+ lic_files_paths.append(tuple([basename] + list(data)))
+ else:
+ # If there are multiple different license files with identical
+ # basenames we rename them to <file>.0, <file>.1, ...
+ for i, data in enumerate(files.values()):
+ lic_files_paths.append(tuple(["%s.%d" % (basename, i)] + list(data)))
+
+ return lic_files_paths
+
+def return_spdx(d, license):
+ """
+ This function returns the spdx mapping of a license if it exists.
+ """
+ return d.getVarFlag('SPDXLICENSEMAP', license)
+
+def canonical_license(d, license):
+ """
+ Return the canonical (SPDX) form of the license if available (so GPLv3
+ becomes GPL-3.0-only) or the passed license if there is no canonical form.
+ """
+ return d.getVarFlag('SPDXLICENSEMAP', license) or license
+
+def expand_wildcard_licenses(d, wildcard_licenses):
+ """
+ There are some common wildcard values users may want to use. Support them
+ here.
+ """
+ licenses = set(wildcard_licenses)
+ mapping = {
+ "AGPL-3.0*" : ["AGPL-3.0-only", "AGPL-3.0-or-later"],
+ "GPL-3.0*" : ["GPL-3.0-only", "GPL-3.0-or-later"],
+ "LGPL-3.0*" : ["LGPL-3.0-only", "LGPL-3.0-or-later"],
+ }
+ for k in mapping:
+ if k in wildcard_licenses:
+ licenses.remove(k)
+ for item in mapping[k]:
+ licenses.add(item)
+
+ for l in licenses:
+ if l in oe.license.obsolete_license_list():
+ bb.fatal("Error, %s is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE" % l)
+ if "*" in l:
+ bb.fatal("Error, %s is an invalid license wildcard entry" % l)
+
+ return list(licenses)
+
+def incompatible_license_contains(license, truevalue, falsevalue, d):
+ license = canonical_license(d, license)
+ bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
+ bad_licenses = expand_wildcard_licenses(d, bad_licenses)
+ return truevalue if license in bad_licenses else falsevalue
+
+def incompatible_pkg_license(d, dont_want_licenses, license):
+ # Handles an "or" or two license sets provided by
+ # flattened_licenses(), pick one that works if possible.
+ def choose_lic_set(a, b):
+ return a if all(oe.license.license_ok(canonical_license(d, lic),
+ dont_want_licenses) for lic in a) else b
+
+ try:
+ licenses = oe.license.flattened_licenses(license, choose_lic_set)
+ except oe.license.LicenseError as exc:
+ bb.fatal('%s: %s' % (d.getVar('P'), exc))
+
+ incompatible_lic = []
+ for l in licenses:
+ license = canonical_license(d, l)
+ if not oe.license.license_ok(license, dont_want_licenses):
+ incompatible_lic.append(license)
+
+ return sorted(incompatible_lic)
+
+def incompatible_license(d, dont_want_licenses, package=None):
+ """
+ This function checks if a recipe has only incompatible licenses. It also
+ take into consideration 'or' operand. dont_want_licenses should be passed
+ as canonical (SPDX) names.
+ """
+ import oe.license
+ license = d.getVar("LICENSE:%s" % package) if package else None
+ if not license:
+ license = d.getVar('LICENSE')
+
+ return incompatible_pkg_license(d, dont_want_licenses, license)
+
+def check_license_flags(d):
+ """
+ This function checks if a recipe has any LICENSE_FLAGS that
+ aren't acceptable.
+
+ If it does, it returns the all LICENSE_FLAGS missing from the list
+ of acceptable license flags, or all of the LICENSE_FLAGS if there
+ is no list of acceptable flags.
+
+ If everything is is acceptable, it returns None.
+ """
+
+ def license_flag_matches(flag, acceptlist, pn):
+ """
+ Return True if flag matches something in acceptlist, None if not.
+
+ Before we test a flag against the acceptlist, we append _${PN}
+ to it. We then try to match that string against the
+ acceptlist. This covers the normal case, where we expect
+ LICENSE_FLAGS to be a simple string like 'commercial', which
+ the user typically matches exactly in the acceptlist by
+ explicitly appending the package name e.g 'commercial_foo'.
+ If we fail the match however, we then split the flag across
+ '_' and append each fragment and test until we either match or
+ run out of fragments.
+ """
+ flag_pn = ("%s_%s" % (flag, pn))
+ for candidate in acceptlist:
+ if flag_pn == candidate:
+ return True
+
+ flag_cur = ""
+ flagments = flag_pn.split("_")
+ flagments.pop() # we've already tested the full string
+ for flagment in flagments:
+ if flag_cur:
+ flag_cur += "_"
+ flag_cur += flagment
+ for candidate in acceptlist:
+ if flag_cur == candidate:
+ return True
+ return False
+
+ def all_license_flags_match(license_flags, acceptlist):
+ """ Return all unmatched flags, None if all flags match """
+ pn = d.getVar('PN')
+ split_acceptlist = acceptlist.split()
+ flags = []
+ for flag in license_flags.split():
+ if not license_flag_matches(flag, split_acceptlist, pn):
+ flags.append(flag)
+ return flags if flags else None
+
+ license_flags = d.getVar('LICENSE_FLAGS')
+ if license_flags:
+ acceptlist = d.getVar('LICENSE_FLAGS_ACCEPTED')
+ if not acceptlist:
+ return license_flags.split()
+ unmatched_flags = all_license_flags_match(license_flags, acceptlist)
+ if unmatched_flags:
+ return unmatched_flags
+ return None
+
+def check_license_format(d):
+ """
+ This function checks if LICENSE is well defined,
+ Validate operators in LICENSES.
+ No spaces are allowed between LICENSES.
+ """
+ pn = d.getVar('PN')
+ licenses = d.getVar('LICENSE')
+ from oe.license import license_operator, license_operator_chars, license_pattern
+
+ elements = list(filter(lambda x: x.strip(), license_operator.split(licenses)))
+ for pos, element in enumerate(elements):
+ if license_pattern.match(element):
+ if pos > 0 and license_pattern.match(elements[pos - 1]):
+ oe.qa.handle_error('license-format',
+ '%s: LICENSE value "%s" has an invalid format - license names ' \
+ 'must be separated by the following characters to indicate ' \
+ 'the license selection: %s' %
+ (pn, licenses, license_operator_chars), d)
+ elif not license_operator.match(element):
+ oe.qa.handle_error('license-format',
+ '%s: LICENSE value "%s" has an invalid separator "%s" that is not ' \
+ 'in the valid list of separators (%s)' %
+ (pn, licenses, element, license_operator_chars), d)
+
+SSTATETASKS += "do_populate_lic"
+do_populate_lic[sstate-inputdirs] = "${LICSSTATEDIR}"
+do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/"
+
+IMAGE_CLASSES:append = " license_image"
+
+python do_populate_lic_setscene () {
+ sstate_setscene(d)
+}
+addtask do_populate_lic_setscene
diff --git a/meta/classes-global/logging.bbclass b/meta/classes-global/logging.bbclass
new file mode 100644
index 0000000000..ce03abfe42
--- /dev/null
+++ b/meta/classes-global/logging.bbclass
@@ -0,0 +1,107 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# The following logging mechanisms are to be used in bash functions of recipes.
+# They are intended to map one to one in intention and output format with the
+# python recipe logging functions of a similar naming convention: bb.plain(),
+# bb.note(), etc.
+
+LOGFIFO = "${T}/fifo.${@os.getpid()}"
+
+# Print the output exactly as it is passed in. Typically used for output of
+# tasks that should be seen on the console. Use sparingly.
+# Output: logs console
+bbplain() {
+ if [ -p ${LOGFIFO} ] ; then
+ printf "%b\0" "bbplain $*" > ${LOGFIFO}
+ else
+ echo "$*"
+ fi
+}
+
+# Notify the user of a noteworthy condition.
+# Output: logs
+bbnote() {
+ if [ -p ${LOGFIFO} ] ; then
+ printf "%b\0" "bbnote $*" > ${LOGFIFO}
+ else
+ echo "NOTE: $*"
+ fi
+}
+
+# Print a warning to the log. Warnings are non-fatal, and do not
+# indicate a build failure.
+# Output: logs console
+bbwarn() {
+ if [ -p ${LOGFIFO} ] ; then
+ printf "%b\0" "bbwarn $*" > ${LOGFIFO}
+ else
+ echo "WARNING: $*"
+ fi
+}
+
+# Print an error to the log. Errors are non-fatal in that the build can
+# continue, but they do indicate a build failure.
+# Output: logs console
+bberror() {
+ if [ -p ${LOGFIFO} ] ; then
+ printf "%b\0" "bberror $*" > ${LOGFIFO}
+ else
+ echo "ERROR: $*"
+ fi
+}
+
+# Print a fatal error to the log. Fatal errors indicate build failure
+# and halt the build, exiting with an error code.
+# Output: logs console
+bbfatal() {
+ if [ -p ${LOGFIFO} ] ; then
+ printf "%b\0" "bbfatal $*" > ${LOGFIFO}
+ else
+ echo "ERROR: $*"
+ fi
+ exit 1
+}
+
+# Like bbfatal, except prevents the suppression of the error log by
+# bitbake's UI.
+# Output: logs console
+bbfatal_log() {
+ if [ -p ${LOGFIFO} ] ; then
+ printf "%b\0" "bbfatal_log $*" > ${LOGFIFO}
+ else
+ echo "ERROR: $*"
+ fi
+ exit 1
+}
+
+# Print debug messages. These are appropriate for progress checkpoint
+# messages to the logs. Depending on the debug log level, they may also
+# go to the console.
+# Output: logs console
+# Usage: bbdebug 1 "first level debug message"
+# bbdebug 2 "second level debug message"
+bbdebug() {
+ USAGE='Usage: bbdebug [123] "message"'
+ if [ $# -lt 2 ]; then
+ bbfatal "$USAGE"
+ fi
+
+ # Strip off the debug level and ensure it is an integer
+ DBGLVL=$1; shift
+ NONDIGITS=$(echo "$DBGLVL" | tr -d "[:digit:]")
+ if [ "$NONDIGITS" ]; then
+ bbfatal "$USAGE"
+ fi
+
+ # All debug output is printed to the logs
+ if [ -p ${LOGFIFO} ] ; then
+ printf "%b\0" "bbdebug $DBGLVL $*" > ${LOGFIFO}
+ else
+ echo "DEBUG: $*"
+ fi
+}
+
diff --git a/meta/classes-global/mirrors.bbclass b/meta/classes-global/mirrors.bbclass
new file mode 100644
index 0000000000..9643b31a23
--- /dev/null
+++ b/meta/classes-global/mirrors.bbclass
@@ -0,0 +1,95 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+MIRRORS += "\
+${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian/20180310T215105Z/pool \
+${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20120328T092752Z/debian/pool \
+${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20110127T084257Z/debian/pool \
+${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20090802T004153Z/debian/pool \
+${DEBIAN_MIRROR} http://ftp.de.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.au.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.cl.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.hr.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.fi.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.hk.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.hu.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.ie.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.it.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.jp.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.no.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.pl.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.ro.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.si.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.es.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.se.debian.org/debian/pool \
+${DEBIAN_MIRROR} http://ftp.tr.debian.org/debian/pool \
+${GNU_MIRROR} https://mirrors.kernel.org/gnu \
+${KERNELORG_MIRROR} http://www.kernel.org/pub \
+${GNUPG_MIRROR} ftp://ftp.gnupg.org/gcrypt \
+${GNUPG_MIRROR} ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt \
+${GNUPG_MIRROR} ftp://mirrors.dotsrc.org/gcrypt \
+ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN \
+ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/ \
+ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/ \
+ftp://ftp.gnutls.org/gcrypt/gnutls ${GNUPG_MIRROR}/gnutls \
+http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/ \
+http://www.mirrorservice.org/sites/lsof.itap.purdue.edu/pub/tools/unix/lsof/ http://www.mirrorservice.org/sites/lsof.itap.purdue.edu/pub/tools/unix/lsof/OLD/ \
+${APACHE_MIRROR} http://www.us.apache.org/dist \
+${APACHE_MIRROR} http://archive.apache.org/dist \
+http://downloads.sourceforge.net/watchdog/ http://fossies.org/linux/misc/ \
+${SAVANNAH_GNU_MIRROR} http://download-mirror.savannah.gnu.org/releases \
+${SAVANNAH_NONGNU_MIRROR} http://download-mirror.savannah.nongnu.org/releases \
+ftp://sourceware.org/pub http://mirrors.kernel.org/sourceware \
+ftp://sourceware.org/pub http://gd.tuwien.ac.at/gnu/sourceware \
+ftp://sourceware.org/pub http://ftp.gwdg.de/pub/linux/sources.redhat.com/sourceware \
+cvs://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+svn://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+git://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+gitsm://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+hg://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+bzr://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+p4://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+osc://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+https?://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+ftp://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+npm://.*/?.* http://downloads.yoctoproject.org/mirror/sources/ \
+cvs://.*/.* http://sources.openembedded.org/ \
+svn://.*/.* http://sources.openembedded.org/ \
+git://.*/.* http://sources.openembedded.org/ \
+gitsm://.*/.* http://sources.openembedded.org/ \
+hg://.*/.* http://sources.openembedded.org/ \
+bzr://.*/.* http://sources.openembedded.org/ \
+p4://.*/.* http://sources.openembedded.org/ \
+osc://.*/.* http://sources.openembedded.org/ \
+https?://.*/.* http://sources.openembedded.org/ \
+ftp://.*/.* http://sources.openembedded.org/ \
+npm://.*/?.* http://sources.openembedded.org/ \
+${CPAN_MIRROR} http://cpan.metacpan.org/ \
+${CPAN_MIRROR} http://search.cpan.org/CPAN/ \
+https?://downloads.yoctoproject.org/releases/uninative/ https://mirrors.kernel.org/yocto/uninative/ \
+https?://downloads.yoctoproject.org/mirror/sources/ https://mirrors.kernel.org/yocto-sources/ \
+"
+
+# Use MIRRORS to provide git repo fallbacks using the https protocol, for cases
+# where git native protocol fetches may fail due to local firewall rules, etc.
+
+MIRRORS += "\
+git://salsa.debian.org/.* git://salsa.debian.org/PATH;protocol=https \
+git://git.gnome.org/.* git://gitlab.gnome.org/GNOME/PATH;protocol=https \
+git://.*/.* git://HOST/PATH;protocol=https \
+git://.*/.* git://HOST/git/PATH;protocol=https \
+"
+
+# Switch glibc and binutils recipes to use shallow clones as they're large and this
+# improves user experience whilst allowing the flexibility of git urls in the recipes
+BB_GIT_SHALLOW:pn-binutils = "1"
+BB_GIT_SHALLOW:pn-binutils-cross-${TARGET_ARCH} = "1"
+BB_GIT_SHALLOW:pn-binutils-cross-canadian-${TRANSLATED_TARGET_ARCH} = "1"
+BB_GIT_SHALLOW:pn-binutils-cross-testsuite = "1"
+BB_GIT_SHALLOW:pn-binutils-crosssdk-${SDK_SYS} = "1"
+BB_GIT_SHALLOW:pn-glibc = "1"
+PREMIRRORS += "git://sourceware.org/git/glibc.git https://downloads.yoctoproject.org/mirror/sources/ \
+ git://sourceware.org/git/binutils-gdb.git https://downloads.yoctoproject.org/mirror/sources/"
diff --git a/meta/classes-global/package.bbclass b/meta/classes-global/package.bbclass
new file mode 100644
index 0000000000..2d985d8aff
--- /dev/null
+++ b/meta/classes-global/package.bbclass
@@ -0,0 +1,2546 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
+# Packaging process
+#
+# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
+# Taking D and splitting it up into the packages listed in PACKAGES, placing the
+# resulting output in PKGDEST.
+#
+# There are the following default steps but PACKAGEFUNCS can be extended:
+#
+# a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
+#
+# b) perform_packagecopy - Copy D into PKGD
+#
+# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
+#
+# d) split_and_strip_files - split the files into runtime and debug and strip them.
+# Debug files include debug info split, and associated sources that end up in -dbg packages
+#
+# e) fixup_perms - Fix up permissions in the package before we split it.
+#
+# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
+# Also triggers the binary stripping code to put files in -dbg packages.
+#
+# g) package_do_filedeps - Collect perfile run-time dependency metadata
+# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
+# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
+#
+# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
+# dependencies found. Also stores the package name so anyone else using this library
+# knows which package to depend on.
+#
+# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
+#
+# j) read_shlibdeps - Reads the stored shlibs information into the metadata
+#
+# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
+#
+# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
+# packaging steps
+
+inherit packagedata
+inherit chrpath
+inherit package_pkgdata
+inherit insane
+
+PKGD = "${WORKDIR}/package"
+PKGDEST = "${WORKDIR}/packages-split"
+
+LOCALE_SECTION ?= ''
+
+ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
+
+# rpm is used for the per-file dependency identification
+# dwarfsrcfiles is used to determine the list of debug source files
+PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
+
+
+# If your postinstall can execute at rootfs creation time rather than on
+# target but depends on a native/cross tool in order to execute, you need to
+# list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong
+# in the package dependencies as normal, this is just for native/cross support
+# tools at rootfs build time.
+PACKAGE_WRITE_DEPS ??= ""
+
+def legitimize_package_name(s):
+ """
+ Make sure package names are legitimate strings
+ """
+ import re
+
+ def fixutf(m):
+ cp = m.group(1)
+ if cp:
+ return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
+
+ # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
+ s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
+
+ # Remaining package name validity fixes
+ return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
+
+def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
+ """
+ Used in .bb files to split up dynamically generated subpackages of a
+ given package, usually plugins or modules.
+
+ Arguments:
+ root -- the path in which to search
+ file_regex -- regular expression to match searched files. Use
+ parentheses () to mark the part of this expression
+ that should be used to derive the module name (to be
+ substituted where %s is used in other function
+ arguments as noted below)
+ output_pattern -- pattern to use for the package names. Must include %s.
+ description -- description to set for each package. Must include %s.
+ postinst -- postinstall script to use for all packages (as a
+ string)
+ recursive -- True to perform a recursive search - default False
+ hook -- a hook function to be called for every match. The
+ function will be called with the following arguments
+ (in the order listed):
+ f: full path to the file/directory match
+ pkg: the package name
+ file_regex: as above
+ output_pattern: as above
+ modulename: the module name derived using file_regex
+ extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
+ all packages. The default value of None causes a
+ dependency on the main package (${PN}) - if you do
+ not want this, pass '' for this parameter.
+ aux_files_pattern -- extra item(s) to be added to FILES for each
+ package. Can be a single string item or a list of
+ strings for multiple items. Must include %s.
+ postrm -- postrm script to use for all packages (as a string)
+ allow_dirs -- True allow directories to be matched - default False
+ prepend -- if True, prepend created packages to PACKAGES instead
+ of the default False which appends them
+ match_path -- match file_regex on the whole relative path to the
+ root rather than just the file name
+ aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
+ each package, using the actual derived module name
+ rather than converting it to something legal for a
+ package name. Can be a single string item or a list
+ of strings for multiple items. Must include %s.
+ allow_links -- True to allow symlinks to be matched - default False
+ summary -- Summary to set for each package. Must include %s;
+ defaults to description if not set.
+
+ """
+
+ dvar = d.getVar('PKGD')
+ root = d.expand(root)
+ output_pattern = d.expand(output_pattern)
+ extra_depends = d.expand(extra_depends)
+
+ # If the root directory doesn't exist, don't error out later but silently do
+ # no splitting.
+ if not os.path.exists(dvar + root):
+ return []
+
+ ml = d.getVar("MLPREFIX")
+ if ml:
+ if not output_pattern.startswith(ml):
+ output_pattern = ml + output_pattern
+
+ newdeps = []
+ for dep in (extra_depends or "").split():
+ if dep.startswith(ml):
+ newdeps.append(dep)
+ else:
+ newdeps.append(ml + dep)
+ if newdeps:
+ extra_depends = " ".join(newdeps)
+
+
+ packages = d.getVar('PACKAGES').split()
+ split_packages = set()
+
+ if postinst:
+ postinst = '#!/bin/sh\n' + postinst + '\n'
+ if postrm:
+ postrm = '#!/bin/sh\n' + postrm + '\n'
+ if not recursive:
+ objs = os.listdir(dvar + root)
+ else:
+ objs = []
+ for walkroot, dirs, files in os.walk(dvar + root):
+ for file in files:
+ relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
+ if relpath:
+ objs.append(relpath)
+
+ if extra_depends == None:
+ extra_depends = d.getVar("PN")
+
+ if not summary:
+ summary = description
+
+ for o in sorted(objs):
+ import re, stat
+ if match_path:
+ m = re.match(file_regex, o)
+ else:
+ m = re.match(file_regex, os.path.basename(o))
+
+ if not m:
+ continue
+ f = os.path.join(dvar + root, o)
+ mode = os.lstat(f).st_mode
+ if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
+ continue
+ on = legitimize_package_name(m.group(1))
+ pkg = output_pattern % on
+ split_packages.add(pkg)
+ if not pkg in packages:
+ if prepend:
+ packages = [pkg] + packages
+ else:
+ packages.append(pkg)
+ oldfiles = d.getVar('FILES:' + pkg)
+ newfile = os.path.join(root, o)
+ # These names will be passed through glob() so if the filename actually
+ # contains * or ? (rare, but possible) we need to handle that specially
+ newfile = newfile.replace('*', '[*]')
+ newfile = newfile.replace('?', '[?]')
+ if not oldfiles:
+ the_files = [newfile]
+ if aux_files_pattern:
+ if type(aux_files_pattern) is list:
+ for fp in aux_files_pattern:
+ the_files.append(fp % on)
+ else:
+ the_files.append(aux_files_pattern % on)
+ if aux_files_pattern_verbatim:
+ if type(aux_files_pattern_verbatim) is list:
+ for fp in aux_files_pattern_verbatim:
+ the_files.append(fp % m.group(1))
+ else:
+ the_files.append(aux_files_pattern_verbatim % m.group(1))
+ d.setVar('FILES:' + pkg, " ".join(the_files))
+ else:
+ d.setVar('FILES:' + pkg, oldfiles + " " + newfile)
+ if extra_depends != '':
+ d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends)
+ if not d.getVar('DESCRIPTION:' + pkg):
+ d.setVar('DESCRIPTION:' + pkg, description % on)
+ if not d.getVar('SUMMARY:' + pkg):
+ d.setVar('SUMMARY:' + pkg, summary % on)
+ if postinst:
+ d.setVar('pkg_postinst:' + pkg, postinst)
+ if postrm:
+ d.setVar('pkg_postrm:' + pkg, postrm)
+ if callable(hook):
+ hook(f, pkg, file_regex, output_pattern, m.group(1))
+
+ d.setVar('PACKAGES', ' '.join(packages))
+ return list(split_packages)
+
+PACKAGE_DEPENDS += "file-native"
+
+python () {
+ if d.getVar('PACKAGES') != '':
+ deps = ""
+ for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
+ deps += " %s:do_populate_sysroot" % dep
+ if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
+ deps += ' xz-native:do_populate_sysroot'
+ d.appendVarFlag('do_package', 'depends', deps)
+
+ # shlibs requires any DEPENDS to have already packaged for the *.list files
+ d.appendVarFlag('do_package', 'deptask', " do_packagedata")
+}
+
+# Get a list of files from file vars by searching files under current working directory
+# The list contains symlinks, directories and normal files.
+def files_from_filevars(filevars):
+ import os,glob
+ cpath = oe.cachedpath.CachedPath()
+ files = []
+ for f in filevars:
+ if os.path.isabs(f):
+ f = '.' + f
+ if not f.startswith("./"):
+ f = './' + f
+ globbed = glob.glob(f)
+ if globbed:
+ if [ f ] != globbed:
+ files += globbed
+ continue
+ files.append(f)
+
+ symlink_paths = []
+ for ind, f in enumerate(files):
+ # Handle directory symlinks. Truncate path to the lowest level symlink
+ parent = ''
+ for dirname in f.split('/')[:-1]:
+ parent = os.path.join(parent, dirname)
+ if dirname == '.':
+ continue
+ if cpath.islink(parent):
+ bb.warn("FILES contains file '%s' which resides under a "
+ "directory symlink. Please fix the recipe and use the "
+ "real path for the file." % f[1:])
+ symlink_paths.append(f)
+ files[ind] = parent
+ f = parent
+ break
+
+ if not cpath.islink(f):
+ if cpath.isdir(f):
+ newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
+ if newfiles:
+ files += newfiles
+
+ return files, symlink_paths
+
+# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
+def get_conffiles(pkg, d):
+ pkgdest = d.getVar('PKGDEST')
+ root = os.path.join(pkgdest, pkg)
+ cwd = os.getcwd()
+ os.chdir(root)
+
+ conffiles = d.getVar('CONFFILES:%s' % pkg);
+ if conffiles == None:
+ conffiles = d.getVar('CONFFILES')
+ if conffiles == None:
+ conffiles = ""
+ conffiles = conffiles.split()
+ conf_orig_list = files_from_filevars(conffiles)[0]
+
+ # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
+ conf_list = []
+ for f in conf_orig_list:
+ if os.path.isdir(f):
+ continue
+ if os.path.islink(f):
+ continue
+ if not os.path.exists(f):
+ continue
+ conf_list.append(f)
+
+ # Remove the leading './'
+ for i in range(0, len(conf_list)):
+ conf_list[i] = conf_list[i][1:]
+
+ os.chdir(cwd)
+ return conf_list
+
+def checkbuildpath(file, d):
+ tmpdir = d.getVar('TMPDIR')
+ with open(file) as f:
+ file_content = f.read()
+ if tmpdir in file_content:
+ return True
+
+ return False
+
+def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
+ debugfiles = {}
+
+ for line in dwarfsrcfiles_output.splitlines():
+ if line.startswith("\t"):
+ debugfiles[os.path.normpath(line.split()[0])] = ""
+
+ return debugfiles.keys()
+
+def source_info(file, d, fatal=True):
+ import subprocess
+
+ cmd = ["dwarfsrcfiles", file]
+ try:
+ output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
+ retval = 0
+ except subprocess.CalledProcessError as exc:
+ output = exc.output
+ retval = exc.returncode
+
+ # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
+ if retval != 0 and retval != 255:
+ msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
+ if fatal:
+ bb.fatal(msg)
+ bb.note(msg)
+
+ debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
+
+ return list(debugsources)
+
+def splitdebuginfo(file, dvar, dv, d):
+ # Function to split a single file into two components, one is the stripped
+ # target system binary, the other contains any debugging information. The
+ # two files are linked to reference each other.
+ #
+ # return a mapping of files:debugsources
+
+ import stat
+ import subprocess
+
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
+ debugfile = dvar + dest
+ sources = []
+
+ if file.endswith(".ko") and file.find("/lib/modules/") != -1:
+ if oe.package.is_kernel_module_signed(file):
+ bb.debug(1, "Skip strip on signed module %s" % file)
+ return (file, sources)
+
+ # Split the file...
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+ #bb.note("Split %s -> %s" % (file, debugfile))
+ # Only store off the hard link reference if we successfully split!
+
+ dvar = d.getVar('PKGD')
+ objcopy = d.getVar("OBJCOPY")
+
+ newmode = None
+ if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
+ origmode = os.stat(file)[stat.ST_MODE]
+ newmode = origmode | stat.S_IWRITE | stat.S_IREAD
+ os.chmod(file, newmode)
+
+ # We need to extract the debug src information here...
+ if dv["srcdir"]:
+ sources = source_info(file, d)
+
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+
+ subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
+
+ # Set the debuglink to have the view of the file path on the target
+ subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
+
+ if newmode:
+ os.chmod(file, origmode)
+
+ return (file, sources)
+
+def splitstaticdebuginfo(file, dvar, dv, d):
+ # Unlike the function above, there is no way to split a static library
+ # two components. So to get similar results we will copy the unmodified
+ # static library (containing the debug symbols) into a new directory.
+ # We will then strip (preserving symbols) the static library in the
+ # typical location.
+ #
+ # return a mapping of files:debugsources
+
+ import stat
+
+ src = file[len(dvar):]
+ dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
+ debugfile = dvar + dest
+ sources = []
+
+ # Copy the file...
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+ #bb.note("Copy %s -> %s" % (file, debugfile))
+
+ dvar = d.getVar('PKGD')
+
+ newmode = None
+ if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
+ origmode = os.stat(file)[stat.ST_MODE]
+ newmode = origmode | stat.S_IWRITE | stat.S_IREAD
+ os.chmod(file, newmode)
+
+ # We need to extract the debug src information here...
+ if dv["srcdir"]:
+ sources = source_info(file, d)
+
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+
+ # Copy the unmodified item to the debug directory
+ shutil.copy2(file, debugfile)
+
+ if newmode:
+ os.chmod(file, origmode)
+
+ return (file, sources)
+
+def inject_minidebuginfo(file, dvar, dv, d):
+ # Extract just the symbols from debuginfo into minidebuginfo,
+ # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
+ # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
+
+ import subprocess
+
+ readelf = d.getVar('READELF')
+ nm = d.getVar('NM')
+ objcopy = d.getVar('OBJCOPY')
+
+ minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
+
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
+ debugfile = dvar + dest
+ minidebugfile = minidebuginfodir + src + '.minidebug'
+ bb.utils.mkdirhier(os.path.dirname(minidebugfile))
+
+ # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
+ # so skip it.
+ if not os.path.exists(debugfile):
+ bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
+ return
+
+ # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
+ # We will exclude all of these from minidebuginfo to save space.
+ remove_section_names = []
+ for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
+ fields = line.split()
+ if len(fields) < 8:
+ continue
+ name = fields[0]
+ type = fields[1]
+ flags = fields[7]
+ # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
+ if name.startswith('.debug_'):
+ continue
+ if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
+ remove_section_names.append(name)
+
+ # List dynamic symbols in the binary. We can exclude these from minidebuginfo
+ # because they are always present in the binary.
+ dynsyms = set()
+ for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
+ dynsyms.add(line.split()[0])
+
+ # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
+ # These are the ones we want to keep in minidebuginfo.
+ keep_symbols_file = minidebugfile + '.symlist'
+ found_any_symbols = False
+ with open(keep_symbols_file, 'w') as f:
+ for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
+ fields = line.split('|')
+ if len(fields) < 7:
+ continue
+ name = fields[0].strip()
+ type = fields[3].strip()
+ if type == 'FUNC' and name not in dynsyms:
+ f.write('{}\n'.format(name))
+ found_any_symbols = True
+
+ if not found_any_symbols:
+ bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
+ return
+
+ bb.utils.remove(minidebugfile)
+ bb.utils.remove(minidebugfile + '.xz')
+
+ subprocess.check_call([objcopy, '-S'] +
+ ['--remove-section={}'.format(s) for s in remove_section_names] +
+ ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
+
+ subprocess.check_call(['xz', '--keep', minidebugfile])
+
+ subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
+
+def copydebugsources(debugsrcdir, sources, d):
+ # The debug src information written out to sourcefile is further processed
+ # and copied to the destination here.
+
+ import stat
+ import subprocess
+
+ if debugsrcdir and sources:
+ sourcefile = d.expand("${WORKDIR}/debugsources.list")
+ bb.utils.remove(sourcefile)
+
+ # filenames are null-separated - this is an artefact of the previous use
+ # of rpm's debugedit, which was writing them out that way, and the code elsewhere
+ # is still assuming that.
+ debuglistoutput = '\0'.join(sources) + '\0'
+ with open(sourcefile, 'a') as sf:
+ sf.write(debuglistoutput)
+
+ dvar = d.getVar('PKGD')
+ strip = d.getVar("STRIP")
+ objcopy = d.getVar("OBJCOPY")
+ workdir = d.getVar("WORKDIR")
+ sdir = d.getVar("S")
+ cflags = d.expand("${CFLAGS}")
+
+ prefixmap = {}
+ for flag in cflags.split():
+ if not flag.startswith("-fdebug-prefix-map"):
+ continue
+ if "recipe-sysroot" in flag:
+ continue
+ flag = flag.split("=")
+ prefixmap[flag[1]] = flag[2]
+
+ nosuchdir = []
+ basepath = dvar
+ for p in debugsrcdir.split("/"):
+ basepath = basepath + "/" + p
+ if not cpath.exists(basepath):
+ nosuchdir.append(basepath)
+ bb.utils.mkdirhier(basepath)
+ cpath.updatecache(basepath)
+
+ for pmap in prefixmap:
+ # Ignore files from the recipe sysroots (target and native)
+ cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
+ # We need to ignore files that are not actually ours
+ # we do this by only paying attention to items from this package
+ cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
+ # Remove prefix in the source paths
+ cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
+ cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
+
+ try:
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ # Can "fail" if internal headers/transient sources are attempted
+ pass
+ # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
+ # Work around this by manually finding and copying any symbolic links that made it through.
+ cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
+ (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # debugsources.list may be polluted from the host if we used externalsrc,
+ # cpio uses copy-pass and may have just created a directory structure
+ # matching the one from the host, if thats the case move those files to
+ # debugsrcdir to avoid host contamination.
+ # Empty dir structure will be deleted in the next step.
+
+ # Same check as above for externalsrc
+ if workdir not in sdir:
+ if os.path.exists(dvar + debugsrcdir + sdir):
+ cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # The copy by cpio may have resulted in some empty directories! Remove these
+ cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # Also remove debugsrcdir if its empty
+ for p in nosuchdir[::-1]:
+ if os.path.exists(p) and not os.listdir(p):
+ os.rmdir(p)
+
+#
+# Package data handling routines
+#
+
+def get_package_mapping (pkg, basepkg, d, depversions=None):
+ import oe.packagedata
+
+ data = oe.packagedata.read_subpkgdata(pkg, d)
+ key = "PKG:%s" % pkg
+
+ if key in data:
+ if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
+ bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
+ # Have to avoid undoing the write_extra_pkgs(global_variants...)
+ if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
+ and data[key] == basepkg:
+ return pkg
+ if depversions == []:
+ # Avoid returning a mapping if the renamed package rprovides its original name
+ rprovkey = "RPROVIDES:%s" % pkg
+ if rprovkey in data:
+ if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
+ bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
+ return pkg
+ # Do map to rewritten package name
+ return data[key]
+
+ return pkg
+
+def get_package_additional_metadata (pkg_type, d):
+ base_key = "PACKAGE_ADD_METADATA"
+ for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
+ if d.getVar(key, False) is None:
+ continue
+ d.setVarFlag(key, "type", "list")
+ if d.getVarFlag(key, "separator") is None:
+ d.setVarFlag(key, "separator", "\\n")
+ metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
+ return "\n".join(metadata_fields).strip()
+
+def runtime_mapping_rename (varname, pkg, d):
+ #bb.note("%s before: %s" % (varname, d.getVar(varname)))
+
+ new_depends = {}
+ deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
+ for depend, depversions in deps.items():
+ new_depend = get_package_mapping(depend, pkg, d, depversions)
+ if depend != new_depend:
+ bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
+ new_depends[new_depend] = deps[depend]
+
+ d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
+
+ #bb.note("%s after: %s" % (varname, d.getVar(varname)))
+
+#
+# Used by do_packagedata (and possibly other routines post do_package)
+#
+
+PRSERV_ACTIVE = "${@bool(d.getVar("PRSERV_HOST"))}"
+PRSERV_ACTIVE[vardepvalue] = "${PRSERV_ACTIVE}"
+package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
+package_get_auto_pr[vardeps] += "PRSERV_ACTIVE"
+python package_get_auto_pr() {
+ import oe.prservice
+
+ def get_do_package_hash(pn):
+ if d.getVar("BB_RUNTASK") != "do_package":
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ for dep in taskdepdata:
+ if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
+ return taskdepdata[dep][6]
+ return None
+
+ # Support per recipe PRSERV_HOST
+ pn = d.getVar('PN')
+ host = d.getVar("PRSERV_HOST_" + pn)
+ if not (host is None):
+ d.setVar("PRSERV_HOST", host)
+
+ pkgv = d.getVar("PKGV")
+
+ # PR Server not active, handle AUTOINC
+ if not d.getVar('PRSERV_HOST'):
+ d.setVar("PRSERV_PV_AUTOINC", "0")
+ return
+
+ auto_pr = None
+ pv = d.getVar("PV")
+ version = d.getVar("PRAUTOINX")
+ pkgarch = d.getVar("PACKAGE_ARCH")
+ checksum = get_do_package_hash(pn)
+
+ # If do_package isn't in the dependencies, we can't get the checksum...
+ if not checksum:
+ bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
+ #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ #for dep in taskdepdata:
+ # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
+ return
+
+ if d.getVar('PRSERV_LOCKDOWN'):
+ auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
+ if auto_pr is None:
+ bb.fatal("Can NOT get PRAUTO from lockdown exported file")
+ d.setVar('PRAUTO',str(auto_pr))
+ return
+
+ try:
+ conn = oe.prservice.prserv_make_conn(d)
+ if conn is not None:
+ if "AUTOINC" in pkgv:
+ srcpv = bb.fetch2.get_srcrev(d)
+ base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
+ value = conn.getPR(base_ver, pkgarch, srcpv)
+ d.setVar("PRSERV_PV_AUTOINC", str(value))
+
+ auto_pr = conn.getPR(version, pkgarch, checksum)
+ conn.close()
+ except Exception as e:
+ bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
+ if auto_pr is None:
+ bb.fatal("Can NOT get PRAUTO from remote PR service")
+ d.setVar('PRAUTO',str(auto_pr))
+}
+
+#
+# Package functions suitable for inclusion in PACKAGEFUNCS
+#
+
+python package_convert_pr_autoinc() {
+ pkgv = d.getVar("PKGV")
+
+ # Adjust pkgv as necessary...
+ if 'AUTOINC' in pkgv:
+ d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
+
+ # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
+ d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
+ d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
+}
+
+LOCALEBASEPN ??= "${PN}"
+
+python package_do_split_locales() {
+ if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
+ bb.debug(1, "package requested not splitting locales")
+ return
+
+ packages = (d.getVar('PACKAGES') or "").split()
+
+ datadir = d.getVar('datadir')
+ if not datadir:
+ bb.note("datadir not defined")
+ return
+
+ dvar = d.getVar('PKGD')
+ pn = d.getVar('LOCALEBASEPN')
+
+ if pn + '-locale' in packages:
+ packages.remove(pn + '-locale')
+
+ localedir = os.path.join(dvar + datadir, 'locale')
+
+ if not cpath.isdir(localedir):
+ bb.debug(1, "No locale files in this package")
+ return
+
+ locales = os.listdir(localedir)
+
+ summary = d.getVar('SUMMARY') or pn
+ description = d.getVar('DESCRIPTION') or ""
+ locale_section = d.getVar('LOCALE_SECTION')
+ mlprefix = d.getVar('MLPREFIX') or ""
+ for l in sorted(locales):
+ ln = legitimize_package_name(l)
+ pkg = pn + '-locale-' + ln
+ packages.append(pkg)
+ d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
+ d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
+ d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
+ d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
+ d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
+ if locale_section:
+ d.setVar('SECTION:' + pkg, locale_section)
+
+ d.setVar('PACKAGES', ' '.join(packages))
+
+ # Disabled by RP 18/06/07
+ # Wildcards aren't supported in debian
+ # They break with ipkg since glibc-locale* will mean that
+ # glibc-localedata-translit* won't install as a dependency
+ # for some other package which breaks meta-toolchain
+ # Probably breaks since virtual-locale- isn't provided anywhere
+ #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
+ #rdep.append('%s-locale*' % pn)
+ #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
+}
+
+python perform_packagecopy () {
+ import subprocess
+ import shutil
+
+ dest = d.getVar('D')
+ dvar = d.getVar('PKGD')
+
+ # Start by package population by taking a copy of the installed
+ # files to operate on
+ # Preserve sparse files and hard links
+ cmd = 'tar --exclude=./sysroot-only -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # replace RPATHs for the nativesdk binaries, to make them relocatable
+ if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
+ rpath_replace (dvar, d)
+}
+perform_packagecopy[cleandirs] = "${PKGD}"
+perform_packagecopy[dirs] = "${PKGD}"
+
+# We generate a master list of directories to process, we start by
+# seeding this list with reasonable defaults, then load from
+# the fs-perms.txt files
+python fixup_perms () {
+ import pwd, grp
+
+ # init using a string with the same format as a line as documented in
+ # the fs-perms.txt file
+ # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
+ # <path> link <link target>
+ #
+ # __str__ can be used to print out an entry in the input format
+ #
+ # if fs_perms_entry.path is None:
+ # an error occurred
+ # if fs_perms_entry.link, you can retrieve:
+ # fs_perms_entry.path = path
+ # fs_perms_entry.link = target of link
+ # if not fs_perms_entry.link, you can retrieve:
+ # fs_perms_entry.path = path
+ # fs_perms_entry.mode = expected dir mode or None
+ # fs_perms_entry.uid = expected uid or -1
+ # fs_perms_entry.gid = expected gid or -1
+ # fs_perms_entry.walk = 'true' or something else
+ # fs_perms_entry.fmode = expected file mode or None
+ # fs_perms_entry.fuid = expected file uid or -1
+ # fs_perms_entry_fgid = expected file gid or -1
+ class fs_perms_entry():
+ def __init__(self, line):
+ lsplit = line.split()
+ if len(lsplit) == 3 and lsplit[1].lower() == "link":
+ self._setlink(lsplit[0], lsplit[2])
+ elif len(lsplit) == 8:
+ self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
+ else:
+ msg = "Fixup Perms: invalid config line %s" % line
+ oe.qa.handle_error("perm-config", msg, d)
+ self.path = None
+ self.link = None
+
+ def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
+ self.path = os.path.normpath(path)
+ self.link = None
+ self.mode = self._procmode(mode)
+ self.uid = self._procuid(uid)
+ self.gid = self._procgid(gid)
+ self.walk = walk.lower()
+ self.fmode = self._procmode(fmode)
+ self.fuid = self._procuid(fuid)
+ self.fgid = self._procgid(fgid)
+
+ def _setlink(self, path, link):
+ self.path = os.path.normpath(path)
+ self.link = link
+
+ def _procmode(self, mode):
+ if not mode or (mode and mode == "-"):
+ return None
+ else:
+ return int(mode,8)
+
+ # Note uid/gid -1 has special significance in os.lchown
+ def _procuid(self, uid):
+ if uid is None or uid == "-":
+ return -1
+ elif uid.isdigit():
+ return int(uid)
+ else:
+ return pwd.getpwnam(uid).pw_uid
+
+ def _procgid(self, gid):
+ if gid is None or gid == "-":
+ return -1
+ elif gid.isdigit():
+ return int(gid)
+ else:
+ return grp.getgrnam(gid).gr_gid
+
+ # Use for debugging the entries
+ def __str__(self):
+ if self.link:
+ return "%s link %s" % (self.path, self.link)
+ else:
+ mode = "-"
+ if self.mode:
+ mode = "0%o" % self.mode
+ fmode = "-"
+ if self.fmode:
+ fmode = "0%o" % self.fmode
+ uid = self._mapugid(self.uid)
+ gid = self._mapugid(self.gid)
+ fuid = self._mapugid(self.fuid)
+ fgid = self._mapugid(self.fgid)
+ return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
+
+ def _mapugid(self, id):
+ if id is None or id == -1:
+ return "-"
+ else:
+ return "%d" % id
+
+ # Fix the permission, owner and group of path
+ def fix_perms(path, mode, uid, gid, dir):
+ if mode and not os.path.islink(path):
+ #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
+ os.chmod(path, mode)
+ # -1 is a special value that means don't change the uid/gid
+ # if they are BOTH -1, don't bother to lchown
+ if not (uid == -1 and gid == -1):
+ #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
+ os.lchown(path, uid, gid)
+
+ # Return a list of configuration files based on either the default
+ # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
+ # paths are resolved via BBPATH
+ def get_fs_perms_list(d):
+ str = ""
+ bbpath = d.getVar('BBPATH')
+ fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
+ for conf_file in fs_perms_tables.split():
+ confpath = bb.utils.which(bbpath, conf_file)
+ if confpath:
+ str += " %s" % bb.utils.which(bbpath, conf_file)
+ else:
+ bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
+ return str
+
+
+
+ dvar = d.getVar('PKGD')
+
+ fs_perms_table = {}
+ fs_link_table = {}
+
+ # By default all of the standard directories specified in
+ # bitbake.conf will get 0755 root:root.
+ target_path_vars = [ 'base_prefix',
+ 'prefix',
+ 'exec_prefix',
+ 'base_bindir',
+ 'base_sbindir',
+ 'base_libdir',
+ 'datadir',
+ 'sysconfdir',
+ 'servicedir',
+ 'sharedstatedir',
+ 'localstatedir',
+ 'infodir',
+ 'mandir',
+ 'docdir',
+ 'bindir',
+ 'sbindir',
+ 'libexecdir',
+ 'libdir',
+ 'includedir',
+ 'oldincludedir' ]
+
+ for path in target_path_vars:
+ dir = d.getVar(path) or ""
+ if dir == "":
+ continue
+ fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
+
+ # Now we actually load from the configuration files
+ for conf in get_fs_perms_list(d).split():
+ if not os.path.exists(conf):
+ continue
+ with open(conf) as f:
+ for line in f:
+ if line.startswith('#'):
+ continue
+ lsplit = line.split()
+ if len(lsplit) == 0:
+ continue
+ if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
+ msg = "Fixup perms: %s invalid line: %s" % (conf, line)
+ oe.qa.handle_error("perm-line", msg, d)
+ continue
+ entry = fs_perms_entry(d.expand(line))
+ if entry and entry.path:
+ if entry.link:
+ fs_link_table[entry.path] = entry
+ if entry.path in fs_perms_table:
+ fs_perms_table.pop(entry.path)
+ else:
+ fs_perms_table[entry.path] = entry
+ if entry.path in fs_link_table:
+ fs_link_table.pop(entry.path)
+
+ # Debug -- list out in-memory table
+ #for dir in fs_perms_table:
+ # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
+ #for link in fs_link_table:
+ # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
+
+ # We process links first, so we can go back and fixup directory ownership
+ # for any newly created directories
+ # Process in sorted order so /run gets created before /run/lock, etc.
+ for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
+ link = entry.link
+ dir = entry.path
+ origin = dvar + dir
+ if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
+ continue
+
+ if link[0] == "/":
+ target = dvar + link
+ ptarget = link
+ else:
+ target = os.path.join(os.path.dirname(origin), link)
+ ptarget = os.path.join(os.path.dirname(dir), link)
+ if os.path.exists(target):
+ msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
+ oe.qa.handle_error("perm-link", msg, d)
+ continue
+
+ # Create path to move directory to, move it, and then setup the symlink
+ bb.utils.mkdirhier(os.path.dirname(target))
+ #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
+ bb.utils.rename(origin, target)
+ #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
+ os.symlink(link, origin)
+
+ for dir in fs_perms_table:
+ origin = dvar + dir
+ if not (cpath.exists(origin) and cpath.isdir(origin)):
+ continue
+
+ fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
+
+ if fs_perms_table[dir].walk == 'true':
+ for root, dirs, files in os.walk(origin):
+ for dr in dirs:
+ each_dir = os.path.join(root, dr)
+ fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
+ for f in files:
+ each_file = os.path.join(root, f)
+ fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
+}
+
+def package_debug_vars(d):
+ # We default to '.debug' style
+ if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
+ # Single debug-file-directory style debug info
+ debug_vars = {
+ "append": ".debug",
+ "staticappend": "",
+ "dir": "",
+ "staticdir": "",
+ "libdir": "/usr/lib/debug",
+ "staticlibdir": "/usr/lib/debug-static",
+ "srcdir": "/usr/src/debug",
+ }
+ elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
+ # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
+ debug_vars = {
+ "append": "",
+ "staticappend": "",
+ "dir": "/.debug",
+ "staticdir": "/.debug-static",
+ "libdir": "",
+ "staticlibdir": "",
+ "srcdir": "",
+ }
+ elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
+ debug_vars = {
+ "append": "",
+ "staticappend": "",
+ "dir": "/.debug",
+ "staticdir": "/.debug-static",
+ "libdir": "",
+ "staticlibdir": "",
+ "srcdir": "/usr/src/debug",
+ }
+ else:
+ # Original OE-core, a.k.a. ".debug", style debug info
+ debug_vars = {
+ "append": "",
+ "staticappend": "",
+ "dir": "/.debug",
+ "staticdir": "/.debug-static",
+ "libdir": "",
+ "staticlibdir": "",
+ "srcdir": "/usr/src/debug",
+ }
+
+ return debug_vars
+
+python split_and_strip_files () {
+ import stat, errno
+ import subprocess
+
+ dvar = d.getVar('PKGD')
+ pn = d.getVar('PN')
+ hostos = d.getVar('HOST_OS')
+
+ oldcwd = os.getcwd()
+ os.chdir(dvar)
+
+ dv = package_debug_vars(d)
+
+ #
+ # First lets figure out all of the files we may have to process ... do this only once!
+ #
+ elffiles = {}
+ symlinks = {}
+ staticlibs = []
+ inodes = {}
+ libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
+ baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
+ skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
+ if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
+ d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
+ checkelf = {}
+ checkelflinks = {}
+ for root, dirs, files in cpath.walk(dvar):
+ for f in files:
+ file = os.path.join(root, f)
+
+ # Skip debug files
+ if dv["append"] and file.endswith(dv["append"]):
+ continue
+ if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
+ continue
+
+ if file in skipfiles:
+ continue
+
+ if oe.package.is_static_lib(file):
+ staticlibs.append(file)
+ continue
+
+ try:
+ ltarget = cpath.realpath(file, dvar, False)
+ s = cpath.lstat(ltarget)
+ except OSError as e:
+ (err, strerror) = e.args
+ if err != errno.ENOENT:
+ raise
+ # Skip broken symlinks
+ continue
+ if not s:
+ continue
+ # Check its an executable
+ if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
+ or (s[stat.ST_MODE] & stat.S_IXOTH) \
+ or ((file.startswith(libdir) or file.startswith(baselibdir)) \
+ and (".so" in f or ".node" in f)) \
+ or (f.startswith('vmlinux') or ".ko" in f):