mirror of
https://git.yoctoproject.org/poky
synced 2026-02-21 17:09:42 +01:00
Compare commits
105 Commits
kirkstone-
...
yocto-4.0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
65dafea220 | ||
|
|
f4153b1dc4 | ||
|
|
cbd2312210 | ||
|
|
93b4705b76 | ||
|
|
ca564b9513 | ||
|
|
7b199d5083 | ||
|
|
82bd71e1cb | ||
|
|
b6f577e2e7 | ||
|
|
dc62b99bdf | ||
|
|
9fecba6b89 | ||
|
|
738cb2a735 | ||
|
|
3edd7714c1 | ||
|
|
f26e5d7500 | ||
|
|
692ea97f20 | ||
|
|
2a693e5499 | ||
|
|
2453dfa90a | ||
|
|
370c87ac60 | ||
|
|
74d5d34367 | ||
|
|
79a8d5e6b4 | ||
|
|
7dce079e09 | ||
|
|
fd8bc329dc | ||
|
|
35ade176ff | ||
|
|
533b22a811 | ||
|
|
79814c6339 | ||
|
|
1fcc005666 | ||
|
|
067465faa1 | ||
|
|
c8b7bd3158 | ||
|
|
4aad4cc620 | ||
|
|
80880c552c | ||
|
|
b899f258be | ||
|
|
7aaba58845 | ||
|
|
876ff61388 | ||
|
|
bba70ce341 | ||
|
|
db86e51343 | ||
|
|
0b4b2c007d | ||
|
|
fdcaaeebb6 | ||
|
|
94e4412765 | ||
|
|
d65382908c | ||
|
|
07213601fd | ||
|
|
fd36d262b8 | ||
|
|
92b150b9f3 | ||
|
|
da271d70f0 | ||
|
|
d95be1c7ed | ||
|
|
0118853b5b | ||
|
|
cb89546620 | ||
|
|
73cbe478a5 | ||
|
|
36209ca38f | ||
|
|
f8187daf0a | ||
|
|
d369e47a82 | ||
|
|
7750385191 | ||
|
|
60f1d08fa6 | ||
|
|
fd2d945820 | ||
|
|
80dee2dad3 | ||
|
|
9c9bb12c72 | ||
|
|
d3e3783973 | ||
|
|
ebfc6bdba4 | ||
|
|
cf42cc0ef0 | ||
|
|
4641f83d93 | ||
|
|
e2c6d1436a | ||
|
|
ffa4177a45 | ||
|
|
ea2fc2b81d | ||
|
|
45f3a4fd63 | ||
|
|
24cdd5bb63 | ||
|
|
3902078452 | ||
|
|
99483cff5c | ||
|
|
4f3a354071 | ||
|
|
f4c7ae358e | ||
|
|
dc189cf536 | ||
|
|
9cd4e36dd5 | ||
|
|
11555b5d2b | ||
|
|
85f60741c5 | ||
|
|
1aa3cb0169 | ||
|
|
754c0e4cc7 | ||
|
|
c033fb48e3 | ||
|
|
99f1aa4736 | ||
|
|
7af48055e3 | ||
|
|
3fd177d610 | ||
|
|
dde151f3ad | ||
|
|
53360000fd | ||
|
|
47edd3bbdd | ||
|
|
89732f25f2 | ||
|
|
2d58f993c3 | ||
|
|
192cb88c17 | ||
|
|
b6879ce896 | ||
|
|
18da62e637 | ||
|
|
bbe38cd637 | ||
|
|
2f4691f403 | ||
|
|
5c7103d6c7 | ||
|
|
6e1c131b2f | ||
|
|
3cdbb5cc24 | ||
|
|
f86718661a | ||
|
|
319ccc97b4 | ||
|
|
a11a62209e | ||
|
|
b627db0656 | ||
|
|
445a2069cd | ||
|
|
41dbb14cb9 | ||
|
|
13d7119cdf | ||
|
|
2bed21abab | ||
|
|
c4bbc6d9c5 | ||
|
|
f7133e57f8 | ||
|
|
531d6fe602 | ||
|
|
c5cdf68773 | ||
|
|
8c4aa01b45 | ||
|
|
f930572051 | ||
|
|
a00404e18e |
@@ -11713,12 +11713,13 @@ containing an index of JSON :term:`SPDX` files for individual recipes, together
|
||||
with an ``IMAGE-MACHINE.spdx.tar.zst`` compressed archive containing all such
|
||||
files.
|
||||
|
||||
The :ref:`create-spdx <ref-classes-create-spdx>` class offers options to include
|
||||
more information in the output :term:`SPDX` data, such as adding compressed
|
||||
archives of the files in the generated target packages
|
||||
(:term:`SPDX_ARCHIVE_PACKAGED`), adding a description of the source files
|
||||
handled by the target recipes (:term:`SPDX_INCLUDE_SOURCES`) and adding archives
|
||||
of these source files themselves (:term:`SPDX_ARCHIVE_SOURCES`).
|
||||
The :ref:`ref-classes-create-spdx` class offers options to include
|
||||
more information in the output :term:`SPDX` data, such as making the generated
|
||||
files more human readable (:term:`SPDX_PRETTY`), adding compressed archives of
|
||||
the files in the generated target packages (:term:`SPDX_ARCHIVE_PACKAGED`),
|
||||
adding a description of the source files used to generate host tools and target
|
||||
packages (:term:`SPDX_INCLUDE_SOURCES`) and adding archives of these source
|
||||
files themselves (:term:`SPDX_ARCHIVE_SOURCES`).
|
||||
|
||||
Though the toplevel :term:`SPDX` output is available in
|
||||
``tmp/deploy/images/MACHINE/`` inside the :term:`Build Directory`, ancillary
|
||||
|
||||
@@ -36,7 +36,7 @@ How do I install/not-install the kernel image on the root filesystem?
|
||||
The kernel image (e.g. ``vmlinuz``) is provided by the
|
||||
``kernel-image`` package. Image recipes depend on ``kernel-base``. To
|
||||
specify whether or not the kernel image is installed in the generated
|
||||
root filesystem, override ``RDEPENDS:${KERNEL_PACKAGE_NAME}-base`` to include or not
|
||||
root filesystem, override ``RRECOMMENDS:${KERNEL_PACKAGE_NAME}-base`` to include or not
|
||||
include "kernel-image". See the
|
||||
":ref:`dev-manual/common-tasks:appending other layers metadata with your layer`"
|
||||
section in the
|
||||
|
||||
@@ -265,3 +265,6 @@ Miscellaneous changes
|
||||
when parsing recipes. Any code depending on the previous behaviour will no longer
|
||||
work - change any such code to explicitly use appropriate path variables instead.
|
||||
|
||||
- In order to exclude the kernel image from the image rootfs,
|
||||
:term:`RRECOMMENDS`\ ``:${KERNEL_PACKAGE_NAME}-base`` should be set instead of
|
||||
:term:`RDEPENDS`\ ``:${KERNEL_PACKAGE_NAME}-base``.
|
||||
|
||||
@@ -9,3 +9,5 @@ Release 4.0 (kirkstone)
|
||||
release-notes-4.0.2
|
||||
release-notes-4.0.3
|
||||
release-notes-4.0.4
|
||||
release-notes-4.0.5
|
||||
release-notes-4.0.6
|
||||
|
||||
196
documentation/migration-guides/release-notes-4.0.5.rst
Normal file
196
documentation/migration-guides/release-notes-4.0.5.rst
Normal file
@@ -0,0 +1,196 @@
|
||||
Release notes for Yocto-4.0.5 (Kirkstone)
|
||||
-----------------------------------------
|
||||
|
||||
Security Fixes in Yocto-4.0.5
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- qemu: fix :cve:`2021-3750`, :cve:`2021-3611` and :cve:`2022-2962`
|
||||
- binutils : fix :cve:`2022-38126`, :cve:`2022-38127` and :cve:`2022-38128`
|
||||
- tff: fix :cve:`2022-2867`, :cve:`2022-2868` and :cve:`2022-2869`
|
||||
- inetutils: fix :cve:`2022-39028`
|
||||
- go: fix :cve:`2022-27664`
|
||||
|
||||
Fixes in Yocto-4.0.5
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Revert "gcc-cross-canadian: Add symlink to real-ld alongside other symlinks"
|
||||
- bind: upgrade to 9.18.7
|
||||
- binutils: stable 2.38 branch updates (dc2474e7)
|
||||
- bitbake: Fix npm to use https rather than http
|
||||
- bitbake: asyncrpc/client: Fix unix domain socket chdir race issues
|
||||
- bitbake: bitbake: Add copyright headers where missing
|
||||
- bitbake: gitsm: Error out if submodule refers to parent repo
|
||||
- bitbake: runqueue: Drop deadlock breaking force fail
|
||||
- bitbake: runqueue: Ensure deferred tasks are sorted by multiconfig
|
||||
- bitbake: runqueue: Improve deadlock warning messages
|
||||
- bitbake: siggen: Fix insufficent entropy in sigtask file names
|
||||
- bitbake: tests/fetch: Allow handling of a file:// url within a submodule
|
||||
- build-appliance-image: Update to kirkstone head revision (4a88ada)
|
||||
- busybox: add devmem 128-bit support
|
||||
- classes: files: Extend overlayfs-etc class
|
||||
- coreutils: add openssl PACKAGECONFIG
|
||||
- create-pull-request: don't switch the git remote protocol to git://
|
||||
- dev-manual: fix reference to BitBake user manual
|
||||
- expat: upgrade 2.4.8 -> 2.4.9
|
||||
- files: overlayfs-etc: refactor preinit template
|
||||
- gcc-cross-canadian: add default plugin linker
|
||||
- gcc: add arm-v9 support
|
||||
- git: upgrade 2.35.4 -> 2.35.5
|
||||
- glibc-locale: explicitly remove empty dirs in ${libdir}
|
||||
- glibc-tests: use += instead of :append
|
||||
- glibc: stable 2.35 branch updates.(8d125a1f)
|
||||
- go-native: switch from SRC_URI:append to SRC_URI +=
|
||||
- image_types_wic.bbclass: fix cross binutils dependency
|
||||
- kern-tools: allow 'y' or 'm' to avoid config audit warnings
|
||||
- kern-tools: fix queue processing in relative TOPDIR configurations
|
||||
- kernel-yocto: allow patch author date to be commit date
|
||||
- libpng: upgrade to 1.6.38
|
||||
- linux-firmware: package new Qualcomm firmware
|
||||
- linux-firmware: upgrade 20220708 -> 20220913
|
||||
- linux-libc-headers: switch from SRC_URI:append to SRC_URI +=
|
||||
- linux-yocto-dev: add qemuarm64
|
||||
- linux-yocto/5.10: update to v5.10.149
|
||||
- linux-yocto/5.15: cfg: fix ACPI warnings for -tiny
|
||||
- linux-yocto/5.15: update to v5.15.68
|
||||
- local.conf.sample: correct the location of public hashserv
|
||||
- ltp: Fix pread02 case trigger the glibc overflow detection
|
||||
- lttng-modules: Fix crash on powerpc64
|
||||
- lttng-tools: Disable on qemuriscv32
|
||||
- lttng-tools: Disable on riscv32
|
||||
- migration-guides: add 4.0.4 release notes
|
||||
- oeqa/runtime/dnf: fix typo
|
||||
- own-mirrors: add crate
|
||||
- perf: Fix for recent kernel upgrades
|
||||
- poky.conf: bump version for 4.0.5
|
||||
- poky.yaml.in: update version requirements
|
||||
- python3-rfc3986-validator: switch from SRC_URI:append to SRC_URI +=
|
||||
- python3: upgrade 3.10.4 -> 3.10.7
|
||||
- qemu: Backport patches from upstream to support float128 on qemu-ppc64
|
||||
- rpm: Remove -Wimplicit-function-declaration warnings
|
||||
- rpm: update to 4.17.1
|
||||
- rsync: update to 3.2.5
|
||||
- stress-cpu: disable float128 math on powerpc64 to avoid SIGILL
|
||||
- tune-neoversen2: support tune-neoversen2 base on armv9a
|
||||
- tzdata: update to 2022d
|
||||
- u-boot: switch from append to += in SRC_URI
|
||||
- uninative: Upgrade to 3.7 to work with glibc 2.36
|
||||
- vim: Upgrade to 9.0.0598
|
||||
- webkitgtk: Update to 2.36.7
|
||||
|
||||
|
||||
Known Issues in Yocto-4.0.5
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- There are recent CVEs in key components such as openssl. They are not included in this release as it was built before the issues were known and fixes were available but these are now available on the kirkstone branch.
|
||||
|
||||
|
||||
Contributors to Yocto-4.0.5
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Adrian Freihofer
|
||||
- Alexander Kanavin
|
||||
- Alexandre Belloni
|
||||
- Bhabu Bindu
|
||||
- Bruce Ashfield
|
||||
- Chen Qi
|
||||
- Daniel McGregor
|
||||
- Denys Dmytriyenko
|
||||
- Dmitry Baryshkov
|
||||
- Florin Diaconescu
|
||||
- He Zhe
|
||||
- Joshua Watt
|
||||
- Khem Raj
|
||||
- Martin Jansa
|
||||
- Michael Halstead
|
||||
- Michael Opdenacker
|
||||
- Mikko Rapeli
|
||||
- Mingli Yu
|
||||
- Neil Horman
|
||||
- Pavel Zhukov
|
||||
- Richard Purdie
|
||||
- Robert Joslyn
|
||||
- Ross Burton
|
||||
- Ruiqiang Hao
|
||||
- Samuli Piippo
|
||||
- Steve Sakoman
|
||||
- Sundeep KOKKONDA
|
||||
- Teoh Jay Shen
|
||||
- Tim Orling
|
||||
- Virendra Thakur
|
||||
- Vyacheslav Yurkov
|
||||
- Xiangyu Chen
|
||||
- Yash Shinde
|
||||
- pgowda
|
||||
- Wang Mingyu
|
||||
|
||||
|
||||
Repositories / Downloads for Yocto-4.0.5
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
poky
|
||||
|
||||
- Repository Location: :yocto_git:`/poky`
|
||||
- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
|
||||
- Tag: :yocto_git:`yocto-4.0.5 </poky/log/?h=yocto-4.0.5>`
|
||||
- Git Revision: :yocto_git:`2e79b199114b25d81bfaa029ccfb17676946d20d </poky/commit/?id=2e79b199114b25d81bfaa029ccfb17676946d20d>`
|
||||
- Release Artefact: poky-2e79b199114b25d81bfaa029ccfb17676946d20d
|
||||
- sha: 7bcf3f901d4c5677fc95944ab096e9e306f4c758a658dde5befd16861ad2b8ea
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/poky-2e79b199114b25d81bfaa029ccfb17676946d20d.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/poky-2e79b199114b25d81bfaa029ccfb17676946d20d.tar.bz2
|
||||
|
||||
openembedded-core
|
||||
|
||||
- Repository Location: :oe_git:`/openembedded-core`
|
||||
- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
|
||||
- Tag: :oe_git:`yocto-4.0.5 </openembedded-core/log/?h=yocto-4.0.5>`
|
||||
- Git Revision: :oe_git:`fbdf93f43ff4b876487e1f26752598ec8abcb46e </openembedded-core/commit/?id=fbdf93f43ff4b876487e1f26752598ec8abcb46e>`
|
||||
- Release Artefact: oecore-fbdf93f43ff4b876487e1f26752598ec8abcb46e
|
||||
- sha: 2d9b5a8e9355b633bb57633cc8c2d319ba13fe4721f79204e61116b3faa6cbf1
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/oecore-fbdf93f43ff4b876487e1f26752598ec8abcb46e.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/oecore-fbdf93f43ff4b876487e1f26752598ec8abcb46e.tar.bz2
|
||||
|
||||
meta-mingw
|
||||
|
||||
- Repository Location: :yocto_git:`/meta-mingw`
|
||||
- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
|
||||
- Tag: :yocto_git:`yocto-4.0.5 </meta-mingw/log/?h=yocto-4.0.5>`
|
||||
- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
|
||||
- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
|
||||
- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
|
||||
|
||||
meta-gplv2
|
||||
|
||||
- Repository Location: :yocto_git:`/meta-gplv2`
|
||||
- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
|
||||
- Tag: :yocto_git:`yocto-4.0.5 </meta-gplv2/log/?h=yocto-4.0.5>`
|
||||
- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
|
||||
- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
|
||||
- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
|
||||
|
||||
bitbake
|
||||
|
||||
- Repository Location: :oe_git:`/bitbake`
|
||||
- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
|
||||
- Tag: :oe_git:`yocto-4.0.5 </bitbake/log/?h=yocto-4.0.5>`
|
||||
- Git Revision: :oe_git:`c90d57497b9bcd237c3ae810ee8edb5b0d2d575a </bitbake/commit/?id=c90d57497b9bcd237c3ae810ee8edb5b0d2d575a>`
|
||||
- Release Artefact: bitbake-c90d57497b9bcd237c3ae810ee8edb5b0d2d575a
|
||||
- sha: 5698d548ce179036e46a24f80b213124c8825a4f443fa1d6be7ab0f70b01a9ff
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/bitbake-c90d57497b9bcd237c3ae810ee8edb5b0d2d575a.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/bitbake-c90d57497b9bcd237c3ae810ee8edb5b0d2d575a.tar.bz2
|
||||
|
||||
yocto-docs
|
||||
|
||||
- Repository Location: :yocto_git:`/yocto-docs`
|
||||
- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
|
||||
- Tag: :yocto_git:`yocto-4.0.5 </yocto-docs/log/?h=yocto-4.0.5>`
|
||||
- Git Revision: :yocto_git:`8c2f9f54e29781f4ee72e81eeaa12ceaa82dc2d3 </yocto-docs/commit/?id=8c2f9f54e29781f4ee72e81eeaa12ceaa82dc2d3>`
|
||||
|
||||
313
documentation/migration-guides/release-notes-4.0.6.rst
Normal file
313
documentation/migration-guides/release-notes-4.0.6.rst
Normal file
@@ -0,0 +1,313 @@
|
||||
.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
|
||||
|
||||
Release notes for Yocto-4.0.6 (Kirkstone)
|
||||
-----------------------------------------
|
||||
|
||||
Security Fixes in Yocto-4.0.6
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- bash: Fix :cve:`2022-3715`
|
||||
- curl: Fix :cve:`2022-32221`, :cve:`2022-42915` and :cve:`2022-42916`
|
||||
- dbus: Fix :cve:`2022-42010`, :cve:`2022-42011` and :cve:`2022-42012`
|
||||
- dropbear: Fix :cve:`2021-36369`
|
||||
- ffmpeg: Fix :cve:`2022-3964`, :cve:`2022-3965`
|
||||
- go: Fix :cve:`2022-2880`
|
||||
- grub2: Fix :cve:`2022-2601`, :cve:`2022-3775` and :cve:`2022-28736`
|
||||
- libarchive: Fix :cve:`2022-36227`
|
||||
- libpam: Fix :cve:`2022-28321`
|
||||
- libsndfile1: Fix :cve:`2021-4156`
|
||||
- lighttpd: Fix :cve:`2022-41556`
|
||||
- openssl: Fix :cve:`2022-3358`
|
||||
- pixman: Fix :cve:`2022-44638`
|
||||
- python3-mako: Fix :cve:`2022-40023`
|
||||
- python3: Fix :cve:`2022-42919`
|
||||
- qemu: Fix :cve:`2022-3165`
|
||||
- sysstat: Fix :cve:`2022-39377`
|
||||
- systemd: Fix :cve:`2022-3821`
|
||||
- tiff: Fix :cve:`2022-2953`, :cve:`2022-3599`, :cve:`2022-3597`, :cve:`2022-3626`, :cve:`2022-3627`, :cve:`2022-3570`, :cve:`2022-3598` and :cve:`2022-3970`
|
||||
- vim: Fix :cve:`2022-3352`, :cve:`2022-3705` and :cve:`2022-4141`
|
||||
- wayland: Fix :cve:`2021-3782`
|
||||
- xserver-xorg: Fix :cve:`2022-3550` and :cve:`2022-3551`
|
||||
|
||||
|
||||
Fixes in Yocto-4.0.6
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- archiver: avoid using machine variable as it breaks multiconfig
|
||||
- babeltrace: upgrade to 1.5.11
|
||||
- bind: upgrade to 9.18.8
|
||||
- bitbake.conf: Drop export of SOURCE_DATE_EPOCH_FALLBACK
|
||||
- bitbake: gitsm: Fix regression in gitsm submodule path parsing
|
||||
- bitbake: runqueue: Fix race issues around hash equivalence and sstate reuse
|
||||
- bluez5: Point hciattach bcm43xx firmware search path to /lib/firmware
|
||||
- bluez5: add dbus to RDEPENDS
|
||||
- build-appliance-image: Update to kirkstone head revision
|
||||
- buildtools-tarball: export certificates to python and curl
|
||||
- cargo_common.bbclass: Fix typos
|
||||
- classes: make TOOLCHAIN more permissive for kernel
|
||||
- cmake-native: Fix host tool contamination (Bug: 14951)
|
||||
- common-tasks.rst: fix oeqa runtime test path
|
||||
- create-spdx.bbclass: remove unused SPDX_INCLUDE_PACKAGED
|
||||
- create-spdx: Remove ";name=..." for downloadLocation
|
||||
- create-spdx: default share_src for shared sources
|
||||
- cve-update-db-native: add timeout to urlopen() calls
|
||||
- dbus: upgrade to 1.14.4
|
||||
- dhcpcd: fix to work with systemd
|
||||
- expat: upgrade to 2.5.0
|
||||
- externalsrc.bbclass: Remove a trailing slash from ${B}
|
||||
- externalsrc.bbclass: fix git repo detection
|
||||
- externalsrc: git submodule--helper list unsupported
|
||||
- gcc-shared-source: Fix source date epoch handling
|
||||
- gcc-source: Drop gengtype manipulation
|
||||
- gcc-source: Ensure deploy_source_date_epoch sstate hash doesn't change
|
||||
- gcc-source: Fix gengtypes race
|
||||
- gdk-pixbuf: upgrade to 2.42.10
|
||||
- get_module_deps3.py: Check attribute '__file__'
|
||||
- glib-2.0: fix rare GFileInfo test case failure
|
||||
- glibc-locale: Do not INHIBIT_DEFAULT_DEPS
|
||||
- gnomebase.bbclass: return the whole version for tarball directory if it is a number
|
||||
- gnutls: Unified package names to lower-case
|
||||
- groff: submit patches upstream
|
||||
- gstreamer1.0-libav: fix errors with ffmpeg 5.x
|
||||
- gstreamer1.0: upgrade to 1.20.4
|
||||
- ifupdown: upgrade to 0.8.39
|
||||
- insane.bbclass: Allow hashlib version that only accepts on parameter
|
||||
- iso-codes: upgrade to 4.12.0
|
||||
- kea: submit patch upstream (fix-multilib-conflict.patch)
|
||||
- kern-tools: fix relative path processing
|
||||
- kern-tools: integrate ZFS speedup patch
|
||||
- kernel-yocto: improve fatal error messages of symbol_why.py
|
||||
- kernel.bbclass: Include randstruct seed assets in STAGING_KERNEL_BUILDDIR
|
||||
- kernel.bbclass: make KERNEL_DEBUG_TIMESTAMPS work at rebuild
|
||||
- kernel: Clear SYSROOT_DIRS instead of replacing sysroot_stage_all
|
||||
- libcap: upgrade to 2.66
|
||||
- libepoxy: convert to git
|
||||
- libepoxy: update to 1.5.10
|
||||
- libffi: submit patch upstream (0001-arm-sysv-reverted-clang-VFP-mitigation.patch )
|
||||
- libffi: upgrade to 3.4.4
|
||||
- libical: upgrade to 3.0.16
|
||||
- libksba: upgrade to 1.6.2
|
||||
- libuv: fixup SRC_URI
|
||||
- libxcrypt: upgrade to 4.4.30
|
||||
- lighttpd: upgrade to 1.4.67
|
||||
- linux-firmware: add new fw file to ${PN}-qcom-adreno-a530
|
||||
- linux-firmware: don't put the firmware into the sysroot
|
||||
- linux-firmware: package amdgpu firmware
|
||||
- linux-firmware: split rtl8761 firmware
|
||||
- linux-firmware: upgrade to 20221109
|
||||
- linux-yocto/5.10: update genericx86* machines to v5.10.149
|
||||
- linux-yocto/5.15: fix CONFIG_CRYPTO_CCM mismatch warnings
|
||||
- linux-yocto/5.15: update genericx86* machines to v5.15.72
|
||||
- linux-yocto/5.15: update to v5.15.78
|
||||
- ltp: backport clock_gettime04 fix from upstream
|
||||
- lttng-modules: upgrade to 2.13.7
|
||||
- lttng-tools: Upgrade to 2.13.8
|
||||
- lttng-tools: submit determinism.patch upstream
|
||||
- lttng-ust: upgrade to 2.13.5
|
||||
- meson: make wrapper options sub-command specific
|
||||
- meta-selftest/staticids: add render group for systemd
|
||||
- mirrors.bbclass: update CPAN_MIRROR
|
||||
- mirrors.bbclass: use shallow tarball for binutils-native
|
||||
- mobile-broadband-provider-info: upgrade 20220725 -> 20221107
|
||||
- mtd-utils: upgrade 2.1.4 -> 2.1.5
|
||||
- numactl: upgrade to 2.0.16
|
||||
- oe/packagemanager/rpm: don't leak file objects
|
||||
- oeqa/selftest/lic_checksum: Cleanup changes to emptytest include
|
||||
- oeqa/selftest/minidebuginfo: Create selftest for minidebuginfo
|
||||
- oeqa/selftest/tinfoil: Add test for separate config_data with recipe_parse_file()
|
||||
- openssl: Fix SSL_CERT_FILE to match ca-certs location
|
||||
- openssl: upgrade to 3.0.7
|
||||
- openssl: export necessary env vars in SDK
|
||||
- opkg-utils: use a git clone, not a dynamic snapshot
|
||||
- opkg: Set correct info_dir and status_file in opkg.conf
|
||||
- overlayfs: Allow not used mount points
|
||||
- ovmf: correct patches status
|
||||
- package: Fix handling of minidebuginfo with newer binutils
|
||||
- perf: Depend on native setuptools3
|
||||
- poky.conf: bump version for 4.0.6
|
||||
- psplash: add psplash-default in rdepends
|
||||
- psplash: consider the situation of psplash not exist for systemd
|
||||
- python3: advance to version 3.10.8
|
||||
- qemu-helper-native: Correctly pass program name as argv[0]
|
||||
- qemu-helper-native: Re-write bridge helper as C program
|
||||
- qemu-native: Add PACKAGECONFIG option for jack
|
||||
- qemu: add io_uring PACKAGECONFIG
|
||||
- quilt: backport a patch to address grep 3.8 failures
|
||||
- resolvconf: make it work
|
||||
- rm_work: exclude the SSTATETASKS from the rm_work tasks sinature
|
||||
- runqemu: Do not perturb script environment
|
||||
- runqemu: Fix gl-es argument from causing other arguments to be ignored
|
||||
- sanity: Drop data finalize call
|
||||
- sanity: check for GNU tar specifically
|
||||
- scripts/oe-check-sstate: cleanup
|
||||
- scripts/oe-check-sstate: force build to run for all targets, specifically populate_sysroot
|
||||
- scripts: convert-overrides: Allow command-line customizations
|
||||
- socat: upgrade to 1.7.4.4
|
||||
- SPDX and CVE documentation updates
|
||||
- sstate: Allow optimisation of do_deploy_archives task dependencies
|
||||
- sstatesig: emit more helpful error message when not finding sstate manifest
|
||||
- sstatesig: skip the rm_work task signature
|
||||
- sudo: upgrade to 1.9.12p1
|
||||
- systemd: Consider PACKAGECONFIG in RRECOMMENDS
|
||||
- systemd: add group render to udev package
|
||||
- tcl: correct patch status
|
||||
- tiff: refresh with devtool
|
||||
- tiff: add CVE tag to b258ed69a485a9cfb299d9f060eb2a46c54e5903.patch
|
||||
- u-boot: Remove duplicate inherit of cml1
|
||||
- uboot-sign: Fix using wrong KEY_REQ_ARGS
|
||||
- vala: install vapigen-wrapper into /usr/bin/crosscripts and stage only that
|
||||
- valgrind: remove most hidden tests for arm64
|
||||
- vim: Upgrade to 9.0.0947
|
||||
- vulkan-samples: add lfs=0 to SRC_URI to avoid git smudge errors in do_unpack
|
||||
- wic: honor the SOURCE_DATE_EPOCH in case of updated fstab
|
||||
- wic: make ext2/3/4 images reproducible
|
||||
- wic: swap partitions are not added to fstab
|
||||
- wpebackend-fdo: upgrade to 1.14.0
|
||||
- xserver-xorg: move some recommended dependencies in required
|
||||
- xwayland: upgrade to 22.1.5
|
||||
|
||||
|
||||
Known Issues in Yocto-4.0.6
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- N/A
|
||||
|
||||
|
||||
Contributors to Yocto-4.0.6
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Alex Kiernan
|
||||
- Alexander Kanavin
|
||||
- Alexey Smirnov
|
||||
- Bartosz Golaszewski
|
||||
- Bernhard Rosenkränzer
|
||||
- Bhabu Bindu
|
||||
- Bruce Ashfield
|
||||
- Chee Yang Lee
|
||||
- Chen Qi
|
||||
- Christian Eggers
|
||||
- Claus Stovgaard
|
||||
- Diego Sueiro
|
||||
- Dmitry Baryshkov
|
||||
- Ed Tanous
|
||||
- Enrico Jörns
|
||||
- Etienne Cordonnier
|
||||
- Frank de Brabander
|
||||
- Harald Seiler
|
||||
- Hitendra Prajapati
|
||||
- Jan-Simon Moeller
|
||||
- Jeremy Puhlman
|
||||
- Joe Slater
|
||||
- John Edward Broadbent
|
||||
- Jose Quaresma
|
||||
- Joshua Watt
|
||||
- Kai Kang
|
||||
- Keiya Nobuta
|
||||
- Khem Raj
|
||||
- Konrad Weihmann
|
||||
- Leon Anavi
|
||||
- Liam Beguin
|
||||
- Marek Vasut
|
||||
- Mark Hatle
|
||||
- Martin Jansa
|
||||
- Michael Opdenacker
|
||||
- Mikko Rapeli
|
||||
- Narpat Mali
|
||||
- Nathan Rossi
|
||||
- Niko Mauno
|
||||
- Pavel Zhukov
|
||||
- Peter Kjellerstedt
|
||||
- Peter Marko
|
||||
- Polampalli, Archana
|
||||
- Qiu, Zheng
|
||||
- Ravula Adhitya Siddartha
|
||||
- Richard Purdie
|
||||
- Ross Burton
|
||||
- Sakib Sajal
|
||||
- Sean Anderson
|
||||
- Sergei Zhmylev
|
||||
- Steve Sakoman
|
||||
- Teoh Jay Shen
|
||||
- Thomas Perrot
|
||||
- Tim Orling
|
||||
- Vincent Davis Jr
|
||||
- Vivek Kumbhar
|
||||
- Vyacheslav Yurkov
|
||||
- Wang Mingyu
|
||||
- Xiangyu Chen
|
||||
- Zheng Qiu
|
||||
- Ciaran Courtney
|
||||
- Wang Mingyu
|
||||
|
||||
|
||||
Repositories / Downloads for Yocto-4.0.6
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
poky
|
||||
|
||||
- Repository Location: :yocto_git:`/poky`
|
||||
- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
|
||||
- Tag: :yocto_git:`yocto-4.0.6 </poky/log/?h=yocto-4.0.6>`
|
||||
- Git Revision: :yocto_git:`c4e08719a782fd4119eaf643907b80cebf57f88f </poky/commit/?id=c4e08719a782fd4119eaf643907b80cebf57f88f>`
|
||||
- Release Artefact: poky-c4e08719a782fd4119eaf643907b80cebf57f88f
|
||||
- sha: 2eb3b323dd2ccd25f9442bfbcbde82bc081fad5afd146a8e6dde439db24a99d4
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/poky-c4e08719a782fd4119eaf643907b80cebf57f88f.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/poky-c4e08719a782fd4119eaf643907b80cebf57f88f.tar.bz2
|
||||
|
||||
openembedded-core
|
||||
|
||||
- Repository Location: :oe_git:`/openembedded-core`
|
||||
- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
|
||||
- Tag: :oe_git:`yocto-4.0.6 </openembedded-core/log/?h=yocto-4.0.6>`
|
||||
- Git Revision: :oe_git:`45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2 </openembedded-core/commit/?id=45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2>`
|
||||
- Release Artefact: oecore-45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2
|
||||
- sha: de8b443365927befe67cc443b60db57563ff0726377223f836a3f3971cf405ec
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/oecore-45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/oecore-45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2.tar.bz2
|
||||
|
||||
meta-mingw
|
||||
|
||||
- Repository Location: :yocto_git:`/meta-mingw`
|
||||
- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
|
||||
- Tag: :yocto_git:`yocto-4.0.6 </meta-mingw/log/?h=yocto-4.0.6>`
|
||||
- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
|
||||
- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
|
||||
- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
|
||||
|
||||
meta-gplv2
|
||||
|
||||
- Repository Location: :yocto_git:`/meta-gplv2`
|
||||
- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
|
||||
- Tag: :yocto_git:`yocto-4.0.6 </meta-gplv2/log/?h=yocto-4.0.6>`
|
||||
- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
|
||||
- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
|
||||
- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
|
||||
|
||||
bitbake
|
||||
|
||||
- Repository Location: :oe_git:`/bitbake`
|
||||
- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
|
||||
- Tag: :oe_git:`yocto-4.0.6 </bitbake/log/?h=yocto-4.0.6>`
|
||||
- Git Revision: :oe_git:`7e268c107bb0240d583d2c34e24a71e373382509 </bitbake/commit/?id=7e268c107bb0240d583d2c34e24a71e373382509>`
|
||||
- Release Artefact: bitbake-7e268c107bb0240d583d2c34e24a71e373382509
|
||||
- sha: c3e2899012358c95962c7a5c85cf98dc30c58eae0861c374124e96d9556bb901
|
||||
- Download Locations:
|
||||
http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/bitbake-7e268c107bb0240d583d2c34e24a71e373382509.tar.bz2
|
||||
http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/bitbake-7e268c107bb0240d583d2c34e24a71e373382509.tar.bz2
|
||||
|
||||
yocto-docs
|
||||
|
||||
- Repository Location: :yocto_git:`/yocto-docs`
|
||||
- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
|
||||
- Tag: :yocto_git:`yocto-4.0.6 </yocto-docs/log/?h=yocto-4.0.6>`
|
||||
- Git Revision: :yocto_git:`c10d65ef3bbdf4fe3abc03e3aef3d4ca8c2ad87f </yocto-docs/commit/?id=c10d65ef3bbdf4fe3abc03e3aef3d4ca8c2ad87f>`
|
||||
|
||||
|
||||
@@ -380,8 +380,8 @@ The toplevel :term:`SPDX` output file is generated in JSON format as a
|
||||
as well as in ``tmp/deploy/spdx``.
|
||||
|
||||
The exact behaviour of this class, and the amount of output can be controlled
|
||||
by the :term:`SPDX_ARCHIVE_PACKAGED`, :term:`SPDX_ARCHIVE_SOURCES` and
|
||||
:term:`SPDX_INCLUDE_SOURCES` variables.
|
||||
by the :term:`SPDX_PRETTY`, :term:`SPDX_ARCHIVE_PACKAGED`,
|
||||
:term:`SPDX_ARCHIVE_SOURCES` and :term:`SPDX_INCLUDE_SOURCES` variables.
|
||||
|
||||
See the description of these variables and the
|
||||
":ref:`dev-manual/common-tasks:creating a software bill of materials`"
|
||||
|
||||
@@ -7165,6 +7165,17 @@ system and gives an overview of their function and contents.
|
||||
image), compared to just using the
|
||||
:ref:`create-spdx <ref-classes-create-spdx>` class with no option.
|
||||
|
||||
:term:`SPDX_PRETTY`
|
||||
This option makes the SPDX output more human-readable, using
|
||||
identation and newlines, instead of the default output in a
|
||||
single line::
|
||||
|
||||
SPDX_PRETTY = "1"
|
||||
|
||||
The generated SPDX files are approximately 20% bigger, but
|
||||
this option is recommended if you want to inspect the SPDX
|
||||
output files with a text editor.
|
||||
|
||||
:term:`SPDXLICENSEMAP`
|
||||
Maps commonly used license names to their SPDX counterparts found in
|
||||
``meta/files/common-licenses/``. For the default :term:`SPDXLICENSEMAP`
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
DISTRO = "poky"
|
||||
DISTRO_NAME = "Poky (Yocto Project Reference Distro)"
|
||||
#DISTRO_VERSION = "3.4+snapshot-${METADATA_REVISION}"
|
||||
DISTRO_VERSION = "4.0.6"
|
||||
DISTRO_VERSION = "4.0.7"
|
||||
DISTRO_CODENAME = "kirkstone"
|
||||
SDK_VENDOR = "-pokysdk"
|
||||
SDK_VERSION = "${@d.getVar('DISTRO_VERSION').replace('snapshot-${METADATA_REVISION}', 'snapshot')}"
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
The third file.
|
||||
@@ -7,9 +7,12 @@ SRC_URI = "http://downloads.yoctoproject.org/mirror/sources/syslinux-${PV}.tar.x
|
||||
file://file1 \
|
||||
file://file2"
|
||||
|
||||
SRC_URI:append:class-native = " file://file3"
|
||||
|
||||
SRC_URI[md5sum] = "92a253df9211e9c20172796ecf388f13"
|
||||
SRC_URI[sha256sum] = "26d3986d2bea109d5dc0e4f8c4822a459276cf021125e8c9f23c3cca5d8c850e"
|
||||
|
||||
S = "${WORKDIR}/syslinux-${PV}"
|
||||
|
||||
EXCLUDE_FROM_WORLD = "1"
|
||||
BBCLASSEXTEND = "native"
|
||||
|
||||
@@ -4,4 +4,7 @@ INHIBIT_DEFAULT_DEPS = "1"
|
||||
SRC_URI = "file://file1 \
|
||||
file://file2"
|
||||
|
||||
SRC_URI:append:class-native = " file://file3"
|
||||
|
||||
EXCLUDE_FROM_WORLD = "1"
|
||||
BBCLASSEXTEND = "native"
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
The third file.
|
||||
@@ -95,6 +95,17 @@ QB_OPT_APPEND:append:qemuriscv32 = " -bios none"
|
||||
CFLAGS:append:qemuriscv64 = " -mcmodel=medany"
|
||||
|
||||
|
||||
## Emulate image.bbclass
|
||||
# Handle inherits of any of the image classes we need
|
||||
IMAGE_CLASSES ??= ""
|
||||
IMGCLASSES = " ${IMAGE_CLASSES}"
|
||||
inherit ${IMGCLASSES}
|
||||
# Set defaults to satisfy IMAGE_FEATURES check
|
||||
IMAGE_FEATURES ?= ""
|
||||
IMAGE_FEATURES[type] = "list"
|
||||
IMAGE_FEATURES[validitems] += ""
|
||||
|
||||
|
||||
# This next part is necessary to trick the build system into thinking
|
||||
# its building an image recipe so it generates the qemuboot.conf
|
||||
addtask do_rootfs before do_image after do_install
|
||||
|
||||
@@ -132,7 +132,7 @@ def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
|
||||
# /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
|
||||
# would return /usr/local/bin/ccache/gcc, but what we need is
|
||||
# /usr/bin/gcc, this code can check and fix that.
|
||||
if "ccache" in srctool:
|
||||
if os.path.islink(srctool) and os.path.basename(os.readlink(srctool)) == 'ccache':
|
||||
srctool = bb.utils.which(path, tool, executable=True, direction=1)
|
||||
if srctool:
|
||||
os.symlink(srctool, desttool)
|
||||
|
||||
@@ -24,6 +24,7 @@ SPDX_ARCHIVE_PACKAGED ??= "0"
|
||||
|
||||
SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
|
||||
SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdoc"
|
||||
SPDX_PRETTY ??= "0"
|
||||
|
||||
SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
|
||||
|
||||
@@ -75,6 +76,11 @@ def recipe_spdx_is_native(d, recipe):
|
||||
def is_work_shared_spdx(d):
|
||||
return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
|
||||
|
||||
def get_json_indent(d):
|
||||
if d.getVar("SPDX_PRETTY") == "1":
|
||||
return 2
|
||||
return None
|
||||
|
||||
python() {
|
||||
import json
|
||||
if d.getVar("SPDX_LICENSE_DATA"):
|
||||
@@ -514,7 +520,7 @@ python do_create_spdx() {
|
||||
|
||||
dep_recipes = collect_dep_recipes(d, doc, recipe)
|
||||
|
||||
doc_sha1 = oe.sbom.write_doc(d, doc, "recipes")
|
||||
doc_sha1 = oe.sbom.write_doc(d, doc, "recipes", indent=get_json_indent(d))
|
||||
dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe))
|
||||
|
||||
recipe_ref = oe.spdx.SPDXExternalDocumentRef()
|
||||
@@ -579,7 +585,7 @@ python do_create_spdx() {
|
||||
|
||||
add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources)
|
||||
|
||||
oe.sbom.write_doc(d, package_doc, "packages")
|
||||
oe.sbom.write_doc(d, package_doc, "packages", indent=get_json_indent(d))
|
||||
}
|
||||
# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
|
||||
addtask do_create_spdx after do_package do_packagedata do_unpack before do_populate_sdk do_build do_rm_work
|
||||
@@ -743,7 +749,7 @@ python do_create_runtime_spdx() {
|
||||
)
|
||||
seen_deps.add(dep)
|
||||
|
||||
oe.sbom.write_doc(d, runtime_doc, "runtime", spdx_deploy)
|
||||
oe.sbom.write_doc(d, runtime_doc, "runtime", spdx_deploy, indent=get_json_indent(d))
|
||||
}
|
||||
|
||||
addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
|
||||
@@ -939,7 +945,7 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages):
|
||||
image_spdx_path = rootfs_deploydir / (rootfs_name + ".spdx.json")
|
||||
|
||||
with image_spdx_path.open("wb") as f:
|
||||
doc.to_json(f, sort_keys=True)
|
||||
doc.to_json(f, sort_keys=True, indent=get_json_indent(d))
|
||||
|
||||
num_threads = int(d.getVar("BB_NUMBER_THREADS"))
|
||||
|
||||
@@ -997,7 +1003,11 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages):
|
||||
|
||||
index["documents"].sort(key=lambda x: x["filename"])
|
||||
|
||||
index_str = io.BytesIO(json.dumps(index, sort_keys=True).encode("utf-8"))
|
||||
index_str = io.BytesIO(json.dumps(
|
||||
index,
|
||||
sort_keys=True,
|
||||
indent=get_json_indent(d),
|
||||
).encode("utf-8"))
|
||||
|
||||
info = tarfile.TarInfo()
|
||||
info.name = "index.json"
|
||||
@@ -1011,4 +1021,4 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages):
|
||||
|
||||
spdx_index_path = rootfs_deploydir / (rootfs_name + ".spdx.index.json")
|
||||
with spdx_index_path.open("w") as f:
|
||||
json.dump(index, f, sort_keys=True)
|
||||
json.dump(index, f, sort_keys=True, indent=get_json_indent(d))
|
||||
|
||||
@@ -42,8 +42,8 @@ CVE_CHECK_LOG_JSON ?= "${T}/cve.json"
|
||||
CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
|
||||
CVE_CHECK_RECIPE_FILE ?= "${CVE_CHECK_DIR}/${PN}"
|
||||
CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json"
|
||||
CVE_CHECK_MANIFEST ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cve"
|
||||
CVE_CHECK_MANIFEST_JSON ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.json"
|
||||
CVE_CHECK_MANIFEST ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cve"
|
||||
CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.json"
|
||||
CVE_CHECK_COPY_FILES ??= "1"
|
||||
CVE_CHECK_CREATE_MANIFEST ??= "1"
|
||||
|
||||
@@ -196,7 +196,7 @@ python cve_check_write_rootfs_manifest () {
|
||||
recipies.add(pkg_data["PN"])
|
||||
|
||||
bb.note("Writing rootfs CVE manifest")
|
||||
deploy_dir = d.getVar("DEPLOY_DIR_IMAGE")
|
||||
deploy_dir = d.getVar("IMGDEPLOYDIR")
|
||||
link_name = d.getVar("IMAGE_LINK_NAME")
|
||||
|
||||
json_data = {"version":"1", "package": []}
|
||||
|
||||
@@ -229,7 +229,7 @@ def srctree_hash_files(d, srcdir=None):
|
||||
env['GIT_INDEX_FILE'] = tmp_index.name
|
||||
subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env)
|
||||
git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8")
|
||||
if os.path.exists(".gitmodules"):
|
||||
if os.path.exists(os.path.join(s_dir, ".gitmodules")):
|
||||
submodule_helper = subprocess.check_output(["git", "config", "--file", ".gitmodules", "--get-regexp", "path"], cwd=s_dir, env=env).decode("utf-8")
|
||||
for line in submodule_helper.splitlines():
|
||||
module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1])
|
||||
|
||||
@@ -3,7 +3,7 @@ FILES:${PN} += "${datadir}/icons/hicolor"
|
||||
GTKIC_VERSION ??= '3'
|
||||
|
||||
GTKPN = "${@ 'gtk4' if d.getVar('GTKIC_VERSION') == '4' else 'gtk+3' }"
|
||||
GTKIC_CMD = "${@ 'gtk-update-icon-cache-3.0.0' if d.getVar('GTKIC_VERSION') == '4' else 'gtk4-update-icon-cache' }"
|
||||
GTKIC_CMD = "${@ 'gtk4-update-icon-cache' if d.getVar('GTKIC_VERSION') == '4' else 'gtk-update-icon-cache-3.0' }"
|
||||
|
||||
#gtk+3/gtk4 require GTK3DISTROFEATURES, DEPENDS on it make all the
|
||||
#recipes inherit this class require GTK3DISTROFEATURES
|
||||
|
||||
@@ -177,8 +177,7 @@ python () {
|
||||
|
||||
IMAGE_POSTPROCESS_COMMAND ?= ""
|
||||
|
||||
# some default locales
|
||||
IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
|
||||
IMAGE_LINGUAS ??= ""
|
||||
|
||||
LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS').split()))}"
|
||||
|
||||
|
||||
@@ -67,6 +67,9 @@ FIT_CONF_PREFIX[doc] = "Prefix to use for FIT configuration node name"
|
||||
|
||||
FIT_SUPPORTED_INITRAMFS_FSTYPES ?= "cpio.lz4 cpio.lzo cpio.lzma cpio.xz cpio.zst cpio.gz ext2.gz cpio"
|
||||
|
||||
# Allow user to select the default DTB for FIT image when multiple dtb's exists.
|
||||
FIT_CONF_DEFAULT_DTB ?= ""
|
||||
|
||||
# Keys used to sign individually image nodes.
|
||||
# The keys to sign image nodes must be different from those used to sign
|
||||
# configuration nodes, otherwise the "required" property, from
|
||||
@@ -369,6 +372,7 @@ fitimage_emit_section_config() {
|
||||
bootscr_line=""
|
||||
setup_line=""
|
||||
default_line=""
|
||||
default_dtb_image="${FIT_CONF_DEFAULT_DTB}"
|
||||
|
||||
# conf node name is selected based on dtb ID if it is present,
|
||||
# otherwise its selected based on kernel ID
|
||||
@@ -411,7 +415,17 @@ fitimage_emit_section_config() {
|
||||
# default node is selected based on dtb ID if it is present,
|
||||
# otherwise its selected based on kernel ID
|
||||
if [ -n "$dtb_image" ]; then
|
||||
default_line="default = \"${FIT_CONF_PREFIX}$dtb_image\";"
|
||||
# Select default node as user specified dtb when
|
||||
# multiple dtb exists.
|
||||
if [ -n "$default_dtb_image" ]; then
|
||||
if [ -s "${EXTERNAL_KERNEL_DEVICETREE}/$default_dtb_image" ]; then
|
||||
default_line="default = \"${FIT_CONF_PREFIX}$default_dtb_image\";"
|
||||
else
|
||||
bbwarn "Couldn't find a valid user specified dtb in ${EXTERNAL_KERNEL_DEVICETREE}/$default_dtb_image"
|
||||
fi
|
||||
else
|
||||
default_line="default = \"${FIT_CONF_PREFIX}$dtb_image\";"
|
||||
fi
|
||||
else
|
||||
default_line="default = \"${FIT_CONF_PREFIX}$kernel_id\";"
|
||||
fi
|
||||
@@ -540,10 +554,11 @@ fitimage_assemble() {
|
||||
|
||||
if [ -n "${EXTERNAL_KERNEL_DEVICETREE}" ]; then
|
||||
dtbcount=1
|
||||
for DTB in $(find "${EXTERNAL_KERNEL_DEVICETREE}" \( -name '*.dtb' -o -name '*.dtbo' \) -printf '%P\n' | sort); do
|
||||
for DTB in $(find "${EXTERNAL_KERNEL_DEVICETREE}" -name '*.dtb' -printf '%P\n' | sort) \
|
||||
$(find "${EXTERNAL_KERNEL_DEVICETREE}" -name '*.dtbo' -printf '%P\n' | sort); do
|
||||
DTB=$(echo "$DTB" | tr '/' '_')
|
||||
|
||||
# Skip DTB if we've picked it up previously
|
||||
# Skip DTB/DTBO if we've picked it up previously
|
||||
echo "$DTBS" | tr ' ' '\n' | grep -xq "$DTB" && continue
|
||||
|
||||
DTBS="$DTBS $DTB"
|
||||
|
||||
@@ -444,8 +444,8 @@ kernel_do_install() {
|
||||
oe_runmake DEPMOD=echo MODLIB=${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION} INSTALL_FW_PATH=${D}${nonarch_base_libdir}/firmware modules_install
|
||||
rm "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/build"
|
||||
rm "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/source"
|
||||
# If the kernel/ directory is empty remove it to prevent QA issues
|
||||
rmdir --ignore-fail-on-non-empty "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/kernel"
|
||||
# Remove empty module directories to prevent QA issues
|
||||
find "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/kernel" -type d -empty -delete
|
||||
else
|
||||
bbnote "no modules to install"
|
||||
fi
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
# QB_OPT_APPEND: options to append to qemu, e.g., "-device usb-mouse"
|
||||
#
|
||||
# QB_DEFAULT_KERNEL: default kernel to boot, e.g., "bzImage"
|
||||
# e.g., "bzImage-initramfs-qemux86-64.bin" if INITRAMFS_IMAGE_BUNDLE is set to 1.
|
||||
#
|
||||
# QB_DEFAULT_FSTYPE: default FSTYPE to boot, e.g., "ext4"
|
||||
#
|
||||
@@ -87,7 +88,7 @@
|
||||
QB_MEM ?= "-m 256"
|
||||
QB_SMP ?= ""
|
||||
QB_SERIAL_OPT ?= "-serial mon:stdio -serial null"
|
||||
QB_DEFAULT_KERNEL ?= "${KERNEL_IMAGETYPE}"
|
||||
QB_DEFAULT_KERNEL ?= "${@bb.utils.contains("INITRAMFS_IMAGE_BUNDLE", "1", "${KERNEL_IMAGETYPE}-${INITRAMFS_LINK_NAME}.bin", "${KERNEL_IMAGETYPE}", d)}"
|
||||
QB_DEFAULT_FSTYPE ?= "ext4"
|
||||
QB_RNG ?= "-object rng-random,filename=/dev/urandom,id=rng0 -device virtio-rng-pci,rng=rng0"
|
||||
QB_OPT_APPEND ?= ""
|
||||
|
||||
@@ -27,6 +27,13 @@ BB_SCHEDULER ?= "completion"
|
||||
BB_TASK_IONICE_LEVEL:task-rm_work = "3.0"
|
||||
|
||||
do_rm_work () {
|
||||
# Force using the HOSTTOOLS 'rm' - otherwise the SYSROOT_NATIVE 'rm' can be selected depending on PATH
|
||||
# Avoids race-condition accessing 'rm' when deleting WORKDIR folders at the end of this function
|
||||
RM_BIN="$(PATH=${HOSTTOOLS_DIR} command -v rm)"
|
||||
if [ -z "${RM_BIN}" ]; then
|
||||
bbfatal "Binary 'rm' not found in HOSTTOOLS_DIR, cannot remove WORKDIR data."
|
||||
fi
|
||||
|
||||
# If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe.
|
||||
for p in ${RM_WORK_EXCLUDE}; do
|
||||
if [ "$p" = "${PN}" ]; then
|
||||
@@ -73,7 +80,7 @@ do_rm_work () {
|
||||
# sstate version since otherwise we'd need to leave 'plaindirs' around
|
||||
# such as 'packages' and 'packages-split' and these can be large. No end
|
||||
# of chain tasks depend directly on do_package anymore.
|
||||
rm -f -- $i;
|
||||
"${RM_BIN}" -f -- $i;
|
||||
;;
|
||||
*_setscene*)
|
||||
# Skip stamps which are already setscene versions
|
||||
@@ -90,7 +97,7 @@ do_rm_work () {
|
||||
;;
|
||||
esac
|
||||
done
|
||||
rm -f -- $i
|
||||
"${RM_BIN}" -f -- $i
|
||||
esac
|
||||
done
|
||||
|
||||
@@ -100,9 +107,9 @@ do_rm_work () {
|
||||
# Retain only logs and other files in temp, safely ignore
|
||||
# failures of removing pseudo folers on NFS2/3 server.
|
||||
if [ $dir = 'pseudo' ]; then
|
||||
rm -rf -- $dir 2> /dev/null || true
|
||||
"${RM_BIN}" -rf -- $dir 2> /dev/null || true
|
||||
elif ! echo "$excludes" | grep -q -w "$dir"; then
|
||||
rm -rf -- $dir
|
||||
"${RM_BIN}" -rf -- $dir
|
||||
fi
|
||||
done
|
||||
}
|
||||
@@ -174,7 +181,7 @@ python inject_rm_work() {
|
||||
# other recipes and thus will typically run much later than completion of
|
||||
# work in the recipe itself.
|
||||
# In practice, addtask() here merely updates the dependencies.
|
||||
bb.build.addtask('do_rm_work', 'do_build', ' '.join(deps), d)
|
||||
bb.build.addtask('do_rm_work', 'do_rm_work_all do_build', ' '.join(deps), d)
|
||||
|
||||
# Always update do_build_without_rm_work dependencies.
|
||||
bb.build.addtask('do_build_without_rm_work', '', ' '.join(deps), d)
|
||||
|
||||
@@ -31,7 +31,7 @@ toolchain_create_sdk_env_script () {
|
||||
echo '# http://tldp.org/HOWTO/Program-Library-HOWTO/shared-libraries.html#AEN80' >> $script
|
||||
echo '# http://xahlee.info/UnixResource_dir/_/ldpath.html' >> $script
|
||||
echo '# Only disable this check if you are absolutely know what you are doing!' >> $script
|
||||
echo 'if [ ! -z "$LD_LIBRARY_PATH" ]; then' >> $script
|
||||
echo 'if [ ! -z "${LD_LIBRARY_PATH:-}" ]; then' >> $script
|
||||
echo " echo \"Your environment is misconfigured, you probably need to 'unset LD_LIBRARY_PATH'\"" >> $script
|
||||
echo " echo \"but please check why this was set in the first place and that it's safe to unset.\"" >> $script
|
||||
echo ' echo "The SDK will not operate correctly in most cases when LD_LIBRARY_PATH is set."' >> $script
|
||||
|
||||
@@ -113,7 +113,8 @@ def get_source_date_epoch_from_git(d, sourcedir):
|
||||
return None
|
||||
|
||||
bb.debug(1, "git repository: %s" % gitpath)
|
||||
p = subprocess.run(['git', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], check=True, stdout=subprocess.PIPE)
|
||||
p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'],
|
||||
check=True, stdout=subprocess.PIPE)
|
||||
return int(p.stdout.decode('utf-8'))
|
||||
|
||||
def get_source_date_epoch_from_youngest_file(d, sourcedir):
|
||||
|
||||
@@ -32,7 +32,7 @@ def get_sdk_spdxid(sdk):
|
||||
return "SPDXRef-SDK-%s" % sdk
|
||||
|
||||
|
||||
def write_doc(d, spdx_doc, subdir, spdx_deploy=None):
|
||||
def write_doc(d, spdx_doc, subdir, spdx_deploy=None, indent=None):
|
||||
from pathlib import Path
|
||||
|
||||
if spdx_deploy is None:
|
||||
@@ -41,7 +41,7 @@ def write_doc(d, spdx_doc, subdir, spdx_deploy=None):
|
||||
dest = spdx_deploy / subdir / (spdx_doc.name + ".spdx.json")
|
||||
dest.parent.mkdir(exist_ok=True, parents=True)
|
||||
with dest.open("wb") as f:
|
||||
doc_sha1 = spdx_doc.to_json(f, sort_keys=True)
|
||||
doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent)
|
||||
|
||||
l = spdx_deploy / "by-namespace" / spdx_doc.documentNamespace.replace("/", "_")
|
||||
l.parent.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
@@ -57,6 +57,7 @@ class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
|
||||
self.outputbuf = output
|
||||
self.finalresult = finalresult
|
||||
self.finalresult.buffer = True
|
||||
self.target = target
|
||||
|
||||
def _add_result_with_semaphore(self, method, test, *args, **kwargs):
|
||||
self.semaphore.acquire()
|
||||
@@ -65,13 +66,14 @@ class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
|
||||
self.result.starttime[test.id()] = self._test_start.timestamp()
|
||||
self.result.threadprogress[self.threadnum].append(test.id())
|
||||
totalprogress = sum(len(x) for x in self.result.threadprogress.values())
|
||||
self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s)" % (
|
||||
self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s failed) (%s)" % (
|
||||
self.threadnum,
|
||||
len(self.result.threadprogress[self.threadnum]),
|
||||
self.totalinprocess,
|
||||
totalprogress,
|
||||
self.totaltests,
|
||||
"{0:.2f}".format(time.time()-self._test_start.timestamp()),
|
||||
self.target.failed_tests,
|
||||
test.id())
|
||||
finally:
|
||||
self.semaphore.release()
|
||||
|
||||
@@ -49,21 +49,20 @@ class RpmBasicTest(OERuntimeTestCase):
|
||||
msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output)
|
||||
self.assertEqual(status, 0, msg=msg)
|
||||
|
||||
def check_no_process_for_user(u):
|
||||
_, output = self.target.run(self.tc.target_cmds['ps'])
|
||||
if u + ' ' in output:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
def wait_for_no_process_for_user(u, timeout = 120):
|
||||
timeout_at = time.time() + timeout
|
||||
while time.time() < timeout_at:
|
||||
_, output = self.target.run(self.tc.target_cmds['ps'])
|
||||
if u + ' ' not in output:
|
||||
return
|
||||
time.sleep(1)
|
||||
user_pss = [ps for ps in output.split("\n") if u + ' ' in ps]
|
||||
msg = "There're %s 's process(es) still running: %s".format(u, "\n".join(user_pss))
|
||||
assertTrue(True, msg=msg)
|
||||
|
||||
def unset_up_test_user(u):
|
||||
# ensure no test1 process in running
|
||||
timeout = time.time() + 30
|
||||
while time.time() < timeout:
|
||||
if check_no_process_for_user(u):
|
||||
break
|
||||
else:
|
||||
time.sleep(1)
|
||||
wait_for_no_process_for_user(u)
|
||||
status, output = self.target.run('userdel -r %s' % u)
|
||||
msg = 'Failed to erase user: %s' % output
|
||||
self.assertTrue(status == 0, msg=msg)
|
||||
|
||||
44
meta/lib/oeqa/selftest/cases/externalsrc.py
Normal file
44
meta/lib/oeqa/selftest/cases/externalsrc.py
Normal file
@@ -0,0 +1,44 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import get_bb_var, runCmd
|
||||
|
||||
class ExternalSrc(OESelftestTestCase):
|
||||
# test that srctree_hash_files does not crash
|
||||
# we should be actually checking do_compile[file-checksums] but oeqa currently does not support it
|
||||
# so we check only that a recipe with externalsrc can be parsed
|
||||
def test_externalsrc_srctree_hash_files(self):
|
||||
test_recipe = "git-submodule-test"
|
||||
git_url = "git://git.yoctoproject.org/git-submodule-test"
|
||||
externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name
|
||||
|
||||
self.write_config(
|
||||
"""
|
||||
INHERIT += "externalsrc"
|
||||
EXTERNALSRC:pn-%s = "%s"
|
||||
""" % (test_recipe, externalsrc_dir)
|
||||
)
|
||||
|
||||
# test with git without submodules
|
||||
runCmd('git clone %s %s' % (git_url, externalsrc_dir))
|
||||
os.unlink(externalsrc_dir + "/.gitmodules")
|
||||
open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing
|
||||
self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
|
||||
os.unlink(".gitmodules")
|
||||
|
||||
# test with git with submodules
|
||||
runCmd('git checkout .gitmodules', cwd=externalsrc_dir)
|
||||
runCmd('git submodule update --init --recursive', cwd=externalsrc_dir)
|
||||
self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
|
||||
|
||||
# test without git
|
||||
shutil.rmtree(os.path.join(externalsrc_dir, ".git"))
|
||||
self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
|
||||
@@ -263,7 +263,7 @@ class TestImage(OESelftestTestCase):
|
||||
except FileNotFoundError:
|
||||
self.fail("/dev/dri directory does not exist; no render nodes available on this machine. %s" %(render_hint))
|
||||
try:
|
||||
dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True)
|
||||
dripath = subprocess.check_output("PATH=/bin:/usr/bin:$PATH pkg-config --variable=dridriverdir dri", shell=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.fail("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
|
||||
qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
|
||||
|
||||
@@ -34,6 +34,4 @@ do_install () {
|
||||
}
|
||||
|
||||
CLEANBROKEN = "1"
|
||||
# https://github.com/rhboot/efivar/issues/202
|
||||
COMPATIBLE_HOST:libc-musl = 'null'
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ SRC_URI = "https://ftp.isc.org/isc/bind9/${PV}/${BPN}-${PV}.tar.xz \
|
||||
file://0001-avoid-start-failure-with-bind-user.patch \
|
||||
"
|
||||
|
||||
SRC_URI[sha256sum] = "0e3c3ab9378db84ba0f37073d67ba125ae4f2ff8daf366c9db287e3f1b2c35f0"
|
||||
SRC_URI[sha256sum] = "f415a92feb62568b50854a063cb231e257351f8672186d0ab031a49b3de2cac6"
|
||||
|
||||
UPSTREAM_CHECK_URI = "https://ftp.isc.org/isc/bind9/"
|
||||
# follow the ESV versions divisible by 2
|
||||
@@ -14,6 +14,8 @@ UPSTREAM_CHECK_URI = "https://roy.marples.name/downloads/dhcpcd/"
|
||||
SRC_URI = "https://roy.marples.name/downloads/${BPN}/${BPN}-${PV}.tar.xz \
|
||||
file://0001-remove-INCLUDEDIR-to-prevent-build-issues.patch \
|
||||
file://0001-20-resolv.conf-improve-the-sitation-of-working-with-.patch \
|
||||
file://0001-privsep-Allow-getrandom-sysctl-for-newer-glibc.patch \
|
||||
file://0002-privsep-Allow-newfstatat-syscall-as-well.patch \
|
||||
file://dhcpcd.service \
|
||||
file://dhcpcd@.service \
|
||||
"
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
From c6cdf0aee71ab4126d36b045f02428ee3c6ec50b Mon Sep 17 00:00:00 2001
|
||||
From: Roy Marples <roy@marples.name>
|
||||
Date: Fri, 26 Aug 2022 09:08:36 +0100
|
||||
Subject: [PATCH 1/2] privsep: Allow getrandom sysctl for newer glibc
|
||||
|
||||
Fixes #120
|
||||
|
||||
Upstream-Status: Backport [c6cdf0aee71ab4126d36b045f02428ee3c6ec50b]
|
||||
Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
|
||||
---
|
||||
src/privsep-linux.c | 3 +++
|
||||
1 file changed, 3 insertions(+)
|
||||
|
||||
diff --git a/src/privsep-linux.c b/src/privsep-linux.c
|
||||
index b238644b..479a1d82 100644
|
||||
--- a/src/privsep-linux.c
|
||||
+++ b/src/privsep-linux.c
|
||||
@@ -300,6 +300,9 @@ static struct sock_filter ps_seccomp_filter[] = {
|
||||
#ifdef __NR_getpid
|
||||
SECCOMP_ALLOW(__NR_getpid),
|
||||
#endif
|
||||
+#ifdef __NR_getrandom
|
||||
+ SECCOMP_ALLOW(__NR_getrandom),
|
||||
+#endif
|
||||
#ifdef __NR_getsockopt
|
||||
/* For route socket overflow */
|
||||
SECCOMP_ALLOW_ARG(__NR_getsockopt, 1, SOL_SOCKET),
|
||||
--
|
||||
2.17.1
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
From 7625a555797f587a89dc2447fd9d621024d5165c Mon Sep 17 00:00:00 2001
|
||||
From: Roy Marples <roy@marples.name>
|
||||
Date: Fri, 26 Aug 2022 09:24:50 +0100
|
||||
Subject: [PATCH 2/2] privsep: Allow newfstatat syscall as well
|
||||
|
||||
Allows newer glibc variants to work apparently.
|
||||
As reported in #84 and #89.
|
||||
|
||||
Upstream-Status: Backport [7625a555797f587a89dc2447fd9d621024d5165c]
|
||||
Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
|
||||
---
|
||||
src/privsep-linux.c | 3 +++
|
||||
1 file changed, 3 insertions(+)
|
||||
|
||||
diff --git a/src/privsep-linux.c b/src/privsep-linux.c
|
||||
index 479a1d82..6327b1bc 100644
|
||||
--- a/src/privsep-linux.c
|
||||
+++ b/src/privsep-linux.c
|
||||
@@ -328,6 +328,9 @@ static struct sock_filter ps_seccomp_filter[] = {
|
||||
#ifdef __NR_nanosleep
|
||||
SECCOMP_ALLOW(__NR_nanosleep), /* XXX should use ppoll instead */
|
||||
#endif
|
||||
+#ifdef __NR_newfstatat
|
||||
+ SECCOMP_ALLOW(__NR_newfstatat),
|
||||
+#endif
|
||||
#ifdef __NR_ppoll
|
||||
SECCOMP_ALLOW(__NR_ppoll),
|
||||
#endif
|
||||
--
|
||||
2.17.1
|
||||
|
||||
@@ -54,15 +54,12 @@ SYSTEMD_SERVICE:${PN}-sshd = "sshd.socket"
|
||||
|
||||
inherit autotools-brokensep ptest
|
||||
|
||||
PACKAGECONFIG ??= "rng-tools"
|
||||
PACKAGECONFIG ??= ""
|
||||
PACKAGECONFIG[kerberos] = "--with-kerberos5,--without-kerberos5,krb5"
|
||||
PACKAGECONFIG[ldns] = "--with-ldns,--without-ldns,ldns"
|
||||
PACKAGECONFIG[libedit] = "--with-libedit,--without-libedit,libedit"
|
||||
PACKAGECONFIG[manpages] = "--with-mantype=man,--with-mantype=cat"
|
||||
|
||||
# Add RRECOMMENDS to rng-tools for sshd package
|
||||
PACKAGECONFIG[rng-tools] = ""
|
||||
|
||||
EXTRA_AUTORECONF += "--exclude=aclocal"
|
||||
|
||||
# login path is hardcoded in sshd
|
||||
@@ -162,15 +159,10 @@ FILES:${PN}-keygen = "${bindir}/ssh-keygen"
|
||||
|
||||
RDEPENDS:${PN} += "${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-keygen ${PN}-sftp-server"
|
||||
RDEPENDS:${PN}-sshd += "${PN}-keygen ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-keyinit pam-plugin-loginuid', '', d)}"
|
||||
RRECOMMENDS:${PN}-sshd:append:class-target = "\
|
||||
${@bb.utils.filter('PACKAGECONFIG', 'rng-tools', d)} \
|
||||
"
|
||||
|
||||
# break dependency on base package for -dev package
|
||||
# otherwise SDK fails to build as the main openssh and dropbear packages
|
||||
# conflict with each other
|
||||
RDEPENDS:${PN}-dev = ""
|
||||
|
||||
# gdb would make attach-ptrace test pass rather than skip but not worth the build dependencies
|
||||
RDEPENDS:${PN}-ptest += "${PN}-sftp ${PN}-misc ${PN}-sftp-server make sed sudo coreutils"
|
||||
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
From 7725e7bfe6f2ce8146b6552b44e0d226be7638e7 Mon Sep 17 00:00:00 2001
|
||||
From: Pauli <pauli@openssl.org>
|
||||
Date: Fri, 11 Nov 2022 09:40:19 +1100
|
||||
Subject: [PATCH] x509: fix double locking problem
|
||||
|
||||
This reverts commit 9aa4be691f5c73eb3c68606d824c104550c053f7 and removed the
|
||||
redundant flag setting.
|
||||
|
||||
Fixes #19643
|
||||
|
||||
Fixes LOW CVE-2022-3996
|
||||
|
||||
Reviewed-by: Dmitry Belyavskiy <beldmit@gmail.com>
|
||||
Reviewed-by: Tomas Mraz <tomas@openssl.org>
|
||||
(Merged from https://github.com/openssl/openssl/pull/19652)
|
||||
|
||||
(cherry picked from commit 4d0340a6d2f327700a059f0b8f954d6160f8eef5)
|
||||
|
||||
Upstream-Status: Backport [https://github.com/openssl/openssl/commit/7725e7bfe6f2ce8146b6552b44e0d226be7638e7]
|
||||
CVE: CVE-2022-3996
|
||||
Signed-off-by: Vivek Kumbhar <vkumbhar@mvista.com>
|
||||
---
|
||||
crypto/x509/pcy_map.c | 4 ----
|
||||
1 file changed, 4 deletions(-)
|
||||
|
||||
diff --git a/crypto/x509/pcy_map.c b/crypto/x509/pcy_map.c
|
||||
index 05406c6493..60dfd1e320 100644
|
||||
--- a/crypto/x509/pcy_map.c
|
||||
+++ b/crypto/x509/pcy_map.c
|
||||
@@ -73,10 +73,6 @@ int ossl_policy_cache_set_mapping(X509 *x, POLICY_MAPPINGS *maps)
|
||||
|
||||
ret = 1;
|
||||
bad_mapping:
|
||||
- if (ret == -1 && CRYPTO_THREAD_write_lock(x->lock)) {
|
||||
- x->ex_flags |= EXFLAG_INVALID_POLICY;
|
||||
- CRYPTO_THREAD_unlock(x->lock);
|
||||
- }
|
||||
sk_POLICY_MAPPING_pop_free(maps, POLICY_MAPPING_free);
|
||||
return ret;
|
||||
|
||||
--
|
||||
2.30.2
|
||||
|
||||
@@ -12,6 +12,7 @@ SRC_URI = "http://www.openssl.org/source/openssl-${PV}.tar.gz \
|
||||
file://0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch \
|
||||
file://afalg.patch \
|
||||
file://0001-Configure-do-not-tweak-mips-cflags.patch \
|
||||
file://CVE-2022-3996.patch \
|
||||
"
|
||||
|
||||
SRC_URI:append:class-nativesdk = " \
|
||||
|
||||
@@ -138,19 +138,26 @@ do_configure () {
|
||||
do_prepare_config
|
||||
merge_config.sh -m .config ${@" ".join(find_cfgs(d))}
|
||||
cml1_do_configure
|
||||
|
||||
# Save a copy of .config and autoconf.h.
|
||||
cp .config .config.orig
|
||||
cp include/autoconf.h include/autoconf.h.orig
|
||||
}
|
||||
|
||||
do_compile() {
|
||||
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
|
||||
export KCONFIG_NOTIMESTAMP=1
|
||||
|
||||
# Ensure we start do_compile with the original .config and autoconf.h.
|
||||
# These files should always have matching timestamps.
|
||||
cp .config.orig .config
|
||||
cp include/autoconf.h.orig include/autoconf.h
|
||||
|
||||
if [ "${BUSYBOX_SPLIT_SUID}" = "1" -a x`grep "CONFIG_FEATURE_INDIVIDUAL=y" .config` = x ]; then
|
||||
# Guard againt interrupted do_compile: clean temporary files.
|
||||
rm -f .config.app.suid .config.app.nosuid .config.disable.apps .config.nonapps
|
||||
|
||||
# split the .config into two parts, and make two busybox binaries
|
||||
if [ -e .config.orig ]; then
|
||||
# Need to guard again an interrupted do_compile - restore any backup
|
||||
cp .config.orig .config
|
||||
fi
|
||||
cp .config .config.orig
|
||||
oe_runmake busybox.cfg.suid
|
||||
oe_runmake busybox.cfg.nosuid
|
||||
|
||||
@@ -187,15 +194,18 @@ do_compile() {
|
||||
bbfatal "busybox suid binary incorrectly provides /bin/sh"
|
||||
fi
|
||||
|
||||
# copy .config.orig back to .config, because the install process may check this file
|
||||
cp .config.orig .config
|
||||
# cleanup
|
||||
rm .config.orig .config.app.suid .config.app.nosuid .config.disable.apps .config.nonapps
|
||||
rm .config.app.suid .config.app.nosuid .config.disable.apps .config.nonapps
|
||||
else
|
||||
oe_runmake busybox_unstripped
|
||||
cp busybox_unstripped busybox
|
||||
oe_runmake busybox.links
|
||||
fi
|
||||
|
||||
# restore original .config and autoconf.h, because the install process
|
||||
# may check these files
|
||||
cp .config.orig .config
|
||||
cp include/autoconf.h.orig include/autoconf.h
|
||||
}
|
||||
|
||||
do_install () {
|
||||
|
||||
@@ -182,3 +182,5 @@ do_install:class-nativesdk() {
|
||||
rm -rf ${D}${localstatedir}/run
|
||||
}
|
||||
BBCLASSEXTEND = "native nativesdk"
|
||||
|
||||
CVE_PRODUCT += "d-bus_project:d-bus"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
SRCBRANCH ?= "release/2.35/master"
|
||||
PV = "2.35"
|
||||
SRCREV_glibc ?= "8d125a1f9145ad90c94e438858d6b5b7578686f2"
|
||||
SRCREV_glibc ?= "293211b6fddf60fc407d21fcba0326dd2148f76b"
|
||||
SRCREV_localedef ?= "794da69788cbf9bf57b59a852f9f11307663fa87"
|
||||
|
||||
GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git"
|
||||
|
||||
@@ -10,7 +10,7 @@ LIC_FILES_CHKSUM = "file://LICENSING;md5=c0a30e2b1502c55a7f37e412cd6c6a4b \
|
||||
inherit autotools pkgconfig
|
||||
|
||||
SRC_URI = "git://github.com/besser82/libxcrypt.git;branch=${SRCBRANCH};protocol=https"
|
||||
SRCREV = "fee2687bad66e351a3dcc963a34ae80125923ff8"
|
||||
SRCREV = "d7fe1ac04c326dba7e0440868889d1dccb41a175"
|
||||
SRCBRANCH ?= "develop"
|
||||
|
||||
SRC_URI += "file://fix_cflags_handling.patch"
|
||||
|
||||
624
meta/recipes-core/libxml/libxml2/CVE-2022-40303.patch
Normal file
624
meta/recipes-core/libxml/libxml2/CVE-2022-40303.patch
Normal file
@@ -0,0 +1,624 @@
|
||||
From 15050f59d2a62b97b34e9cab8b8076a68ef003bd Mon Sep 17 00:00:00 2001
|
||||
From: Nick Wellnhofer <wellnhofer@aevum.de>
|
||||
Date: Thu, 25 Aug 2022 17:43:08 +0200
|
||||
Subject: [PATCH] CVE-2022-40303
|
||||
|
||||
Fix integer overflows with XML_PARSE_HUGE
|
||||
|
||||
Also impose size limits when XML_PARSE_HUGE is set. Limit size of names
|
||||
to XML_MAX_TEXT_LENGTH (10 million bytes) and other content to
|
||||
XML_MAX_HUGE_LENGTH (1 billion bytes).
|
||||
|
||||
Move some the length checks to the end of the respective loop to make
|
||||
them strict.
|
||||
|
||||
xmlParseEntityValue didn't have a length limitation at all. But without
|
||||
XML_PARSE_HUGE, this should eventually trigger an error in xmlGROW.
|
||||
|
||||
Thanks to Maddie Stone working with Google Project Zero for the report!
|
||||
|
||||
Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/libxml2/-/commit/c846986356fc149915a74972bf198abc266bc2c0]
|
||||
CVE: CVE-2022-40303
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
parser.c | 233 +++++++++++++++++++++++++++++--------------------------
|
||||
1 file changed, 121 insertions(+), 112 deletions(-)
|
||||
|
||||
diff --git a/parser.c b/parser.c
|
||||
index 1bc3713..0f76577 100644
|
||||
--- a/parser.c
|
||||
+++ b/parser.c
|
||||
@@ -115,6 +115,8 @@ xmlParseElementEnd(xmlParserCtxtPtr ctxt);
|
||||
* *
|
||||
************************************************************************/
|
||||
|
||||
+#define XML_MAX_HUGE_LENGTH 1000000000
|
||||
+
|
||||
#define XML_PARSER_BIG_ENTITY 1000
|
||||
#define XML_PARSER_LOT_ENTITY 5000
|
||||
|
||||
@@ -565,7 +567,7 @@ xmlFatalErr(xmlParserCtxtPtr ctxt, xmlParserErrors error, const char *info)
|
||||
errmsg = "Malformed declaration expecting version";
|
||||
break;
|
||||
case XML_ERR_NAME_TOO_LONG:
|
||||
- errmsg = "Name too long use XML_PARSE_HUGE option";
|
||||
+ errmsg = "Name too long";
|
||||
break;
|
||||
#if 0
|
||||
case:
|
||||
@@ -3210,6 +3212,9 @@ xmlParseNameComplex(xmlParserCtxtPtr ctxt) {
|
||||
int len = 0, l;
|
||||
int c;
|
||||
int count = 0;
|
||||
+ int maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_TEXT_LENGTH :
|
||||
+ XML_MAX_NAME_LENGTH;
|
||||
|
||||
#ifdef DEBUG
|
||||
nbParseNameComplex++;
|
||||
@@ -3275,7 +3280,8 @@ xmlParseNameComplex(xmlParserCtxtPtr ctxt) {
|
||||
if (ctxt->instate == XML_PARSER_EOF)
|
||||
return(NULL);
|
||||
}
|
||||
- len += l;
|
||||
+ if (len <= INT_MAX - l)
|
||||
+ len += l;
|
||||
NEXTL(l);
|
||||
c = CUR_CHAR(l);
|
||||
}
|
||||
@@ -3301,13 +3307,13 @@ xmlParseNameComplex(xmlParserCtxtPtr ctxt) {
|
||||
if (ctxt->instate == XML_PARSER_EOF)
|
||||
return(NULL);
|
||||
}
|
||||
- len += l;
|
||||
+ if (len <= INT_MAX - l)
|
||||
+ len += l;
|
||||
NEXTL(l);
|
||||
c = CUR_CHAR(l);
|
||||
}
|
||||
}
|
||||
- if ((len > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if (len > maxLength) {
|
||||
xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "Name");
|
||||
return(NULL);
|
||||
}
|
||||
@@ -3346,7 +3352,10 @@ const xmlChar *
|
||||
xmlParseName(xmlParserCtxtPtr ctxt) {
|
||||
const xmlChar *in;
|
||||
const xmlChar *ret;
|
||||
- int count = 0;
|
||||
+ size_t count = 0;
|
||||
+ size_t maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_TEXT_LENGTH :
|
||||
+ XML_MAX_NAME_LENGTH;
|
||||
|
||||
GROW;
|
||||
|
||||
@@ -3370,8 +3379,7 @@ xmlParseName(xmlParserCtxtPtr ctxt) {
|
||||
in++;
|
||||
if ((*in > 0) && (*in < 0x80)) {
|
||||
count = in - ctxt->input->cur;
|
||||
- if ((count > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if (count > maxLength) {
|
||||
xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "Name");
|
||||
return(NULL);
|
||||
}
|
||||
@@ -3392,6 +3400,9 @@ xmlParseNCNameComplex(xmlParserCtxtPtr ctxt) {
|
||||
int len = 0, l;
|
||||
int c;
|
||||
int count = 0;
|
||||
+ int maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_TEXT_LENGTH :
|
||||
+ XML_MAX_NAME_LENGTH;
|
||||
size_t startPosition = 0;
|
||||
|
||||
#ifdef DEBUG
|
||||
@@ -3412,17 +3423,13 @@ xmlParseNCNameComplex(xmlParserCtxtPtr ctxt) {
|
||||
while ((c != ' ') && (c != '>') && (c != '/') && /* test bigname.xml */
|
||||
(xmlIsNameChar(ctxt, c) && (c != ':'))) {
|
||||
if (count++ > XML_PARSER_CHUNK_SIZE) {
|
||||
- if ((len > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "NCName");
|
||||
- return(NULL);
|
||||
- }
|
||||
count = 0;
|
||||
GROW;
|
||||
if (ctxt->instate == XML_PARSER_EOF)
|
||||
return(NULL);
|
||||
}
|
||||
- len += l;
|
||||
+ if (len <= INT_MAX - l)
|
||||
+ len += l;
|
||||
NEXTL(l);
|
||||
c = CUR_CHAR(l);
|
||||
if (c == 0) {
|
||||
@@ -3440,8 +3447,7 @@ xmlParseNCNameComplex(xmlParserCtxtPtr ctxt) {
|
||||
c = CUR_CHAR(l);
|
||||
}
|
||||
}
|
||||
- if ((len > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if (len > maxLength) {
|
||||
xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "NCName");
|
||||
return(NULL);
|
||||
}
|
||||
@@ -3467,7 +3473,10 @@ static const xmlChar *
|
||||
xmlParseNCName(xmlParserCtxtPtr ctxt) {
|
||||
const xmlChar *in, *e;
|
||||
const xmlChar *ret;
|
||||
- int count = 0;
|
||||
+ size_t count = 0;
|
||||
+ size_t maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_TEXT_LENGTH :
|
||||
+ XML_MAX_NAME_LENGTH;
|
||||
|
||||
#ifdef DEBUG
|
||||
nbParseNCName++;
|
||||
@@ -3492,8 +3501,7 @@ xmlParseNCName(xmlParserCtxtPtr ctxt) {
|
||||
goto complex;
|
||||
if ((*in > 0) && (*in < 0x80)) {
|
||||
count = in - ctxt->input->cur;
|
||||
- if ((count > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if (count > maxLength) {
|
||||
xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "NCName");
|
||||
return(NULL);
|
||||
}
|
||||
@@ -3575,6 +3583,9 @@ xmlParseStringName(xmlParserCtxtPtr ctxt, const xmlChar** str) {
|
||||
const xmlChar *cur = *str;
|
||||
int len = 0, l;
|
||||
int c;
|
||||
+ int maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_TEXT_LENGTH :
|
||||
+ XML_MAX_NAME_LENGTH;
|
||||
|
||||
#ifdef DEBUG
|
||||
nbParseStringName++;
|
||||
@@ -3610,12 +3621,6 @@ xmlParseStringName(xmlParserCtxtPtr ctxt, const xmlChar** str) {
|
||||
if (len + 10 > max) {
|
||||
xmlChar *tmp;
|
||||
|
||||
- if ((len > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "NCName");
|
||||
- xmlFree(buffer);
|
||||
- return(NULL);
|
||||
- }
|
||||
max *= 2;
|
||||
tmp = (xmlChar *) xmlRealloc(buffer,
|
||||
max * sizeof(xmlChar));
|
||||
@@ -3629,14 +3634,18 @@ xmlParseStringName(xmlParserCtxtPtr ctxt, const xmlChar** str) {
|
||||
COPY_BUF(l,buffer,len,c);
|
||||
cur += l;
|
||||
c = CUR_SCHAR(cur, l);
|
||||
+ if (len > maxLength) {
|
||||
+ xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "NCName");
|
||||
+ xmlFree(buffer);
|
||||
+ return(NULL);
|
||||
+ }
|
||||
}
|
||||
buffer[len] = 0;
|
||||
*str = cur;
|
||||
return(buffer);
|
||||
}
|
||||
}
|
||||
- if ((len > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if (len > maxLength) {
|
||||
xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "NCName");
|
||||
return(NULL);
|
||||
}
|
||||
@@ -3663,6 +3672,9 @@ xmlParseNmtoken(xmlParserCtxtPtr ctxt) {
|
||||
int len = 0, l;
|
||||
int c;
|
||||
int count = 0;
|
||||
+ int maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_TEXT_LENGTH :
|
||||
+ XML_MAX_NAME_LENGTH;
|
||||
|
||||
#ifdef DEBUG
|
||||
nbParseNmToken++;
|
||||
@@ -3714,12 +3726,6 @@ xmlParseNmtoken(xmlParserCtxtPtr ctxt) {
|
||||
if (len + 10 > max) {
|
||||
xmlChar *tmp;
|
||||
|
||||
- if ((max > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "NmToken");
|
||||
- xmlFree(buffer);
|
||||
- return(NULL);
|
||||
- }
|
||||
max *= 2;
|
||||
tmp = (xmlChar *) xmlRealloc(buffer,
|
||||
max * sizeof(xmlChar));
|
||||
@@ -3733,6 +3739,11 @@ xmlParseNmtoken(xmlParserCtxtPtr ctxt) {
|
||||
COPY_BUF(l,buffer,len,c);
|
||||
NEXTL(l);
|
||||
c = CUR_CHAR(l);
|
||||
+ if (len > maxLength) {
|
||||
+ xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "NmToken");
|
||||
+ xmlFree(buffer);
|
||||
+ return(NULL);
|
||||
+ }
|
||||
}
|
||||
buffer[len] = 0;
|
||||
return(buffer);
|
||||
@@ -3740,8 +3751,7 @@ xmlParseNmtoken(xmlParserCtxtPtr ctxt) {
|
||||
}
|
||||
if (len == 0)
|
||||
return(NULL);
|
||||
- if ((len > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if (len > maxLength) {
|
||||
xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "NmToken");
|
||||
return(NULL);
|
||||
}
|
||||
@@ -3767,6 +3777,9 @@ xmlParseEntityValue(xmlParserCtxtPtr ctxt, xmlChar **orig) {
|
||||
int len = 0;
|
||||
int size = XML_PARSER_BUFFER_SIZE;
|
||||
int c, l;
|
||||
+ int maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_HUGE_LENGTH :
|
||||
+ XML_MAX_TEXT_LENGTH;
|
||||
xmlChar stop;
|
||||
xmlChar *ret = NULL;
|
||||
const xmlChar *cur = NULL;
|
||||
@@ -3826,6 +3839,12 @@ xmlParseEntityValue(xmlParserCtxtPtr ctxt, xmlChar **orig) {
|
||||
GROW;
|
||||
c = CUR_CHAR(l);
|
||||
}
|
||||
+
|
||||
+ if (len > maxLength) {
|
||||
+ xmlFatalErrMsg(ctxt, XML_ERR_ENTITY_NOT_FINISHED,
|
||||
+ "entity value too long\n");
|
||||
+ goto error;
|
||||
+ }
|
||||
}
|
||||
buf[len] = 0;
|
||||
if (ctxt->instate == XML_PARSER_EOF)
|
||||
@@ -3913,6 +3932,9 @@ xmlParseAttValueComplex(xmlParserCtxtPtr ctxt, int *attlen, int normalize) {
|
||||
xmlChar *rep = NULL;
|
||||
size_t len = 0;
|
||||
size_t buf_size = 0;
|
||||
+ size_t maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_HUGE_LENGTH :
|
||||
+ XML_MAX_TEXT_LENGTH;
|
||||
int c, l, in_space = 0;
|
||||
xmlChar *current = NULL;
|
||||
xmlEntityPtr ent;
|
||||
@@ -3944,16 +3966,6 @@ xmlParseAttValueComplex(xmlParserCtxtPtr ctxt, int *attlen, int normalize) {
|
||||
while (((NXT(0) != limit) && /* checked */
|
||||
(IS_CHAR(c)) && (c != '<')) &&
|
||||
(ctxt->instate != XML_PARSER_EOF)) {
|
||||
- /*
|
||||
- * Impose a reasonable limit on attribute size, unless XML_PARSE_HUGE
|
||||
- * special option is given
|
||||
- */
|
||||
- if ((len > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErrMsg(ctxt, XML_ERR_ATTRIBUTE_NOT_FINISHED,
|
||||
- "AttValue length too long\n");
|
||||
- goto mem_error;
|
||||
- }
|
||||
if (c == '&') {
|
||||
in_space = 0;
|
||||
if (NXT(1) == '#') {
|
||||
@@ -4101,6 +4113,11 @@ xmlParseAttValueComplex(xmlParserCtxtPtr ctxt, int *attlen, int normalize) {
|
||||
}
|
||||
GROW;
|
||||
c = CUR_CHAR(l);
|
||||
+ if (len > maxLength) {
|
||||
+ xmlFatalErrMsg(ctxt, XML_ERR_ATTRIBUTE_NOT_FINISHED,
|
||||
+ "AttValue length too long\n");
|
||||
+ goto mem_error;
|
||||
+ }
|
||||
}
|
||||
if (ctxt->instate == XML_PARSER_EOF)
|
||||
goto error;
|
||||
@@ -4122,16 +4139,6 @@ xmlParseAttValueComplex(xmlParserCtxtPtr ctxt, int *attlen, int normalize) {
|
||||
} else
|
||||
NEXT;
|
||||
|
||||
- /*
|
||||
- * There we potentially risk an overflow, don't allow attribute value of
|
||||
- * length more than INT_MAX it is a very reasonable assumption !
|
||||
- */
|
||||
- if (len >= INT_MAX) {
|
||||
- xmlFatalErrMsg(ctxt, XML_ERR_ATTRIBUTE_NOT_FINISHED,
|
||||
- "AttValue length too long\n");
|
||||
- goto mem_error;
|
||||
- }
|
||||
-
|
||||
if (attlen != NULL) *attlen = (int) len;
|
||||
return(buf);
|
||||
|
||||
@@ -4202,6 +4209,9 @@ xmlParseSystemLiteral(xmlParserCtxtPtr ctxt) {
|
||||
int len = 0;
|
||||
int size = XML_PARSER_BUFFER_SIZE;
|
||||
int cur, l;
|
||||
+ int maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_TEXT_LENGTH :
|
||||
+ XML_MAX_NAME_LENGTH;
|
||||
xmlChar stop;
|
||||
int state = ctxt->instate;
|
||||
int count = 0;
|
||||
@@ -4229,13 +4239,6 @@ xmlParseSystemLiteral(xmlParserCtxtPtr ctxt) {
|
||||
if (len + 5 >= size) {
|
||||
xmlChar *tmp;
|
||||
|
||||
- if ((size > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "SystemLiteral");
|
||||
- xmlFree(buf);
|
||||
- ctxt->instate = (xmlParserInputState) state;
|
||||
- return(NULL);
|
||||
- }
|
||||
size *= 2;
|
||||
tmp = (xmlChar *) xmlRealloc(buf, size * sizeof(xmlChar));
|
||||
if (tmp == NULL) {
|
||||
@@ -4264,6 +4267,12 @@ xmlParseSystemLiteral(xmlParserCtxtPtr ctxt) {
|
||||
SHRINK;
|
||||
cur = CUR_CHAR(l);
|
||||
}
|
||||
+ if (len > maxLength) {
|
||||
+ xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "SystemLiteral");
|
||||
+ xmlFree(buf);
|
||||
+ ctxt->instate = (xmlParserInputState) state;
|
||||
+ return(NULL);
|
||||
+ }
|
||||
}
|
||||
buf[len] = 0;
|
||||
ctxt->instate = (xmlParserInputState) state;
|
||||
@@ -4291,6 +4300,9 @@ xmlParsePubidLiteral(xmlParserCtxtPtr ctxt) {
|
||||
xmlChar *buf = NULL;
|
||||
int len = 0;
|
||||
int size = XML_PARSER_BUFFER_SIZE;
|
||||
+ int maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_TEXT_LENGTH :
|
||||
+ XML_MAX_NAME_LENGTH;
|
||||
xmlChar cur;
|
||||
xmlChar stop;
|
||||
int count = 0;
|
||||
@@ -4318,12 +4330,6 @@ xmlParsePubidLiteral(xmlParserCtxtPtr ctxt) {
|
||||
if (len + 1 >= size) {
|
||||
xmlChar *tmp;
|
||||
|
||||
- if ((size > XML_MAX_NAME_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "Public ID");
|
||||
- xmlFree(buf);
|
||||
- return(NULL);
|
||||
- }
|
||||
size *= 2;
|
||||
tmp = (xmlChar *) xmlRealloc(buf, size * sizeof(xmlChar));
|
||||
if (tmp == NULL) {
|
||||
@@ -4351,6 +4357,11 @@ xmlParsePubidLiteral(xmlParserCtxtPtr ctxt) {
|
||||
SHRINK;
|
||||
cur = CUR;
|
||||
}
|
||||
+ if (len > maxLength) {
|
||||
+ xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "Public ID");
|
||||
+ xmlFree(buf);
|
||||
+ return(NULL);
|
||||
+ }
|
||||
}
|
||||
buf[len] = 0;
|
||||
if (cur != stop) {
|
||||
@@ -4750,6 +4761,9 @@ xmlParseCommentComplex(xmlParserCtxtPtr ctxt, xmlChar *buf,
|
||||
int r, rl;
|
||||
int cur, l;
|
||||
size_t count = 0;
|
||||
+ size_t maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_HUGE_LENGTH :
|
||||
+ XML_MAX_TEXT_LENGTH;
|
||||
int inputid;
|
||||
|
||||
inputid = ctxt->input->id;
|
||||
@@ -4795,13 +4809,6 @@ xmlParseCommentComplex(xmlParserCtxtPtr ctxt, xmlChar *buf,
|
||||
if ((r == '-') && (q == '-')) {
|
||||
xmlFatalErr(ctxt, XML_ERR_HYPHEN_IN_COMMENT, NULL);
|
||||
}
|
||||
- if ((len > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErrMsgStr(ctxt, XML_ERR_COMMENT_NOT_FINISHED,
|
||||
- "Comment too big found", NULL);
|
||||
- xmlFree (buf);
|
||||
- return;
|
||||
- }
|
||||
if (len + 5 >= size) {
|
||||
xmlChar *new_buf;
|
||||
size_t new_size;
|
||||
@@ -4839,6 +4846,13 @@ xmlParseCommentComplex(xmlParserCtxtPtr ctxt, xmlChar *buf,
|
||||
GROW;
|
||||
cur = CUR_CHAR(l);
|
||||
}
|
||||
+
|
||||
+ if (len > maxLength) {
|
||||
+ xmlFatalErrMsgStr(ctxt, XML_ERR_COMMENT_NOT_FINISHED,
|
||||
+ "Comment too big found", NULL);
|
||||
+ xmlFree (buf);
|
||||
+ return;
|
||||
+ }
|
||||
}
|
||||
buf[len] = 0;
|
||||
if (cur == 0) {
|
||||
@@ -4883,6 +4897,9 @@ xmlParseComment(xmlParserCtxtPtr ctxt) {
|
||||
xmlChar *buf = NULL;
|
||||
size_t size = XML_PARSER_BUFFER_SIZE;
|
||||
size_t len = 0;
|
||||
+ size_t maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_HUGE_LENGTH :
|
||||
+ XML_MAX_TEXT_LENGTH;
|
||||
xmlParserInputState state;
|
||||
const xmlChar *in;
|
||||
size_t nbchar = 0;
|
||||
@@ -4966,8 +4983,7 @@ get_more:
|
||||
buf[len] = 0;
|
||||
}
|
||||
}
|
||||
- if ((len > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if (len > maxLength) {
|
||||
xmlFatalErrMsgStr(ctxt, XML_ERR_COMMENT_NOT_FINISHED,
|
||||
"Comment too big found", NULL);
|
||||
xmlFree (buf);
|
||||
@@ -5167,6 +5183,9 @@ xmlParsePI(xmlParserCtxtPtr ctxt) {
|
||||
xmlChar *buf = NULL;
|
||||
size_t len = 0;
|
||||
size_t size = XML_PARSER_BUFFER_SIZE;
|
||||
+ size_t maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_HUGE_LENGTH :
|
||||
+ XML_MAX_TEXT_LENGTH;
|
||||
int cur, l;
|
||||
const xmlChar *target;
|
||||
xmlParserInputState state;
|
||||
@@ -5242,14 +5261,6 @@ xmlParsePI(xmlParserCtxtPtr ctxt) {
|
||||
return;
|
||||
}
|
||||
count = 0;
|
||||
- if ((len > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErrMsgStr(ctxt, XML_ERR_PI_NOT_FINISHED,
|
||||
- "PI %s too big found", target);
|
||||
- xmlFree(buf);
|
||||
- ctxt->instate = state;
|
||||
- return;
|
||||
- }
|
||||
}
|
||||
COPY_BUF(l,buf,len,cur);
|
||||
NEXTL(l);
|
||||
@@ -5259,15 +5270,14 @@ xmlParsePI(xmlParserCtxtPtr ctxt) {
|
||||
GROW;
|
||||
cur = CUR_CHAR(l);
|
||||
}
|
||||
+ if (len > maxLength) {
|
||||
+ xmlFatalErrMsgStr(ctxt, XML_ERR_PI_NOT_FINISHED,
|
||||
+ "PI %s too big found", target);
|
||||
+ xmlFree(buf);
|
||||
+ ctxt->instate = state;
|
||||
+ return;
|
||||
+ }
|
||||
}
|
||||
- if ((len > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErrMsgStr(ctxt, XML_ERR_PI_NOT_FINISHED,
|
||||
- "PI %s too big found", target);
|
||||
- xmlFree(buf);
|
||||
- ctxt->instate = state;
|
||||
- return;
|
||||
- }
|
||||
buf[len] = 0;
|
||||
if (cur != '?') {
|
||||
xmlFatalErrMsgStr(ctxt, XML_ERR_PI_NOT_FINISHED,
|
||||
@@ -8959,6 +8969,9 @@ xmlParseAttValueInternal(xmlParserCtxtPtr ctxt, int *len, int *alloc,
|
||||
const xmlChar *in = NULL, *start, *end, *last;
|
||||
xmlChar *ret = NULL;
|
||||
int line, col;
|
||||
+ int maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_HUGE_LENGTH :
|
||||
+ XML_MAX_TEXT_LENGTH;
|
||||
|
||||
GROW;
|
||||
in = (xmlChar *) CUR_PTR;
|
||||
@@ -8998,8 +9011,7 @@ xmlParseAttValueInternal(xmlParserCtxtPtr ctxt, int *len, int *alloc,
|
||||
start = in;
|
||||
if (in >= end) {
|
||||
GROW_PARSE_ATT_VALUE_INTERNAL(ctxt, in, start, end)
|
||||
- if (((in - start) > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if ((in - start) > maxLength) {
|
||||
xmlFatalErrMsg(ctxt, XML_ERR_ATTRIBUTE_NOT_FINISHED,
|
||||
"AttValue length too long\n");
|
||||
return(NULL);
|
||||
@@ -9012,8 +9024,7 @@ xmlParseAttValueInternal(xmlParserCtxtPtr ctxt, int *len, int *alloc,
|
||||
if ((*in++ == 0x20) && (*in == 0x20)) break;
|
||||
if (in >= end) {
|
||||
GROW_PARSE_ATT_VALUE_INTERNAL(ctxt, in, start, end)
|
||||
- if (((in - start) > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if ((in - start) > maxLength) {
|
||||
xmlFatalErrMsg(ctxt, XML_ERR_ATTRIBUTE_NOT_FINISHED,
|
||||
"AttValue length too long\n");
|
||||
return(NULL);
|
||||
@@ -9046,16 +9057,14 @@ xmlParseAttValueInternal(xmlParserCtxtPtr ctxt, int *len, int *alloc,
|
||||
last = last + delta;
|
||||
}
|
||||
end = ctxt->input->end;
|
||||
- if (((in - start) > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if ((in - start) > maxLength) {
|
||||
xmlFatalErrMsg(ctxt, XML_ERR_ATTRIBUTE_NOT_FINISHED,
|
||||
"AttValue length too long\n");
|
||||
return(NULL);
|
||||
}
|
||||
}
|
||||
}
|
||||
- if (((in - start) > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if ((in - start) > maxLength) {
|
||||
xmlFatalErrMsg(ctxt, XML_ERR_ATTRIBUTE_NOT_FINISHED,
|
||||
"AttValue length too long\n");
|
||||
return(NULL);
|
||||
@@ -9068,8 +9077,7 @@ xmlParseAttValueInternal(xmlParserCtxtPtr ctxt, int *len, int *alloc,
|
||||
col++;
|
||||
if (in >= end) {
|
||||
GROW_PARSE_ATT_VALUE_INTERNAL(ctxt, in, start, end)
|
||||
- if (((in - start) > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if ((in - start) > maxLength) {
|
||||
xmlFatalErrMsg(ctxt, XML_ERR_ATTRIBUTE_NOT_FINISHED,
|
||||
"AttValue length too long\n");
|
||||
return(NULL);
|
||||
@@ -9077,8 +9085,7 @@ xmlParseAttValueInternal(xmlParserCtxtPtr ctxt, int *len, int *alloc,
|
||||
}
|
||||
}
|
||||
last = in;
|
||||
- if (((in - start) > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
+ if ((in - start) > maxLength) {
|
||||
xmlFatalErrMsg(ctxt, XML_ERR_ATTRIBUTE_NOT_FINISHED,
|
||||
"AttValue length too long\n");
|
||||
return(NULL);
|
||||
@@ -9768,6 +9775,9 @@ xmlParseCDSect(xmlParserCtxtPtr ctxt) {
|
||||
int s, sl;
|
||||
int cur, l;
|
||||
int count = 0;
|
||||
+ int maxLength = (ctxt->options & XML_PARSE_HUGE) ?
|
||||
+ XML_MAX_HUGE_LENGTH :
|
||||
+ XML_MAX_TEXT_LENGTH;
|
||||
|
||||
/* Check 2.6.0 was NXT(0) not RAW */
|
||||
if (CMP9(CUR_PTR, '<', '!', '[', 'C', 'D', 'A', 'T', 'A', '[')) {
|
||||
@@ -9801,13 +9811,6 @@ xmlParseCDSect(xmlParserCtxtPtr ctxt) {
|
||||
if (len + 5 >= size) {
|
||||
xmlChar *tmp;
|
||||
|
||||
- if ((size > XML_MAX_TEXT_LENGTH) &&
|
||||
- ((ctxt->options & XML_PARSE_HUGE) == 0)) {
|
||||
- xmlFatalErrMsgStr(ctxt, XML_ERR_CDATA_NOT_FINISHED,
|
||||
- "CData section too big found", NULL);
|
||||
- xmlFree (buf);
|
||||
- return;
|
||||
- }
|
||||
tmp = (xmlChar *) xmlRealloc(buf, size * 2 * sizeof(xmlChar));
|
||||
if (tmp == NULL) {
|
||||
xmlFree(buf);
|
||||
@@ -9834,6 +9837,12 @@ xmlParseCDSect(xmlParserCtxtPtr ctxt) {
|
||||
}
|
||||
NEXTL(l);
|
||||
cur = CUR_CHAR(l);
|
||||
+ if (len > maxLength) {
|
||||
+ xmlFatalErrMsg(ctxt, XML_ERR_CDATA_NOT_FINISHED,
|
||||
+ "CData section too big found\n");
|
||||
+ xmlFree(buf);
|
||||
+ return;
|
||||
+ }
|
||||
}
|
||||
buf[len] = 0;
|
||||
ctxt->instate = XML_PARSER_CONTENT;
|
||||
--
|
||||
2.25.1
|
||||
|
||||
106
meta/recipes-core/libxml/libxml2/CVE-2022-40304.patch
Normal file
106
meta/recipes-core/libxml/libxml2/CVE-2022-40304.patch
Normal file
@@ -0,0 +1,106 @@
|
||||
From cde95d801abc9405ca821ad814c7730333328d96 Mon Sep 17 00:00:00 2001
|
||||
From: Nick Wellnhofer <wellnhofer@aevum.de>
|
||||
Date: Wed, 31 Aug 2022 22:11:25 +0200
|
||||
Subject: [PATCH] CVE-2022-40304
|
||||
|
||||
Fix dict corruption caused by entity reference cycles
|
||||
|
||||
When an entity reference cycle is detected, the entity content is
|
||||
cleared by setting its first byte to zero. But the entity content might
|
||||
be allocated from a dict. In this case, the dict entry becomes corrupted
|
||||
leading to all kinds of logic errors, including memory errors like
|
||||
double-frees.
|
||||
|
||||
Stop storing entity content, orig, ExternalID and SystemID in a dict.
|
||||
These values are unlikely to occur multiple times in a document, so they
|
||||
shouldn't have been stored in a dict in the first place.
|
||||
|
||||
Thanks to Ned Williamson and Nathan Wachholz working with Google Project
|
||||
Zero for the report!
|
||||
|
||||
Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/libxml2/-/commit/1b41ec4e9433b05bb0376be4725804c54ef1d80b]
|
||||
CVE: CVE-2022-40304
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
entities.c | 55 ++++++++++++++++--------------------------------------
|
||||
1 file changed, 16 insertions(+), 39 deletions(-)
|
||||
|
||||
diff --git a/entities.c b/entities.c
|
||||
index 1a8f86f..ec1b9a7 100644
|
||||
--- a/entities.c
|
||||
+++ b/entities.c
|
||||
@@ -112,36 +112,19 @@ xmlFreeEntity(xmlEntityPtr entity)
|
||||
if ((entity->children) && (entity->owner == 1) &&
|
||||
(entity == (xmlEntityPtr) entity->children->parent))
|
||||
xmlFreeNodeList(entity->children);
|
||||
- if (dict != NULL) {
|
||||
- if ((entity->name != NULL) && (!xmlDictOwns(dict, entity->name)))
|
||||
- xmlFree((char *) entity->name);
|
||||
- if ((entity->ExternalID != NULL) &&
|
||||
- (!xmlDictOwns(dict, entity->ExternalID)))
|
||||
- xmlFree((char *) entity->ExternalID);
|
||||
- if ((entity->SystemID != NULL) &&
|
||||
- (!xmlDictOwns(dict, entity->SystemID)))
|
||||
- xmlFree((char *) entity->SystemID);
|
||||
- if ((entity->URI != NULL) && (!xmlDictOwns(dict, entity->URI)))
|
||||
- xmlFree((char *) entity->URI);
|
||||
- if ((entity->content != NULL)
|
||||
- && (!xmlDictOwns(dict, entity->content)))
|
||||
- xmlFree((char *) entity->content);
|
||||
- if ((entity->orig != NULL) && (!xmlDictOwns(dict, entity->orig)))
|
||||
- xmlFree((char *) entity->orig);
|
||||
- } else {
|
||||
- if (entity->name != NULL)
|
||||
- xmlFree((char *) entity->name);
|
||||
- if (entity->ExternalID != NULL)
|
||||
- xmlFree((char *) entity->ExternalID);
|
||||
- if (entity->SystemID != NULL)
|
||||
- xmlFree((char *) entity->SystemID);
|
||||
- if (entity->URI != NULL)
|
||||
- xmlFree((char *) entity->URI);
|
||||
- if (entity->content != NULL)
|
||||
- xmlFree((char *) entity->content);
|
||||
- if (entity->orig != NULL)
|
||||
- xmlFree((char *) entity->orig);
|
||||
- }
|
||||
+ if ((entity->name != NULL) &&
|
||||
+ ((dict == NULL) || (!xmlDictOwns(dict, entity->name))))
|
||||
+ xmlFree((char *) entity->name);
|
||||
+ if (entity->ExternalID != NULL)
|
||||
+ xmlFree((char *) entity->ExternalID);
|
||||
+ if (entity->SystemID != NULL)
|
||||
+ xmlFree((char *) entity->SystemID);
|
||||
+ if (entity->URI != NULL)
|
||||
+ xmlFree((char *) entity->URI);
|
||||
+ if (entity->content != NULL)
|
||||
+ xmlFree((char *) entity->content);
|
||||
+ if (entity->orig != NULL)
|
||||
+ xmlFree((char *) entity->orig);
|
||||
xmlFree(entity);
|
||||
}
|
||||
|
||||
@@ -177,18 +160,12 @@ xmlCreateEntity(xmlDictPtr dict, const xmlChar *name, int type,
|
||||
ret->SystemID = xmlStrdup(SystemID);
|
||||
} else {
|
||||
ret->name = xmlDictLookup(dict, name, -1);
|
||||
- if (ExternalID != NULL)
|
||||
- ret->ExternalID = xmlDictLookup(dict, ExternalID, -1);
|
||||
- if (SystemID != NULL)
|
||||
- ret->SystemID = xmlDictLookup(dict, SystemID, -1);
|
||||
+ ret->ExternalID = xmlStrdup(ExternalID);
|
||||
+ ret->SystemID = xmlStrdup(SystemID);
|
||||
}
|
||||
if (content != NULL) {
|
||||
ret->length = xmlStrlen(content);
|
||||
- if ((dict != NULL) && (ret->length < 5))
|
||||
- ret->content = (xmlChar *)
|
||||
- xmlDictLookup(dict, content, ret->length);
|
||||
- else
|
||||
- ret->content = xmlStrndup(content, ret->length);
|
||||
+ ret->content = xmlStrndup(content, ret->length);
|
||||
} else {
|
||||
ret->length = 0;
|
||||
ret->content = NULL;
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -13,7 +13,7 @@ DEPENDS = "zlib virtual/libiconv"
|
||||
|
||||
inherit gnomebase
|
||||
|
||||
SRC_URI += "http://www.w3.org/XML/Test/xmlts20080827.tar.gz;subdir=${BP};name=testtar \
|
||||
SRC_URI += "http://www.w3.org/XML/Test/xmlts20080827.tar;subdir=${BP};name=testtar \
|
||||
file://libxml-64bit.patch \
|
||||
file://runtest.patch \
|
||||
file://run-ptest \
|
||||
@@ -23,10 +23,12 @@ SRC_URI += "http://www.w3.org/XML/Test/xmlts20080827.tar.gz;subdir=${BP};name=te
|
||||
file://remove-fuzz-from-ptests.patch \
|
||||
file://libxml-m4-use-pkgconfig.patch \
|
||||
file://0001-Port-gentest.py-to-Python-3.patch \
|
||||
file://CVE-2022-40303.patch \
|
||||
file://CVE-2022-40304.patch \
|
||||
"
|
||||
|
||||
SRC_URI[archive.sha256sum] = "60d74a257d1ccec0475e749cba2f21559e48139efba6ff28224357c7c798dfee"
|
||||
SRC_URI[testtar.sha256sum] = "96151685cec997e1f9f3387e3626d61e6284d4d6e66e0e440c209286c03e9cc7"
|
||||
SRC_URI[testtar.sha256sum] = "9b2c865aba66c6429ca301a7ef048d7eca2cdb7a9106184416710853c7b37d0d"
|
||||
|
||||
BINCONFIG = "${bindir}/xml2-config"
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@ CVE_DB_UPDATE_INTERVAL ?= "86400"
|
||||
# Timeout for blocking socket operations, such as the connection attempt.
|
||||
CVE_SOCKET_TIMEOUT ?= "60"
|
||||
|
||||
CVE_DB_TEMP_FILE ?= "${CVE_CHECK_DB_DIR}/temp_nvdcve_1.1.db"
|
||||
|
||||
python () {
|
||||
if not bb.data.inherits_class("cve-check", d):
|
||||
raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.")
|
||||
@@ -32,25 +34,15 @@ python do_fetch() {
|
||||
"""
|
||||
import bb.utils
|
||||
import bb.progress
|
||||
import sqlite3, urllib, urllib.parse, gzip
|
||||
from datetime import date
|
||||
import shutil
|
||||
|
||||
bb.utils.export_proxies(d)
|
||||
|
||||
YEAR_START = 2002
|
||||
|
||||
db_file = d.getVar("CVE_CHECK_DB_FILE")
|
||||
db_dir = os.path.dirname(db_file)
|
||||
db_tmp_file = d.getVar("CVE_DB_TEMP_FILE")
|
||||
|
||||
cve_socket_timeout = int(d.getVar("CVE_SOCKET_TIMEOUT"))
|
||||
|
||||
if os.path.exists("{0}-journal".format(db_file)):
|
||||
# If a journal is present the last update might have been interrupted. In that case,
|
||||
# just wipe any leftovers and force the DB to be recreated.
|
||||
os.remove("{0}-journal".format(db_file))
|
||||
|
||||
if os.path.exists(db_file):
|
||||
os.remove(db_file)
|
||||
cleanup_db_download(db_file, db_tmp_file)
|
||||
|
||||
# The NVD database changes once a day, so no need to update more frequently
|
||||
# Allow the user to force-update
|
||||
@@ -68,9 +60,60 @@ python do_fetch() {
|
||||
pass
|
||||
|
||||
bb.utils.mkdirhier(db_dir)
|
||||
if os.path.exists(db_file):
|
||||
shutil.copy2(db_file, db_tmp_file)
|
||||
|
||||
if update_db_file(db_tmp_file, d) == True:
|
||||
# Update downloaded correctly, can swap files
|
||||
shutil.move(db_tmp_file, db_file)
|
||||
else:
|
||||
# Update failed, do not modify the database
|
||||
bb.note("CVE database update failed")
|
||||
os.remove(db_tmp_file)
|
||||
}
|
||||
|
||||
do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}"
|
||||
do_fetch[file-checksums] = ""
|
||||
do_fetch[vardeps] = ""
|
||||
|
||||
def cleanup_db_download(db_file, db_tmp_file):
|
||||
"""
|
||||
Cleanup the download space from possible failed downloads
|
||||
"""
|
||||
|
||||
# Clean up the updates done on the main file
|
||||
# Remove it only if a journal file exists - it means a complete re-download
|
||||
if os.path.exists("{0}-journal".format(db_file)):
|
||||
# If a journal is present the last update might have been interrupted. In that case,
|
||||
# just wipe any leftovers and force the DB to be recreated.
|
||||
os.remove("{0}-journal".format(db_file))
|
||||
|
||||
if os.path.exists(db_file):
|
||||
os.remove(db_file)
|
||||
|
||||
# Clean-up the temporary file downloads, we can remove both journal
|
||||
# and the temporary database
|
||||
if os.path.exists("{0}-journal".format(db_tmp_file)):
|
||||
# If a journal is present the last update might have been interrupted. In that case,
|
||||
# just wipe any leftovers and force the DB to be recreated.
|
||||
os.remove("{0}-journal".format(db_tmp_file))
|
||||
|
||||
if os.path.exists(db_tmp_file):
|
||||
os.remove(db_tmp_file)
|
||||
|
||||
def update_db_file(db_tmp_file, d):
|
||||
"""
|
||||
Update the given database file
|
||||
"""
|
||||
import bb.utils, bb.progress
|
||||
from datetime import date
|
||||
import urllib, gzip, sqlite3
|
||||
|
||||
YEAR_START = 2002
|
||||
cve_socket_timeout = int(d.getVar("CVE_SOCKET_TIMEOUT"))
|
||||
|
||||
# Connect to database
|
||||
conn = sqlite3.connect(db_file)
|
||||
conn = sqlite3.connect(db_tmp_file)
|
||||
initialize_db(conn)
|
||||
|
||||
with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f:
|
||||
@@ -87,8 +130,11 @@ python do_fetch() {
|
||||
response = urllib.request.urlopen(meta_url, timeout=cve_socket_timeout)
|
||||
except urllib.error.URLError as e:
|
||||
cve_f.write('Warning: CVE db update error, Unable to fetch CVE data.\n\n')
|
||||
bb.warn("Failed to fetch CVE data (%s)" % e.reason)
|
||||
return
|
||||
bb.warn("Failed to fetch CVE data (%s)" % e)
|
||||
import socket
|
||||
result = socket.getaddrinfo("nvd.nist.gov", 443, proto=socket.IPPROTO_TCP)
|
||||
bb.warn("Host IPs are %s" % (", ".join(t[4][0] for t in result)))
|
||||
return False
|
||||
|
||||
if response:
|
||||
for l in response.read().decode("utf-8").splitlines():
|
||||
@@ -98,7 +144,7 @@ python do_fetch() {
|
||||
break
|
||||
else:
|
||||
bb.warn("Cannot parse CVE metadata, update failed")
|
||||
return
|
||||
return False
|
||||
|
||||
# Compare with current db last modified date
|
||||
cursor = conn.execute("select DATE from META where YEAR = ?", (year,))
|
||||
@@ -119,7 +165,7 @@ python do_fetch() {
|
||||
except urllib.error.URLError as e:
|
||||
cve_f.write('Warning: CVE db update error, CVE data is outdated.\n\n')
|
||||
bb.warn("Cannot parse CVE data (%s), update failed" % e.reason)
|
||||
return
|
||||
return False
|
||||
else:
|
||||
bb.debug(2, "Already up to date (last modified %s)" % last_modified)
|
||||
# Update success, set the date to cve_check file.
|
||||
@@ -128,11 +174,7 @@ python do_fetch() {
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
}
|
||||
|
||||
do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}"
|
||||
do_fetch[file-checksums] = ""
|
||||
do_fetch[vardeps] = ""
|
||||
return True
|
||||
|
||||
def initialize_db(conn):
|
||||
with conn:
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
From 25492154b42f68a48752a7f61eaf1fb61e454e52 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Zbigniew=20J=C4=99drzejewski-Szmek?= <zbyszek@in.waw.pl>
|
||||
Date: Tue, 18 Oct 2022 18:09:06 +0200
|
||||
Subject: [PATCH] shared/json: allow json_variant_dump() to return an error
|
||||
|
||||
Upstream-Status: Backport [https://github.com/systemd/systemd/commit/7922ead507e0d83e4ec72a8cbd2b67194766e58c]
|
||||
|
||||
Needed to fix CVE-2022-45873.patch backported from systemd/main,
|
||||
otherwise it fails to build with:
|
||||
|
||||
| ../git/src/shared/elf-util.c: In function 'parse_elf_object':
|
||||
| ../git/src/shared/elf-util.c:792:27: error: void value not ignored as it ought to be
|
||||
| 792 | r = json_variant_dump(package_metadata, JSON_FORMAT_FLUSH, json_out, NULL);
|
||||
| | ^
|
||||
|
||||
Signed-off-by: Martin Jansa <martin2.jansa@lgepartner.com>
|
||||
---
|
||||
src/shared/json.c | 7 ++++---
|
||||
src/shared/json.h | 2 +-
|
||||
2 files changed, 5 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/src/shared/json.c b/src/shared/json.c
|
||||
index dff95eda26..81c05efe22 100644
|
||||
--- a/src/shared/json.c
|
||||
+++ b/src/shared/json.c
|
||||
@@ -1792,9 +1792,9 @@ int json_variant_format(JsonVariant *v, JsonFormatFlags flags, char **ret) {
|
||||
return (int) sz - 1;
|
||||
}
|
||||
|
||||
-void json_variant_dump(JsonVariant *v, JsonFormatFlags flags, FILE *f, const char *prefix) {
|
||||
+int json_variant_dump(JsonVariant *v, JsonFormatFlags flags, FILE *f, const char *prefix) {
|
||||
if (!v)
|
||||
- return;
|
||||
+ return 0;
|
||||
|
||||
if (!f)
|
||||
f = stdout;
|
||||
@@ -1820,7 +1820,8 @@ void json_variant_dump(JsonVariant *v, JsonFormatFlags flags, FILE *f, const cha
|
||||
fputc('\n', f); /* In case of SSE add a second newline */
|
||||
|
||||
if (flags & JSON_FORMAT_FLUSH)
|
||||
- fflush(f);
|
||||
+ return fflush_and_check(f);
|
||||
+ return 0;
|
||||
}
|
||||
|
||||
int json_variant_filter(JsonVariant **v, char **to_remove) {
|
||||
diff --git a/src/shared/json.h b/src/shared/json.h
|
||||
index 8760354b66..c712700763 100644
|
||||
--- a/src/shared/json.h
|
||||
+++ b/src/shared/json.h
|
||||
@@ -187,7 +187,7 @@ typedef enum JsonFormatFlags {
|
||||
} JsonFormatFlags;
|
||||
|
||||
int json_variant_format(JsonVariant *v, JsonFormatFlags flags, char **ret);
|
||||
-void json_variant_dump(JsonVariant *v, JsonFormatFlags flags, FILE *f, const char *prefix);
|
||||
+int json_variant_dump(JsonVariant *v, JsonFormatFlags flags, FILE *f, const char *prefix);
|
||||
|
||||
int json_variant_filter(JsonVariant **v, char **to_remove);
|
||||
|
||||
124
meta/recipes-core/systemd/systemd/CVE-2022-45873.patch
Normal file
124
meta/recipes-core/systemd/systemd/CVE-2022-45873.patch
Normal file
@@ -0,0 +1,124 @@
|
||||
From 076b807be472630692c5348c60d0c2b7b28ad437 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Zbigniew=20J=C4=99drzejewski-Szmek?= <zbyszek@in.waw.pl>
|
||||
Date: Tue, 18 Oct 2022 18:23:53 +0200
|
||||
Subject: [PATCH] coredump: avoid deadlock when passing processed backtrace
|
||||
data
|
||||
|
||||
We would deadlock when passing the data back from the forked-off process that
|
||||
was doing backtrace generation back to the coredump parent. This is because we
|
||||
fork the child and wait for it to exit. The child tries to write too much data
|
||||
to the output pipe, and and after the first 64k blocks on the parent because
|
||||
the pipe is full. The bug surfaced in Fedora because of a combination of four
|
||||
factors:
|
||||
- 87707784c70dc9894ec613df0a6e75e732a362a3 was backported to v251.5, which
|
||||
allowed coredump processing to be successful.
|
||||
- 1a0281a3ebf4f8c16d40aa9e63103f16cd23bb2a was NOT backported, so the output
|
||||
was very verbose.
|
||||
- Fedora has the ELF package metadata available, so a lot of output can be
|
||||
generated. Most other distros just don't have the information.
|
||||
- gnome-calendar crashes and has a bazillion modules and 69596 bytes of output
|
||||
are generated for it.
|
||||
|
||||
Fixes https://bugzilla.redhat.com/show_bug.cgi?id=2135778.
|
||||
|
||||
The code is changed to try to write data opportunistically. If we get partial
|
||||
information, that is still logged. In is generally better to log partial
|
||||
backtrace information than nothing at all.
|
||||
|
||||
Upstream-Status: Backport [https://github.com/systemd/systemd/commit/076b807be472630692c5348c60d0c2b7b28ad437]
|
||||
CVE: CVE-2022-45873
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
src/shared/elf-util.c | 37 +++++++++++++++++++++++++++++++------
|
||||
1 file changed, 31 insertions(+), 6 deletions(-)
|
||||
|
||||
diff --git a/src/shared/elf-util.c b/src/shared/elf-util.c
|
||||
index 6d9fcfbbf2..bd27507346 100644
|
||||
--- a/src/shared/elf-util.c
|
||||
+++ b/src/shared/elf-util.c
|
||||
@@ -30,6 +30,9 @@
|
||||
#define THREADS_MAX 64
|
||||
#define ELF_PACKAGE_METADATA_ID 0xcafe1a7e
|
||||
|
||||
+/* The amount of data we're willing to write to each of the output pipes. */
|
||||
+#define COREDUMP_PIPE_MAX (1024*1024U)
|
||||
+
|
||||
static void *dw_dl = NULL;
|
||||
static void *elf_dl = NULL;
|
||||
|
||||
@@ -700,13 +703,13 @@ int parse_elf_object(int fd, const char *executable, bool fork_disable_dump, cha
|
||||
return r;
|
||||
|
||||
if (ret) {
|
||||
- r = RET_NERRNO(pipe2(return_pipe, O_CLOEXEC));
|
||||
+ r = RET_NERRNO(pipe2(return_pipe, O_CLOEXEC|O_NONBLOCK));
|
||||
if (r < 0)
|
||||
return r;
|
||||
}
|
||||
|
||||
if (ret_package_metadata) {
|
||||
- r = RET_NERRNO(pipe2(json_pipe, O_CLOEXEC));
|
||||
+ r = RET_NERRNO(pipe2(json_pipe, O_CLOEXEC|O_NONBLOCK));
|
||||
if (r < 0)
|
||||
return r;
|
||||
}
|
||||
@@ -750,8 +753,24 @@ int parse_elf_object(int fd, const char *executable, bool fork_disable_dump, cha
|
||||
goto child_fail;
|
||||
|
||||
if (buf) {
|
||||
- r = loop_write(return_pipe[1], buf, strlen(buf), false);
|
||||
- if (r < 0)
|
||||
+ size_t len = strlen(buf);
|
||||
+
|
||||
+ if (len > COREDUMP_PIPE_MAX) {
|
||||
+ /* This is iffy. A backtrace can be a few hundred kilobytes, but too much is
|
||||
+ * too much. Let's log a warning and ignore the rest. */
|
||||
+ log_warning("Generated backtrace is %zu bytes (more than the limit of %u bytes), backtrace will be truncated.",
|
||||
+ len, COREDUMP_PIPE_MAX);
|
||||
+ len = COREDUMP_PIPE_MAX;
|
||||
+ }
|
||||
+
|
||||
+ /* Bump the space for the returned string.
|
||||
+ * Failure is ignored, because partial output is still useful. */
|
||||
+ (void) fcntl(return_pipe[1], F_SETPIPE_SZ, len);
|
||||
+
|
||||
+ r = loop_write(return_pipe[1], buf, len, false);
|
||||
+ if (r == -EAGAIN)
|
||||
+ log_warning("Write failed, backtrace will be truncated.");
|
||||
+ else if (r < 0)
|
||||
goto child_fail;
|
||||
|
||||
return_pipe[1] = safe_close(return_pipe[1]);
|
||||
@@ -760,13 +779,19 @@ int parse_elf_object(int fd, const char *executable, bool fork_disable_dump, cha
|
||||
if (package_metadata) {
|
||||
_cleanup_fclose_ FILE *json_out = NULL;
|
||||
|
||||
+ /* Bump the space for the returned string. We don't know how much space we'll need in
|
||||
+ * advance, so we'll just try to write as much as possible and maybe fail later. */
|
||||
+ (void) fcntl(json_pipe[1], F_SETPIPE_SZ, COREDUMP_PIPE_MAX);
|
||||
+
|
||||
json_out = take_fdopen(&json_pipe[1], "w");
|
||||
if (!json_out) {
|
||||
r = -errno;
|
||||
goto child_fail;
|
||||
}
|
||||
|
||||
- json_variant_dump(package_metadata, JSON_FORMAT_FLUSH, json_out, NULL);
|
||||
+ r = json_variant_dump(package_metadata, JSON_FORMAT_FLUSH, json_out, NULL);
|
||||
+ if (r < 0)
|
||||
+ log_warning_errno(r, "Failed to write JSON package metadata, ignoring: %m");
|
||||
}
|
||||
|
||||
_exit(EXIT_SUCCESS);
|
||||
@@ -801,7 +826,7 @@ int parse_elf_object(int fd, const char *executable, bool fork_disable_dump, cha
|
||||
|
||||
r = json_parse_file(json_in, NULL, 0, &package_metadata, NULL, NULL);
|
||||
if (r < 0 && r != -EINVAL) /* EINVAL: json was empty, so we got nothing, but that's ok */
|
||||
- return r;
|
||||
+ log_warning_errno(r, "Failed to read or parse json metadata, ignoring: %m");
|
||||
}
|
||||
|
||||
if (ret)
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -26,6 +26,8 @@ SRC_URI += "file://touchscreen.rules \
|
||||
file://0001-Move-sysusers.d-sysctl.d-binfmt.d-modules-load.d-to-.patch \
|
||||
file://0001-resolve-Use-sockaddr-pointer-type-for-bind.patch \
|
||||
file://CVE-2022-3821.patch \
|
||||
file://CVE-2022-45873.patch \
|
||||
file://0001-shared-json-allow-json_variant_dump-to-return-an-err.patch \
|
||||
"
|
||||
|
||||
# patches needed by musl
|
||||
|
||||
@@ -42,5 +42,6 @@ SRC_URI = "\
|
||||
file://0018-CVE-2022-38128-1.patch \
|
||||
file://0018-CVE-2022-38128-2.patch \
|
||||
file://0018-CVE-2022-38128-3.patch \
|
||||
file://0019-CVE-2022-4285.patch \
|
||||
"
|
||||
S = "${WORKDIR}/git"
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
From 5c831a3c7f3ca98d6aba1200353311e1a1f84c70 Mon Sep 17 00:00:00 2001
|
||||
From: Nick Clifton <nickc@redhat.com>
|
||||
Date: Wed, 19 Oct 2022 15:09:12 +0100
|
||||
Subject: [PATCH] Fix an illegal memory access when parsing an ELF file
|
||||
containing corrupt symbol version information.
|
||||
|
||||
PR 29699
|
||||
* elf.c (_bfd_elf_slurp_version_tables): Fail if the sh_info field
|
||||
of the section header is zero.
|
||||
|
||||
Upstream-Status: Backport [https://sourceware.org/git/gitweb.cgi?p=binutils-gdb.git;h=5c831a3c7f3ca98d6aba1200353311e1a1f84c70]
|
||||
CVE: CVE-2022-4285
|
||||
|
||||
Signed-off-by: Yash Shinde <Yash.Shinde@windriver.com>
|
||||
---
|
||||
bfd/ChangeLog | 6 ++++++
|
||||
bfd/elf.c | 4 +++-
|
||||
2 files changed, 9 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/bfd/elf.c b/bfd/elf.c
|
||||
index fe00e0f9189..7cd7febcf95 100644
|
||||
--- a/bfd/elf.c
|
||||
+++ b/bfd/elf.c
|
||||
@@ -8918,7 +8918,9 @@ _bfd_elf_slurp_version_tables (bfd *abfd, bool default_imported_symver)
|
||||
bfd_set_error (bfd_error_file_too_big);
|
||||
goto error_return_verref;
|
||||
}
|
||||
- elf_tdata (abfd)->verref = (Elf_Internal_Verneed *) bfd_alloc (abfd, amt);
|
||||
+ if (amt == 0)
|
||||
+ goto error_return_verref;
|
||||
+ elf_tdata (abfd)->verref = (Elf_Internal_Verneed *) bfd_zalloc (abfd, amt);
|
||||
if (elf_tdata (abfd)->verref == NULL)
|
||||
goto error_return_verref;
|
||||
|
||||
--
|
||||
2.31.1
|
||||
|
||||
@@ -48,7 +48,6 @@ SRC_URI = "\
|
||||
file://0016-If-CXXFLAGS-contains-something-unsupported-by-the-bu.patch \
|
||||
file://0017-handle-sysroot-support-for-nativesdk-gcc.patch \
|
||||
file://0018-Search-target-sysroot-gcc-version-specific-dirs-with.patch \
|
||||
file://0019-nios2-Define-MUSL_DYNAMIC_LINKER.patch \
|
||||
file://0020-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch \
|
||||
file://0021-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch \
|
||||
file://0022-sync-gcc-stddef.h-with-musl.patch \
|
||||
|
||||
@@ -43,10 +43,10 @@ Signed-off-by: Ruiqiang Hao <Ruiqiang.Hao@windriver.com>
|
||||
gcc/testsuite/lib/target-supports.exp | 3 ++-
|
||||
9 files changed, 79 insertions(+), 8 deletions(-)
|
||||
|
||||
diff --git a/gcc/config/arm/arm-cpus.in b/gcc/config/arm/arm-cpus.in
|
||||
index bcc9ebe9f..58d83829c 100644
|
||||
--- a/gcc/config/arm/arm-cpus.in
|
||||
+++ b/gcc/config/arm/arm-cpus.in
|
||||
Index: gcc-11.3.0/gcc/config/arm/arm-cpus.in
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/arm/arm-cpus.in
|
||||
+++ gcc-11.3.0/gcc/config/arm/arm-cpus.in
|
||||
@@ -132,6 +132,9 @@ define feature cmse
|
||||
# Architecture rel 8.1-M.
|
||||
define feature armv8_1m_main
|
||||
@@ -57,7 +57,7 @@ index bcc9ebe9f..58d83829c 100644
|
||||
# Floating point and Neon extensions.
|
||||
# VFPv1 is not supported in GCC.
|
||||
|
||||
@@ -293,6 +296,7 @@ define fgroup ARMv8m_base ARMv6m armv8 cmse tdiv
|
||||
@@ -293,6 +296,7 @@ define fgroup ARMv8m_base ARMv6m armv8 c
|
||||
define fgroup ARMv8m_main ARMv7m armv8 cmse
|
||||
define fgroup ARMv8r ARMv8a
|
||||
define fgroup ARMv8_1m_main ARMv8m_main armv8_1m_main
|
||||
@@ -87,10 +87,10 @@ index bcc9ebe9f..58d83829c 100644
|
||||
begin arch iwmmxt
|
||||
tune for iwmmxt
|
||||
tune flags LDSCHED STRONG XSCALE
|
||||
diff --git a/gcc/config/arm/arm-tables.opt b/gcc/config/arm/arm-tables.opt
|
||||
index 5692d4fb7..ae3dd9414 100644
|
||||
--- a/gcc/config/arm/arm-tables.opt
|
||||
+++ b/gcc/config/arm/arm-tables.opt
|
||||
Index: gcc-11.3.0/gcc/config/arm/arm-tables.opt
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/arm/arm-tables.opt
|
||||
+++ gcc-11.3.0/gcc/config/arm/arm-tables.opt
|
||||
@@ -380,10 +380,13 @@ EnumValue
|
||||
Enum(arm_arch) String(armv8.1-m.main) Value(30)
|
||||
|
||||
@@ -107,10 +107,10 @@ index 5692d4fb7..ae3dd9414 100644
|
||||
|
||||
Enum
|
||||
Name(arm_fpu) Type(enum fpu_type)
|
||||
diff --git a/gcc/config/arm/arm.h b/gcc/config/arm/arm.h
|
||||
index 47c13a9e5..088c7725c 100644
|
||||
--- a/gcc/config/arm/arm.h
|
||||
+++ b/gcc/config/arm/arm.h
|
||||
Index: gcc-11.3.0/gcc/config/arm/arm.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/arm/arm.h
|
||||
+++ gcc-11.3.0/gcc/config/arm/arm.h
|
||||
@@ -456,7 +456,8 @@ enum base_architecture
|
||||
BASE_ARCH_8A = 8,
|
||||
BASE_ARCH_8M_BASE = 8,
|
||||
@@ -121,10 +121,10 @@ index 47c13a9e5..088c7725c 100644
|
||||
};
|
||||
|
||||
/* The major revision number of the ARM Architecture implemented by the target. */
|
||||
diff --git a/gcc/config/arm/t-aprofile b/gcc/config/arm/t-aprofile
|
||||
index 8574ac3e2..68e2251c7 100644
|
||||
--- a/gcc/config/arm/t-aprofile
|
||||
+++ b/gcc/config/arm/t-aprofile
|
||||
Index: gcc-11.3.0/gcc/config/arm/t-aprofile
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/arm/t-aprofile
|
||||
+++ gcc-11.3.0/gcc/config/arm/t-aprofile
|
||||
@@ -26,8 +26,8 @@
|
||||
|
||||
# Arch and FPU variants to build libraries with
|
||||
@@ -136,7 +136,7 @@ index 8574ac3e2..68e2251c7 100644
|
||||
|
||||
# ARMv7-A - build nofp, fp-d16 and SIMD variants
|
||||
|
||||
@@ -46,6 +46,11 @@ MULTILIB_REQUIRED += mthumb/march=armv8-a/mfloat-abi=soft
|
||||
@@ -46,6 +46,11 @@ MULTILIB_REQUIRED += mthumb/march=armv8-
|
||||
MULTILIB_REQUIRED += mthumb/march=armv8-a+simd/mfloat-abi=hard
|
||||
MULTILIB_REQUIRED += mthumb/march=armv8-a+simd/mfloat-abi=softfp
|
||||
|
||||
@@ -148,7 +148,7 @@ index 8574ac3e2..68e2251c7 100644
|
||||
# Matches
|
||||
|
||||
# Arch Matches
|
||||
@@ -129,17 +134,29 @@ MULTILIB_MATCHES += march?armv8-a=march?armv8.6-a
|
||||
@@ -129,17 +134,29 @@ MULTILIB_MATCHES += march?armv8-a=march?
|
||||
MULTILIB_MATCHES += $(foreach ARCH, $(v8_6_a_simd_variants), \
|
||||
march?armv8-a+simd=march?armv8.6-a$(ARCH))
|
||||
|
||||
@@ -180,11 +180,11 @@ index 8574ac3e2..68e2251c7 100644
|
||||
- $(foreach ARCH, armv7-a armv8-a, \
|
||||
+ $(foreach ARCH, armv7-a armv8-a armv9-a, \
|
||||
mthumb/march.$(ARCH)/mfloat-abi.soft=m$(MODE)/march.$(ARCH)/mfloat-abi.softfp))
|
||||
diff --git a/gcc/config/arm/t-arm-elf b/gcc/config/arm/t-arm-elf
|
||||
index d68def308..b3a900e8c 100644
|
||||
--- a/gcc/config/arm/t-arm-elf
|
||||
+++ b/gcc/config/arm/t-arm-elf
|
||||
@@ -38,6 +38,8 @@ v7ve_fps := vfpv3-d16 vfpv3 vfpv3-d16-fp16 vfpv3-fp16 vfpv4 neon \
|
||||
Index: gcc-11.3.0/gcc/config/arm/t-arm-elf
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/arm/t-arm-elf
|
||||
+++ gcc-11.3.0/gcc/config/arm/t-arm-elf
|
||||
@@ -38,6 +38,8 @@ v7ve_fps := vfpv3-d16 vfpv3 vfpv3-d16-fp
|
||||
# it seems to work ok.
|
||||
v8_fps := simd fp16 crypto fp16+crypto dotprod fp16fml
|
||||
|
||||
@@ -202,7 +202,7 @@ index d68def308..b3a900e8c 100644
|
||||
# No floating point variants, require thumb1 softfp
|
||||
all_nofp_t := armv6-m armv6s-m armv8-m.base
|
||||
|
||||
@@ -110,6 +114,11 @@ MULTILIB_MATCHES += $(foreach ARCH, $(all_v8_archs), \
|
||||
@@ -110,6 +114,11 @@ MULTILIB_MATCHES += $(foreach ARCH,
|
||||
$(foreach FPARCH, $(v8_fps), \
|
||||
march?armv7+fp=march?$(ARCH)+$(FPARCH)))
|
||||
|
||||
@@ -214,11 +214,11 @@ index d68def308..b3a900e8c 100644
|
||||
MULTILIB_MATCHES += $(foreach ARCH, armv7e-m armv8-m.mainline, \
|
||||
march?armv7+fp=march?$(ARCH)+fp.dp)
|
||||
|
||||
diff --git a/gcc/config/arm/t-multilib b/gcc/config/arm/t-multilib
|
||||
index ddc5033bf..d789b86ee 100644
|
||||
--- a/gcc/config/arm/t-multilib
|
||||
+++ b/gcc/config/arm/t-multilib
|
||||
@@ -78,6 +78,8 @@ v8_4_a_simd_variants := $(call all_feat_combs, simd fp16 crypto i8mm bf16)
|
||||
Index: gcc-11.3.0/gcc/config/arm/t-multilib
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/arm/t-multilib
|
||||
+++ gcc-11.3.0/gcc/config/arm/t-multilib
|
||||
@@ -78,6 +78,8 @@ v8_4_a_simd_variants := $(call all_feat_
|
||||
v8_5_a_simd_variants := $(call all_feat_combs, simd fp16 crypto i8mm bf16)
|
||||
v8_6_a_simd_variants := $(call all_feat_combs, simd fp16 crypto i8mm bf16)
|
||||
v8_r_nosimd_variants := +crc
|
||||
@@ -227,7 +227,7 @@ index ddc5033bf..d789b86ee 100644
|
||||
|
||||
ifneq (,$(HAS_APROFILE))
|
||||
include $(srcdir)/config/arm/t-aprofile
|
||||
@@ -202,6 +204,16 @@ MULTILIB_MATCHES += march?armv7=march?armv8.6-a
|
||||
@@ -202,6 +204,16 @@ MULTILIB_MATCHES += march?armv7=march?ar
|
||||
MULTILIB_MATCHES += $(foreach ARCH, $(v8_6_a_simd_variants), \
|
||||
march?armv7+fp=march?armv8.6-a$(ARCH))
|
||||
|
||||
@@ -244,10 +244,10 @@ index ddc5033bf..d789b86ee 100644
|
||||
endif # Not APROFILE.
|
||||
|
||||
# Use Thumb libraries for everything.
|
||||
diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi
|
||||
index 7184a62d0..9a712c0d6 100644
|
||||
--- a/gcc/doc/invoke.texi
|
||||
+++ b/gcc/doc/invoke.texi
|
||||
Index: gcc-11.3.0/gcc/doc/invoke.texi
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/doc/invoke.texi
|
||||
+++ gcc-11.3.0/gcc/doc/invoke.texi
|
||||
@@ -19701,6 +19701,7 @@ Permissible names are:
|
||||
@samp{armv7-m}, @samp{armv7e-m},
|
||||
@samp{armv8-m.base}, @samp{armv8-m.main},
|
||||
@@ -256,10 +256,10 @@ index 7184a62d0..9a712c0d6 100644
|
||||
@samp{iwmmxt} and @samp{iwmmxt2}.
|
||||
|
||||
Additionally, the following architectures, which lack support for the
|
||||
diff --git a/gcc/testsuite/gcc.target/arm/multilib.exp b/gcc/testsuite/gcc.target/arm/multilib.exp
|
||||
index 4b30025db..e3f06c316 100644
|
||||
--- a/gcc/testsuite/gcc.target/arm/multilib.exp
|
||||
+++ b/gcc/testsuite/gcc.target/arm/multilib.exp
|
||||
Index: gcc-11.3.0/gcc/testsuite/gcc.target/arm/multilib.exp
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/testsuite/gcc.target/arm/multilib.exp
|
||||
+++ gcc-11.3.0/gcc/testsuite/gcc.target/arm/multilib.exp
|
||||
@@ -135,6 +135,14 @@ if {[multilib_config "aprofile"] } {
|
||||
{-march=armv8.6-a+simd+fp16 -mfloat-abi=softfp} "thumb/v8-a+simd/softfp"
|
||||
{-march=armv8.6-a+simd+fp16+nofp -mfloat-abi=softfp} "thumb/v8-a/nofp"
|
||||
@@ -275,10 +275,10 @@ index 4b30025db..e3f06c316 100644
|
||||
{-mcpu=cortex-a53+crypto -mfloat-abi=hard} "thumb/v8-a+simd/hard"
|
||||
{-mcpu=cortex-a53+nofp -mfloat-abi=softfp} "thumb/v8-a/nofp"
|
||||
{-march=armv8-a+crc -mfloat-abi=hard -mfpu=vfp} "thumb/v8-a+simd/hard"
|
||||
diff --git a/gcc/testsuite/lib/target-supports.exp b/gcc/testsuite/lib/target-supports.exp
|
||||
index 857e57218..52e043917 100644
|
||||
--- a/gcc/testsuite/lib/target-supports.exp
|
||||
+++ b/gcc/testsuite/lib/target-supports.exp
|
||||
Index: gcc-11.3.0/gcc/testsuite/lib/target-supports.exp
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/testsuite/lib/target-supports.exp
|
||||
+++ gcc-11.3.0/gcc/testsuite/lib/target-supports.exp
|
||||
@@ -4820,7 +4820,8 @@ foreach { armfunc armflag armdefs } {
|
||||
v8m_base "-march=armv8-m.base -mthumb -mfloat-abi=soft"
|
||||
__ARM_ARCH_8M_BASE__
|
||||
@@ -289,6 +289,3 @@ index 857e57218..52e043917 100644
|
||||
eval [string map [list FUNC $armfunc FLAG $armflag DEFS $armdefs ] {
|
||||
proc check_effective_target_arm_arch_FUNC_ok { } {
|
||||
return [check_no_compiler_messages arm_arch_FUNC_ok assembly {
|
||||
--
|
||||
2.34.1
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
From 84dd8ea4c982fc2c82af642293d29e9c1880de5b Mon Sep 17 00:00:00 2001
|
||||
From 4de00af67b57b5440bdf61ab364ad959ad0aeee7 Mon Sep 17 00:00:00 2001
|
||||
From: Khem Raj <raj.khem@gmail.com>
|
||||
Date: Fri, 29 Mar 2013 09:24:50 +0400
|
||||
Subject: [PATCH] Define GLIBC_DYNAMIC_LINKER and UCLIBC_DYNAMIC_LINKER
|
||||
@@ -12,28 +12,37 @@ SH, sparc, alpha for possible future support (if any)
|
||||
|
||||
Removes the do_headerfix task in metadata
|
||||
|
||||
Signed-off-by: Khem Raj <raj.khem@gmail.com>
|
||||
|
||||
Upstream-Status: Inappropriate [OE configuration]
|
||||
Signed-off-by: Khem Raj <raj.khem@gmail.com>
|
||||
|
||||
Refresh patch from master to deduplicate patches and fix arm linker
|
||||
Signed-off-by: Pavel Zhukov <pavel@zhukoff.net>
|
||||
---
|
||||
gcc/config/aarch64/aarch64-linux.h | 4 ++--
|
||||
gcc/config/alpha/linux-elf.h | 4 ++--
|
||||
gcc/config/arm/linux-eabi.h | 4 ++--
|
||||
gcc/config/arm/linux-eabi.h | 6 +++---
|
||||
gcc/config/arm/linux-elf.h | 2 +-
|
||||
gcc/config/i386/linux.h | 2 +-
|
||||
gcc/config/i386/linux64.h | 6 +++---
|
||||
gcc/config/i386/linux.h | 4 ++--
|
||||
gcc/config/i386/linux64.h | 12 ++++++------
|
||||
gcc/config/linux.h | 8 ++++----
|
||||
gcc/config/mips/linux.h | 12 ++++++------
|
||||
gcc/config/riscv/linux.h | 2 +-
|
||||
gcc/config/microblaze/linux.h | 4 ++--
|
||||
gcc/config/mips/linux.h | 18 +++++++++---------
|
||||
gcc/config/nios2/linux.h | 4 ++--
|
||||
gcc/config/riscv/linux.h | 4 ++--
|
||||
gcc/config/rs6000/linux64.h | 15 +++++----------
|
||||
gcc/config/sh/linux.h | 2 +-
|
||||
gcc/config/rs6000/sysv4.h | 4 ++--
|
||||
gcc/config/s390/linux.h | 8 ++++----
|
||||
gcc/config/sh/linux.h | 4 ++--
|
||||
gcc/config/sparc/linux.h | 2 +-
|
||||
gcc/config/sparc/linux64.h | 4 ++--
|
||||
13 files changed, 31 insertions(+), 36 deletions(-)
|
||||
17 files changed, 53 insertions(+), 58 deletions(-)
|
||||
|
||||
diff --git a/gcc/config/aarch64/aarch64-linux.h b/gcc/config/aarch64/aarch64-linux.h
|
||||
index 7f2529a2a1d..4bcae7f3110 100644
|
||||
--- a/gcc/config/aarch64/aarch64-linux.h
|
||||
+++ b/gcc/config/aarch64/aarch64-linux.h
|
||||
Index: gcc-11.3.0/gcc/config/aarch64/aarch64-linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/aarch64/aarch64-linux.h
|
||||
+++ gcc-11.3.0/gcc/config/aarch64/aarch64-linux.h
|
||||
@@ -21,10 +21,10 @@
|
||||
#ifndef GCC_AARCH64_LINUX_H
|
||||
#define GCC_AARCH64_LINUX_H
|
||||
@@ -47,11 +56,11 @@ index 7f2529a2a1d..4bcae7f3110 100644
|
||||
|
||||
#undef ASAN_CC1_SPEC
|
||||
#define ASAN_CC1_SPEC "%{%:sanitize(address):-funwind-tables}"
|
||||
diff --git a/gcc/config/alpha/linux-elf.h b/gcc/config/alpha/linux-elf.h
|
||||
index c1dae8ca2cf..3ce2b76c1a4 100644
|
||||
--- a/gcc/config/alpha/linux-elf.h
|
||||
+++ b/gcc/config/alpha/linux-elf.h
|
||||
@@ -23,8 +23,8 @@ along with GCC; see the file COPYING3. If not see
|
||||
Index: gcc-11.3.0/gcc/config/alpha/linux-elf.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/alpha/linux-elf.h
|
||||
+++ gcc-11.3.0/gcc/config/alpha/linux-elf.h
|
||||
@@ -23,8 +23,8 @@ along with GCC; see the file COPYING3.
|
||||
#define EXTRA_SPECS \
|
||||
{ "elf_dynamic_linker", ELF_DYNAMIC_LINKER },
|
||||
|
||||
@@ -62,10 +71,10 @@ index c1dae8ca2cf..3ce2b76c1a4 100644
|
||||
#if DEFAULT_LIBC == LIBC_UCLIBC
|
||||
#define CHOOSE_DYNAMIC_LINKER(G, U) "%{mglibc:" G ";:" U "}"
|
||||
#elif DEFAULT_LIBC == LIBC_GLIBC
|
||||
diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h
|
||||
index 85d0136e76e..6bd95855827 100644
|
||||
--- a/gcc/config/arm/linux-eabi.h
|
||||
+++ b/gcc/config/arm/linux-eabi.h
|
||||
Index: gcc-11.3.0/gcc/config/arm/linux-eabi.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/arm/linux-eabi.h
|
||||
+++ gcc-11.3.0/gcc/config/arm/linux-eabi.h
|
||||
@@ -65,8 +65,8 @@
|
||||
GLIBC_DYNAMIC_LINKER_DEFAULT and TARGET_DEFAULT_FLOAT_ABI. */
|
||||
|
||||
@@ -77,10 +86,19 @@ index 85d0136e76e..6bd95855827 100644
|
||||
#define GLIBC_DYNAMIC_LINKER_DEFAULT GLIBC_DYNAMIC_LINKER_SOFT_FLOAT
|
||||
|
||||
#define GLIBC_DYNAMIC_LINKER \
|
||||
diff --git a/gcc/config/arm/linux-elf.h b/gcc/config/arm/linux-elf.h
|
||||
index 0c1c4e70b6b..6bd643ade11 100644
|
||||
--- a/gcc/config/arm/linux-elf.h
|
||||
+++ b/gcc/config/arm/linux-elf.h
|
||||
@@ -89,7 +89,7 @@
|
||||
#define MUSL_DYNAMIC_LINKER_E "%{mbig-endian:eb}"
|
||||
#endif
|
||||
#define MUSL_DYNAMIC_LINKER \
|
||||
- "/lib/ld-musl-arm" MUSL_DYNAMIC_LINKER_E "%{mfloat-abi=hard:hf}%{mfdpic:-fdpic}.so.1"
|
||||
+ SYSTEMLIBS_DIR "ld-musl-arm" MUSL_DYNAMIC_LINKER_E "%{mfloat-abi=hard:hf}%{mfdpic:-fdpic}.so.1"
|
||||
|
||||
/* At this point, bpabi.h will have clobbered LINK_SPEC. We want to
|
||||
use the GNU/Linux version, not the generic BPABI version. */
|
||||
Index: gcc-11.3.0/gcc/config/arm/linux-elf.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/arm/linux-elf.h
|
||||
+++ gcc-11.3.0/gcc/config/arm/linux-elf.h
|
||||
@@ -60,7 +60,7 @@
|
||||
|
||||
#define LIBGCC_SPEC "%{mfloat-abi=soft*:-lfloat} -lgcc"
|
||||
@@ -90,11 +108,11 @@ index 0c1c4e70b6b..6bd643ade11 100644
|
||||
|
||||
#define LINUX_TARGET_LINK_SPEC "%{h*} \
|
||||
%{static:-Bstatic} \
|
||||
diff --git a/gcc/config/i386/linux.h b/gcc/config/i386/linux.h
|
||||
index 04b274f1654..7aafcf3ac2d 100644
|
||||
--- a/gcc/config/i386/linux.h
|
||||
+++ b/gcc/config/i386/linux.h
|
||||
@@ -20,7 +20,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
Index: gcc-11.3.0/gcc/config/i386/linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/i386/linux.h
|
||||
+++ gcc-11.3.0/gcc/config/i386/linux.h
|
||||
@@ -20,7 +20,7 @@ along with GCC; see the file COPYING3.
|
||||
<http://www.gnu.org/licenses/>. */
|
||||
|
||||
#define GNU_USER_LINK_EMULATION "elf_i386"
|
||||
@@ -102,12 +120,13 @@ index 04b274f1654..7aafcf3ac2d 100644
|
||||
+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2"
|
||||
|
||||
#undef MUSL_DYNAMIC_LINKER
|
||||
#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-i386.so.1"
|
||||
diff --git a/gcc/config/i386/linux64.h b/gcc/config/i386/linux64.h
|
||||
index b3822ced528..92d303e80d6 100644
|
||||
--- a/gcc/config/i386/linux64.h
|
||||
+++ b/gcc/config/i386/linux64.h
|
||||
@@ -27,9 +27,9 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
|
||||
-#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-i386.so.1"
|
||||
+#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-i386.so.1"
|
||||
Index: gcc-11.3.0/gcc/config/i386/linux64.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/i386/linux64.h
|
||||
+++ gcc-11.3.0/gcc/config/i386/linux64.h
|
||||
@@ -27,13 +27,13 @@ see the files COPYING3 and COPYING.RUNTI
|
||||
#define GNU_USER_LINK_EMULATION64 "elf_x86_64"
|
||||
#define GNU_USER_LINK_EMULATIONX32 "elf32_x86_64"
|
||||
|
||||
@@ -119,12 +138,19 @@ index b3822ced528..92d303e80d6 100644
|
||||
+#define GLIBC_DYNAMIC_LINKERX32 SYSTEMLIBS_DIR "ld-linux-x32.so.2"
|
||||
|
||||
#undef MUSL_DYNAMIC_LINKER32
|
||||
#define MUSL_DYNAMIC_LINKER32 "/lib/ld-musl-i386.so.1"
|
||||
diff --git a/gcc/config/linux.h b/gcc/config/linux.h
|
||||
index 4e1db60fced..87efc5f69fe 100644
|
||||
--- a/gcc/config/linux.h
|
||||
+++ b/gcc/config/linux.h
|
||||
@@ -94,10 +94,10 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
|
||||
-#define MUSL_DYNAMIC_LINKER32 "/lib/ld-musl-i386.so.1"
|
||||
+#define MUSL_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-musl-i386.so.1"
|
||||
#undef MUSL_DYNAMIC_LINKER64
|
||||
-#define MUSL_DYNAMIC_LINKER64 "/lib/ld-musl-x86_64.so.1"
|
||||
+#define MUSL_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld-musl-x86_64.so.1"
|
||||
#undef MUSL_DYNAMIC_LINKERX32
|
||||
-#define MUSL_DYNAMIC_LINKERX32 "/lib/ld-musl-x32.so.1"
|
||||
+#define MUSL_DYNAMIC_LINKERX32 SYSTEMLIBS_DIR "ld-musl-x32.so.1"
|
||||
Index: gcc-11.3.0/gcc/config/linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/linux.h
|
||||
+++ gcc-11.3.0/gcc/config/linux.h
|
||||
@@ -94,10 +94,10 @@ see the files COPYING3 and COPYING.RUNTI
|
||||
GLIBC_DYNAMIC_LINKER must be defined for each target using them, or
|
||||
GLIBC_DYNAMIC_LINKER32 and GLIBC_DYNAMIC_LINKER64 for targets
|
||||
supporting both 32-bit and 64-bit compilation. */
|
||||
@@ -139,11 +165,33 @@ index 4e1db60fced..87efc5f69fe 100644
|
||||
#define BIONIC_DYNAMIC_LINKER "/system/bin/linker"
|
||||
#define BIONIC_DYNAMIC_LINKER32 "/system/bin/linker"
|
||||
#define BIONIC_DYNAMIC_LINKER64 "/system/bin/linker64"
|
||||
diff --git a/gcc/config/mips/linux.h b/gcc/config/mips/linux.h
|
||||
index 44a85e410d9..8d41b5574f6 100644
|
||||
--- a/gcc/config/mips/linux.h
|
||||
+++ b/gcc/config/mips/linux.h
|
||||
@@ -22,20 +22,20 @@ along with GCC; see the file COPYING3. If not see
|
||||
Index: gcc-11.3.0/gcc/config/microblaze/linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/microblaze/linux.h
|
||||
+++ gcc-11.3.0/gcc/config/microblaze/linux.h
|
||||
@@ -28,7 +28,7 @@
|
||||
#undef TLS_NEEDS_GOT
|
||||
#define TLS_NEEDS_GOT 1
|
||||
|
||||
-#define GLIBC_DYNAMIC_LINKER "/lib/ld.so.1"
|
||||
+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "/ld.so.1"
|
||||
#define UCLIBC_DYNAMIC_LINKER "/lib/ld-uClibc.so.0"
|
||||
|
||||
#if TARGET_BIG_ENDIAN_DEFAULT == 0 /* LE */
|
||||
@@ -38,7 +38,7 @@
|
||||
#endif
|
||||
|
||||
#undef MUSL_DYNAMIC_LINKER
|
||||
-#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-microblaze" MUSL_DYNAMIC_LINKER_E ".so.1"
|
||||
+#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-microblaze" MUSL_DYNAMIC_LINKER_E ".so.1"
|
||||
|
||||
#undef SUBTARGET_EXTRA_SPECS
|
||||
#define SUBTARGET_EXTRA_SPECS \
|
||||
Index: gcc-11.3.0/gcc/config/mips/linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/mips/linux.h
|
||||
+++ gcc-11.3.0/gcc/config/mips/linux.h
|
||||
@@ -22,29 +22,29 @@ along with GCC; see the file COPYING3.
|
||||
#define GNU_USER_LINK_EMULATIONN32 "elf32%{EB:b}%{EL:l}tsmipn32"
|
||||
|
||||
#define GLIBC_DYNAMIC_LINKER32 \
|
||||
@@ -170,11 +218,36 @@ index 44a85e410d9..8d41b5574f6 100644
|
||||
|
||||
#undef MUSL_DYNAMIC_LINKER32
|
||||
#define MUSL_DYNAMIC_LINKER32 \
|
||||
diff --git a/gcc/config/riscv/linux.h b/gcc/config/riscv/linux.h
|
||||
index fce5b896e6e..03aa55cb5ab 100644
|
||||
--- a/gcc/config/riscv/linux.h
|
||||
+++ b/gcc/config/riscv/linux.h
|
||||
@@ -22,7 +22,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
- "/lib/ld-musl-mips%{mips32r6|mips64r6:r6}%{EL:el}%{msoft-float:-sf}.so.1"
|
||||
+ SYSTEMLIBS_DIR "ld-musl-mips%{mips32r6|mips64r6:r6}%{EL:el}%{msoft-float:-sf}.so.1"
|
||||
#undef MUSL_DYNAMIC_LINKER64
|
||||
#define MUSL_DYNAMIC_LINKER64 \
|
||||
- "/lib/ld-musl-mips64%{mips64r6:r6}%{EL:el}%{msoft-float:-sf}.so.1"
|
||||
+ SYSTEMLIBS_DIR "ld-musl-mips64%{mips64r6:r6}%{EL:el}%{msoft-float:-sf}.so.1"
|
||||
#define MUSL_DYNAMIC_LINKERN32 \
|
||||
- "/lib/ld-musl-mipsn32%{mips64r6:r6}%{EL:el}%{msoft-float:-sf}.so.1"
|
||||
+ SYSTEMLIBS_DIR "ld-musl-mipsn32%{mips64r6:r6}%{EL:el}%{msoft-float:-sf}.so.1"
|
||||
|
||||
#define BIONIC_DYNAMIC_LINKERN32 "/system/bin/linker32"
|
||||
#define GNU_USER_DYNAMIC_LINKERN32 \
|
||||
Index: gcc-11.3.0/gcc/config/nios2/linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/nios2/linux.h
|
||||
+++ gcc-11.3.0/gcc/config/nios2/linux.h
|
||||
@@ -29,7 +29,7 @@
|
||||
#undef CPP_SPEC
|
||||
#define CPP_SPEC "%{posix:-D_POSIX_SOURCE} %{pthread:-D_REENTRANT}"
|
||||
|
||||
-#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux-nios2.so.1"
|
||||
+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux-nios2.so.1"
|
||||
|
||||
#undef LINK_SPEC
|
||||
#define LINK_SPEC LINK_SPEC_ENDIAN \
|
||||
Index: gcc-11.3.0/gcc/config/riscv/linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/riscv/linux.h
|
||||
+++ gcc-11.3.0/gcc/config/riscv/linux.h
|
||||
@@ -22,7 +22,7 @@ along with GCC; see the file COPYING3.
|
||||
GNU_USER_TARGET_OS_CPP_BUILTINS(); \
|
||||
} while (0)
|
||||
|
||||
@@ -183,10 +256,19 @@ index fce5b896e6e..03aa55cb5ab 100644
|
||||
|
||||
#define MUSL_ABI_SUFFIX \
|
||||
"%{mabi=ilp32:-sf}" \
|
||||
diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h
|
||||
index e3f2cd254f6..a11e01faa3d 100644
|
||||
--- a/gcc/config/rs6000/linux64.h
|
||||
+++ b/gcc/config/rs6000/linux64.h
|
||||
@@ -33,7 +33,7 @@ along with GCC; see the file COPYING3.
|
||||
"%{mabi=lp64d:}"
|
||||
|
||||
#undef MUSL_DYNAMIC_LINKER
|
||||
-#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-riscv" XLEN_SPEC MUSL_ABI_SUFFIX ".so.1"
|
||||
+#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-riscv" XLEN_SPEC MUSL_ABI_SUFFIX ".so.1"
|
||||
|
||||
/* Because RISC-V only has word-sized atomics, it requries libatomic where
|
||||
others do not. So link libatomic by default, as needed. */
|
||||
Index: gcc-11.3.0/gcc/config/rs6000/linux64.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/rs6000/linux64.h
|
||||
+++ gcc-11.3.0/gcc/config/rs6000/linux64.h
|
||||
@@ -336,24 +336,19 @@ extern int dot_symbols;
|
||||
#undef LINK_OS_DEFAULT_SPEC
|
||||
#define LINK_OS_DEFAULT_SPEC "%(link_os_linux)"
|
||||
@@ -217,12 +299,55 @@ index e3f2cd254f6..a11e01faa3d 100644
|
||||
|
||||
#undef DEFAULT_ASM_ENDIAN
|
||||
#if (TARGET_DEFAULT & MASK_LITTLE_ENDIAN)
|
||||
diff --git a/gcc/config/sh/linux.h b/gcc/config/sh/linux.h
|
||||
index 7558d2f7195..3aaa6c3a078 100644
|
||||
--- a/gcc/config/sh/linux.h
|
||||
+++ b/gcc/config/sh/linux.h
|
||||
@@ -64,7 +64,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
"/lib/ld-musl-sh" MUSL_DYNAMIC_LINKER_E MUSL_DYNAMIC_LINKER_FP \
|
||||
Index: gcc-11.3.0/gcc/config/rs6000/sysv4.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/rs6000/sysv4.h
|
||||
+++ gcc-11.3.0/gcc/config/rs6000/sysv4.h
|
||||
@@ -780,10 +780,10 @@ GNU_USER_TARGET_CC1_SPEC
|
||||
|
||||
#define MUSL_DYNAMIC_LINKER_E ENDIAN_SELECT("","le","")
|
||||
|
||||
-#define GLIBC_DYNAMIC_LINKER "/lib/ld.so.1"
|
||||
+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld.so.1"
|
||||
#undef MUSL_DYNAMIC_LINKER
|
||||
#define MUSL_DYNAMIC_LINKER \
|
||||
- "/lib/ld-musl-powerpc" MUSL_DYNAMIC_LINKER_E "%{msoft-float:-sf}.so.1"
|
||||
+ SYSTEMLIBS_DIR "ld-musl-powerpc" MUSL_DYNAMIC_LINKER_E "%{msoft-float:-sf}.so.1"
|
||||
|
||||
#ifndef GNU_USER_DYNAMIC_LINKER
|
||||
#define GNU_USER_DYNAMIC_LINKER GLIBC_DYNAMIC_LINKER
|
||||
Index: gcc-11.3.0/gcc/config/s390/linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/s390/linux.h
|
||||
+++ gcc-11.3.0/gcc/config/s390/linux.h
|
||||
@@ -72,13 +72,13 @@ along with GCC; see the file COPYING3.
|
||||
#define MULTILIB_DEFAULTS { "m31" }
|
||||
#endif
|
||||
|
||||
-#define GLIBC_DYNAMIC_LINKER32 "/lib/ld.so.1"
|
||||
-#define GLIBC_DYNAMIC_LINKER64 "/lib/ld64.so.1"
|
||||
+#define GLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld.so.1"
|
||||
+#define GLIBC_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld64.so.1"
|
||||
|
||||
#undef MUSL_DYNAMIC_LINKER32
|
||||
-#define MUSL_DYNAMIC_LINKER32 "/lib/ld-musl-s390.so.1"
|
||||
+#define MUSL_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-musl-s390.so.1"
|
||||
#undef MUSL_DYNAMIC_LINKER64
|
||||
-#define MUSL_DYNAMIC_LINKER64 "/lib/ld-musl-s390x.so.1"
|
||||
+#define MUSL_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld-musl-s390x.so.1"
|
||||
|
||||
#undef LINK_SPEC
|
||||
#define LINK_SPEC \
|
||||
Index: gcc-11.3.0/gcc/config/sh/linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/sh/linux.h
|
||||
+++ gcc-11.3.0/gcc/config/sh/linux.h
|
||||
@@ -61,10 +61,10 @@ along with GCC; see the file COPYING3.
|
||||
|
||||
#undef MUSL_DYNAMIC_LINKER
|
||||
#define MUSL_DYNAMIC_LINKER \
|
||||
- "/lib/ld-musl-sh" MUSL_DYNAMIC_LINKER_E MUSL_DYNAMIC_LINKER_FP \
|
||||
+ SYSTEMLIBS_DIR "ld-musl-sh" MUSL_DYNAMIC_LINKER_E MUSL_DYNAMIC_LINKER_FP \
|
||||
"%{mfdpic:-fdpic}.so.1"
|
||||
|
||||
-#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2"
|
||||
@@ -230,11 +355,11 @@ index 7558d2f7195..3aaa6c3a078 100644
|
||||
|
||||
#undef SUBTARGET_LINK_EMUL_SUFFIX
|
||||
#define SUBTARGET_LINK_EMUL_SUFFIX "%{mfdpic:_fd;:_linux}"
|
||||
diff --git a/gcc/config/sparc/linux.h b/gcc/config/sparc/linux.h
|
||||
index 2550d7ee8f0..a94f4cd8ba2 100644
|
||||
--- a/gcc/config/sparc/linux.h
|
||||
+++ b/gcc/config/sparc/linux.h
|
||||
@@ -78,7 +78,7 @@ extern const char *host_detect_local_cpu (int argc, const char **argv);
|
||||
Index: gcc-11.3.0/gcc/config/sparc/linux.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/sparc/linux.h
|
||||
+++ gcc-11.3.0/gcc/config/sparc/linux.h
|
||||
@@ -78,7 +78,7 @@ extern const char *host_detect_local_cpu
|
||||
When the -shared link option is used a final link is not being
|
||||
done. */
|
||||
|
||||
@@ -243,11 +368,11 @@ index 2550d7ee8f0..a94f4cd8ba2 100644
|
||||
|
||||
#undef LINK_SPEC
|
||||
#define LINK_SPEC "-m elf32_sparc %{shared:-shared} \
|
||||
diff --git a/gcc/config/sparc/linux64.h b/gcc/config/sparc/linux64.h
|
||||
index 95af8afa9b5..63127afb074 100644
|
||||
--- a/gcc/config/sparc/linux64.h
|
||||
+++ b/gcc/config/sparc/linux64.h
|
||||
@@ -78,8 +78,8 @@ along with GCC; see the file COPYING3. If not see
|
||||
Index: gcc-11.3.0/gcc/config/sparc/linux64.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/sparc/linux64.h
|
||||
+++ gcc-11.3.0/gcc/config/sparc/linux64.h
|
||||
@@ -78,8 +78,8 @@ along with GCC; see the file COPYING3.
|
||||
When the -shared link option is used a final link is not being
|
||||
done. */
|
||||
|
||||
|
||||
@@ -18,13 +18,13 @@ Upstream-Status: Pending
|
||||
gcc/config/arm/linux-eabi.h | 6 +++++-
|
||||
1 file changed, 5 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h
|
||||
index 6bd95855827..77befab5da8 100644
|
||||
--- a/gcc/config/arm/linux-eabi.h
|
||||
+++ b/gcc/config/arm/linux-eabi.h
|
||||
Index: gcc-11.3.0/gcc/config/arm/linux-eabi.h
|
||||
===================================================================
|
||||
--- gcc-11.3.0.orig/gcc/config/arm/linux-eabi.h
|
||||
+++ gcc-11.3.0/gcc/config/arm/linux-eabi.h
|
||||
@@ -91,10 +91,14 @@
|
||||
#define MUSL_DYNAMIC_LINKER \
|
||||
"/lib/ld-musl-arm" MUSL_DYNAMIC_LINKER_E "%{mfloat-abi=hard:hf}%{mfdpic:-fdpic}.so.1"
|
||||
SYSTEMLIBS_DIR "ld-musl-arm" MUSL_DYNAMIC_LINKER_E "%{mfloat-abi=hard:hf}%{mfdpic:-fdpic}.so.1"
|
||||
|
||||
+/* For armv4 we pass --fix-v4bx to linker to support EABI */
|
||||
+#undef TARGET_FIX_V4BX_SPEC
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
From 9ec4db8e910d9a51ae43f6b20d4bf1dac2d8cca8 Mon Sep 17 00:00:00 2001
|
||||
From: Khem Raj <raj.khem@gmail.com>
|
||||
Date: Tue, 2 Feb 2016 10:26:10 -0800
|
||||
Subject: [PATCH] nios2: Define MUSL_DYNAMIC_LINKER
|
||||
|
||||
Upstream-Status: Backport [https://gcc.gnu.org/git/?p=gcc.git;a=commitdiff;h=e5ddbbf992b909d8e38851bd3179d29389e6ac97]
|
||||
|
||||
Signed-off-by: Marek Vasut <marex@denx.de>
|
||||
Signed-off-by: Khem Raj <raj.khem@gmail.com>
|
||||
---
|
||||
gcc/config/nios2/linux.h | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/gcc/config/nios2/linux.h b/gcc/config/nios2/linux.h
|
||||
index 08edf1521f6..15696d86241 100644
|
||||
--- a/gcc/config/nios2/linux.h
|
||||
+++ b/gcc/config/nios2/linux.h
|
||||
@@ -30,6 +30,7 @@
|
||||
#define CPP_SPEC "%{posix:-D_POSIX_SOURCE} %{pthread:-D_REENTRANT}"
|
||||
|
||||
#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux-nios2.so.1"
|
||||
+#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-nios2.so.1"
|
||||
|
||||
#undef LINK_SPEC
|
||||
#define LINK_SPEC LINK_SPEC_ENDIAN \
|
||||
@@ -18,6 +18,8 @@ SRC_URI += "\
|
||||
file://0001-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch \
|
||||
file://CVE-2022-27664.patch \
|
||||
file://0001-net-http-httputil-avoid-query-parameter-smuggling.patch \
|
||||
file://CVE-2022-41715.patch \
|
||||
file://CVE-2022-41717.patch \
|
||||
"
|
||||
SRC_URI[main.sha256sum] = "a1a48b23afb206f95e7bbaa9b898d965f90826f6f1d1fc0c1d784ada0cd300fd"
|
||||
|
||||
|
||||
270
meta/recipes-devtools/go/go-1.18/CVE-2022-41715.patch
Normal file
270
meta/recipes-devtools/go/go-1.18/CVE-2022-41715.patch
Normal file
@@ -0,0 +1,270 @@
|
||||
From e9017c2416ad0ef642f5e0c2eab2dbf3cba4d997 Mon Sep 17 00:00:00 2001
|
||||
From: Russ Cox <rsc@golang.org>
|
||||
Date: Wed, 28 Sep 2022 11:18:51 -0400
|
||||
Subject: [PATCH] [release-branch.go1.18] regexp: limit size of parsed regexps
|
||||
|
||||
Set a 128 MB limit on the amount of space used by []syntax.Inst
|
||||
in the compiled form corresponding to a given regexp.
|
||||
|
||||
Also set a 128 MB limit on the rune storage in the *syntax.Regexp
|
||||
tree itself.
|
||||
|
||||
Thanks to Adam Korczynski (ADA Logics) and OSS-Fuzz for reporting this issue.
|
||||
|
||||
Fixes CVE-2022-41715.
|
||||
Updates #55949.
|
||||
Fixes #55950.
|
||||
|
||||
Change-Id: Ia656baed81564436368cf950e1c5409752f28e1b
|
||||
Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1592136
|
||||
TryBot-Result: Security TryBots <security-trybots@go-security-trybots.iam.gserviceaccount.com>
|
||||
Reviewed-by: Damien Neil <dneil@google.com>
|
||||
Run-TryBot: Roland Shoemaker <bracewell@google.com>
|
||||
Reviewed-by: Julie Qiu <julieqiu@google.com>
|
||||
Reviewed-on: https://go-review.googlesource.com/c/go/+/438501
|
||||
Run-TryBot: Carlos Amedee <carlos@golang.org>
|
||||
Reviewed-by: Carlos Amedee <carlos@golang.org>
|
||||
Reviewed-by: Dmitri Shuralyov <dmitshur@google.com>
|
||||
TryBot-Result: Gopher Robot <gobot@golang.org>
|
||||
Reviewed-by: Dmitri Shuralyov <dmitshur@golang.org>
|
||||
|
||||
Upstream-Status: Backport [https://github.com/golang/go/commit/e9017c2416ad0ef642f5e0c2eab2dbf3cba4d997]
|
||||
CVE: CVE-2022-41715
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
src/regexp/syntax/parse.go | 145 ++++++++++++++++++++++++++++++--
|
||||
src/regexp/syntax/parse_test.go | 13 +--
|
||||
2 files changed, 148 insertions(+), 10 deletions(-)
|
||||
|
||||
diff --git a/src/regexp/syntax/parse.go b/src/regexp/syntax/parse.go
|
||||
index d7cf2af..3792960 100644
|
||||
--- a/src/regexp/syntax/parse.go
|
||||
+++ b/src/regexp/syntax/parse.go
|
||||
@@ -90,15 +90,49 @@ const (
|
||||
// until we've allocated at least maxHeight Regexp structures.
|
||||
const maxHeight = 1000
|
||||
|
||||
+// maxSize is the maximum size of a compiled regexp in Insts.
|
||||
+// It too is somewhat arbitrarily chosen, but the idea is to be large enough
|
||||
+// to allow significant regexps while at the same time small enough that
|
||||
+// the compiled form will not take up too much memory.
|
||||
+// 128 MB is enough for a 3.3 million Inst structures, which roughly
|
||||
+// corresponds to a 3.3 MB regexp.
|
||||
+const (
|
||||
+ maxSize = 128 << 20 / instSize
|
||||
+ instSize = 5 * 8 // byte, 2 uint32, slice is 5 64-bit words
|
||||
+)
|
||||
+
|
||||
+// maxRunes is the maximum number of runes allowed in a regexp tree
|
||||
+// counting the runes in all the nodes.
|
||||
+// Ignoring character classes p.numRunes is always less than the length of the regexp.
|
||||
+// Character classes can make it much larger: each \pL adds 1292 runes.
|
||||
+// 128 MB is enough for 32M runes, which is over 26k \pL instances.
|
||||
+// Note that repetitions do not make copies of the rune slices,
|
||||
+// so \pL{1000} is only one rune slice, not 1000.
|
||||
+// We could keep a cache of character classes we've seen,
|
||||
+// so that all the \pL we see use the same rune list,
|
||||
+// but that doesn't remove the problem entirely:
|
||||
+// consider something like [\pL01234][\pL01235][\pL01236]...[\pL^&*()].
|
||||
+// And because the Rune slice is exposed directly in the Regexp,
|
||||
+// there is not an opportunity to change the representation to allow
|
||||
+// partial sharing between different character classes.
|
||||
+// So the limit is the best we can do.
|
||||
+const (
|
||||
+ maxRunes = 128 << 20 / runeSize
|
||||
+ runeSize = 4 // rune is int32
|
||||
+)
|
||||
+
|
||||
type parser struct {
|
||||
flags Flags // parse mode flags
|
||||
stack []*Regexp // stack of parsed expressions
|
||||
free *Regexp
|
||||
numCap int // number of capturing groups seen
|
||||
wholeRegexp string
|
||||
- tmpClass []rune // temporary char class work space
|
||||
- numRegexp int // number of regexps allocated
|
||||
- height map[*Regexp]int // regexp height for height limit check
|
||||
+ tmpClass []rune // temporary char class work space
|
||||
+ numRegexp int // number of regexps allocated
|
||||
+ numRunes int // number of runes in char classes
|
||||
+ repeats int64 // product of all repetitions seen
|
||||
+ height map[*Regexp]int // regexp height, for height limit check
|
||||
+ size map[*Regexp]int64 // regexp compiled size, for size limit check
|
||||
}
|
||||
|
||||
func (p *parser) newRegexp(op Op) *Regexp {
|
||||
@@ -122,6 +156,104 @@ func (p *parser) reuse(re *Regexp) {
|
||||
p.free = re
|
||||
}
|
||||
|
||||
+func (p *parser) checkLimits(re *Regexp) {
|
||||
+ if p.numRunes > maxRunes {
|
||||
+ panic(ErrInternalError)
|
||||
+ }
|
||||
+ p.checkSize(re)
|
||||
+ p.checkHeight(re)
|
||||
+}
|
||||
+
|
||||
+func (p *parser) checkSize(re *Regexp) {
|
||||
+ if p.size == nil {
|
||||
+ // We haven't started tracking size yet.
|
||||
+ // Do a relatively cheap check to see if we need to start.
|
||||
+ // Maintain the product of all the repeats we've seen
|
||||
+ // and don't track if the total number of regexp nodes
|
||||
+ // we've seen times the repeat product is in budget.
|
||||
+ if p.repeats == 0 {
|
||||
+ p.repeats = 1
|
||||
+ }
|
||||
+ if re.Op == OpRepeat {
|
||||
+ n := re.Max
|
||||
+ if n == -1 {
|
||||
+ n = re.Min
|
||||
+ }
|
||||
+ if n <= 0 {
|
||||
+ n = 1
|
||||
+ }
|
||||
+ if int64(n) > maxSize/p.repeats {
|
||||
+ p.repeats = maxSize
|
||||
+ } else {
|
||||
+ p.repeats *= int64(n)
|
||||
+ }
|
||||
+ }
|
||||
+ if int64(p.numRegexp) < maxSize/p.repeats {
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // We need to start tracking size.
|
||||
+ // Make the map and belatedly populate it
|
||||
+ // with info about everything we've constructed so far.
|
||||
+ p.size = make(map[*Regexp]int64)
|
||||
+ for _, re := range p.stack {
|
||||
+ p.checkSize(re)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ if p.calcSize(re, true) > maxSize {
|
||||
+ panic(ErrInternalError)
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func (p *parser) calcSize(re *Regexp, force bool) int64 {
|
||||
+ if !force {
|
||||
+ if size, ok := p.size[re]; ok {
|
||||
+ return size
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ var size int64
|
||||
+ switch re.Op {
|
||||
+ case OpLiteral:
|
||||
+ size = int64(len(re.Rune))
|
||||
+ case OpCapture, OpStar:
|
||||
+ // star can be 1+ or 2+; assume 2 pessimistically
|
||||
+ size = 2 + p.calcSize(re.Sub[0], false)
|
||||
+ case OpPlus, OpQuest:
|
||||
+ size = 1 + p.calcSize(re.Sub[0], false)
|
||||
+ case OpConcat:
|
||||
+ for _, sub := range re.Sub {
|
||||
+ size += p.calcSize(sub, false)
|
||||
+ }
|
||||
+ case OpAlternate:
|
||||
+ for _, sub := range re.Sub {
|
||||
+ size += p.calcSize(sub, false)
|
||||
+ }
|
||||
+ if len(re.Sub) > 1 {
|
||||
+ size += int64(len(re.Sub)) - 1
|
||||
+ }
|
||||
+ case OpRepeat:
|
||||
+ sub := p.calcSize(re.Sub[0], false)
|
||||
+ if re.Max == -1 {
|
||||
+ if re.Min == 0 {
|
||||
+ size = 2 + sub // x*
|
||||
+ } else {
|
||||
+ size = 1 + int64(re.Min)*sub // xxx+
|
||||
+ }
|
||||
+ break
|
||||
+ }
|
||||
+ // x{2,5} = xx(x(x(x)?)?)?
|
||||
+ size = int64(re.Max)*sub + int64(re.Max-re.Min)
|
||||
+ }
|
||||
+
|
||||
+ if size < 1 {
|
||||
+ size = 1
|
||||
+ }
|
||||
+ p.size[re] = size
|
||||
+ return size
|
||||
+}
|
||||
+
|
||||
func (p *parser) checkHeight(re *Regexp) {
|
||||
if p.numRegexp < maxHeight {
|
||||
return
|
||||
@@ -158,6 +290,7 @@ func (p *parser) calcHeight(re *Regexp, force bool) int {
|
||||
|
||||
// push pushes the regexp re onto the parse stack and returns the regexp.
|
||||
func (p *parser) push(re *Regexp) *Regexp {
|
||||
+ p.numRunes += len(re.Rune)
|
||||
if re.Op == OpCharClass && len(re.Rune) == 2 && re.Rune[0] == re.Rune[1] {
|
||||
// Single rune.
|
||||
if p.maybeConcat(re.Rune[0], p.flags&^FoldCase) {
|
||||
@@ -189,7 +322,7 @@ func (p *parser) push(re *Regexp) *Regexp {
|
||||
}
|
||||
|
||||
p.stack = append(p.stack, re)
|
||||
- p.checkHeight(re)
|
||||
+ p.checkLimits(re)
|
||||
return re
|
||||
}
|
||||
|
||||
@@ -299,7 +432,7 @@ func (p *parser) repeat(op Op, min, max int, before, after, lastRepeat string) (
|
||||
re.Sub = re.Sub0[:1]
|
||||
re.Sub[0] = sub
|
||||
p.stack[n-1] = re
|
||||
- p.checkHeight(re)
|
||||
+ p.checkLimits(re)
|
||||
|
||||
if op == OpRepeat && (min >= 2 || max >= 2) && !repeatIsValid(re, 1000) {
|
||||
return "", &Error{ErrInvalidRepeatSize, before[:len(before)-len(after)]}
|
||||
@@ -503,6 +636,7 @@ func (p *parser) factor(sub []*Regexp) []*Regexp {
|
||||
|
||||
for j := start; j < i; j++ {
|
||||
sub[j] = p.removeLeadingString(sub[j], len(str))
|
||||
+ p.checkLimits(sub[j])
|
||||
}
|
||||
suffix := p.collapse(sub[start:i], OpAlternate) // recurse
|
||||
|
||||
@@ -560,6 +694,7 @@ func (p *parser) factor(sub []*Regexp) []*Regexp {
|
||||
for j := start; j < i; j++ {
|
||||
reuse := j != start // prefix came from sub[start]
|
||||
sub[j] = p.removeLeadingRegexp(sub[j], reuse)
|
||||
+ p.checkLimits(sub[j])
|
||||
}
|
||||
suffix := p.collapse(sub[start:i], OpAlternate) // recurse
|
||||
|
||||
diff --git a/src/regexp/syntax/parse_test.go b/src/regexp/syntax/parse_test.go
|
||||
index 1ef6d8a..67e3c56 100644
|
||||
--- a/src/regexp/syntax/parse_test.go
|
||||
+++ b/src/regexp/syntax/parse_test.go
|
||||
@@ -484,12 +484,15 @@ var invalidRegexps = []string{
|
||||
`(?P<>a)`,
|
||||
`[a-Z]`,
|
||||
`(?i)[a-Z]`,
|
||||
- `a{100000}`,
|
||||
- `a{100000,}`,
|
||||
- "((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}){2})",
|
||||
- strings.Repeat("(", 1000) + strings.Repeat(")", 1000),
|
||||
- strings.Repeat("(?:", 1000) + strings.Repeat(")*", 1000),
|
||||
`\Q\E*`,
|
||||
+ `a{100000}`, // too much repetition
|
||||
+ `a{100000,}`, // too much repetition
|
||||
+ "((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}){2})", // too much repetition
|
||||
+ strings.Repeat("(", 1000) + strings.Repeat(")", 1000), // too deep
|
||||
+ strings.Repeat("(?:", 1000) + strings.Repeat(")*", 1000), // too deep
|
||||
+ "(" + strings.Repeat("(xx?)", 1000) + "){1000}", // too long
|
||||
+ strings.Repeat("(xx?){1000}", 1000), // too long
|
||||
+ strings.Repeat(`\pL`, 27000), // too many runes
|
||||
}
|
||||
|
||||
var onlyPerl = []string{
|
||||
--
|
||||
2.25.1
|
||||
|
||||
89
meta/recipes-devtools/go/go-1.18/CVE-2022-41717.patch
Normal file
89
meta/recipes-devtools/go/go-1.18/CVE-2022-41717.patch
Normal file
@@ -0,0 +1,89 @@
|
||||
From 618120c165669c00a1606505defea6ca755cdc27 Mon Sep 17 00:00:00 2001
|
||||
From: Damien Neil <dneil@google.com>
|
||||
Date: Wed, 30 Nov 2022 16:46:33 -0500
|
||||
Subject: [PATCH] [release-branch.go1.19] net/http: update bundled
|
||||
golang.org/x/net/http2
|
||||
|
||||
Disable cmd/internal/moddeps test, since this update includes PRIVATE
|
||||
track fixes.
|
||||
|
||||
For #56350.
|
||||
For #57009.
|
||||
Fixes CVE-2022-41717.
|
||||
|
||||
Change-Id: I5c6ce546add81f361dcf0d5123fa4eaaf8f0a03b
|
||||
Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1663835
|
||||
Reviewed-by: Tatiana Bradley <tatianabradley@google.com>
|
||||
Reviewed-by: Julie Qiu <julieqiu@google.com>
|
||||
Reviewed-on: https://go-review.googlesource.com/c/go/+/455363
|
||||
TryBot-Result: Gopher Robot <gobot@golang.org>
|
||||
Run-TryBot: Jenny Rakoczy <jenny@golang.org>
|
||||
Reviewed-by: Michael Pratt <mpratt@google.com>
|
||||
|
||||
Upstream-Status: Backport [https://github.com/golang/go/commit/618120c165669c00a1606505defea6ca755cdc27]
|
||||
CVE: CVE-2022-41717
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
src/cmd/internal/moddeps/moddeps_test.go | 1 +
|
||||
src/net/http/h2_bundle.go | 18 +++++++++++-------
|
||||
2 files changed, 12 insertions(+), 7 deletions(-)
|
||||
|
||||
diff --git a/src/cmd/internal/moddeps/moddeps_test.go b/src/cmd/internal/moddeps/moddeps_test.go
|
||||
index 3306e29..d48d43f 100644
|
||||
--- a/src/cmd/internal/moddeps/moddeps_test.go
|
||||
+++ b/src/cmd/internal/moddeps/moddeps_test.go
|
||||
@@ -34,6 +34,7 @@ import (
|
||||
// See issues 36852, 41409, and 43687.
|
||||
// (Also see golang.org/issue/27348.)
|
||||
func TestAllDependencies(t *testing.T) {
|
||||
+ t.Skip("TODO(#57009): 1.19.4 contains unreleased changes from vendored modules")
|
||||
t.Skip("TODO(#53977): 1.18.5 contains unreleased changes from vendored modules")
|
||||
|
||||
goBin := testenv.GoToolPath(t)
|
||||
diff --git a/src/net/http/h2_bundle.go b/src/net/http/h2_bundle.go
|
||||
index 6e2ef30..9d6abd8 100644
|
||||
--- a/src/net/http/h2_bundle.go
|
||||
+++ b/src/net/http/h2_bundle.go
|
||||
@@ -4189,6 +4189,7 @@ type http2serverConn struct {
|
||||
headerTableSize uint32
|
||||
peerMaxHeaderListSize uint32 // zero means unknown (default)
|
||||
canonHeader map[string]string // http2-lower-case -> Go-Canonical-Case
|
||||
+ canonHeaderKeysSize int // canonHeader keys size in bytes
|
||||
writingFrame bool // started writing a frame (on serve goroutine or separate)
|
||||
writingFrameAsync bool // started a frame on its own goroutine but haven't heard back on wroteFrameCh
|
||||
needsFrameFlush bool // last frame write wasn't a flush
|
||||
@@ -4368,6 +4369,13 @@ func (sc *http2serverConn) condlogf(err error, format string, args ...interface{
|
||||
}
|
||||
}
|
||||
|
||||
+// maxCachedCanonicalHeadersKeysSize is an arbitrarily-chosen limit on the size
|
||||
+// of the entries in the canonHeader cache.
|
||||
+// This should be larger than the size of unique, uncommon header keys likely to
|
||||
+// be sent by the peer, while not so high as to permit unreasonable memory usage
|
||||
+// if the peer sends an unbounded number of unique header keys.
|
||||
+const http2maxCachedCanonicalHeadersKeysSize = 2048
|
||||
+
|
||||
func (sc *http2serverConn) canonicalHeader(v string) string {
|
||||
sc.serveG.check()
|
||||
http2buildCommonHeaderMapsOnce()
|
||||
@@ -4383,14 +4391,10 @@ func (sc *http2serverConn) canonicalHeader(v string) string {
|
||||
sc.canonHeader = make(map[string]string)
|
||||
}
|
||||
cv = CanonicalHeaderKey(v)
|
||||
- // maxCachedCanonicalHeaders is an arbitrarily-chosen limit on the number of
|
||||
- // entries in the canonHeader cache. This should be larger than the number
|
||||
- // of unique, uncommon header keys likely to be sent by the peer, while not
|
||||
- // so high as to permit unreaasonable memory usage if the peer sends an unbounded
|
||||
- // number of unique header keys.
|
||||
- const maxCachedCanonicalHeaders = 32
|
||||
- if len(sc.canonHeader) < maxCachedCanonicalHeaders {
|
||||
+ size := 100 + len(v)*2 // 100 bytes of map overhead + key + value
|
||||
+ if sc.canonHeaderKeysSize+size <= http2maxCachedCanonicalHeadersKeysSize {
|
||||
sc.canonHeader[v] = cv
|
||||
+ sc.canonHeaderKeysSize += size
|
||||
}
|
||||
return cv
|
||||
}
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -4,6 +4,8 @@ DEPENDS = "go-native virtual/${TARGET_PREFIX}gcc-crosssdk virtual/nativesdk-${TA
|
||||
PN = "go-crosssdk-${SDK_SYS}"
|
||||
PROVIDES = "virtual/${TARGET_PREFIX}go-crosssdk"
|
||||
|
||||
export GOCACHE = "${B}/.cache"
|
||||
|
||||
do_configure[noexec] = "1"
|
||||
|
||||
do_compile() {
|
||||
|
||||
@@ -0,0 +1,97 @@
|
||||
From 6ebe9231cd34dacd32a964859bc509aaa1e3f5fd Mon Sep 17 00:00:00 2001
|
||||
From: Narpat Mali <narpat.mali@windriver.com>
|
||||
Date: Fri, 6 Jan 2023 14:13:10 +0000
|
||||
Subject: [PATCH] python3-git: CVE-2022-24439 fix from PR 1518
|
||||
|
||||
Fix command injection
|
||||
Add `--` in some commands that receive user input
|
||||
and if interpreted as options could lead to remote
|
||||
code execution (RCE).
|
||||
|
||||
There may be more commands that could benefit from `--`
|
||||
so the input is never interpreted as an option,
|
||||
but most of those aren't dangerous.
|
||||
|
||||
Fixed commands:
|
||||
|
||||
- push
|
||||
- pull
|
||||
- fetch
|
||||
- clone/clone_from and friends
|
||||
- archive (not sure if this one can be exploited, but it doesn't hurt
|
||||
adding `--` :))
|
||||
|
||||
For anyone using GitPython and exposing any of the GitPython methods to users,
|
||||
make sure to always validate the input (like if starts with `--`).
|
||||
And for anyone allowing users to pass arbitrary options, be aware
|
||||
that some options may lead fo RCE, like `--exc`, `--upload-pack`,
|
||||
`--receive-pack`, `--config` (#1516).
|
||||
|
||||
Ref #1517
|
||||
|
||||
CVE: CVE-2022-24439
|
||||
|
||||
Upstream-Status: Backport [https://github.com/gitpython-developers/GitPython/pull/1518]
|
||||
|
||||
Signed-off-by: Narpat Mali <narpat.mali@windriver.com>
|
||||
---
|
||||
git/remote.py | 6 +++---
|
||||
git/repo/base.py | 4 ++--
|
||||
2 files changed, 5 insertions(+), 5 deletions(-)
|
||||
|
||||
diff --git a/git/remote.py b/git/remote.py
|
||||
index 56f3c5b..59681bc 100644
|
||||
--- a/git/remote.py
|
||||
+++ b/git/remote.py
|
||||
@@ -881,7 +881,7 @@ class Remote(LazyMixin, IterableObj):
|
||||
else:
|
||||
args = [refspec]
|
||||
|
||||
- proc = self.repo.git.fetch(self, *args, as_process=True, with_stdout=False,
|
||||
+ proc = self.repo.git.fetch("--", self, *args, as_process=True, with_stdout=False,
|
||||
universal_newlines=True, v=verbose, **kwargs)
|
||||
res = self._get_fetch_info_from_stderr(proc, progress,
|
||||
kill_after_timeout=kill_after_timeout)
|
||||
@@ -905,7 +905,7 @@ class Remote(LazyMixin, IterableObj):
|
||||
# No argument refspec, then ensure the repo's config has a fetch refspec.
|
||||
self._assert_refspec()
|
||||
kwargs = add_progress(kwargs, self.repo.git, progress)
|
||||
- proc = self.repo.git.pull(self, refspec, with_stdout=False, as_process=True,
|
||||
+ proc = self.repo.git.pull("--", self, refspec, with_stdout=False, as_process=True,
|
||||
universal_newlines=True, v=True, **kwargs)
|
||||
res = self._get_fetch_info_from_stderr(proc, progress,
|
||||
kill_after_timeout=kill_after_timeout)
|
||||
@@ -945,7 +945,7 @@ class Remote(LazyMixin, IterableObj):
|
||||
If the operation fails completely, the length of the returned IterableList will
|
||||
be 0."""
|
||||
kwargs = add_progress(kwargs, self.repo.git, progress)
|
||||
- proc = self.repo.git.push(self, refspec, porcelain=True, as_process=True,
|
||||
+ proc = self.repo.git.push("--", self, refspec, porcelain=True, as_process=True,
|
||||
universal_newlines=True,
|
||||
kill_after_timeout=kill_after_timeout,
|
||||
**kwargs)
|
||||
diff --git a/git/repo/base.py b/git/repo/base.py
|
||||
index 7713c91..f14f929 100644
|
||||
--- a/git/repo/base.py
|
||||
+++ b/git/repo/base.py
|
||||
@@ -1072,7 +1072,7 @@ class Repo(object):
|
||||
multi = None
|
||||
if multi_options:
|
||||
multi = shlex.split(' '.join(multi_options))
|
||||
- proc = git.clone(multi, Git.polish_url(str(url)), clone_path, with_extended_output=True, as_process=True,
|
||||
+ proc = git.clone("--", multi, Git.polish_url(str(url)), clone_path, with_extended_output=True, as_process=True,
|
||||
v=True, universal_newlines=True, **add_progress(kwargs, git, progress))
|
||||
if progress:
|
||||
handle_process_output(proc, None, to_progress_instance(progress).new_message_handler(),
|
||||
@@ -1173,7 +1173,7 @@ class Repo(object):
|
||||
if not isinstance(path, (tuple, list)):
|
||||
path = [path]
|
||||
# end assure paths is list
|
||||
- self.git.archive(treeish, *path, **kwargs)
|
||||
+ self.git.archive("--", treeish, *path, **kwargs)
|
||||
return self
|
||||
|
||||
def has_separate_working_tree(self) -> bool:
|
||||
--
|
||||
2.34.1
|
||||
|
||||
@@ -0,0 +1,488 @@
|
||||
From fe9b71628767610a238e47cd46b82d411a7e871a Mon Sep 17 00:00:00 2001
|
||||
From: Narpat Mali <narpat.mali@windriver.com>
|
||||
Date: Sat, 7 Jan 2023 17:16:57 +0000
|
||||
Subject: [PATCH] python3-git: CVE-2022-24439 fix from PR 1521
|
||||
|
||||
Forbid unsafe protocol URLs in Repo.clone{,_from}()
|
||||
Since the URL is passed directly to git clone, and the remote-ext helper
|
||||
will happily execute shell commands, so by default disallow URLs that
|
||||
contain a "::" unless a new unsafe_protocols kwarg is passed.
|
||||
(CVE-2022-24439)
|
||||
|
||||
Fixes #1515
|
||||
|
||||
CVE: CVE-2022-24439
|
||||
|
||||
Upstream-Status: Backport [https://github.com/gitpython-developers/GitPython/pull/1521]
|
||||
|
||||
Signed-off-by: Narpat Mali <narpat.mali@windriver.com>
|
||||
---
|
||||
git/cmd.py | 51 ++++++++++++++++++++++++--
|
||||
git/exc.py | 8 ++++
|
||||
git/objects/submodule/base.py | 19 ++++++----
|
||||
git/remote.py | 69 +++++++++++++++++++++++++++++++----
|
||||
git/repo/base.py | 44 ++++++++++++++++++----
|
||||
5 files changed, 166 insertions(+), 25 deletions(-)
|
||||
|
||||
diff --git a/git/cmd.py b/git/cmd.py
|
||||
index 4f05698..77026d6 100644
|
||||
--- a/git/cmd.py
|
||||
+++ b/git/cmd.py
|
||||
@@ -4,6 +4,7 @@
|
||||
# This module is part of GitPython and is released under
|
||||
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
|
||||
from __future__ import annotations
|
||||
+import re
|
||||
from contextlib import contextmanager
|
||||
import io
|
||||
import logging
|
||||
@@ -31,7 +32,9 @@ from git.util import is_cygwin_git, cygpath, expand_path, remove_password_if_pre
|
||||
|
||||
from .exc import (
|
||||
GitCommandError,
|
||||
- GitCommandNotFound
|
||||
+ GitCommandNotFound,
|
||||
+ UnsafeOptionError,
|
||||
+ UnsafeProtocolError
|
||||
)
|
||||
from .util import (
|
||||
LazyMixin,
|
||||
@@ -225,6 +228,8 @@ class Git(LazyMixin):
|
||||
|
||||
_excluded_ = ('cat_file_all', 'cat_file_header', '_version_info')
|
||||
|
||||
+ re_unsafe_protocol = re.compile("(.+)::.+")
|
||||
+
|
||||
def __getstate__(self) -> Dict[str, Any]:
|
||||
return slots_to_dict(self, exclude=self._excluded_)
|
||||
|
||||
@@ -400,6 +405,44 @@ class Git(LazyMixin):
|
||||
url = url.replace("\\\\", "\\").replace("\\", "/")
|
||||
return url
|
||||
|
||||
+ @classmethod
|
||||
+ def check_unsafe_protocols(cls, url: str) -> None:
|
||||
+ """
|
||||
+ Check for unsafe protocols.
|
||||
+ Apart from the usual protocols (http, git, ssh),
|
||||
+ Git allows "remote helpers" that have the form `<transport>::<address>`,
|
||||
+ one of these helpers (`ext::`) can be used to invoke any arbitrary command.
|
||||
+ See:
|
||||
+ - https://git-scm.com/docs/gitremote-helpers
|
||||
+ - https://git-scm.com/docs/git-remote-ext
|
||||
+ """
|
||||
+ match = cls.re_unsafe_protocol.match(url)
|
||||
+ if match:
|
||||
+ protocol = match.group(1)
|
||||
+ raise UnsafeProtocolError(
|
||||
+ f"The `{protocol}::` protocol looks suspicious, use `allow_unsafe_protocols=True` to allow it."
|
||||
+ )
|
||||
+
|
||||
+ @classmethod
|
||||
+ def check_unsafe_options(cls, options: List[str], unsafe_options: List[str]) -> None:
|
||||
+ """
|
||||
+ Check for unsafe options.
|
||||
+ Some options that are passed to `git <command>` can be used to execute
|
||||
+ arbitrary commands, this are blocked by default.
|
||||
+ """
|
||||
+ # Options can be of the form `foo` or `--foo bar` `--foo=bar`,
|
||||
+ # so we need to check if they start with "--foo" or if they are equal to "foo".
|
||||
+ bare_unsafe_options = [
|
||||
+ option.lstrip("-")
|
||||
+ for option in unsafe_options
|
||||
+ ]
|
||||
+ for option in options:
|
||||
+ for unsafe_option, bare_option in zip(unsafe_options, bare_unsafe_options):
|
||||
+ if option.startswith(unsafe_option) or option == bare_option:
|
||||
+ raise UnsafeOptionError(
|
||||
+ f"{unsafe_option} is not allowed, use `allow_unsafe_options=True` to allow it."
|
||||
+ )
|
||||
+
|
||||
class AutoInterrupt(object):
|
||||
"""Kill/Interrupt the stored process instance once this instance goes out of scope. It is
|
||||
used to prevent processes piling up in case iterators stop reading.
|
||||
@@ -1068,12 +1111,12 @@ class Git(LazyMixin):
|
||||
return args
|
||||
|
||||
@classmethod
|
||||
- def __unpack_args(cls, arg_list: Sequence[str]) -> List[str]:
|
||||
+ def _unpack_args(cls, arg_list: Sequence[str]) -> List[str]:
|
||||
|
||||
outlist = []
|
||||
if isinstance(arg_list, (list, tuple)):
|
||||
for arg in arg_list:
|
||||
- outlist.extend(cls.__unpack_args(arg))
|
||||
+ outlist.extend(cls._unpack_args(arg))
|
||||
else:
|
||||
outlist.append(str(arg_list))
|
||||
|
||||
@@ -1154,7 +1197,7 @@ class Git(LazyMixin):
|
||||
# Prepare the argument list
|
||||
|
||||
opt_args = self.transform_kwargs(**opts_kwargs)
|
||||
- ext_args = self.__unpack_args([a for a in args if a is not None])
|
||||
+ ext_args = self._unpack_args([a for a in args if a is not None])
|
||||
|
||||
if insert_after_this_arg is None:
|
||||
args_list = opt_args + ext_args
|
||||
diff --git a/git/exc.py b/git/exc.py
|
||||
index e8ff784..5c96db2 100644
|
||||
--- a/git/exc.py
|
||||
+++ b/git/exc.py
|
||||
@@ -36,6 +36,14 @@ class NoSuchPathError(GitError, OSError):
|
||||
""" Thrown if a path could not be access by the system. """
|
||||
|
||||
|
||||
+class UnsafeProtocolError(GitError):
|
||||
+ """Thrown if unsafe protocols are passed without being explicitly allowed."""
|
||||
+
|
||||
+
|
||||
+class UnsafeOptionError(GitError):
|
||||
+ """Thrown if unsafe options are passed without being explicitly allowed."""
|
||||
+
|
||||
+
|
||||
class CommandError(GitError):
|
||||
"""Base class for exceptions thrown at every stage of `Popen()` execution.
|
||||
|
||||
diff --git a/git/objects/submodule/base.py b/git/objects/submodule/base.py
|
||||
index f782045..deb224e 100644
|
||||
--- a/git/objects/submodule/base.py
|
||||
+++ b/git/objects/submodule/base.py
|
||||
@@ -264,7 +264,8 @@ class Submodule(IndexObject, TraversableIterableObj):
|
||||
# end
|
||||
|
||||
@classmethod
|
||||
- def _clone_repo(cls, repo: 'Repo', url: str, path: PathLike, name: str, **kwargs: Any) -> 'Repo':
|
||||
+ def _clone_repo(cls, repo: 'Repo', url: str, path: PathLike, name: str,
|
||||
+ allow_unsafe_options: bool = False, allow_unsafe_protocols: bool = False,**kwargs: Any) -> 'Repo':
|
||||
""":return: Repo instance of newly cloned repository
|
||||
:param repo: our parent repository
|
||||
:param url: url to clone from
|
||||
@@ -281,7 +282,8 @@ class Submodule(IndexObject, TraversableIterableObj):
|
||||
module_checkout_path = osp.join(str(repo.working_tree_dir), path)
|
||||
# end
|
||||
|
||||
- clone = git.Repo.clone_from(url, module_checkout_path, **kwargs)
|
||||
+ clone = git.Repo.clone_from(url, module_checkout_path, allow_unsafe_options=allow_unsafe_options,
|
||||
+ allow_unsafe_protocols=allow_unsafe_protocols, **kwargs)
|
||||
if cls._need_gitfile_submodules(repo.git):
|
||||
cls._write_git_file_and_module_config(module_checkout_path, module_abspath)
|
||||
# end
|
||||
@@ -338,8 +340,8 @@ class Submodule(IndexObject, TraversableIterableObj):
|
||||
@classmethod
|
||||
def add(cls, repo: 'Repo', name: str, path: PathLike, url: Union[str, None] = None,
|
||||
branch: Union[str, None] = None, no_checkout: bool = False, depth: Union[int, None] = None,
|
||||
- env: Union[Mapping[str, str], None] = None, clone_multi_options: Union[Sequence[TBD], None] = None
|
||||
- ) -> 'Submodule':
|
||||
+ env: Union[Mapping[str, str], None] = None, clone_multi_options: Union[Sequence[TBD], None] = None,
|
||||
+ allow_unsafe_options: bool = False, allow_unsafe_protocols: bool = False,) -> 'Submodule':
|
||||
"""Add a new submodule to the given repository. This will alter the index
|
||||
as well as the .gitmodules file, but will not create a new commit.
|
||||
If the submodule already exists, no matter if the configuration differs
|
||||
@@ -447,7 +449,8 @@ class Submodule(IndexObject, TraversableIterableObj):
|
||||
kwargs['multi_options'] = clone_multi_options
|
||||
|
||||
# _clone_repo(cls, repo, url, path, name, **kwargs):
|
||||
- mrepo = cls._clone_repo(repo, url, path, name, env=env, **kwargs)
|
||||
+ mrepo = cls._clone_repo(repo, url, path, name, env=env, allow_unsafe_options=allow_unsafe_options,
|
||||
+ allow_unsafe_protocols=allow_unsafe_protocols, **kwargs)
|
||||
# END verify url
|
||||
|
||||
## See #525 for ensuring git urls in config-files valid under Windows.
|
||||
@@ -484,7 +487,8 @@ class Submodule(IndexObject, TraversableIterableObj):
|
||||
def update(self, recursive: bool = False, init: bool = True, to_latest_revision: bool = False,
|
||||
progress: Union['UpdateProgress', None] = None, dry_run: bool = False,
|
||||
force: bool = False, keep_going: bool = False, env: Union[Mapping[str, str], None] = None,
|
||||
- clone_multi_options: Union[Sequence[TBD], None] = None) -> 'Submodule':
|
||||
+ clone_multi_options: Union[Sequence[TBD], None] = None, allow_unsafe_options: bool = False,
|
||||
+ allow_unsafe_protocols: bool = False) -> 'Submodule':
|
||||
"""Update the repository of this submodule to point to the checkout
|
||||
we point at with the binsha of this instance.
|
||||
|
||||
@@ -585,7 +589,8 @@ class Submodule(IndexObject, TraversableIterableObj):
|
||||
(self.url, checkout_module_abspath, self.name))
|
||||
if not dry_run:
|
||||
mrepo = self._clone_repo(self.repo, self.url, self.path, self.name, n=True, env=env,
|
||||
- multi_options=clone_multi_options)
|
||||
+ multi_options=clone_multi_options, allow_unsafe_options=allow_unsafe_options,
|
||||
+ allow_unsafe_protocols=allow_unsafe_protocols)
|
||||
# END handle dry-run
|
||||
progress.update(END | CLONE, 0, 1, prefix + "Done cloning to %s" % checkout_module_abspath)
|
||||
|
||||
diff --git a/git/remote.py b/git/remote.py
|
||||
index 59681bc..cea6b99 100644
|
||||
--- a/git/remote.py
|
||||
+++ b/git/remote.py
|
||||
@@ -473,6 +473,23 @@ class Remote(LazyMixin, IterableObj):
|
||||
__slots__ = ("repo", "name", "_config_reader")
|
||||
_id_attribute_ = "name"
|
||||
|
||||
+ unsafe_git_fetch_options = [
|
||||
+ # This option allows users to execute arbitrary commands.
|
||||
+ # https://git-scm.com/docs/git-fetch#Documentation/git-fetch.txt---upload-packltupload-packgt
|
||||
+ "--upload-pack",
|
||||
+ ]
|
||||
+ unsafe_git_pull_options = [
|
||||
+ # This option allows users to execute arbitrary commands.
|
||||
+ # https://git-scm.com/docs/git-pull#Documentation/git-pull.txt---upload-packltupload-packgt
|
||||
+ "--upload-pack"
|
||||
+ ]
|
||||
+ unsafe_git_push_options = [
|
||||
+ # This option allows users to execute arbitrary commands.
|
||||
+ # https://git-scm.com/docs/git-push#Documentation/git-push.txt---execltgit-receive-packgt
|
||||
+ "--receive-pack",
|
||||
+ "--exec",
|
||||
+ ]
|
||||
+
|
||||
def __init__(self, repo: 'Repo', name: str) -> None:
|
||||
"""Initialize a remote instance
|
||||
|
||||
@@ -549,7 +566,8 @@ class Remote(LazyMixin, IterableObj):
|
||||
yield Remote(repo, section[lbound + 1:rbound])
|
||||
# END for each configuration section
|
||||
|
||||
- def set_url(self, new_url: str, old_url: Optional[str] = None, **kwargs: Any) -> 'Remote':
|
||||
+ def set_url(self, new_url: str, old_url: Optional[str] = None,
|
||||
+ allow_unsafe_protocols: bool = False, **kwargs: Any) -> 'Remote':
|
||||
"""Configure URLs on current remote (cf command git remote set_url)
|
||||
|
||||
This command manages URLs on the remote.
|
||||
@@ -558,15 +576,17 @@ class Remote(LazyMixin, IterableObj):
|
||||
:param old_url: when set, replaces this URL with new_url for the remote
|
||||
:return: self
|
||||
"""
|
||||
+ if not allow_unsafe_protocols:
|
||||
+ Git.check_unsafe_protocols(new_url)
|
||||
scmd = 'set-url'
|
||||
kwargs['insert_kwargs_after'] = scmd
|
||||
if old_url:
|
||||
- self.repo.git.remote(scmd, self.name, new_url, old_url, **kwargs)
|
||||
+ self.repo.git.remote(scmd, "--", self.name, new_url, old_url, **kwargs)
|
||||
else:
|
||||
- self.repo.git.remote(scmd, self.name, new_url, **kwargs)
|
||||
+ self.repo.git.remote(scmd, "--", self.name, new_url, **kwargs)
|
||||
return self
|
||||
|
||||
- def add_url(self, url: str, **kwargs: Any) -> 'Remote':
|
||||
+ def add_url(self, url: str, allow_unsafe_protocols: bool = False, **kwargs: Any) -> 'Remote':
|
||||
"""Adds a new url on current remote (special case of git remote set_url)
|
||||
|
||||
This command adds new URLs to a given remote, making it possible to have
|
||||
@@ -575,7 +595,7 @@ class Remote(LazyMixin, IterableObj):
|
||||
:param url: string being the URL to add as an extra remote URL
|
||||
:return: self
|
||||
"""
|
||||
- return self.set_url(url, add=True)
|
||||
+ return self.set_url(url, add=True, allow_unsafe_protocols=allow_unsafe_protocols)
|
||||
|
||||
def delete_url(self, url: str, **kwargs: Any) -> 'Remote':
|
||||
"""Deletes a new url on current remote (special case of git remote set_url)
|
||||
@@ -667,7 +687,7 @@ class Remote(LazyMixin, IterableObj):
|
||||
return out_refs
|
||||
|
||||
@ classmethod
|
||||
- def create(cls, repo: 'Repo', name: str, url: str, **kwargs: Any) -> 'Remote':
|
||||
+ def create(cls, repo: 'Repo', name: str, url: str, allow_unsafe_protocols: bool = False, *kwargs: Any) -> 'Remote':
|
||||
"""Create a new remote to the given repository
|
||||
:param repo: Repository instance that is to receive the new remote
|
||||
:param name: Desired name of the remote
|
||||
@@ -677,7 +697,10 @@ class Remote(LazyMixin, IterableObj):
|
||||
:raise GitCommandError: in case an origin with that name already exists"""
|
||||
scmd = 'add'
|
||||
kwargs['insert_kwargs_after'] = scmd
|
||||
- repo.git.remote(scmd, name, Git.polish_url(url), **kwargs)
|
||||
+ url = Git.polish_url(url)
|
||||
+ if not allow_unsafe_protocols:
|
||||
+ Git.check_unsafe_protocols(url)
|
||||
+ repo.git.remote(scmd, "--", name, url, **kwargs)
|
||||
return cls(repo, name)
|
||||
|
||||
# add is an alias
|
||||
@@ -840,6 +863,8 @@ class Remote(LazyMixin, IterableObj):
|
||||
progress: Union[RemoteProgress, None, 'UpdateProgress'] = None,
|
||||
verbose: bool = True,
|
||||
kill_after_timeout: Union[None, float] = None,
|
||||
+ allow_unsafe_protocols: bool = False,
|
||||
+ allow_unsafe_options: bool = False,
|
||||
**kwargs: Any) -> IterableList[FetchInfo]:
|
||||
"""Fetch the latest changes for this remote
|
||||
|
||||
@@ -881,6 +906,14 @@ class Remote(LazyMixin, IterableObj):
|
||||
else:
|
||||
args = [refspec]
|
||||
|
||||
+ if not allow_unsafe_protocols:
|
||||
+ for ref in args:
|
||||
+ if ref:
|
||||
+ Git.check_unsafe_protocols(ref)
|
||||
+
|
||||
+ if not allow_unsafe_options:
|
||||
+ Git.check_unsafe_options(options=list(kwargs.keys()), unsafe_options=self.unsafe_git_fetch_options)
|
||||
+
|
||||
proc = self.repo.git.fetch("--", self, *args, as_process=True, with_stdout=False,
|
||||
universal_newlines=True, v=verbose, **kwargs)
|
||||
res = self._get_fetch_info_from_stderr(proc, progress,
|
||||
@@ -892,6 +925,8 @@ class Remote(LazyMixin, IterableObj):
|
||||
def pull(self, refspec: Union[str, List[str], None] = None,
|
||||
progress: Union[RemoteProgress, 'UpdateProgress', None] = None,
|
||||
kill_after_timeout: Union[None, float] = None,
|
||||
+ allow_unsafe_protocols: bool = False,
|
||||
+ allow_unsafe_options: bool = False,
|
||||
**kwargs: Any) -> IterableList[FetchInfo]:
|
||||
"""Pull changes from the given branch, being the same as a fetch followed
|
||||
by a merge of branch with your local branch.
|
||||
@@ -905,6 +940,15 @@ class Remote(LazyMixin, IterableObj):
|
||||
# No argument refspec, then ensure the repo's config has a fetch refspec.
|
||||
self._assert_refspec()
|
||||
kwargs = add_progress(kwargs, self.repo.git, progress)
|
||||
+
|
||||
+ refspec = Git._unpack_args(refspec or [])
|
||||
+ if not allow_unsafe_protocols:
|
||||
+ for ref in refspec:
|
||||
+ Git.check_unsafe_protocols(ref)
|
||||
+
|
||||
+ if not allow_unsafe_options:
|
||||
+ Git.check_unsafe_options(options=list(kwargs.keys()), unsafe_options=self.unsafe_git_pull_options)
|
||||
+
|
||||
proc = self.repo.git.pull("--", self, refspec, with_stdout=False, as_process=True,
|
||||
universal_newlines=True, v=True, **kwargs)
|
||||
res = self._get_fetch_info_from_stderr(proc, progress,
|
||||
@@ -916,6 +960,8 @@ class Remote(LazyMixin, IterableObj):
|
||||
def push(self, refspec: Union[str, List[str], None] = None,
|
||||
progress: Union[RemoteProgress, 'UpdateProgress', Callable[..., RemoteProgress], None] = None,
|
||||
kill_after_timeout: Union[None, float] = None,
|
||||
+ allow_unsafe_protocols: bool = False,
|
||||
+ allow_unsafe_options: bool = False,
|
||||
**kwargs: Any) -> IterableList[PushInfo]:
|
||||
"""Push changes from source branch in refspec to target branch in refspec.
|
||||
|
||||
@@ -945,6 +991,15 @@ class Remote(LazyMixin, IterableObj):
|
||||
If the operation fails completely, the length of the returned IterableList will
|
||||
be 0."""
|
||||
kwargs = add_progress(kwargs, self.repo.git, progress)
|
||||
+
|
||||
+ refspec = Git._unpack_args(refspec or [])
|
||||
+ if not allow_unsafe_protocols:
|
||||
+ for ref in refspec:
|
||||
+ Git.check_unsafe_protocols(ref)
|
||||
+
|
||||
+ if not allow_unsafe_options:
|
||||
+ Git.check_unsafe_options(options=list(kwargs.keys()), unsafe_options=self.unsafe_git_push_options)
|
||||
+
|
||||
proc = self.repo.git.push("--", self, refspec, porcelain=True, as_process=True,
|
||||
universal_newlines=True,
|
||||
kill_after_timeout=kill_after_timeout,
|
||||
diff --git a/git/repo/base.py b/git/repo/base.py
|
||||
index f14f929..7b3565b 100644
|
||||
--- a/git/repo/base.py
|
||||
+++ b/git/repo/base.py
|
||||
@@ -24,7 +24,11 @@ from git.compat import (
|
||||
)
|
||||
from git.config import GitConfigParser
|
||||
from git.db import GitCmdObjectDB
|
||||
-from git.exc import InvalidGitRepositoryError, NoSuchPathError, GitCommandError
|
||||
+from git.exc import (
|
||||
+ GitCommandError,
|
||||
+ InvalidGitRepositoryError,
|
||||
+ NoSuchPathError,
|
||||
+)
|
||||
from git.index import IndexFile
|
||||
from git.objects import Submodule, RootModule, Commit
|
||||
from git.refs import HEAD, Head, Reference, TagReference
|
||||
@@ -97,6 +101,18 @@ class Repo(object):
|
||||
re_author_committer_start = re.compile(r'^(author|committer)')
|
||||
re_tab_full_line = re.compile(r'^\t(.*)$')
|
||||
|
||||
+ unsafe_git_clone_options = [
|
||||
+ # This option allows users to execute arbitrary commands.
|
||||
+ # https://git-scm.com/docs/git-clone#Documentation/git-clone.txt---upload-packltupload-packgt
|
||||
+ "--upload-pack",
|
||||
+ "-u",
|
||||
+ # Users can override configuration variables
|
||||
+ # like `protocol.allow` or `core.gitProxy` to execute arbitrary commands.
|
||||
+ # https://git-scm.com/docs/git-clone#Documentation/git-clone.txt---configltkeygtltvaluegt
|
||||
+ "--config",
|
||||
+ "-c",
|
||||
+ ]
|
||||
+
|
||||
# invariants
|
||||
# represents the configuration level of a configuration file
|
||||
config_level: ConfigLevels_Tup = ("system", "user", "global", "repository")
|
||||
@@ -1049,7 +1065,8 @@ class Repo(object):
|
||||
@ classmethod
|
||||
def _clone(cls, git: 'Git', url: PathLike, path: PathLike, odb_default_type: Type[GitCmdObjectDB],
|
||||
progress: Union['RemoteProgress', 'UpdateProgress', Callable[..., 'RemoteProgress'], None] = None,
|
||||
- multi_options: Optional[List[str]] = None, **kwargs: Any
|
||||
+ multi_options: Optional[List[str]] = None, allow_unsafe_protocols: bool = False,
|
||||
+ allow_unsafe_options: bool = False, **kwargs: Any
|
||||
) -> 'Repo':
|
||||
odbt = kwargs.pop('odbt', odb_default_type)
|
||||
|
||||
@@ -1072,6 +1089,12 @@ class Repo(object):
|
||||
multi = None
|
||||
if multi_options:
|
||||
multi = shlex.split(' '.join(multi_options))
|
||||
+
|
||||
+ if not allow_unsafe_protocols:
|
||||
+ Git.check_unsafe_protocols(str(url))
|
||||
+ if not allow_unsafe_options and multi_options:
|
||||
+ Git.check_unsafe_options(options=multi_options, unsafe_options=cls.unsafe_git_clone_options)
|
||||
+
|
||||
proc = git.clone("--", multi, Git.polish_url(str(url)), clone_path, with_extended_output=True, as_process=True,
|
||||
v=True, universal_newlines=True, **add_progress(kwargs, git, progress))
|
||||
if progress:
|
||||
@@ -1107,7 +1130,9 @@ class Repo(object):
|
||||
return repo
|
||||
|
||||
def clone(self, path: PathLike, progress: Optional[Callable] = None,
|
||||
- multi_options: Optional[List[str]] = None, **kwargs: Any) -> 'Repo':
|
||||
+ multi_options: Optional[List[str]] = None, unsafe_protocols: bool = False,
|
||||
+ allow_unsafe_protocols: bool = False, allow_unsafe_options: bool = False,
|
||||
+ **kwargs: Any) -> 'Repo':
|
||||
"""Create a clone from this repository.
|
||||
|
||||
:param path: is the full path of the new repo (traditionally ends with ./<name>.git).
|
||||
@@ -1116,18 +1141,21 @@ class Repo(object):
|
||||
option per list item which is passed exactly as specified to clone.
|
||||
For example ['--config core.filemode=false', '--config core.ignorecase',
|
||||
'--recurse-submodule=repo1_path', '--recurse-submodule=repo2_path']
|
||||
+ :param unsafe_protocols: Allow unsafe protocols to be used, like ex
|
||||
:param kwargs:
|
||||
* odbt = ObjectDatabase Type, allowing to determine the object database
|
||||
implementation used by the returned Repo instance
|
||||
* All remaining keyword arguments are given to the git-clone command
|
||||
|
||||
:return: ``git.Repo`` (the newly cloned repo)"""
|
||||
- return self._clone(self.git, self.common_dir, path, type(self.odb), progress, multi_options, **kwargs)
|
||||
+ return self._clone(self.git, self.common_dir, path, type(self.odb), progress, multi_options,
|
||||
+ allow_unsafe_protocols=allow_unsafe_protocols, allow_unsafe_options=allow_unsafe_options, **kwargs)
|
||||
|
||||
@ classmethod
|
||||
def clone_from(cls, url: PathLike, to_path: PathLike, progress: Optional[Callable] = None,
|
||||
- env: Optional[Mapping[str, str]] = None,
|
||||
- multi_options: Optional[List[str]] = None, **kwargs: Any) -> 'Repo':
|
||||
+ env: Optional[Mapping[str, str]] = None, multi_options: Optional[List[str]] = None,
|
||||
+ unsafe_protocols: bool = False, allow_unsafe_protocols: bool = False,
|
||||
+ allow_unsafe_options: bool = False, **kwargs: Any) -> 'Repo':
|
||||
"""Create a clone from the given URL
|
||||
|
||||
:param url: valid git url, see http://www.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS
|
||||
@@ -1140,12 +1168,14 @@ class Repo(object):
|
||||
If you want to unset some variable, consider providing empty string
|
||||
as its value.
|
||||
:param multi_options: See ``clone`` method
|
||||
+ :param unsafe_protocols: Allow unsafe protocols to be used, like ext
|
||||
:param kwargs: see the ``clone`` method
|
||||
:return: Repo instance pointing to the cloned directory"""
|
||||
git = cls.GitCommandWrapperType(os.getcwd())
|
||||
if env is not None:
|
||||
git.update_environment(**env)
|
||||
- return cls._clone(git, url, to_path, GitCmdObjectDB, progress, multi_options, **kwargs)
|
||||
+ return cls._clone(git, url, to_path, GitCmdObjectDB, progress, multi_options,
|
||||
+ allow_unsafe_protocols=allow_unsafe_protocols, allow_unsafe_options=allow_unsafe_options, **kwargs)
|
||||
|
||||
def archive(self, ostream: Union[TextIO, BinaryIO], treeish: Optional[str] = None,
|
||||
prefix: Optional[str] = None, **kwargs: Any) -> Repo:
|
||||
--
|
||||
2.34.1
|
||||
|
||||
@@ -12,6 +12,10 @@ PYPI_PACKAGE = "GitPython"
|
||||
|
||||
inherit pypi python_setuptools_build_meta
|
||||
|
||||
SRC_URI += "file://0001-python3-git-CVE-2022-24439-fix-from-PR-1518.patch \
|
||||
file://0001-python3-git-CVE-2022-24439-fix-from-PR-1521.patch \
|
||||
"
|
||||
|
||||
SRC_URI[sha256sum] = "1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"
|
||||
|
||||
DEPENDS += " ${PYTHON_PN}-gitdb"
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
From 9e9f617a83f6593b476669030b0347d48e831c3f Mon Sep 17 00:00:00 2001
|
||||
From: Narpat Mali <narpat.mali@windriver.com>
|
||||
Date: Mon, 9 Jan 2023 14:45:05 +0000
|
||||
Subject: [PATCH] Limit the amount of whitespace to search/backtrack. Fixes
|
||||
#3659.
|
||||
|
||||
CVE: CVE-2022-40897
|
||||
|
||||
Upstream-Status: Backport [https://github.com/pypa/setuptools/commit/43a9c9bfa6aa626ec2a22540bea28d2ca77964be]
|
||||
|
||||
Signed-off-by: Narpat Mali <narpat.mali@windriver.com>
|
||||
---
|
||||
setuptools/package_index.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
|
||||
index 270e7f3..e93fcc6 100644
|
||||
--- a/setuptools/package_index.py
|
||||
+++ b/setuptools/package_index.py
|
||||
@@ -197,7 +197,7 @@ def unique_values(func):
|
||||
return wrapper
|
||||
|
||||
|
||||
-REL = re.compile(r"""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
|
||||
+REL = re.compile(r"""<([^>]*\srel\s{0,10}=\s{0,10}['"]?([^'" >]+)[^>]*)>""", re.I)
|
||||
# this line is here to fix emacs' cruddy broken syntax highlighting
|
||||
|
||||
|
||||
--
|
||||
2.34.1
|
||||
|
||||
@@ -11,6 +11,7 @@ SRC_URI:append:class-native = " file://0001-conditionally-do-not-fetch-code-by-e
|
||||
SRC_URI += "\
|
||||
file://0001-change-shebang-to-python3.patch \
|
||||
file://0001-_distutils-sysconfig-append-STAGING_LIBDIR-python-sy.patch \
|
||||
file://0001-Limit-the-amount-of-whitespace-to-search-backtrack.-.patch \
|
||||
"
|
||||
|
||||
SRC_URI[sha256sum] = "d144f85102f999444d06f9c0e8c737fd0194f10f2f7e5fdb77573f6e2fa4fad0"
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
From a9a0d67a663f20b69903751c23851dd4cd6b49d4 Mon Sep 17 00:00:00 2001
|
||||
From: Narpat Mali <narpat.mali@windriver.com>
|
||||
Date: Wed, 11 Jan 2023 07:45:57 +0000
|
||||
Subject: [PATCH] Fixed potential DoS attack via WHEEL_INFO_RE
|
||||
|
||||
CVE: CVE-2022-40898
|
||||
|
||||
Upstream-Status: Backport [https://github.com/pypa/wheel/commit/88f02bc335d5404991e532e7f3b0fc80437bf4e0]
|
||||
|
||||
Signed-off-by: Narpat Mali <narpat.mali@windriver.com>
|
||||
---
|
||||
src/wheel/wheelfile.py | 4 ++--
|
||||
1 file changed, 2 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/wheel/wheelfile.py b/src/wheel/wheelfile.py
|
||||
index 21e7361..ff06edf 100644
|
||||
--- a/src/wheel/wheelfile.py
|
||||
+++ b/src/wheel/wheelfile.py
|
||||
@@ -27,8 +27,8 @@ else:
|
||||
# Non-greedy matching of an optional build number may be too clever (more
|
||||
# invalid wheel filenames will match). Separate regex for .dist-info?
|
||||
WHEEL_INFO_RE = re.compile(
|
||||
- r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.+?))(-(?P<build>\d[^-]*))?
|
||||
- -(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)\.whl$""",
|
||||
+ r"""^(?P<namever>(?P<name>[^-]+?)-(?P<ver>[^-]+?))(-(?P<build>\d[^-]*))?
|
||||
+ -(?P<pyver>[^-]+?)-(?P<abi>[^-]+?)-(?P<plat>[^.]+?)\.whl$""",
|
||||
re.VERBOSE)
|
||||
|
||||
|
||||
--
|
||||
2.32.0
|
||||
|
||||
@@ -8,7 +8,9 @@ SRC_URI[sha256sum] = "e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d495
|
||||
|
||||
inherit python_flit_core pypi
|
||||
|
||||
SRC_URI += " file://0001-Backport-pyproject.toml-from-flit-backend-branch.patch"
|
||||
SRC_URI += "file://0001-Backport-pyproject.toml-from-flit-backend-branch.patch \
|
||||
file://0001-Fixed-potential-DoS-attack-via-WHEEL_INFO_RE.patch \
|
||||
"
|
||||
|
||||
BBCLASSEXTEND = "native nativesdk"
|
||||
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
From 87ef80926ea0ec960a220af89d8ff4db99417b03 Mon Sep 17 00:00:00 2001
|
||||
From: Vivek Kumbhar <vkumbhar@mvista.com>
|
||||
Date: Thu, 24 Nov 2022 17:44:18 +0530
|
||||
Subject: [PATCH] CVE-2022-42919
|
||||
|
||||
Upstream-Status: Backport [https://github.com/python/cpython/commit/eae692eed18892309bcc25a2c0f8980038305ea2]
|
||||
CVE: CVE-2022-42919
|
||||
Signed-off-by: Vivek Kumbhar <vkumbhar@mvista.com>
|
||||
|
||||
[3.10] gh-97514: Don't use Linux abstract sockets for multiprocessing (GH-98501) (GH-98503)
|
||||
|
||||
Linux abstract sockets are insecure as they lack any form of filesystem
|
||||
permissions so their use allows anyone on the system to inject code into
|
||||
the process.
|
||||
|
||||
This removes the default preference for abstract sockets in
|
||||
multiprocessing introduced in Python 3.9+ via
|
||||
https://github.com/python/cpython/pull/18866 while fixing
|
||||
https://github.com/python/cpython/issues/84031.
|
||||
|
||||
Explicit use of an abstract socket by a user now generates a
|
||||
RuntimeWarning. If we choose to keep this warning, it should be
|
||||
backported to the 3.7 and 3.8 branches.
|
||||
(cherry picked from commit 49f61068f49747164988ffc5a442d2a63874fc17)
|
||||
---
|
||||
Lib/multiprocessing/connection.py | 5 -----
|
||||
.../2022-09-07-10-42-00.gh-issue-97514.Yggdsl.rst | 15 +++++++++++++++
|
||||
2 files changed, 15 insertions(+), 5 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2022-09-07-10-42-00.gh-issue-97514.Yggdsl.rst
|
||||
|
||||
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
|
||||
index 510e4b5..8e2facf 100644
|
||||
--- a/Lib/multiprocessing/connection.py
|
||||
+++ b/Lib/multiprocessing/connection.py
|
||||
@@ -73,11 +73,6 @@ def arbitrary_address(family):
|
||||
if family == 'AF_INET':
|
||||
return ('localhost', 0)
|
||||
elif family == 'AF_UNIX':
|
||||
- # Prefer abstract sockets if possible to avoid problems with the address
|
||||
- # size. When coding portable applications, some implementations have
|
||||
- # sun_path as short as 92 bytes in the sockaddr_un struct.
|
||||
- if util.abstract_sockets_supported:
|
||||
- return f"\0listener-{os.getpid()}-{next(_mmap_counter)}"
|
||||
return tempfile.mktemp(prefix='listener-', dir=util.get_temp_dir())
|
||||
elif family == 'AF_PIPE':
|
||||
return tempfile.mktemp(prefix=r'\\.\pipe\pyc-%d-%d-' %
|
||||
diff --git a/Misc/NEWS.d/next/Security/2022-09-07-10-42-00.gh-issue-97514.Yggdsl.rst b/Misc/NEWS.d/next/Security/2022-09-07-10-42-00.gh-issue-97514.Yggdsl.rst
|
||||
new file mode 100644
|
||||
index 0000000..02d95b5
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2022-09-07-10-42-00.gh-issue-97514.Yggdsl.rst
|
||||
@@ -0,0 +1,15 @@
|
||||
+On Linux the :mod:`multiprocessing` module returns to using filesystem backed
|
||||
+unix domain sockets for communication with the *forkserver* process instead of
|
||||
+the Linux abstract socket namespace. Only code that chooses to use the
|
||||
+:ref:`"forkserver" start method <multiprocessing-start-methods>` is affected.
|
||||
+
|
||||
+Abstract sockets have no permissions and could allow any user on the system in
|
||||
+the same `network namespace
|
||||
+<https://man7.org/linux/man-pages/man7/network_namespaces.7.html>`_ (often the
|
||||
+whole system) to inject code into the multiprocessing *forkserver* process.
|
||||
+This was a potential privilege escalation. Filesystem based socket permissions
|
||||
+restrict this to the *forkserver* process user as was the default in Python 3.8
|
||||
+and earlier.
|
||||
+
|
||||
+This prevents Linux `CVE-2022-42919
|
||||
+<https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-42919>`_.
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
From 1f66b714c5f2fef80ec5389456ac31756dbfff0e Mon Sep 17 00:00:00 2001
|
||||
From: Theo Buehler <botovq@users.noreply.github.com>
|
||||
Date: Fri, 21 Oct 2022 21:26:01 +0200
|
||||
Subject: [PATCH] gh-98517: Fix buffer overflows in _sha3 module (#98519)
|
||||
|
||||
This is a port of the applicable part of XKCP's fix [1] for
|
||||
CVE-2022-37454 and avoids the segmentation fault and the infinite
|
||||
loop in the test cases published in [2].
|
||||
|
||||
[1]: https://github.com/XKCP/XKCP/commit/fdc6fef075f4e81d6b1bc38364248975e08e340a
|
||||
[2]: https://mouha.be/sha-3-buffer-overflow/
|
||||
|
||||
Regression test added by: Gregory P. Smith [Google LLC] <greg@krypto.org>
|
||||
---
|
||||
|
||||
Patch applied without modification.
|
||||
|
||||
CVE: CVE-2022-37454
|
||||
|
||||
Upstream-Status: Backport [github.com/cpython/cpython.git 0e4e058602d...]
|
||||
|
||||
Signed-off-by: Joe Slater <joe.slater@windriver.com>
|
||||
---
|
||||
Lib/test/test_hashlib.py | 9 +++++++++
|
||||
.../2022-10-21-13-31-47.gh-issue-98517.SXXGfV.rst | 1 +
|
||||
Modules/_sha3/kcp/KeccakSponge.inc | 15 ++++++++-------
|
||||
3 files changed, 18 insertions(+), 7 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2022-10-21-13-31-47.gh-issue-98517.SXXGfV.rst
|
||||
|
||||
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
|
||||
index ea31f8b..65330e1 100644
|
||||
--- a/Lib/test/test_hashlib.py
|
||||
+++ b/Lib/test/test_hashlib.py
|
||||
@@ -491,6 +491,15 @@ class HashLibTestCase(unittest.TestCase):
|
||||
def test_case_md5_uintmax(self, size):
|
||||
self.check('md5', b'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
|
||||
|
||||
+ @unittest.skipIf(sys.maxsize < _4G - 1, 'test cannot run on 32-bit systems')
|
||||
+ @bigmemtest(size=_4G - 1, memuse=1, dry_run=False)
|
||||
+ def test_sha3_update_overflow(self, size):
|
||||
+ """Regression test for gh-98517 CVE-2022-37454."""
|
||||
+ h = hashlib.sha3_224()
|
||||
+ h.update(b'\x01')
|
||||
+ h.update(b'\x01'*0xffff_ffff)
|
||||
+ self.assertEqual(h.hexdigest(), '80762e8ce6700f114fec0f621fd97c4b9c00147fa052215294cceeed')
|
||||
+
|
||||
# use the three examples from Federal Information Processing Standards
|
||||
# Publication 180-1, Secure Hash Standard, 1995 April 17
|
||||
# http://www.itl.nist.gov/div897/pubs/fip180-1.htm
|
||||
diff --git a/Misc/NEWS.d/next/Security/2022-10-21-13-31-47.gh-issue-98517.SXXGfV.rst b/Misc/NEWS.d/next/Security/2022-10-21-13-31-47.gh-issue-98517.SXXGfV.rst
|
||||
new file mode 100644
|
||||
index 0000000..2d23a6a
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2022-10-21-13-31-47.gh-issue-98517.SXXGfV.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Port XKCP's fix for the buffer overflows in SHA-3 (CVE-2022-37454).
|
||||
diff --git a/Modules/_sha3/kcp/KeccakSponge.inc b/Modules/_sha3/kcp/KeccakSponge.inc
|
||||
index e10739d..cf92e4d 100644
|
||||
--- a/Modules/_sha3/kcp/KeccakSponge.inc
|
||||
+++ b/Modules/_sha3/kcp/KeccakSponge.inc
|
||||
@@ -171,7 +171,7 @@ int SpongeAbsorb(SpongeInstance *instance, const unsigned char *data, size_t dat
|
||||
i = 0;
|
||||
curData = data;
|
||||
while(i < dataByteLen) {
|
||||
- if ((instance->byteIOIndex == 0) && (dataByteLen >= (i + rateInBytes))) {
|
||||
+ if ((instance->byteIOIndex == 0) && (dataByteLen-i >= rateInBytes)) {
|
||||
#ifdef SnP_FastLoop_Absorb
|
||||
/* processing full blocks first */
|
||||
|
||||
@@ -199,10 +199,10 @@ int SpongeAbsorb(SpongeInstance *instance, const unsigned char *data, size_t dat
|
||||
}
|
||||
else {
|
||||
/* normal lane: using the message queue */
|
||||
-
|
||||
- partialBlock = (unsigned int)(dataByteLen - i);
|
||||
- if (partialBlock+instance->byteIOIndex > rateInBytes)
|
||||
+ if (dataByteLen-i > rateInBytes-instance->byteIOIndex)
|
||||
partialBlock = rateInBytes-instance->byteIOIndex;
|
||||
+ else
|
||||
+ partialBlock = (unsigned int)(dataByteLen - i);
|
||||
#ifdef KeccakReference
|
||||
displayBytes(1, "Block to be absorbed (part)", curData, partialBlock);
|
||||
#endif
|
||||
@@ -281,7 +281,7 @@ int SpongeSqueeze(SpongeInstance *instance, unsigned char *data, size_t dataByte
|
||||
i = 0;
|
||||
curData = data;
|
||||
while(i < dataByteLen) {
|
||||
- if ((instance->byteIOIndex == rateInBytes) && (dataByteLen >= (i + rateInBytes))) {
|
||||
+ if ((instance->byteIOIndex == rateInBytes) && (dataByteLen-i >= rateInBytes)) {
|
||||
for(j=dataByteLen-i; j>=rateInBytes; j-=rateInBytes) {
|
||||
SnP_Permute(instance->state);
|
||||
SnP_ExtractBytes(instance->state, curData, 0, rateInBytes);
|
||||
@@ -299,9 +299,10 @@ int SpongeSqueeze(SpongeInstance *instance, unsigned char *data, size_t dataByte
|
||||
SnP_Permute(instance->state);
|
||||
instance->byteIOIndex = 0;
|
||||
}
|
||||
- partialBlock = (unsigned int)(dataByteLen - i);
|
||||
- if (partialBlock+instance->byteIOIndex > rateInBytes)
|
||||
+ if (dataByteLen-i > rateInBytes-instance->byteIOIndex)
|
||||
partialBlock = rateInBytes-instance->byteIOIndex;
|
||||
+ else
|
||||
+ partialBlock = (unsigned int)(dataByteLen - i);
|
||||
i += partialBlock;
|
||||
|
||||
SnP_ExtractBytes(instance->state, curData, instance->byteIOIndex, partialBlock);
|
||||
--
|
||||
2.32.0
|
||||
|
||||
@@ -35,7 +35,6 @@ SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \
|
||||
file://0001-setup.py-Do-not-detect-multiarch-paths-when-cross-co.patch \
|
||||
file://deterministic_imports.patch \
|
||||
file://0001-Avoid-shebang-overflow-on-python-config.py.patch \
|
||||
file://CVE-2022-42919.patch \
|
||||
"
|
||||
|
||||
SRC_URI:append:class-native = " \
|
||||
@@ -44,7 +43,7 @@ SRC_URI:append:class-native = " \
|
||||
file://12-distutils-prefix-is-inside-staging-area.patch \
|
||||
file://0001-Don-t-search-system-for-headers-libraries.patch \
|
||||
"
|
||||
SRC_URI[sha256sum] = "6a30ecde59c47048013eb5a658c9b5dec277203d2793667f578df7671f7f03f3"
|
||||
SRC_URI[sha256sum] = "5ae03e308260164baba39921fdb4dbf8e6d03d8235a939d4582b33f0b5e46a83"
|
||||
|
||||
# exclude pre-releases for both python 2.x and 3.x
|
||||
UPSTREAM_CHECK_REGEX = "[Pp]ython-(?P<pver>\d+(\.\d+)+).tar"
|
||||
@@ -92,6 +92,7 @@ SRC_URI = "https://download.qemu.org/${BPN}-${PV}.tar.xz \
|
||||
file://0020-target-ppc-move-xs-n-madd-am-ds-p-xs-n-msub-am-ds-p-.patch \
|
||||
file://0021-target-ppc-implement-xs-n-maddqp-o-xs-n-msubqp-o.patch \
|
||||
file://CVE-2022-3165.patch \
|
||||
file://CVE-2022-4144.patch \
|
||||
"
|
||||
UPSTREAM_CHECK_REGEX = "qemu-(?P<pver>\d+(\.\d+)+)\.tar"
|
||||
|
||||
|
||||
99
meta/recipes-devtools/qemu/qemu/CVE-2022-4144.patch
Normal file
99
meta/recipes-devtools/qemu/qemu/CVE-2022-4144.patch
Normal file
@@ -0,0 +1,99 @@
|
||||
From 6dbbf055148c6f1b7d8a3251a65bd6f3d1e1f622 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Philippe=20Mathieu-Daud=C3=A9?= <philmd@linaro.org>
|
||||
Date: Mon, 28 Nov 2022 21:27:40 +0100
|
||||
Subject: [PATCH] hw/display/qxl: Avoid buffer overrun in qxl_phys2virt
|
||||
(CVE-2022-4144)
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Have qxl_get_check_slot_offset() return false if the requested
|
||||
buffer size does not fit within the slot memory region.
|
||||
|
||||
Similarly qxl_phys2virt() now returns NULL in such case, and
|
||||
qxl_dirty_one_surface() aborts.
|
||||
|
||||
This avoids buffer overrun in the host pointer returned by
|
||||
memory_region_get_ram_ptr().
|
||||
|
||||
Fixes: CVE-2022-4144 (out-of-bounds read)
|
||||
Reported-by: Wenxu Yin (@awxylitol)
|
||||
Resolves: https://gitlab.com/qemu-project/qemu/-/issues/1336
|
||||
|
||||
CVE: CVE-2022-4144
|
||||
Upstream-Status: Backport [https://gitlab.com/qemu-project/qemu/-/commit/6dbbf055148c6f1b7d8a3251a65bd6f3d1e1f622]
|
||||
Comments: Deleted patch hunk in qxl.h,as it contains change
|
||||
in comments which is not present in current version of qemu
|
||||
|
||||
Signed-off-by: Philippe Mathieu-Daudé <philmd@linaro.org>
|
||||
Signed-off-by: Stefan Hajnoczi <stefanha@redhat.com>
|
||||
Message-Id: <20221128202741.4945-5-philmd@linaro.org>
|
||||
Signed-off-by: Bhabu Bindu <bhabu.bindu@kpit.com>
|
||||
---
|
||||
hw/display/qxl.c | 27 +++++++++++++++++++++++----
|
||||
1 files changed, 23 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/hw/display/qxl.c b/hw/display/qxl.c
|
||||
index 231d733250..0b21626aad 100644
|
||||
--- a/hw/display/qxl.c
|
||||
+++ b/hw/display/qxl.c
|
||||
@@ -1424,11 +1424,13 @@ static void qxl_reset_surfaces(PCIQXLDevice *d)
|
||||
|
||||
/* can be also called from spice server thread context */
|
||||
static bool qxl_get_check_slot_offset(PCIQXLDevice *qxl, QXLPHYSICAL pqxl,
|
||||
- uint32_t *s, uint64_t *o)
|
||||
+ uint32_t *s, uint64_t *o,
|
||||
+ size_t size_requested)
|
||||
{
|
||||
uint64_t phys = le64_to_cpu(pqxl);
|
||||
uint32_t slot = (phys >> (64 - 8)) & 0xff;
|
||||
uint64_t offset = phys & 0xffffffffffff;
|
||||
+ uint64_t size_available;
|
||||
|
||||
if (slot >= NUM_MEMSLOTS) {
|
||||
qxl_set_guest_bug(qxl, "slot too large %d >= %d", slot,
|
||||
@@ -1452,6 +1454,23 @@ static bool qxl_get_check_slot_offset(PCIQXLDevice *qxl, QXLPHYSICAL pqxl,
|
||||
slot, offset, qxl->guest_slots[slot].size);
|
||||
return false;
|
||||
}
|
||||
+ size_available = memory_region_size(qxl->guest_slots[slot].mr);
|
||||
+ if (qxl->guest_slots[slot].offset + offset >= size_available) {
|
||||
+ qxl_set_guest_bug(qxl,
|
||||
+ "slot %d offset %"PRIu64" > region size %"PRIu64"\n",
|
||||
+ slot, qxl->guest_slots[slot].offset + offset,
|
||||
+ size_available);
|
||||
+ return false;
|
||||
+ }
|
||||
+ size_available -= qxl->guest_slots[slot].offset + offset;
|
||||
+ if (size_requested > size_available) {
|
||||
+ qxl_set_guest_bug(qxl,
|
||||
+ "slot %d offset %"PRIu64" size %zu: "
|
||||
+ "overrun by %"PRIu64" bytes\n",
|
||||
+ slot, offset, size_requested,
|
||||
+ size_requested - size_available);
|
||||
+ return false;
|
||||
+ }
|
||||
|
||||
*s = slot;
|
||||
*o = offset;
|
||||
@@ -1471,7 +1490,7 @@ void *qxl_phys2virt(PCIQXLDevice *qxl, QXLPHYSICAL pqxl, int group_id,
|
||||
offset = le64_to_cpu(pqxl) & 0xffffffffffff;
|
||||
return (void *)(intptr_t)offset;
|
||||
case MEMSLOT_GROUP_GUEST:
|
||||
- if (!qxl_get_check_slot_offset(qxl, pqxl, &slot, &offset)) {
|
||||
+ if (!qxl_get_check_slot_offset(qxl, pqxl, &slot, &offset, size)) {
|
||||
return NULL;
|
||||
}
|
||||
ptr = memory_region_get_ram_ptr(qxl->guest_slots[slot].mr);
|
||||
@@ -1937,9 +1956,9 @@ static void qxl_dirty_one_surface(PCIQXLDevice *qxl, QXLPHYSICAL pqxl,
|
||||
uint32_t slot;
|
||||
bool rc;
|
||||
|
||||
- rc = qxl_get_check_slot_offset(qxl, pqxl, &slot, &offset);
|
||||
- assert(rc == true);
|
||||
size = (uint64_t)height * abs(stride);
|
||||
+ rc = qxl_get_check_slot_offset(qxl, pqxl, &slot, &offset, size);
|
||||
+ assert(rc == true);
|
||||
trace_qxl_surfaces_dirty(qxl->id, offset, size);
|
||||
qxl_set_dirty(qxl->guest_slots[slot].mr,
|
||||
qxl->guest_slots[slot].offset + offset,
|
||||
@@ -1,39 +0,0 @@
|
||||
SUMMARY = "An interpreter of object-oriented scripting language"
|
||||
DESCRIPTION = "Ruby is an interpreted scripting language for quick \
|
||||
and easy object-oriented programming. It has many features to process \
|
||||
text files and to do system management tasks (as in Perl). \
|
||||
It is simple, straight-forward, and extensible. \
|
||||
"
|
||||
HOMEPAGE = "http://www.ruby-lang.org/"
|
||||
SECTION = "devel/ruby"
|
||||
LICENSE = "Ruby | BSD-2-Clause | BSD-3-Clause | GPL-2.0-only | ISC | MIT"
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=5b8c87559868796979806100db3f3805 \
|
||||
file://BSDL;md5=8b50bc6de8f586dc66790ba11d064d75 \
|
||||
file://GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
|
||||
file://LEGAL;md5=f260190bc1e92e363f0ee3c0463d4c7c \
|
||||
"
|
||||
|
||||
DEPENDS = "zlib openssl libyaml gdbm readline libffi"
|
||||
DEPENDS:append:class-target = " ruby-native"
|
||||
|
||||
SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}"
|
||||
SRC_URI = "http://cache.ruby-lang.org/pub/ruby/${SHRT_VER}/ruby-${PV}.tar.gz \
|
||||
file://0001-extmk-fix-cross-compilation-of-external-gems.patch \
|
||||
file://0002-Obey-LDFLAGS-for-the-link-of-libruby.patch \
|
||||
"
|
||||
UPSTREAM_CHECK_URI = "https://www.ruby-lang.org/en/downloads/"
|
||||
|
||||
inherit autotools ptest pkgconfig
|
||||
|
||||
|
||||
# This snippet lets compiled extensions which rely on external libraries,
|
||||
# such as zlib, compile properly. If we don't do this, then when extmk.rb
|
||||
# runs, it uses the native libraries instead of the target libraries, and so
|
||||
# none of the linking operations succeed -- which makes extconf.rb think
|
||||
# that the libraries aren't available and hence that the extension can't be
|
||||
# built.
|
||||
|
||||
do_configure:prepend() {
|
||||
sed -i "s#%%TARGET_CFLAGS%%#$CFLAGS#; s#%%TARGET_LDFLAGS%%#$LDFLAGS#" ${S}/common.mk
|
||||
rm -rf ${S}/ruby/
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
From 222203297966f312109e8eaa2520f2cf2f59c09d Mon Sep 17 00:00:00 2001
|
||||
From: Alan Wu <XrXr@users.noreply.github.com>
|
||||
Date: Thu, 31 Mar 2022 17:26:28 -0400
|
||||
Subject: [PATCH] Remove dependency on libcapstone
|
||||
|
||||
We have received reports of build failures due to this configuration
|
||||
check modifying compile flags. Since only YJIT devs use this library
|
||||
we can remove it to make Ruby easier to build for users.
|
||||
|
||||
See: https://github.com/rbenv/ruby-build/discussions/1933
|
||||
|
||||
Upstream-Status: Backport
|
||||
---
|
||||
configure.ac | 9 ---------
|
||||
1 file changed, 9 deletions(-)
|
||||
|
||||
Index: ruby-3.1.2/configure.ac
|
||||
===================================================================
|
||||
--- ruby-3.1.2.orig/configure.ac
|
||||
+++ ruby-3.1.2/configure.ac
|
||||
@@ -1244,15 +1244,6 @@ AC_CHECK_LIB(dl, dlopen) # Dynamic linki
|
||||
AC_CHECK_LIB(dld, shl_load) # Dynamic linking for HP-UX
|
||||
AC_CHECK_LIB(socket, shutdown) # SunOS/Solaris
|
||||
|
||||
-if pkg-config --exists capstone; then
|
||||
- CAPSTONE_CFLAGS=`pkg-config --cflags capstone`
|
||||
- CAPSTONE_LIB_L=`pkg-config --libs-only-L capstone`
|
||||
- LDFLAGS="$LDFLAGS $CAPSTONE_LIB_L"
|
||||
- CFLAGS="$CFLAGS $CAPSTONE_CFLAGS"
|
||||
-fi
|
||||
-
|
||||
-AC_CHECK_LIB(capstone, cs_open) # Capstone disassembler for debugging YJIT
|
||||
-
|
||||
dnl Checks for header files.
|
||||
AC_HEADER_DIRENT
|
||||
dnl AC_HEADER_STDC has been checked in AC_USE_SYSTEM_EXTENSIONS
|
||||
@@ -1,8 +1,25 @@
|
||||
require ruby.inc
|
||||
SUMMARY = "An interpreter of object-oriented scripting language"
|
||||
DESCRIPTION = "Ruby is an interpreted scripting language for quick \
|
||||
and easy object-oriented programming. It has many features to process \
|
||||
text files and to do system management tasks (as in Perl). \
|
||||
It is simple, straight-forward, and extensible. \
|
||||
"
|
||||
HOMEPAGE = "http://www.ruby-lang.org/"
|
||||
SECTION = "devel/ruby"
|
||||
LICENSE = "Ruby | BSD-2-Clause | BSD-3-Clause | GPL-2.0-only | ISC | MIT"
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=5b8c87559868796979806100db3f3805 \
|
||||
file://BSDL;md5=8b50bc6de8f586dc66790ba11d064d75 \
|
||||
file://GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
|
||||
file://LEGAL;md5=f260190bc1e92e363f0ee3c0463d4c7c \
|
||||
"
|
||||
|
||||
DEPENDS:append:libc-musl = " libucontext"
|
||||
DEPENDS = "zlib openssl libyaml gdbm readline libffi"
|
||||
DEPENDS:append:class-target = " ruby-native"
|
||||
|
||||
SRC_URI += " \
|
||||
SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}"
|
||||
SRC_URI = "http://cache.ruby-lang.org/pub/ruby/${SHRT_VER}/ruby-${PV}.tar.gz \
|
||||
file://0001-extmk-fix-cross-compilation-of-external-gems.patch \
|
||||
file://0002-Obey-LDFLAGS-for-the-link-of-libruby.patch \
|
||||
file://remove_has_include_macros.patch \
|
||||
file://run-ptest \
|
||||
file://0001-template-Makefile.in-do-not-write-host-cross-cc-item.patch \
|
||||
@@ -12,10 +29,27 @@ SRC_URI += " \
|
||||
file://0005-Mark-Gemspec-reproducible-change-fixing-784225-too.patch \
|
||||
file://0006-Make-gemspecs-reproducible.patch \
|
||||
file://0001-vm_dump.c-Define-REG_S1-and-REG_S2-for-musl-riscv.patch \
|
||||
file://0001-Remove-dependency-on-libcapstone.patch \
|
||||
"
|
||||
UPSTREAM_CHECK_URI = "https://www.ruby-lang.org/en/downloads/"
|
||||
|
||||
SRC_URI[sha256sum] = "61843112389f02b735428b53bb64cf988ad9fb81858b8248e22e57336f24a83e"
|
||||
inherit autotools ptest pkgconfig
|
||||
|
||||
|
||||
# This snippet lets compiled extensions which rely on external libraries,
|
||||
# such as zlib, compile properly. If we don't do this, then when extmk.rb
|
||||
# runs, it uses the native libraries instead of the target libraries, and so
|
||||
# none of the linking operations succeed -- which makes extconf.rb think
|
||||
# that the libraries aren't available and hence that the extension can't be
|
||||
# built.
|
||||
|
||||
do_configure:prepend() {
|
||||
sed -i "s#%%TARGET_CFLAGS%%#$CFLAGS#; s#%%TARGET_LDFLAGS%%#$LDFLAGS#" ${S}/common.mk
|
||||
rm -rf ${S}/ruby/
|
||||
}
|
||||
|
||||
DEPENDS:append:libc-musl = " libucontext"
|
||||
|
||||
SRC_URI[sha256sum] = "5ea498a35f4cd15875200a52dde42b6eb179e1264e17d78732c3a57cd1c6ab9e"
|
||||
|
||||
PACKAGECONFIG ??= ""
|
||||
PACKAGECONFIG += "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
|
||||
@@ -1,3 +1,4 @@
|
||||
drd/tests/boost_thread
|
||||
gdbserver_tests/hgtls
|
||||
memcheck/tests/dw4
|
||||
memcheck/tests/leak_cpp_interior
|
||||
|
||||
@@ -52,8 +52,10 @@ INITSCRIPT_PARAMS = "defaults"
|
||||
|
||||
SYSTEMD_SERVICE:${PN} = "atd.service"
|
||||
|
||||
do_configure:prepend() {
|
||||
cp -f ${WORKDIR}/posixtm.[ch] ${S}
|
||||
do_patch[postfuncs] += "copy_posix_files"
|
||||
|
||||
copy_posix_files() {
|
||||
cp -f ${WORKDIR}/posixtm.[ch] ${S}
|
||||
}
|
||||
|
||||
do_install () {
|
||||
|
||||
@@ -32,4 +32,4 @@ do_compile:prepend() {
|
||||
ALTERNATIVE:${PN} = "bc dc"
|
||||
ALTERNATIVE_PRIORITY = "100"
|
||||
|
||||
BBCLASSEXTEND = "native"
|
||||
BBCLASSEXTEND = "native nativesdk"
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
From b5332ed6d59ba5113a0a2c67fd82b69fcd5cde68 Mon Sep 17 00:00:00 2001
|
||||
From: obiwac <obiwac@gmail.com>
|
||||
Date: Fri, 22 Jul 2022 22:41:10 +0200
|
||||
Subject: [PATCH] libarchive: CVE-2022-36227 Handle a `calloc` returning NULL
|
||||
(fixes #1754)
|
||||
|
||||
Upstream-Status: Backport [https://github.com/libarchive/libarchive/commit/bff38efe8c110469c5080d387bec62a6ca15b1a5]
|
||||
CVE: CVE-2022-36227
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com
|
||||
---
|
||||
libarchive/archive_write.c | 8 ++++++++
|
||||
1 file changed, 8 insertions(+)
|
||||
|
||||
diff --git a/libarchive/archive_write.c b/libarchive/archive_write.c
|
||||
index 66592e8..27626b5 100644
|
||||
--- a/libarchive/archive_write.c
|
||||
+++ b/libarchive/archive_write.c
|
||||
@@ -201,6 +201,10 @@ __archive_write_allocate_filter(struct archive *_a)
|
||||
struct archive_write_filter *f;
|
||||
|
||||
f = calloc(1, sizeof(*f));
|
||||
+
|
||||
+ if (f == NULL)
|
||||
+ return (NULL);
|
||||
+
|
||||
f->archive = _a;
|
||||
f->state = ARCHIVE_WRITE_FILTER_STATE_NEW;
|
||||
if (a->filter_first == NULL)
|
||||
@@ -548,6 +552,10 @@ archive_write_open2(struct archive *_a, void *client_data,
|
||||
a->client_data = client_data;
|
||||
|
||||
client_filter = __archive_write_allocate_filter(_a);
|
||||
+
|
||||
+ if (client_filter == NULL)
|
||||
+ return (ARCHIVE_FATAL);
|
||||
+
|
||||
client_filter->open = archive_write_client_open;
|
||||
client_filter->write = archive_write_client_write;
|
||||
client_filter->close = archive_write_client_close;
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -30,14 +30,12 @@ PACKAGECONFIG[lz4] = "--with-lz4,--without-lz4,lz4,"
|
||||
PACKAGECONFIG[mbedtls] = "--with-mbedtls,--without-mbedtls,mbedtls,"
|
||||
PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd,"
|
||||
|
||||
EXTRA_OECONF += "--enable-largefile"
|
||||
EXTRA_OECONF += "--enable-largefile --without-iconv"
|
||||
|
||||
SRC_URI = "http://libarchive.org/downloads/libarchive-${PV}.tar.gz \
|
||||
file://CVE-2022-36227.patch \
|
||||
"
|
||||
SRC_URI = "http://libarchive.org/downloads/libarchive-${PV}.tar.gz"
|
||||
UPSTREAM_CHECK_URI = "http://libarchive.org/"
|
||||
|
||||
SRC_URI[sha256sum] = "c676146577d989189940f1959d9e3980d28513d74eedfbc6b7f15ea45fe54ee2"
|
||||
SRC_URI[sha256sum] = "ba6d02f15ba04aba9c23fd5f236bb234eab9d5209e95d1c4df85c44d5f19b9b3"
|
||||
|
||||
inherit autotools update-alternatives pkgconfig
|
||||
|
||||
@@ -19,6 +19,15 @@ SRCREV = "005e014e1abdadb2493d8b3ce87b37a2c0a2351d"
|
||||
|
||||
S = "${WORKDIR}/git"
|
||||
|
||||
|
||||
inherit update-alternatives
|
||||
|
||||
ALTERNATIVE_${PN} = "lsof"
|
||||
ALTERNATIVE_LINK_NAME[lsof] = "${sbindir}/lsof"
|
||||
# Make our priority higher than busybox
|
||||
ALTERNATIVE_PRIORITY = "100"
|
||||
|
||||
|
||||
export LSOF_INCLUDE = "${STAGING_INCDIR}"
|
||||
|
||||
do_configure () {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
From 58245b859ffbcb1780575bf1b0a018d55e74e434 Mon Sep 17 00:00:00 2001
|
||||
From 08ba909500412611953aea0fa2fe0d8fe76b6e24 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Andreas=20M=C3=BCller?= <schnitzeltony@googlemail.com>
|
||||
Date: Wed, 21 Sep 2016 21:14:40 +0200
|
||||
Subject: [PATCH] detect gold as GNU linker too
|
||||
@@ -9,23 +9,21 @@ Content-Transfer-Encoding: 8bit
|
||||
Upstream-Status: Pending
|
||||
|
||||
Signed-off-by: Andreas Müller <schnitzeltony@googlemail.com>
|
||||
|
||||
---
|
||||
configure.ac | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 03e8bda..c2fce51 100644
|
||||
index 468c718..cd93f30 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -28,7 +28,7 @@ AC_CHECK_SIZEOF([void *])
|
||||
AC_MSG_CHECKING([for GNU ld])
|
||||
LD=`$CC -print-prog-name=ld 2>&5`
|
||||
LD=$($CC -print-prog-name=ld 2>&5)
|
||||
|
||||
-if test `$LD -v 2>&1 | $ac_cv_path_GREP -c "GNU ld"` = 0; then
|
||||
+if test `$LD -v 2>&1 | $ac_cv_path_GREP -c "GNU "` = 0; then
|
||||
-if test $($LD -v 2>&1 | $ac_cv_path_GREP -c "GNU ld") = 0; then
|
||||
+if test $($LD -v 2>&1 | $ac_cv_path_GREP -c "GNU ") = 0; then
|
||||
# Not
|
||||
GNU_LD=""
|
||||
AC_MSG_RESULT([no])
|
||||
--
|
||||
2.5.5
|
||||
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
From f60dc1063607ca1f201ba4cbda467d8af3f78f64 Mon Sep 17 00:00:00 2001
|
||||
From: Miroslav Lichvar <mlichvar@redhat.com>
|
||||
Date: Tue, 1 Oct 2019 16:37:55 +0200
|
||||
Subject: [PATCH] don't ignore CFLAGS when building snack
|
||||
|
||||
In addition to the flags returned by python-config --cflags, use the
|
||||
user-specified CFLAGS when building the snack object.
|
||||
|
||||
Upstream-Status: Backport from master
|
||||
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
|
||||
---
|
||||
Makefile.in | 4 ++--
|
||||
1 file changed, 2 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/Makefile.in b/Makefile.in
|
||||
index be5f87b..6facd5e 100644
|
||||
--- a/Makefile.in
|
||||
+++ b/Makefile.in
|
||||
@@ -96,8 +96,8 @@ _snack.$(SOEXT): snack.c $(LIBNEWTSH)
|
||||
PIFLAGS=`$$pyconfig --includes`; \
|
||||
PLDFLAGS=`$$pyconfig --ldflags`; \
|
||||
PLFLAGS=`$$pyconfig --libs`; \
|
||||
- echo $(CC) $(SHCFLAGS) $(CPPFLAGS) $$PIFLAGS $$PCFLAGS -c -o $$ver/snack.o snack.c; \
|
||||
- $(CC) $(SHCFLAGS) $(CPPFLAGS) $$PIFLAGS $$PCFLAGS -c -o $$ver/snack.o snack.c; \
|
||||
+ echo $(CC) $(SHCFLAGS) $(CFLAGS) $(CPPFLAGS) $$PIFLAGS $$PCFLAGS -c -o $$ver/snack.o snack.c; \
|
||||
+ $(CC) $(SHCFLAGS) $(CFLAGS) $(CPPFLAGS) $$PIFLAGS $$PCFLAGS -c -o $$ver/snack.o snack.c; \
|
||||
echo $(CC) --shared $$PLDFLAGS $$PLFLAGS $(LDFLAGS) -o $$ver/_snack.$(SOEXT) $$ver/snack.o -L. -lnewt $(LIBS); \
|
||||
$(CC) --shared $$PLDFLAGS $$PLFLAGS $(LDFLAGS) -o $$ver/_snack.$(SOEXT) $$ver/snack.o -L. -lnewt $(LIBS); \
|
||||
done || :
|
||||
@@ -21,11 +21,9 @@ SRC_URI = "https://releases.pagure.org/newt/newt-${PV}.tar.gz \
|
||||
file://cross_ar.patch \
|
||||
file://Makefile.in-Add-tinfo-library-to-the-linking-librari.patch \
|
||||
file://0001-detect-gold-as-GNU-linker-too.patch \
|
||||
file://0002-don-t-ignore-CFLAGS-when-building-snack.patch \
|
||||
"
|
||||
|
||||
SRC_URI[md5sum] = "a0a5fd6b53bb167a65e15996b249ebb5"
|
||||
SRC_URI[sha256sum] = "265eb46b55d7eaeb887fca7a1d51fe115658882dfe148164b6c49fccac5abb31"
|
||||
SRC_URI[sha256sum] = "caa372907b14ececfe298f0d512a62f41d33b290610244a58aed07bbc5ada12a"
|
||||
|
||||
S = "${WORKDIR}/newt-${PV}"
|
||||
|
||||
@@ -6,7 +6,7 @@ SECTION = "base"
|
||||
LICENSE = "PD & BSD-3-Clause"
|
||||
LIC_FILES_CHKSUM = "file://LICENSE;md5=c679c9d6b02bc2757b3eaf8f53c43fba"
|
||||
|
||||
PV = "2022d"
|
||||
PV = "2022g"
|
||||
|
||||
SRC_URI =" http://www.iana.org/time-zones/repository/releases/tzcode${PV}.tar.gz;name=tzcode \
|
||||
http://www.iana.org/time-zones/repository/releases/tzdata${PV}.tar.gz;name=tzdata \
|
||||
@@ -14,6 +14,5 @@ SRC_URI =" http://www.iana.org/time-zones/repository/releases/tzcode${PV}.tar.gz
|
||||
|
||||
UPSTREAM_CHECK_URI = "http://www.iana.org/time-zones"
|
||||
|
||||
SRC_URI[tzcode.sha256sum] = "d644ba0f938899374ea8cb554e35fb4afa0f7bd7b716c61777cd00500b8759e0"
|
||||
SRC_URI[tzdata.sha256sum] = "6ecdbee27fa43dcfa49f3d4fd8bb1dfef54c90da1abcd82c9abcf2dc4f321de0"
|
||||
|
||||
SRC_URI[tzcode.sha256sum] = "9610bb0b9656ff404c361a41f3286da53064b5469d84f00c9cb2314c8614da74"
|
||||
SRC_URI[tzdata.sha256sum] = "4491db8281ae94a84d939e427bdd83dc389f26764d27d9a5c52d782c16764478"
|
||||
|
||||
@@ -1,19 +1,20 @@
|
||||
There is a potential infinite-loop in function _arc_error_normalized().
|
||||
There is an assertion in function _cairo_arc_in_direction().
|
||||
|
||||
CVE: CVE-2019-6461
|
||||
Upstream-Status: Pending
|
||||
Signed-off-by: Ross Burton <ross.burton@intel.com>
|
||||
|
||||
diff --git a/src/cairo-arc.c b/src/cairo-arc.c
|
||||
index 390397bae..f9249dbeb 100644
|
||||
index 390397bae..1bde774a4 100644
|
||||
--- a/src/cairo-arc.c
|
||||
+++ b/src/cairo-arc.c
|
||||
@@ -99,7 +99,7 @@ _arc_max_angle_for_tolerance_normalized (double tolerance)
|
||||
do {
|
||||
angle = M_PI / i++;
|
||||
error = _arc_error_normalized (angle);
|
||||
- } while (error > tolerance);
|
||||
+ } while (error > tolerance && error > __DBL_EPSILON__);
|
||||
@@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
|
||||
if (cairo_status (cr))
|
||||
return;
|
||||
|
||||
return angle;
|
||||
}
|
||||
- assert (angle_max >= angle_min);
|
||||
+ if (angle_max < angle_min)
|
||||
+ return;
|
||||
|
||||
if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
|
||||
angle_max = fmod (angle_max - angle_min, 2 * M_PI);
|
||||
|
||||
@@ -1,20 +1,40 @@
|
||||
There is an assertion in function _cairo_arc_in_direction().
|
||||
|
||||
CVE: CVE-2019-6462
|
||||
Upstream-Status: Pending
|
||||
Signed-off-by: Ross Burton <ross.burton@intel.com>
|
||||
Upstream-Status: Backport
|
||||
Signed-off-by: Quentin Schulz <quentin.schulz@theobroma-systems.com>
|
||||
|
||||
From ab2c5ee21e5f3d3ee4b3f67cfcd5811a4f99c3a0 Mon Sep 17 00:00:00 2001
|
||||
From: Heiko Lewin <hlewin@gmx.de>
|
||||
Date: Sun, 1 Aug 2021 11:16:03 +0000
|
||||
Subject: [PATCH] _arc_max_angle_for_tolerance_normalized: fix infinite loop
|
||||
|
||||
---
|
||||
src/cairo-arc.c | 4 +++-
|
||||
1 file changed, 3 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/src/cairo-arc.c b/src/cairo-arc.c
|
||||
index 390397bae..1bde774a4 100644
|
||||
index 390397bae..1c891d1a0 100644
|
||||
--- a/src/cairo-arc.c
|
||||
+++ b/src/cairo-arc.c
|
||||
@@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
|
||||
if (cairo_status (cr))
|
||||
return;
|
||||
@@ -90,16 +90,18 @@ _arc_max_angle_for_tolerance_normalized (double tolerance)
|
||||
{ M_PI / 11.0, 9.81410988043554039085e-09 },
|
||||
};
|
||||
int table_size = ARRAY_LENGTH (table);
|
||||
+ const int max_segments = 1000; /* this value is chosen arbitrarily. this gives an error of about 1.74909e-20 */
|
||||
|
||||
- assert (angle_max >= angle_min);
|
||||
+ if (angle_max < angle_min)
|
||||
+ return;
|
||||
for (i = 0; i < table_size; i++)
|
||||
if (table[i].error < tolerance)
|
||||
return table[i].angle;
|
||||
|
||||
if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
|
||||
angle_max = fmod (angle_max - angle_min, 2 * M_PI);
|
||||
++i;
|
||||
+
|
||||
do {
|
||||
angle = M_PI / i++;
|
||||
error = _arc_error_normalized (angle);
|
||||
- } while (error > tolerance);
|
||||
+ } while (error > tolerance && i < max_segments);
|
||||
|
||||
return angle;
|
||||
}
|
||||
--
|
||||
2.38.1
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ LIC_FILES_CHKSUM = "file://LICENSE.TXT;md5=a5927784d823d443c6cae55701d01553 \
|
||||
file://docs/FTL.TXT;md5=9f37b4e6afa3fef9dba8932b16bd3f97 \
|
||||
file://docs/GPLv2.TXT;md5=8ef380476f642c20ebf40fecb0add2ec"
|
||||
|
||||
SRC_URI = "${SAVANNAH_NONGNU_MIRROR}/${BPN}/${BP}.tar.xz \
|
||||
SRC_URI = "${SAVANNAH_GNU_MIRROR}/${BPN}/${BP}.tar.xz \
|
||||
file://CVE-2022-27404.patch \
|
||||
file://CVE-2022-27405.patch \
|
||||
file://CVE-2022-27406.patch \
|
||||
|
||||
@@ -35,9 +35,9 @@ PACKAGES =+ "${PN}-icu ${PN}-icu-dev ${PN}-subset"
|
||||
LEAD_SONAME = "libharfbuzz.so"
|
||||
|
||||
do_install:append() {
|
||||
# If no tools are installed due to PACKAGECONFIG then this directory is
|
||||
#still installed, so remove it to stop packaging wanings.
|
||||
rmdir --ignore-fail-on-non-empty ${D}${bindir}
|
||||
# If no tools are installed due to PACKAGECONFIG then this directory might
|
||||
# still be installed, so remove it to stop packaging warnings.
|
||||
[ ! -d ${D}${bindir} ] || rmdir --ignore-fail-on-non-empty ${D}${bindir}
|
||||
}
|
||||
|
||||
FILES:${PN}-icu = "${libdir}/libharfbuzz-icu.so.*"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user