mirror of
https://git.yoctoproject.org/poky
synced 2026-02-21 08:59:41 +01:00
Compare commits
111 Commits
yocto-3.1.
...
yocto-3.1.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4aad5914ef | ||
|
|
23322786e0 | ||
|
|
139225f0ba | ||
|
|
fc24cd1948 | ||
|
|
1fc880e165 | ||
|
|
9243169d4f | ||
|
|
f97bd9abe6 | ||
|
|
59180eb474 | ||
|
|
2340b1dbb9 | ||
|
|
0b85e5d610 | ||
|
|
ef2da8f28e | ||
|
|
5373e681cf | ||
|
|
98dd6e4cac | ||
|
|
ae4acc9f81 | ||
|
|
cfd2eaa0e1 | ||
|
|
5b956ef359 | ||
|
|
54846f581e | ||
|
|
b361f2a931 | ||
|
|
0c3dfb682d | ||
|
|
7c7fc0de71 | ||
|
|
354f571f61 | ||
|
|
883102b9b8 | ||
|
|
b365d212dc | ||
|
|
c4499b85f7 | ||
|
|
c35c1e15f0 | ||
|
|
820e8891b8 | ||
|
|
b9ae8da74e | ||
|
|
038831674e | ||
|
|
25606f450d | ||
|
|
9e7f4a7db2 | ||
|
|
e4946bd39e | ||
|
|
97810ff2d7 | ||
|
|
d323923047 | ||
|
|
d695bd0d3d | ||
|
|
08bd8cc114 | ||
|
|
eb32f7f5e6 | ||
|
|
88be415b10 | ||
|
|
24fc40faef | ||
|
|
868ebed326 | ||
|
|
17c23e485e | ||
|
|
61ea9f7665 | ||
|
|
b38628041b | ||
|
|
dee08141f2 | ||
|
|
61023f9e61 | ||
|
|
7350f515b3 | ||
|
|
50aa474c84 | ||
|
|
9c5b33ccba | ||
|
|
eb12590623 | ||
|
|
35bcc28983 | ||
|
|
48ea7812c7 | ||
|
|
010094a2ae | ||
|
|
43980058ca | ||
|
|
a985415ec2 | ||
|
|
79ac8cf161 | ||
|
|
3860414240 | ||
|
|
387d23c02e | ||
|
|
232fdbf0e5 | ||
|
|
60a98feb86 | ||
|
|
6a3d60d873 | ||
|
|
1c38d0d3d6 | ||
|
|
ca90350d13 | ||
|
|
159a2de146 | ||
|
|
684c5d4c12 | ||
|
|
8dfc7162e3 | ||
|
|
d2f8a57a30 | ||
|
|
0a0e0663ab | ||
|
|
79b3e05767 | ||
|
|
b6f4778e37 | ||
|
|
6e79d96c6d | ||
|
|
31b4392e6e | ||
|
|
4bc2324a25 | ||
|
|
6013fc2606 | ||
|
|
3f2da49c2b | ||
|
|
02867c9039 | ||
|
|
33a08f7b8f | ||
|
|
07eca06c71 | ||
|
|
9f20f682ff | ||
|
|
6d1f8412be | ||
|
|
872caf23ad | ||
|
|
b9bffd7650 | ||
|
|
0b84202a2b | ||
|
|
ae90fa778a | ||
|
|
fe6c34c48d | ||
|
|
2ae3d43628 | ||
|
|
5582ab6aae | ||
|
|
d4c7b40039 | ||
|
|
a2805141e9 | ||
|
|
7d9d97368b | ||
|
|
69fb63b4fc | ||
|
|
9638dc4826 | ||
|
|
f51a254415 | ||
|
|
1487d68388 | ||
|
|
8a382d8655 | ||
|
|
8d6f9680e4 | ||
|
|
23ed0037b6 | ||
|
|
95cda9d091 | ||
|
|
238fb89434 | ||
|
|
7f694e46a8 | ||
|
|
e873840317 | ||
|
|
9868f99149 | ||
|
|
f2d12bc50b | ||
|
|
6cf824520a | ||
|
|
42bb9689a0 | ||
|
|
7da79fcac2 | ||
|
|
1be2437fd2 | ||
|
|
d3d92d7852 | ||
|
|
6be9d793a3 | ||
|
|
77332ffb9b | ||
|
|
99478d73c5 | ||
|
|
196895a482 | ||
|
|
27877797c7 |
48
bitbake/bin/bitbake-getvar
Executable file
48
bitbake/bin/bitbake-getvar
Executable file
@@ -0,0 +1,48 @@
|
||||
#! /usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2021 Richard Purdie
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
|
||||
bindir = os.path.dirname(__file__)
|
||||
topdir = os.path.dirname(bindir)
|
||||
sys.path[0:0] = [os.path.join(topdir, 'lib')]
|
||||
|
||||
import bb.tinfoil
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Bitbake Query Variable")
|
||||
parser.add_argument("variable", help="variable name to query")
|
||||
parser.add_argument("-r", "--recipe", help="Recipe name to query", default=None, required=False)
|
||||
parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
|
||||
parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
|
||||
parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.unexpand and not args.value:
|
||||
print("--unexpand only makes sense with --value")
|
||||
sys.exit(1)
|
||||
|
||||
if args.flag and not args.value:
|
||||
print("--flag only makes sense with --value")
|
||||
sys.exit(1)
|
||||
|
||||
with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
|
||||
if args.recipe:
|
||||
tinfoil.prepare(quiet=2)
|
||||
d = tinfoil.parse_recipe(args.recipe)
|
||||
else:
|
||||
tinfoil.prepare(quiet=2, config_only=True)
|
||||
d = tinfoil.config_data
|
||||
if args.flag:
|
||||
print(str(d.getVarFlag(args.variable, args.flag, expand=(not args.unexpand))))
|
||||
elif args.value:
|
||||
print(str(d.getVar(args.variable, expand=(not args.unexpand))))
|
||||
else:
|
||||
bb.data.emit_var(args.variable, d=d, all=True)
|
||||
@@ -20,6 +20,7 @@ Commands are queued in a CommandQueue
|
||||
|
||||
from collections import OrderedDict, defaultdict
|
||||
|
||||
import io
|
||||
import bb.event
|
||||
import bb.cooker
|
||||
import bb.remotedata
|
||||
@@ -478,6 +479,17 @@ class CommandsSync:
|
||||
d = command.remotedatastores[dsindex].varhistory
|
||||
return getattr(d, method)(*args, **kwargs)
|
||||
|
||||
def dataStoreConnectorVarHistCmdEmit(self, command, params):
|
||||
dsindex = params[0]
|
||||
var = params[1]
|
||||
oval = params[2]
|
||||
val = params[3]
|
||||
d = command.remotedatastores[params[4]]
|
||||
|
||||
o = io.StringIO()
|
||||
command.remotedatastores[dsindex].varhistory.emit(var, oval, val, o, d)
|
||||
return o.getvalue()
|
||||
|
||||
def dataStoreConnectorIncHistCmd(self, command, params):
|
||||
dsindex = params[0]
|
||||
method = params[1]
|
||||
|
||||
@@ -224,7 +224,12 @@ class Git(FetchMethod):
|
||||
ud.shallow = False
|
||||
|
||||
if ud.usehead:
|
||||
ud.unresolvedrev['default'] = 'HEAD'
|
||||
# When usehead is set let's associate 'HEAD' with the unresolved
|
||||
# rev of this repository. This will get resolved into a revision
|
||||
# later. If an actual revision happens to have also been provided
|
||||
# then this setting will be overridden.
|
||||
for name in ud.names:
|
||||
ud.unresolvedrev[name] = 'HEAD'
|
||||
|
||||
ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
|
||||
|
||||
|
||||
@@ -52,6 +52,12 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
|
||||
|
||||
|
||||
class Wget(FetchMethod):
|
||||
|
||||
# CDNs like CloudFlare may do a 'browser integrity test' which can fail
|
||||
# with the standard wget/urllib User-Agent, so pretend to be a modern
|
||||
# browser.
|
||||
user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
|
||||
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
def supports(self, ud, d):
|
||||
"""
|
||||
@@ -91,10 +97,9 @@ class Wget(FetchMethod):
|
||||
|
||||
fetchcmd = self.basecmd
|
||||
|
||||
if 'downloadfilename' in ud.parm:
|
||||
localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
|
||||
bb.utils.mkdirhier(os.path.dirname(localpath))
|
||||
fetchcmd += " -O %s" % shlex.quote(localpath)
|
||||
localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp"
|
||||
bb.utils.mkdirhier(os.path.dirname(localpath))
|
||||
fetchcmd += " -O %s" % shlex.quote(localpath)
|
||||
|
||||
if ud.user and ud.pswd:
|
||||
fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
|
||||
@@ -108,6 +113,10 @@ class Wget(FetchMethod):
|
||||
|
||||
self._runwget(ud, d, fetchcmd, False)
|
||||
|
||||
# Remove the ".tmp" and move the file into position atomically
|
||||
# Our lock prevents multiple writers but mirroring code may grab incomplete files
|
||||
os.rename(localpath, localpath[:-4])
|
||||
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath):
|
||||
@@ -300,7 +309,7 @@ class Wget(FetchMethod):
|
||||
# Some servers (FusionForge, as used on Alioth) require that the
|
||||
# optional Accept header is set.
|
||||
r.add_header("Accept", "*/*")
|
||||
r.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12")
|
||||
r.add_header("User-Agent", self.user_agent)
|
||||
def add_basic_auth(login_str, request):
|
||||
'''Adds Basic auth to http request, pass in login:password as string'''
|
||||
import base64
|
||||
@@ -404,9 +413,8 @@ class Wget(FetchMethod):
|
||||
"""
|
||||
f = tempfile.NamedTemporaryFile()
|
||||
with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
|
||||
agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
|
||||
fetchcmd = self.basecmd
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'"
|
||||
try:
|
||||
self._runwget(ud, d, fetchcmd, True, workdir=workdir)
|
||||
fetchresult = f.read()
|
||||
|
||||
@@ -650,6 +650,58 @@ class FetcherLocalTest(FetcherTest):
|
||||
with self.assertRaises(bb.fetch2.UnpackError):
|
||||
self.fetchUnpack(['file://a;subdir=/bin/sh'])
|
||||
|
||||
def test_local_gitfetch_usehead(self):
|
||||
# Create dummy local Git repo
|
||||
src_dir = tempfile.mkdtemp(dir=self.tempdir,
|
||||
prefix='gitfetch_localusehead_')
|
||||
src_dir = os.path.abspath(src_dir)
|
||||
bb.process.run("git init", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit'",
|
||||
cwd=src_dir)
|
||||
# Use other branch than master
|
||||
bb.process.run("git checkout -b my-devel", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
|
||||
cwd=src_dir)
|
||||
stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
|
||||
orig_rev = stdout[0].strip()
|
||||
|
||||
# Fetch and check revision
|
||||
self.d.setVar("SRCREV", "AUTOINC")
|
||||
url = "git://" + src_dir + ";protocol=file;usehead=1"
|
||||
fetcher = bb.fetch.Fetch([url], self.d)
|
||||
fetcher.download()
|
||||
fetcher.unpack(self.unpackdir)
|
||||
stdout = bb.process.run("git rev-parse HEAD",
|
||||
cwd=os.path.join(self.unpackdir, 'git'))
|
||||
unpack_rev = stdout[0].strip()
|
||||
self.assertEqual(orig_rev, unpack_rev)
|
||||
|
||||
def test_local_gitfetch_usehead_withname(self):
|
||||
# Create dummy local Git repo
|
||||
src_dir = tempfile.mkdtemp(dir=self.tempdir,
|
||||
prefix='gitfetch_localusehead_')
|
||||
src_dir = os.path.abspath(src_dir)
|
||||
bb.process.run("git init", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit'",
|
||||
cwd=src_dir)
|
||||
# Use other branch than master
|
||||
bb.process.run("git checkout -b my-devel", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
|
||||
cwd=src_dir)
|
||||
stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
|
||||
orig_rev = stdout[0].strip()
|
||||
|
||||
# Fetch and check revision
|
||||
self.d.setVar("SRCREV", "AUTOINC")
|
||||
url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName"
|
||||
fetcher = bb.fetch.Fetch([url], self.d)
|
||||
fetcher.download()
|
||||
fetcher.unpack(self.unpackdir)
|
||||
stdout = bb.process.run("git rev-parse HEAD",
|
||||
cwd=os.path.join(self.unpackdir, 'git'))
|
||||
unpack_rev = stdout[0].strip()
|
||||
self.assertEqual(orig_rev, unpack_rev)
|
||||
|
||||
class FetcherNoNetworkTest(FetcherTest):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
@@ -53,6 +53,10 @@ class TinfoilDataStoreConnectorVarHistory:
|
||||
def remoteCommand(self, cmd, *args, **kwargs):
|
||||
return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs)
|
||||
|
||||
def emit(self, var, oval, val, o, d):
|
||||
ret = self.tinfoil.run_command('dataStoreConnectorVarHistCmdEmit', self.dsindex, var, oval, val, d.dsindex)
|
||||
o.write(ret)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if not hasattr(bb.data_smart.VariableHistory, name):
|
||||
raise AttributeError("VariableHistory has no such method %s" % name)
|
||||
|
||||
@@ -227,7 +227,9 @@ class TerminalFilter(object):
|
||||
|
||||
def keepAlive(self, t):
|
||||
if not self.cuu:
|
||||
print("Bitbake still alive (%ds)" % t)
|
||||
print("Bitbake still alive (no events for %ds). Active tasks:" % t)
|
||||
for t in self.helper.running_tasks:
|
||||
print(t)
|
||||
sys.stdout.flush()
|
||||
|
||||
def updateFooter(self):
|
||||
@@ -597,7 +599,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
|
||||
printinterval = 5000
|
||||
printintervaldelta = 10 * 60 # 10 minutes
|
||||
printinterval = printintervaldelta
|
||||
lastprint = time.time()
|
||||
|
||||
termfilter = tf(main, helper, console_handlers, params.options.quiet)
|
||||
@@ -607,7 +610,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
try:
|
||||
if (lastprint + printinterval) <= time.time():
|
||||
termfilter.keepAlive(printinterval)
|
||||
printinterval += 5000
|
||||
printinterval += printintervaldelta
|
||||
event = eventHandler.waitEvent(0)
|
||||
if event is None:
|
||||
if main.shutdown > 1:
|
||||
@@ -638,7 +641,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
|
||||
if isinstance(event, logging.LogRecord):
|
||||
lastprint = time.time()
|
||||
printinterval = 5000
|
||||
printinterval = printintervaldelta
|
||||
if event.levelno >= bb.msg.BBLogFormatter.ERROR:
|
||||
errors = errors + 1
|
||||
return_value = 1
|
||||
|
||||
@@ -222,19 +222,10 @@ an entire Linux distribution, including the toolchain, from source.
|
||||
.. tip::
|
||||
|
||||
You can significantly speed up your build and guard against fetcher
|
||||
failures by using mirrors. To use mirrors, add these lines to your
|
||||
local.conf file in the Build directory: ::
|
||||
failures by using mirrors. To use mirrors, add this line to your
|
||||
``local.conf`` file in the :term:`Build Directory`: ::
|
||||
|
||||
SSTATE_MIRRORS = "\
|
||||
file://.* http://sstate.yoctoproject.org/dev/PATH;downloadfilename=PATH \n \
|
||||
file://.* http://sstate.yoctoproject.org/&YOCTO_DOC_VERSION_MINUS_ONE;/PATH;downloadfilename=PATH \n \
|
||||
file://.* http://sstate.yoctoproject.org/&YOCTO_DOC_VERSION;/PATH;downloadfilename=PATH \n \
|
||||
"
|
||||
|
||||
|
||||
The previous examples showed how to add sstate paths for Yocto Project
|
||||
&YOCTO_DOC_VERSION_MINUS_ONE;, &YOCTO_DOC_VERSION;, and a development
|
||||
area. For a complete index of sstate locations, see http://sstate.yoctoproject.org/.
|
||||
SSTATE_MIRRORS ?= "file://.* https://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
|
||||
|
||||
#. **Start the Build:** Continue with the following command to build an OS
|
||||
image for the target, which is ``core-image-sato`` in this example:
|
||||
|
||||
@@ -1986,9 +1986,7 @@ Behind the scenes, the shared state code works by looking in
|
||||
shared state files. Here is an example:
|
||||
::
|
||||
|
||||
SSTATE_MIRRORS ?= "\
|
||||
file://.\* http://someserver.tld/share/sstate/PATH;downloadfilename=PATH \n \
|
||||
file://.\* file:///some/local/dir/sstate/PATH"
|
||||
SSTATE_MIRRORS ?= "file://.* https://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
|
||||
|
||||
.. note::
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
DISTRO : "3.1.17"
|
||||
DISTRO : "3.1.19"
|
||||
DISTRO_NAME_NO_CAP : "dunfell"
|
||||
DISTRO_NAME : "Dunfell"
|
||||
DISTRO_NAME_NO_CAP_MINUS_ONE : "zeus"
|
||||
YOCTO_DOC_VERSION : "3.1.17"
|
||||
YOCTO_DOC_VERSION : "3.1.19"
|
||||
YOCTO_DOC_VERSION_MINUS_ONE : "3.0.4"
|
||||
DISTRO_REL_TAG : "yocto-3.1.17"
|
||||
DOCCONF_VERSION : "3.1.17"
|
||||
DISTRO_REL_TAG : "yocto-3.1.19"
|
||||
DOCCONF_VERSION : "3.1.19"
|
||||
BITBAKE_SERIES : "1.46"
|
||||
POKYVERSION : "23.0.17"
|
||||
POKYVERSION : "23.0.19"
|
||||
YOCTO_POKY : "poky-&DISTRO_NAME_NO_CAP;-&POKYVERSION;"
|
||||
YOCTO_DL_URL : "https://downloads.yoctoproject.org"
|
||||
YOCTO_AB_URL : "https://autobuilder.yoctoproject.org"
|
||||
|
||||
@@ -7542,7 +7542,7 @@ system and gives an overview of their function and contents.
|
||||
``SYSTEMD_BOOT_CFG`` as follows:
|
||||
::
|
||||
|
||||
SYSTEMD_BOOT_CFG ?= "${:term:`S`}/loader.conf"
|
||||
SYSTEMD_BOOT_CFG ?= "${S}/loader.conf"
|
||||
|
||||
For information on Systemd-boot, see the `Systemd-boot
|
||||
documentation <http://www.freedesktop.org/wiki/Software/systemd/systemd-boot/>`__.
|
||||
@@ -8745,4 +8745,22 @@ system and gives an overview of their function and contents.
|
||||
|
||||
The default value of ``XSERVER``, if not specified in the machine
|
||||
configuration, is "xserver-xorg xf86-video-fbdev xf86-input-evdev".
|
||||
|
||||
|
||||
:term:`XZ_THREADS`
|
||||
Specifies the number of parallel threads that should be used when
|
||||
using xz compression.
|
||||
|
||||
By default this scales with core count, but is never set less than 2
|
||||
to ensure that multi-threaded mode is always used so that the output
|
||||
file contents are deterministic. Builds will work with a value of 1
|
||||
but the output will differ compared to the output from the compression
|
||||
generated when more than one thread is used.
|
||||
|
||||
On systems where many tasks run in parallel, setting a limit to this
|
||||
can be helpful in controlling system resource usage.
|
||||
|
||||
:term:`XZ_MEMLIMIT`
|
||||
Specifies the maximum memory the xz compression should use as a percentage
|
||||
of system memory. If unconstrained the xz compressor can use large amounts of
|
||||
memory and become problematic with parallelism elsewhere in the build.
|
||||
"50%" has been found to be a good value.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
DISTRO = "poky"
|
||||
DISTRO_NAME = "Poky (Yocto Project Reference Distro)"
|
||||
DISTRO_VERSION = "3.1.17"
|
||||
DISTRO_VERSION = "3.1.19"
|
||||
DISTRO_CODENAME = "dunfell"
|
||||
SDK_VENDOR = "-pokysdk"
|
||||
SDK_VERSION = "${@d.getVar('DISTRO_VERSION').replace('snapshot-${DATE}', 'snapshot')}"
|
||||
|
||||
@@ -231,7 +231,7 @@ BB_DISKMON_DIRS ??= "\
|
||||
# present in the cache. It assumes you can download something faster than you can build it
|
||||
# which will depend on your network.
|
||||
#
|
||||
#SSTATE_MIRRORS ?= "file://.* http://sstate.yoctoproject.org/2.5/PATH;downloadfilename=PATH"
|
||||
#SSTATE_MIRRORS ?= "file://.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
|
||||
|
||||
#
|
||||
# Qemu configuration
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
SUMMARY = "An image used during oe-selftest tests"
|
||||
|
||||
IMAGE_INSTALL = "packagegroup-core-boot dropbear"
|
||||
IMAGE_INSTALL = "packagegroup-core-boot packagegroup-core-ssh-dropbear"
|
||||
IMAGE_FEATURES = "debug-tweaks"
|
||||
|
||||
IMAGE_LINGUAS = " "
|
||||
|
||||
@@ -54,9 +54,10 @@ ARCHIVER_MODE[mirror] ?= "split"
|
||||
|
||||
DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
|
||||
ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources"
|
||||
ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
|
||||
ARCHIVER_ARCH = "${TARGET_SYS}"
|
||||
ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${ARCHIVER_ARCH}/${PF}/"
|
||||
ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
|
||||
ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/"
|
||||
ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${ARCHIVER_ARCH}/${PF}/"
|
||||
ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
|
||||
|
||||
# When producing a combined mirror directory, allow duplicates for the case
|
||||
@@ -100,6 +101,10 @@ python () {
|
||||
bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
|
||||
return
|
||||
|
||||
# TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig
|
||||
if pn.startswith('gcc-source'):
|
||||
d.setVar('ARCHIVER_ARCH', "allarch")
|
||||
|
||||
def hasTask(task):
|
||||
return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
|
||||
|
||||
@@ -578,7 +583,7 @@ python do_dumpdata () {
|
||||
|
||||
SSTATETASKS += "do_deploy_archives"
|
||||
do_deploy_archives () {
|
||||
echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
|
||||
bbnote "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
|
||||
}
|
||||
python do_deploy_archives_setscene () {
|
||||
sstate_setscene(d)
|
||||
|
||||
@@ -30,8 +30,9 @@ bin_package_do_install () {
|
||||
bbfatal bin_package has nothing to install. Be sure the SRC_URI unpacks into S.
|
||||
fi
|
||||
cd ${S}
|
||||
install -d ${D}${base_prefix}
|
||||
tar --no-same-owner --exclude='./patches' --exclude='./.pc' -cpf - . \
|
||||
| tar --no-same-owner -xpf - -C ${D}
|
||||
| tar --no-same-owner -xpf - -C ${D}${base_prefix}
|
||||
}
|
||||
|
||||
FILES_${PN} = "/"
|
||||
|
||||
@@ -47,7 +47,9 @@ CVE_CHECK_MANIFEST_JSON ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX
|
||||
CVE_CHECK_COPY_FILES ??= "1"
|
||||
CVE_CHECK_CREATE_MANIFEST ??= "1"
|
||||
|
||||
# Report Patched or Ignored/Whitelisted CVEs
|
||||
CVE_CHECK_REPORT_PATCHED ??= "1"
|
||||
|
||||
CVE_CHECK_SHOW_WARNINGS ??= "1"
|
||||
|
||||
# Provide text output
|
||||
@@ -56,6 +58,9 @@ CVE_CHECK_FORMAT_TEXT ??= "1"
|
||||
# Provide JSON output - disabled by default for backward compatibility
|
||||
CVE_CHECK_FORMAT_JSON ??= "0"
|
||||
|
||||
# Check for packages without CVEs (no issues or missing product name)
|
||||
CVE_CHECK_COVERAGE ??= "1"
|
||||
|
||||
# Whitelist for packages (PN)
|
||||
CVE_CHECK_PN_WHITELIST ?= ""
|
||||
|
||||
@@ -76,16 +81,10 @@ CVE_CHECK_LAYER_INCLUDELIST ??= ""
|
||||
# set to "alphabetical" for version using single alphabetical character as increment release
|
||||
CVE_VERSION_SUFFIX ??= ""
|
||||
|
||||
def update_symlinks(target_path, link_path):
|
||||
if link_path != target_path and os.path.exists(target_path):
|
||||
if os.path.exists(os.path.realpath(link_path)):
|
||||
os.remove(link_path)
|
||||
os.symlink(os.path.basename(target_path), link_path)
|
||||
|
||||
def generate_json_report(d, out_path, link_path):
|
||||
if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")):
|
||||
import json
|
||||
from oe.cve_check import cve_check_merge_jsons
|
||||
from oe.cve_check import cve_check_merge_jsons, update_symlinks
|
||||
|
||||
bb.note("Generating JSON CVE summary")
|
||||
index_file = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
|
||||
@@ -106,6 +105,7 @@ def generate_json_report(d, out_path, link_path):
|
||||
python cve_save_summary_handler () {
|
||||
import shutil
|
||||
import datetime
|
||||
from oe.cve_check import update_symlinks
|
||||
|
||||
cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
|
||||
|
||||
@@ -136,16 +136,17 @@ python do_cve_check () {
|
||||
"""
|
||||
Check recipe for patched and unpatched CVEs
|
||||
"""
|
||||
from oe.cve_check import get_patched_cves
|
||||
|
||||
if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")):
|
||||
try:
|
||||
patched_cves = get_patches_cves(d)
|
||||
patched_cves = get_patched_cves(d)
|
||||
except FileNotFoundError:
|
||||
bb.fatal("Failure in searching patches")
|
||||
whitelisted, patched, unpatched = check_cves(d, patched_cves)
|
||||
if patched or unpatched:
|
||||
cve_data = get_cve_info(d, patched + unpatched)
|
||||
cve_write_data(d, patched, unpatched, whitelisted, cve_data)
|
||||
whitelisted, patched, unpatched, status = check_cves(d, patched_cves)
|
||||
if patched or unpatched or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status):
|
||||
cve_data = get_cve_info(d, patched + unpatched + whitelisted)
|
||||
cve_write_data(d, patched, unpatched, whitelisted, cve_data, status)
|
||||
else:
|
||||
bb.note("No CVE database found, skipping CVE check")
|
||||
|
||||
@@ -164,7 +165,7 @@ python cve_check_cleanup () {
|
||||
}
|
||||
|
||||
addhandler cve_check_cleanup
|
||||
cve_check_cleanup[eventmask] = "bb.cooker.CookerExit"
|
||||
cve_check_cleanup[eventmask] = "bb.event.BuildCompleted"
|
||||
|
||||
python cve_check_write_rootfs_manifest () {
|
||||
"""
|
||||
@@ -174,7 +175,7 @@ python cve_check_write_rootfs_manifest () {
|
||||
import shutil
|
||||
import json
|
||||
from oe.rootfs import image_list_installed_packages
|
||||
from oe.cve_check import cve_check_merge_jsons
|
||||
from oe.cve_check import cve_check_merge_jsons, update_symlinks
|
||||
|
||||
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
|
||||
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
|
||||
@@ -247,65 +248,6 @@ ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if
|
||||
do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
|
||||
do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
|
||||
|
||||
def get_patches_cves(d):
|
||||
"""
|
||||
Get patches that solve CVEs using the "CVE: " tag.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
pn = d.getVar("PN")
|
||||
cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
|
||||
|
||||
# Matches the last "CVE-YYYY-ID" in the file name, also if written
|
||||
# in lowercase. Possible to have multiple CVE IDs in a single
|
||||
# file name, but only the last one will be detected from the file name.
|
||||
# However, patch files contents addressing multiple CVE IDs are supported
|
||||
# (cve_match regular expression)
|
||||
|
||||
cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
|
||||
|
||||
patched_cves = set()
|
||||
bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
|
||||
for url in src_patches(d):
|
||||
patch_file = bb.fetch.decodeurl(url)[2]
|
||||
|
||||
if not os.path.isfile(patch_file):
|
||||
bb.error("File Not found: %s" % patch_file)
|
||||
raise FileNotFoundError
|
||||
|
||||
# Check patch file name for CVE ID
|
||||
fname_match = cve_file_name_match.search(patch_file)
|
||||
if fname_match:
|
||||
cve = fname_match.group(1).upper()
|
||||
patched_cves.add(cve)
|
||||
bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file))
|
||||
|
||||
with open(patch_file, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
patch_text = f.read()
|
||||
except UnicodeDecodeError:
|
||||
bb.debug(1, "Failed to read patch %s using UTF-8 encoding"
|
||||
" trying with iso8859-1" % patch_file)
|
||||
f.close()
|
||||
with open(patch_file, "r", encoding="iso8859-1") as f:
|
||||
patch_text = f.read()
|
||||
|
||||
# Search for one or more "CVE: " lines
|
||||
text_match = False
|
||||
for match in cve_match.finditer(patch_text):
|
||||
# Get only the CVEs without the "CVE: " tag
|
||||
cves = patch_text[match.start()+5:match.end()]
|
||||
for cve in cves.split():
|
||||
bb.debug(2, "Patch %s solves %s" % (patch_file, cve))
|
||||
patched_cves.add(cve)
|
||||
text_match = True
|
||||
|
||||
if not fname_match and not text_match:
|
||||
bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
|
||||
|
||||
return patched_cves
|
||||
|
||||
def check_cves(d, patched_cves):
|
||||
"""
|
||||
Connect to the NVD database and find unpatched cves.
|
||||
@@ -317,17 +259,20 @@ def check_cves(d, patched_cves):
|
||||
suffix = d.getVar("CVE_VERSION_SUFFIX")
|
||||
|
||||
cves_unpatched = []
|
||||
cves_ignored = []
|
||||
cves_status = []
|
||||
cves_in_recipe = False
|
||||
# CVE_PRODUCT can contain more than one product (eg. curl/libcurl)
|
||||
products = d.getVar("CVE_PRODUCT").split()
|
||||
# If this has been unset then we're not scanning for CVEs here (for example, image recipes)
|
||||
if not products:
|
||||
return ([], [], [])
|
||||
return ([], [], [], [])
|
||||
pv = d.getVar("CVE_VERSION").split("+git")[0]
|
||||
|
||||
# If the recipe has been whitelisted we return empty lists
|
||||
if pn in d.getVar("CVE_CHECK_PN_WHITELIST").split():
|
||||
bb.note("Recipe has been whitelisted, skipping check")
|
||||
return ([], [], [])
|
||||
return ([], [], [], [])
|
||||
|
||||
cve_whitelist = d.getVar("CVE_CHECK_WHITELIST").split()
|
||||
|
||||
@@ -337,6 +282,7 @@ def check_cves(d, patched_cves):
|
||||
|
||||
# For each of the known product names (e.g. curl has CPEs using curl and libcurl)...
|
||||
for product in products:
|
||||
cves_in_product = False
|
||||
if ":" in product:
|
||||
vendor, product = product.split(":", 1)
|
||||
else:
|
||||
@@ -348,17 +294,25 @@ def check_cves(d, patched_cves):
|
||||
|
||||
if cve in cve_whitelist:
|
||||
bb.note("%s-%s has been whitelisted for %s" % (product, pv, cve))
|
||||
# TODO: this should be in the report as 'whitelisted'
|
||||
patched_cves.add(cve)
|
||||
cves_ignored.append(cve)
|
||||
continue
|
||||
elif cve in patched_cves:
|
||||
bb.note("%s has been patched" % (cve))
|
||||
continue
|
||||
# Write status once only for each product
|
||||
if not cves_in_product:
|
||||
cves_status.append([product, True])
|
||||
cves_in_product = True
|
||||
cves_in_recipe = True
|
||||
|
||||
vulnerable = False
|
||||
ignored = False
|
||||
|
||||
for row in conn.execute("SELECT * FROM PRODUCTS WHERE ID IS ? AND PRODUCT IS ? AND VENDOR LIKE ?", (cve, product, vendor)):
|
||||
(_, _, _, version_start, operator_start, version_end, operator_end) = row
|
||||
#bb.debug(2, "Evaluating row " + str(row))
|
||||
if cve in cve_whitelist:
|
||||
ignored = True
|
||||
|
||||
if (operator_start == '=' and pv == version_start) or version_start == '-':
|
||||
vulnerable = True
|
||||
@@ -391,18 +345,25 @@ def check_cves(d, patched_cves):
|
||||
vulnerable = vulnerable_start or vulnerable_end
|
||||
|
||||
if vulnerable:
|
||||
bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve))
|
||||
cves_unpatched.append(cve)
|
||||
if ignored:
|
||||
bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv))
|
||||
cves_ignored.append(cve)
|
||||
else:
|
||||
bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve))
|
||||
cves_unpatched.append(cve)
|
||||
break
|
||||
|
||||
if not vulnerable:
|
||||
bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve))
|
||||
# TODO: not patched but not vulnerable
|
||||
patched_cves.add(cve)
|
||||
|
||||
if not cves_in_product:
|
||||
bb.note("No CVE records found for product %s, pn %s" % (product, pn))
|
||||
cves_status.append([product, False])
|
||||
|
||||
conn.close()
|
||||
|
||||
return (list(cve_whitelist), list(patched_cves), cves_unpatched)
|
||||
return (list(cves_ignored), list(patched_cves), cves_unpatched, cves_status)
|
||||
|
||||
def get_cve_info(d, cves):
|
||||
"""
|
||||
@@ -433,7 +394,6 @@ def cve_write_data_text(d, patched, unpatched, whitelisted, cve_data):
|
||||
CVE manifest if enabled.
|
||||
"""
|
||||
|
||||
|
||||
cve_file = d.getVar("CVE_CHECK_LOG")
|
||||
fdir_name = d.getVar("FILE_DIRNAME")
|
||||
layer = fdir_name.split("/")[-3]
|
||||
@@ -441,12 +401,18 @@ def cve_write_data_text(d, patched, unpatched, whitelisted, cve_data):
|
||||
include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
|
||||
exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
|
||||
|
||||
report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
|
||||
|
||||
if exclude_layers and layer in exclude_layers:
|
||||
return
|
||||
|
||||
if include_layers and layer not in include_layers:
|
||||
return
|
||||
|
||||
# Early exit, the text format does not report packages without CVEs
|
||||
if not patched+unpatched+whitelisted:
|
||||
return
|
||||
|
||||
nvd_link = "https://nvd.nist.gov/vuln/detail/"
|
||||
write_string = ""
|
||||
unpatched_cves = []
|
||||
@@ -454,13 +420,16 @@ def cve_write_data_text(d, patched, unpatched, whitelisted, cve_data):
|
||||
|
||||
for cve in sorted(cve_data):
|
||||
is_patched = cve in patched
|
||||
if is_patched and (d.getVar("CVE_CHECK_REPORT_PATCHED") != "1"):
|
||||
is_ignored = cve in whitelisted
|
||||
|
||||
if (is_patched or is_ignored) and not report_all:
|
||||
continue
|
||||
|
||||
write_string += "LAYER: %s\n" % layer
|
||||
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
|
||||
write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
|
||||
write_string += "CVE: %s\n" % cve
|
||||
if cve in whitelisted:
|
||||
if is_ignored:
|
||||
write_string += "CVE STATUS: Whitelisted\n"
|
||||
elif is_patched:
|
||||
write_string += "CVE STATUS: Patched\n"
|
||||
@@ -476,23 +445,22 @@ def cve_write_data_text(d, patched, unpatched, whitelisted, cve_data):
|
||||
if unpatched_cves and d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1":
|
||||
bb.warn("Found unpatched CVE (%s), for more information check %s" % (" ".join(unpatched_cves),cve_file))
|
||||
|
||||
if write_string:
|
||||
with open(cve_file, "w") as f:
|
||||
bb.note("Writing file %s with CVE information" % cve_file)
|
||||
with open(cve_file, "w") as f:
|
||||
bb.note("Writing file %s with CVE information" % cve_file)
|
||||
f.write(write_string)
|
||||
|
||||
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
|
||||
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
|
||||
bb.utils.mkdirhier(os.path.dirname(deploy_file))
|
||||
with open(deploy_file, "w") as f:
|
||||
f.write(write_string)
|
||||
|
||||
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
|
||||
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
|
||||
bb.utils.mkdirhier(os.path.dirname(deploy_file))
|
||||
with open(deploy_file, "w") as f:
|
||||
f.write(write_string)
|
||||
if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
|
||||
cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
|
||||
bb.utils.mkdirhier(cvelogpath)
|
||||
|
||||
if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
|
||||
cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
|
||||
bb.utils.mkdirhier(cvelogpath)
|
||||
|
||||
with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
|
||||
f.write("%s" % write_string)
|
||||
with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
|
||||
f.write("%s" % write_string)
|
||||
|
||||
def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file):
|
||||
"""
|
||||
@@ -524,7 +492,7 @@ def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_fi
|
||||
with open(index_path, "a+") as f:
|
||||
f.write("%s\n" % fragment_path)
|
||||
|
||||
def cve_write_data_json(d, patched, unpatched, ignored, cve_data):
|
||||
def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
|
||||
"""
|
||||
Prepare CVE data for the JSON format, then write it.
|
||||
"""
|
||||
@@ -538,6 +506,8 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data):
|
||||
include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
|
||||
exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
|
||||
|
||||
report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
|
||||
|
||||
if exclude_layers and layer in exclude_layers:
|
||||
return
|
||||
|
||||
@@ -546,20 +516,29 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data):
|
||||
|
||||
unpatched_cves = []
|
||||
|
||||
product_data = []
|
||||
for s in cve_status:
|
||||
p = {"product": s[0], "cvesInRecord": "Yes"}
|
||||
if s[1] == False:
|
||||
p["cvesInRecord"] = "No"
|
||||
product_data.append(p)
|
||||
|
||||
package_version = "%s%s" % (d.getVar("EXTENDPE"), d.getVar("PV"))
|
||||
package_data = {
|
||||
"name" : d.getVar("PN"),
|
||||
"layer" : layer,
|
||||
"version" : package_version
|
||||
"version" : package_version,
|
||||
"products": product_data
|
||||
}
|
||||
cve_list = []
|
||||
|
||||
for cve in sorted(cve_data):
|
||||
is_patched = cve in patched
|
||||
is_ignored = cve in ignored
|
||||
status = "Unpatched"
|
||||
if is_patched and (d.getVar("CVE_CHECK_REPORT_PATCHED") != "1"):
|
||||
if (is_patched or is_ignored) and not report_all:
|
||||
continue
|
||||
if cve in ignored:
|
||||
if is_ignored:
|
||||
status = "Ignored"
|
||||
elif is_patched:
|
||||
status = "Patched"
|
||||
@@ -589,7 +568,7 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data):
|
||||
|
||||
cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file)
|
||||
|
||||
def cve_write_data(d, patched, unpatched, ignored, cve_data):
|
||||
def cve_write_data(d, patched, unpatched, ignored, cve_data, status):
|
||||
"""
|
||||
Write CVE data in each enabled format.
|
||||
"""
|
||||
@@ -597,4 +576,4 @@ def cve_write_data(d, patched, unpatched, ignored, cve_data):
|
||||
if d.getVar("CVE_CHECK_FORMAT_TEXT") == "1":
|
||||
cve_write_data_text(d, patched, unpatched, ignored, cve_data)
|
||||
if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
|
||||
cve_write_data_json(d, patched, unpatched, ignored, cve_data)
|
||||
cve_write_data_json(d, patched, unpatched, ignored, cve_data, status)
|
||||
|
||||
@@ -124,7 +124,7 @@ python () {
|
||||
def rootfs_variables(d):
|
||||
from oe.rootfs import variable_depends
|
||||
variables = ['IMAGE_DEVICE_TABLE','IMAGE_DEVICE_TABLES','BUILD_IMAGES_FROM_FEEDS','IMAGE_TYPES_MASKED','IMAGE_ROOTFS_ALIGNMENT','IMAGE_OVERHEAD_FACTOR','IMAGE_ROOTFS_SIZE','IMAGE_ROOTFS_EXTRA_SPACE',
|
||||
'IMAGE_ROOTFS_MAXSIZE','IMAGE_NAME','IMAGE_LINK_NAME','IMAGE_MANIFEST','DEPLOY_DIR_IMAGE','IMAGE_FSTYPES','IMAGE_INSTALL_COMPLEMENTARY','IMAGE_LINGUAS', 'IMAGE_LINGUAS_COMPLEMENTARY',
|
||||
'IMAGE_ROOTFS_MAXSIZE','IMAGE_NAME','IMAGE_LINK_NAME','IMAGE_MANIFEST','DEPLOY_DIR_IMAGE','IMAGE_FSTYPES','IMAGE_INSTALL_COMPLEMENTARY','IMAGE_LINGUAS', 'IMAGE_LINGUAS_COMPLEMENTARY', 'IMAGE_LOCALES_ARCHIVE',
|
||||
'MULTILIBRE_ALLOW_REP','MULTILIB_TEMP_ROOTFS','MULTILIB_VARIANTS','MULTILIBS','ALL_MULTILIB_PACKAGE_ARCHS','MULTILIB_GLOBAL_VARIANTS','BAD_RECOMMENDATIONS','NO_RECOMMENDATIONS',
|
||||
'PACKAGE_ARCHS','PACKAGE_CLASSES','TARGET_VENDOR','TARGET_ARCH','TARGET_OS','OVERRIDES','BBEXTENDVARIANT','FEED_DEPLOYDIR_BASE_URI','INTERCEPT_DIR','USE_DEVFS',
|
||||
'CONVERSIONTYPES', 'IMAGE_GEN_DEBUGFS', 'ROOTFS_RO_UNNEEDED', 'IMGDEPLOYDIR', 'PACKAGE_EXCLUDE_COMPLEMENTARY', 'REPRODUCIBLE_TIMESTAMP_ROOTFS', 'IMAGE_INSTALL_DEBUGFS']
|
||||
@@ -176,6 +176,9 @@ IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
|
||||
|
||||
LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS').split()))}"
|
||||
|
||||
# per default create a locale archive
|
||||
IMAGE_LOCALES_ARCHIVE ?= '1'
|
||||
|
||||
# Prefer image, but use the fallback files for lookups if the image ones
|
||||
# aren't yet available.
|
||||
PSEUDO_PASSWD = "${IMAGE_ROOTFS}:${STAGING_DIR_NATIVE}"
|
||||
|
||||
@@ -452,12 +452,14 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
|
||||
"""
|
||||
Check for build paths inside target files and error if not found in the whitelist
|
||||
"""
|
||||
import stat
|
||||
# Ignore .debug files, not interesting
|
||||
if path.find(".debug") != -1:
|
||||
return
|
||||
|
||||
# Ignore symlinks
|
||||
if os.path.islink(path):
|
||||
# Ignore symlinks/devs/fifos
|
||||
mode = os.lstat(path).st_mode
|
||||
if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode):
|
||||
return
|
||||
|
||||
tmpdir = bytes(d.getVar('TMPDIR'), encoding="utf-8")
|
||||
@@ -945,7 +947,7 @@ def package_qa_check_host_user(path, name, d, elf, messages):
|
||||
|
||||
dest = d.getVar('PKGDEST')
|
||||
pn = d.getVar('PN')
|
||||
home = os.path.join(dest, 'home')
|
||||
home = os.path.join(dest, name, 'home')
|
||||
if path == home or path.startswith(home + os.sep):
|
||||
return
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ HOST_LD_KERNEL_ARCH ?= "${TARGET_LD_KERNEL_ARCH}"
|
||||
TARGET_AR_KERNEL_ARCH ?= ""
|
||||
HOST_AR_KERNEL_ARCH ?= "${TARGET_AR_KERNEL_ARCH}"
|
||||
|
||||
KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_KERNEL_ARCH} -fuse-ld=bfd ${DEBUG_PREFIX_MAP} -fdebug-prefix-map=${STAGING_KERNEL_DIR}=${KERNEL_SRC_PATH}"
|
||||
KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_KERNEL_ARCH} -fuse-ld=bfd ${DEBUG_PREFIX_MAP} -fdebug-prefix-map=${STAGING_KERNEL_DIR}=${KERNEL_SRC_PATH} -fdebug-prefix-map=${STAGING_KERNEL_BUILDDIR}=${KERNEL_SRC_PATH}"
|
||||
KERNEL_LD = "${CCACHE}${HOST_PREFIX}ld.bfd ${HOST_LD_KERNEL_ARCH}"
|
||||
KERNEL_AR = "${CCACHE}${HOST_PREFIX}ar ${HOST_AR_KERNEL_ARCH}"
|
||||
TOOLCHAIN = "gcc"
|
||||
|
||||
@@ -56,6 +56,9 @@ FIT_HASH_ALG ?= "sha256"
|
||||
# fitImage Signature Algo
|
||||
FIT_SIGN_ALG ?= "rsa2048"
|
||||
|
||||
# fitImage Padding Algo
|
||||
FIT_PAD_ALG ?= "pkcs-1.5"
|
||||
|
||||
#
|
||||
# Emit the fitImage ITS header
|
||||
#
|
||||
@@ -250,6 +253,7 @@ fitimage_emit_section_config() {
|
||||
|
||||
conf_csum="${FIT_HASH_ALG}"
|
||||
conf_sign_algo="${FIT_SIGN_ALG}"
|
||||
conf_padding_algo="${FIT_PAD_ALG}"
|
||||
if [ "${UBOOT_SIGN_ENABLE}" = "1" ] ; then
|
||||
conf_sign_keyname="${UBOOT_SIGN_KEYNAME}"
|
||||
fi
|
||||
@@ -333,6 +337,7 @@ EOF
|
||||
signature-1 {
|
||||
algo = "${conf_csum},${conf_sign_algo}";
|
||||
key-name-hint = "${conf_sign_keyname}";
|
||||
padding = "${conf_padding_algo}";
|
||||
${sign_line}
|
||||
};
|
||||
EOF
|
||||
|
||||
@@ -269,6 +269,8 @@ do_kernel_metadata() {
|
||||
bbnote "KERNEL_FEATURES: $KERNEL_FEATURES_FINAL"
|
||||
bbnote "Final scc/cfg list: $sccs_defconfig $bsp_definition $sccs $KERNEL_FEATURES_FINAL"
|
||||
fi
|
||||
|
||||
set -e
|
||||
}
|
||||
|
||||
do_patch() {
|
||||
@@ -298,6 +300,8 @@ do_patch() {
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
set -e
|
||||
}
|
||||
|
||||
do_kernel_checkout() {
|
||||
@@ -356,6 +360,8 @@ do_kernel_checkout() {
|
||||
git commit -q -m "baseline commit: creating repo for ${PN}-${PV}"
|
||||
git clean -d -f
|
||||
fi
|
||||
|
||||
set -e
|
||||
}
|
||||
do_kernel_checkout[dirs] = "${S}"
|
||||
|
||||
@@ -523,6 +529,8 @@ do_validate_branches() {
|
||||
kgit-s2q --clean
|
||||
fi
|
||||
fi
|
||||
|
||||
set -e
|
||||
}
|
||||
|
||||
OE_TERMINAL_EXPORTS += "KBUILD_OUTPUT"
|
||||
|
||||
@@ -91,17 +91,17 @@ def copy_license_files(lic_files_paths, destdir):
|
||||
os.link(src, dst)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EXDEV:
|
||||
# Copy license files if hard-link is not possible even if st_dev is the
|
||||
# Copy license files if hardlink is not possible even if st_dev is the
|
||||
# same on source and destination (docker container with device-mapper?)
|
||||
canlink = False
|
||||
else:
|
||||
raise
|
||||
# Only chown if we did hardling, and, we're running under pseudo
|
||||
# Only chown if we did hardlink and we're running under pseudo
|
||||
if canlink and os.environ.get('PSEUDO_DISABLED') == '0':
|
||||
os.chown(dst,0,0)
|
||||
if not canlink:
|
||||
begin_idx = int(beginline)-1 if beginline is not None else None
|
||||
end_idx = int(endline) if endline is not None else None
|
||||
begin_idx = max(0, int(beginline) - 1) if beginline is not None else None
|
||||
end_idx = max(0, int(endline)) if endline is not None else None
|
||||
if begin_idx is None and end_idx is None:
|
||||
shutil.copyfile(src, dst)
|
||||
else:
|
||||
|
||||
@@ -305,7 +305,7 @@ rootfs_trim_schemas () {
|
||||
}
|
||||
|
||||
rootfs_check_host_user_contaminated () {
|
||||
contaminated="${WORKDIR}/host-user-contaminated.txt"
|
||||
contaminated="${S}/host-user-contaminated.txt"
|
||||
HOST_USER_UID="$(PSEUDO_UNLOAD=1 id -u)"
|
||||
HOST_USER_GID="$(PSEUDO_UNLOAD=1 id -g)"
|
||||
|
||||
|
||||
@@ -53,24 +53,23 @@ CVE-2015-4778 CVE-2015-4779 CVE-2015-4780 CVE-2015-4781 CVE-2015-4782 CVE-2015-4
|
||||
CVE-2015-4785 CVE-2015-4786 CVE-2015-4787 CVE-2015-4788 CVE-2015-4789 CVE-2015-4790 CVE-2016-0682 \
|
||||
CVE-2016-0689 CVE-2016-0692 CVE-2016-0694 CVE-2016-3418 CVE-2020-2981"
|
||||
|
||||
#### CPE update pending ####
|
||||
|
||||
# groff:groff-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2000-0803
|
||||
# Appears it was fixed in https://git.savannah.gnu.org/cgit/groff.git/commit/?id=07f95f1674217275ed4612f1dcaa95a88435c6a7
|
||||
# so from 1.17 onwards. Reported to the database for update by RP 2021/5/9. Update accepted 2021/5/10.
|
||||
#CVE_CHECK_WHITELIST += "CVE-2000-0803"
|
||||
|
||||
|
||||
|
||||
#### Upstream still working on ####
|
||||
|
||||
# qemu:qemu-native:qemu-system-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2021-20255
|
||||
# There was a proposed patch https://lists.gnu.org/archive/html/qemu-devel/2021-02/msg06098.html
|
||||
# however qemu maintainers are sure the patch is incorrect and should not be applied.
|
||||
# qemu maintainers say the patch is incorrect and should not be applied
|
||||
# Ignore from OE's perspectivee as the issue is of low impact, at worst sitting in an infinite loop rather than exploitable
|
||||
CVE_CHECK_WHITELIST += "CVE-2021-20255"
|
||||
|
||||
# wget https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2021-31879
|
||||
# https://mail.gnu.org/archive/html/bug-wget/2021-02/msg00002.html
|
||||
# No response upstream as of 2021/5/12
|
||||
# qemu:qemu-native:qemu-system-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-12067
|
||||
# There was a proposed patch but rejected by upstream qemu. It is unclear if the issue can
|
||||
# still be reproduced or where exactly any bug is.
|
||||
# Ignore from OE's perspective as we'll pick up any fix when upstream accepts one.
|
||||
CVE_CHECK_WHITELIST += "CVE-2019-12067"
|
||||
|
||||
# nasm:nasm-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2020-18974
|
||||
# It is a fuzzing related buffer overflow. It is of low impact since most devices
|
||||
# wouldn't expose an assembler. The upstream is inactive and there is little to be
|
||||
# done about the bug, ignore from an OE perspective.
|
||||
CVE_CHECK_WHITELIST += "CVE-2020-18974"
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -79,3 +79,96 @@ def cve_check_merge_jsons(output, data):
|
||||
return
|
||||
|
||||
output["package"].append(data["package"][0])
|
||||
|
||||
def update_symlinks(target_path, link_path):
|
||||
"""
|
||||
Update a symbolic link link_path to point to target_path.
|
||||
Remove the link and recreate it if exist and is different.
|
||||
"""
|
||||
if link_path != target_path and os.path.exists(target_path):
|
||||
if os.path.exists(os.path.realpath(link_path)):
|
||||
os.remove(link_path)
|
||||
os.symlink(os.path.basename(target_path), link_path)
|
||||
|
||||
def get_patched_cves(d):
|
||||
"""
|
||||
Get patches that solve CVEs using the "CVE: " tag.
|
||||
"""
|
||||
|
||||
import re
|
||||
import oe.patch
|
||||
|
||||
pn = d.getVar("PN")
|
||||
cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
|
||||
|
||||
# Matches the last "CVE-YYYY-ID" in the file name, also if written
|
||||
# in lowercase. Possible to have multiple CVE IDs in a single
|
||||
# file name, but only the last one will be detected from the file name.
|
||||
# However, patch files contents addressing multiple CVE IDs are supported
|
||||
# (cve_match regular expression)
|
||||
|
||||
cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
|
||||
|
||||
patched_cves = set()
|
||||
bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
|
||||
for url in oe.patch.src_patches(d):
|
||||
patch_file = bb.fetch.decodeurl(url)[2]
|
||||
|
||||
# Remote compressed patches may not be unpacked, so silently ignore them
|
||||
if not os.path.isfile(patch_file):
|
||||
bb.warn("%s does not exist, cannot extract CVE list" % patch_file)
|
||||
continue
|
||||
|
||||
# Check patch file name for CVE ID
|
||||
fname_match = cve_file_name_match.search(patch_file)
|
||||
if fname_match:
|
||||
cve = fname_match.group(1).upper()
|
||||
patched_cves.add(cve)
|
||||
bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file))
|
||||
|
||||
with open(patch_file, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
patch_text = f.read()
|
||||
except UnicodeDecodeError:
|
||||
bb.debug(1, "Failed to read patch %s using UTF-8 encoding"
|
||||
" trying with iso8859-1" % patch_file)
|
||||
f.close()
|
||||
with open(patch_file, "r", encoding="iso8859-1") as f:
|
||||
patch_text = f.read()
|
||||
|
||||
# Search for one or more "CVE: " lines
|
||||
text_match = False
|
||||
for match in cve_match.finditer(patch_text):
|
||||
# Get only the CVEs without the "CVE: " tag
|
||||
cves = patch_text[match.start()+5:match.end()]
|
||||
for cve in cves.split():
|
||||
bb.debug(2, "Patch %s solves %s" % (patch_file, cve))
|
||||
patched_cves.add(cve)
|
||||
text_match = True
|
||||
|
||||
if not fname_match and not text_match:
|
||||
bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
|
||||
|
||||
return patched_cves
|
||||
|
||||
|
||||
def get_cpe_ids(cve_product, version):
|
||||
"""
|
||||
Get list of CPE identifiers for the given product and version
|
||||
"""
|
||||
|
||||
version = version.split("+git")[0]
|
||||
|
||||
cpe_ids = []
|
||||
for product in cve_product.split():
|
||||
# CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not,
|
||||
# use wildcard for vendor.
|
||||
if ":" in product:
|
||||
vendor, product = product.split(":", 1)
|
||||
else:
|
||||
vendor = "*"
|
||||
|
||||
cpe_id = f'cpe:2.3:a:{vendor}:{product}:{version}:*:*:*:*:*:*:*'
|
||||
cpe_ids.append(cpe_id)
|
||||
|
||||
return cpe_ids
|
||||
|
||||
@@ -611,12 +611,13 @@ class PackageManager(object, metaclass=ABCMeta):
|
||||
"'%s' returned %d:\n%s" %
|
||||
(' '.join(cmd), e.returncode, e.output.decode("utf-8")))
|
||||
|
||||
target_arch = self.d.getVar('TARGET_ARCH')
|
||||
localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
|
||||
if os.path.exists(localedir) and os.listdir(localedir):
|
||||
generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
|
||||
# And now delete the binary locales
|
||||
self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
|
||||
if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1':
|
||||
target_arch = self.d.getVar('TARGET_ARCH')
|
||||
localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
|
||||
if os.path.exists(localedir) and os.listdir(localedir):
|
||||
generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
|
||||
# And now delete the binary locales
|
||||
self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
|
||||
|
||||
def deploy_dir_lock(self):
|
||||
if self.deploy_dir is None:
|
||||
|
||||
@@ -321,7 +321,9 @@ class Rootfs(object, metaclass=ABCMeta):
|
||||
if not os.path.exists(kernel_abi_ver_file):
|
||||
bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
|
||||
|
||||
kernel_ver = open(kernel_abi_ver_file).read().strip(' \n')
|
||||
with open(kernel_abi_ver_file) as f:
|
||||
kernel_ver = f.read().strip(' \n')
|
||||
|
||||
versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver)
|
||||
|
||||
bb.utils.mkdirhier(versioned_modules_dir)
|
||||
|
||||
@@ -23,7 +23,7 @@ class ScpTest(OERuntimeTestCase):
|
||||
os.remove(cls.tmp_path)
|
||||
|
||||
@OETestDepends(['ssh.SSHTest.test_ssh'])
|
||||
@OEHasPackage(['openssh-scp', 'dropbear'])
|
||||
@OEHasPackage(['openssh-scp'])
|
||||
def test_scp_file(self):
|
||||
dst = '/tmp/test_scp_file'
|
||||
|
||||
|
||||
@@ -117,3 +117,85 @@ CVE_CHECK_FORMAT_JSON = "1"
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
self.assertEqual(report["package"][0]["name"], recipename)
|
||||
|
||||
|
||||
def test_recipe_report_json_unpatched(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
CVE_CHECK_REPORT_PATCHED = "0"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
|
||||
|
||||
try:
|
||||
os.remove(summary_json)
|
||||
os.remove(recipe_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("m4-native -c cve_check")
|
||||
|
||||
def check_m4_json(filename):
|
||||
with open(filename) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
package = report["package"][0]
|
||||
self.assertEqual(package["name"], "m4-native")
|
||||
#m4 had only Patched CVEs, so the issues array will be empty
|
||||
self.assertEqual(package["issue"], [])
|
||||
|
||||
self.assertExists(summary_json)
|
||||
check_m4_json(summary_json)
|
||||
self.assertExists(recipe_json)
|
||||
check_m4_json(recipe_json)
|
||||
|
||||
|
||||
def test_recipe_report_json_ignored(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
CVE_CHECK_REPORT_PATCHED = "1"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
|
||||
|
||||
try:
|
||||
os.remove(summary_json)
|
||||
os.remove(recipe_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("logrotate -c cve_check")
|
||||
|
||||
def check_m4_json(filename):
|
||||
with open(filename) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
package = report["package"][0]
|
||||
self.assertEqual(package["name"], "logrotate")
|
||||
found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
|
||||
# m4 CVE should not be in logrotate
|
||||
self.assertNotIn("CVE-2008-1687", found_cves)
|
||||
# logrotate has both Patched and Ignored CVEs
|
||||
self.assertIn("CVE-2011-1098", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1098"], "Patched")
|
||||
self.assertIn("CVE-2011-1548", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1548"], "Ignored")
|
||||
self.assertIn("CVE-2011-1549", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1549"], "Ignored")
|
||||
self.assertIn("CVE-2011-1550", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1550"], "Ignored")
|
||||
|
||||
self.assertExists(summary_json)
|
||||
check_m4_json(summary_json)
|
||||
self.assertExists(recipe_json)
|
||||
check_m4_json(recipe_json)
|
||||
|
||||
@@ -133,7 +133,8 @@ class OEListPackageconfigTests(OEScriptTests):
|
||||
def check_endlines(self, results, expected_endlines):
|
||||
for line in results.output.splitlines():
|
||||
for el in expected_endlines:
|
||||
if line.split() == el.split():
|
||||
if line and line.split()[0] == el.split()[0] and \
|
||||
' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
|
||||
expected_endlines.remove(el)
|
||||
break
|
||||
|
||||
|
||||
@@ -187,6 +187,8 @@ class TestImage(OESelftestTestCase):
|
||||
self.skipTest('virgl isn\'t working with Fedora 34')
|
||||
if distro and distro == 'fedora-35':
|
||||
self.skipTest('virgl isn\'t working with Fedora 35')
|
||||
if distro and distro == 'fedora-36':
|
||||
self.skipTest('virgl isn\'t working with Fedora 36')
|
||||
if distro and distro == 'opensuseleap-15.0':
|
||||
self.skipTest('virgl isn\'t working with Opensuse 15.0')
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=6626bb1e20189cfa95f2c508ba286393"
|
||||
|
||||
COMPATIBLE_HOST = "(i.86|x86_64|arm|aarch64).*-linux"
|
||||
|
||||
SRC_URI = "git://github.com/rhinstaller/efivar.git;branch=master;protocol=https \
|
||||
SRC_URI = "git://github.com/rhinstaller/efivar.git;branch=main;protocol=https \
|
||||
file://determinism.patch \
|
||||
file://no-werror.patch"
|
||||
SRCREV = "c1d6b10e1ed4ba2be07f385eae5bceb694478a10"
|
||||
|
||||
178
meta/recipes-bsp/grub/files/CVE-2021-3695.patch
Normal file
178
meta/recipes-bsp/grub/files/CVE-2021-3695.patch
Normal file
@@ -0,0 +1,178 @@
|
||||
From 0693d672abcf720419f86c56bda6428c540e2bb1 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Wed, 20 Jul 2022 10:01:35 +0530
|
||||
Subject: [PATCH] CVE-2021-3695
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=e623866d9286410156e8b9d2c82d6253a1b22d08]
|
||||
CVE: CVE-2021-3695
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
video/readers/png: Drop greyscale support to fix heap out-of-bounds write
|
||||
|
||||
A 16-bit greyscale PNG without alpha is processed in the following loop:
|
||||
|
||||
for (i = 0; i < (data->image_width * data->image_height);
|
||||
i++, d1 += 4, d2 += 2)
|
||||
{
|
||||
d1[R3] = d2[1];
|
||||
d1[G3] = d2[1];
|
||||
d1[B3] = d2[1];
|
||||
}
|
||||
|
||||
The increment of d1 is wrong. d1 is incremented by 4 bytes per iteration,
|
||||
but there are only 3 bytes allocated for storage. This means that image
|
||||
data will overwrite somewhat-attacker-controlled parts of memory - 3 bytes
|
||||
out of every 4 following the end of the image.
|
||||
|
||||
This has existed since greyscale support was added in 2013 in commit
|
||||
3ccf16dff98f (grub-core/video/readers/png.c: Support grayscale).
|
||||
|
||||
Saving starfield.png as a 16-bit greyscale image without alpha in the gimp
|
||||
and attempting to load it causes grub-emu to crash - I don't think this code
|
||||
has ever worked.
|
||||
|
||||
Delete all PNG greyscale support.
|
||||
|
||||
Fixes: CVE-2021-3695
|
||||
|
||||
Signed-off-by: Daniel Axtens <dja@axtens.net>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/video/readers/png.c | 89 ++++-------------------------------
|
||||
1 file changed, 8 insertions(+), 81 deletions(-)
|
||||
|
||||
diff --git a/grub-core/video/readers/png.c b/grub-core/video/readers/png.c
|
||||
index 0157ff7..db4a9d4 100644
|
||||
--- a/grub-core/video/readers/png.c
|
||||
+++ b/grub-core/video/readers/png.c
|
||||
@@ -100,7 +100,7 @@ struct grub_png_data
|
||||
|
||||
unsigned image_width, image_height;
|
||||
int bpp, is_16bit;
|
||||
- int raw_bytes, is_gray, is_alpha, is_palette;
|
||||
+ int raw_bytes, is_alpha, is_palette;
|
||||
int row_bytes, color_bits;
|
||||
grub_uint8_t *image_data;
|
||||
|
||||
@@ -280,13 +280,13 @@ grub_png_decode_image_header (struct grub_png_data *data)
|
||||
data->bpp = 3;
|
||||
else
|
||||
{
|
||||
- data->is_gray = 1;
|
||||
- data->bpp = 1;
|
||||
+ return grub_error (GRUB_ERR_BAD_FILE_TYPE,
|
||||
+ "png: color type not supported");
|
||||
}
|
||||
|
||||
if ((color_bits != 8) && (color_bits != 16)
|
||||
&& (color_bits != 4
|
||||
- || !(data->is_gray || data->is_palette)))
|
||||
+ || !data->is_palette))
|
||||
return grub_error (GRUB_ERR_BAD_FILE_TYPE,
|
||||
"png: bit depth must be 8 or 16");
|
||||
|
||||
@@ -315,7 +315,7 @@ grub_png_decode_image_header (struct grub_png_data *data)
|
||||
}
|
||||
|
||||
#ifndef GRUB_CPU_WORDS_BIGENDIAN
|
||||
- if (data->is_16bit || data->is_gray || data->is_palette)
|
||||
+ if (data->is_16bit || data->is_palette)
|
||||
#endif
|
||||
{
|
||||
data->image_data = grub_calloc (data->image_height, data->row_bytes);
|
||||
@@ -859,27 +859,8 @@ grub_png_convert_image (struct grub_png_data *data)
|
||||
int shift;
|
||||
int mask = (1 << data->color_bits) - 1;
|
||||
unsigned j;
|
||||
- if (data->is_gray)
|
||||
- {
|
||||
- /* Generic formula is
|
||||
- (0xff * i) / ((1U << data->color_bits) - 1)
|
||||
- but for allowed bit depth of 1, 2 and for it's
|
||||
- equivalent to
|
||||
- (0xff / ((1U << data->color_bits) - 1)) * i
|
||||
- Precompute the multipliers to avoid division.
|
||||
- */
|
||||
-
|
||||
- const grub_uint8_t multipliers[5] = { 0xff, 0xff, 0x55, 0x24, 0x11 };
|
||||
- for (i = 0; i < (1U << data->color_bits); i++)
|
||||
- {
|
||||
- grub_uint8_t col = multipliers[data->color_bits] * i;
|
||||
- palette[i][0] = col;
|
||||
- palette[i][1] = col;
|
||||
- palette[i][2] = col;
|
||||
- }
|
||||
- }
|
||||
- else
|
||||
- grub_memcpy (palette, data->palette, 3 << data->color_bits);
|
||||
+
|
||||
+ grub_memcpy (palette, data->palette, 3 << data->color_bits);
|
||||
d1c = d1;
|
||||
d2c = d2;
|
||||
for (j = 0; j < data->image_height; j++, d1c += data->image_width * 3,
|
||||
@@ -917,61 +898,7 @@ grub_png_convert_image (struct grub_png_data *data)
|
||||
return;
|
||||
}
|
||||
|
||||
- if (data->is_gray)
|
||||
- {
|
||||
- switch (data->bpp)
|
||||
- {
|
||||
- case 4:
|
||||
- /* 16-bit gray with alpha. */
|
||||
- for (i = 0; i < (data->image_width * data->image_height);
|
||||
- i++, d1 += 4, d2 += 4)
|
||||
- {
|
||||
- d1[R4] = d2[3];
|
||||
- d1[G4] = d2[3];
|
||||
- d1[B4] = d2[3];
|
||||
- d1[A4] = d2[1];
|
||||
- }
|
||||
- break;
|
||||
- case 2:
|
||||
- if (data->is_16bit)
|
||||
- /* 16-bit gray without alpha. */
|
||||
- {
|
||||
- for (i = 0; i < (data->image_width * data->image_height);
|
||||
- i++, d1 += 4, d2 += 2)
|
||||
- {
|
||||
- d1[R3] = d2[1];
|
||||
- d1[G3] = d2[1];
|
||||
- d1[B3] = d2[1];
|
||||
- }
|
||||
- }
|
||||
- else
|
||||
- /* 8-bit gray with alpha. */
|
||||
- {
|
||||
- for (i = 0; i < (data->image_width * data->image_height);
|
||||
- i++, d1 += 4, d2 += 2)
|
||||
- {
|
||||
- d1[R4] = d2[1];
|
||||
- d1[G4] = d2[1];
|
||||
- d1[B4] = d2[1];
|
||||
- d1[A4] = d2[0];
|
||||
- }
|
||||
- }
|
||||
- break;
|
||||
- /* 8-bit gray without alpha. */
|
||||
- case 1:
|
||||
- for (i = 0; i < (data->image_width * data->image_height);
|
||||
- i++, d1 += 3, d2++)
|
||||
- {
|
||||
- d1[R3] = d2[0];
|
||||
- d1[G3] = d2[0];
|
||||
- d1[B3] = d2[0];
|
||||
- }
|
||||
- break;
|
||||
- }
|
||||
- return;
|
||||
- }
|
||||
-
|
||||
- {
|
||||
+ {
|
||||
/* Only copy the upper 8 bit. */
|
||||
#ifndef GRUB_CPU_WORDS_BIGENDIAN
|
||||
for (i = 0; i < (data->image_width * data->image_height * data->bpp >> 1);
|
||||
--
|
||||
2.25.1
|
||||
|
||||
46
meta/recipes-bsp/grub/files/CVE-2021-3696.patch
Normal file
46
meta/recipes-bsp/grub/files/CVE-2021-3696.patch
Normal file
@@ -0,0 +1,46 @@
|
||||
From b18ce59d6496a9313d75f9497a0efac61dcf4191 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Wed, 20 Jul 2022 10:05:42 +0530
|
||||
Subject: [PATCH] CVE-2021-3696
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=210245129c932dc9e1c2748d9d35524fb95b5042]
|
||||
CVE: CVE-2021-3696
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
video/readers/png: Avoid heap OOB R/W inserting huff table items
|
||||
|
||||
In fuzzing we observed crashes where a code would attempt to be inserted
|
||||
into a huffman table before the start, leading to a set of heap OOB reads
|
||||
and writes as table entries with negative indices were shifted around and
|
||||
the new code written in.
|
||||
|
||||
Catch the case where we would underflow the array and bail.
|
||||
|
||||
Fixes: CVE-2021-3696
|
||||
Signed-off-by: Daniel Axtens <dja@axtens.net>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/video/readers/png.c | 7 +++++++
|
||||
1 file changed, 7 insertions(+)
|
||||
|
||||
diff --git a/grub-core/video/readers/png.c b/grub-core/video/readers/png.c
|
||||
index 36b3f10..3c05951 100644
|
||||
--- a/grub-core/video/readers/png.c
|
||||
+++ b/grub-core/video/readers/png.c
|
||||
@@ -416,6 +416,13 @@ grub_png_insert_huff_item (struct huff_table *ht, int code, int len)
|
||||
for (i = len; i < ht->max_length; i++)
|
||||
n += ht->maxval[i];
|
||||
|
||||
+ if (n > ht->num_values)
|
||||
+ {
|
||||
+ grub_error (GRUB_ERR_BAD_FILE_TYPE,
|
||||
+ "png: out of range inserting huffman table item");
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
for (i = 0; i < n; i++)
|
||||
ht->values[ht->num_values - i] = ht->values[ht->num_values - i - 1];
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
82
meta/recipes-bsp/grub/files/CVE-2021-3697.patch
Normal file
82
meta/recipes-bsp/grub/files/CVE-2021-3697.patch
Normal file
@@ -0,0 +1,82 @@
|
||||
From 4de9de9d14f4ac27229e45514627534e32cc4406 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Tue, 19 Jul 2022 11:13:02 +0530
|
||||
Subject: [PATCH] CVE-2021-3697
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=22a3f97d39f6a10b08ad7fd1cc47c4dcd10413f6]
|
||||
CVE: CVE-2021-3697
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
video/readers/jpeg: Block int underflow -> wild pointer write
|
||||
|
||||
Certain 1 px wide images caused a wild pointer write in
|
||||
grub_jpeg_ycrcb_to_rgb(). This was caused because in grub_jpeg_decode_data(),
|
||||
we have the following loop:
|
||||
|
||||
for (; data->r1 < nr1 && (!data->dri || rst);
|
||||
data->r1++, data->bitmap_ptr += (vb * data->image_width - hb * nc1) * 3)
|
||||
|
||||
We did not check if vb * width >= hb * nc1.
|
||||
|
||||
On a 64-bit platform, if that turns out to be negative, it will underflow,
|
||||
be interpreted as unsigned 64-bit, then be added to the 64-bit pointer, so
|
||||
we see data->bitmap_ptr jump, e.g.:
|
||||
|
||||
0x6180_0000_0480 to
|
||||
0x6181_0000_0498
|
||||
^
|
||||
~--- carry has occurred and this pointer is now far away from
|
||||
any object.
|
||||
|
||||
On a 32-bit platform, it will decrement the pointer, creating a pointer
|
||||
that won't crash but will overwrite random data.
|
||||
|
||||
Catch the underflow and error out.
|
||||
|
||||
Fixes: CVE-2021-3697
|
||||
|
||||
Signed-off-by: Daniel Axtens <dja@axtens.net>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/video/readers/jpeg.c | 10 +++++++++-
|
||||
1 file changed, 9 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/grub-core/video/readers/jpeg.c b/grub-core/video/readers/jpeg.c
|
||||
index 31359a4..545a60b 100644
|
||||
--- a/grub-core/video/readers/jpeg.c
|
||||
+++ b/grub-core/video/readers/jpeg.c
|
||||
@@ -23,6 +23,7 @@
|
||||
#include <grub/mm.h>
|
||||
#include <grub/misc.h>
|
||||
#include <grub/bufio.h>
|
||||
+#include <grub/safemath.h>
|
||||
|
||||
GRUB_MOD_LICENSE ("GPLv3+");
|
||||
|
||||
@@ -617,6 +618,7 @@ static grub_err_t
|
||||
grub_jpeg_decode_data (struct grub_jpeg_data *data)
|
||||
{
|
||||
unsigned c1, vb, hb, nr1, nc1;
|
||||
+ unsigned stride_a, stride_b, stride;
|
||||
int rst = data->dri;
|
||||
|
||||
vb = 8 << data->log_vs;
|
||||
@@ -624,8 +626,14 @@ grub_jpeg_decode_data (struct grub_jpeg_data *data)
|
||||
nr1 = (data->image_height + vb - 1) >> (3 + data->log_vs);
|
||||
nc1 = (data->image_width + hb - 1) >> (3 + data->log_hs);
|
||||
|
||||
+ if (grub_mul(vb, data->image_width, &stride_a) ||
|
||||
+ grub_mul(hb, nc1, &stride_b) ||
|
||||
+ grub_sub(stride_a, stride_b, &stride))
|
||||
+ return grub_error (GRUB_ERR_BAD_FILE_TYPE,
|
||||
+ "jpeg: cannot decode image with these dimensions");
|
||||
+
|
||||
for (; data->r1 < nr1 && (!data->dri || rst);
|
||||
- data->r1++, data->bitmap_ptr += (vb * data->image_width - hb * nc1) * 3)
|
||||
+ data->r1++, data->bitmap_ptr += stride * 3)
|
||||
for (c1 = 0; c1 < nc1 && (!data->dri || rst);
|
||||
c1++, rst--, data->bitmap_ptr += hb * 3)
|
||||
{
|
||||
--
|
||||
2.25.1
|
||||
|
||||
32
meta/recipes-bsp/grub/files/CVE-2021-3981.patch
Normal file
32
meta/recipes-bsp/grub/files/CVE-2021-3981.patch
Normal file
@@ -0,0 +1,32 @@
|
||||
From 67740c43c9326956ea5cd6be77f813b5499a56a5 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Mon, 27 Jun 2022 10:15:29 +0530
|
||||
Subject: [PATCH] CVE-2021-3981
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/grub.git/diff/util/grub-mkconfig.in?id=0adec29674561034771c13e446069b41ef41e4d4]
|
||||
CVE: CVE-2021-3981
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
util/grub-mkconfig.in | 6 +++++-
|
||||
1 file changed, 5 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/util/grub-mkconfig.in b/util/grub-mkconfig.in
|
||||
index 9f477ff..ead94a6 100644
|
||||
--- a/util/grub-mkconfig.in
|
||||
+++ b/util/grub-mkconfig.in
|
||||
@@ -287,7 +287,11 @@ and /etc/grub.d/* files or please file a bug report with
|
||||
exit 1
|
||||
else
|
||||
# none of the children aborted with error, install the new grub.cfg
|
||||
- mv -f ${grub_cfg}.new ${grub_cfg}
|
||||
+ oldumask=$(umask)
|
||||
+ umask 077
|
||||
+ cat ${grub_cfg}.new > ${grub_cfg}
|
||||
+ umask $oldumask
|
||||
+ rm -f ${grub_cfg}.new
|
||||
fi
|
||||
fi
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
60
meta/recipes-bsp/grub/files/CVE-2022-28733.patch
Normal file
60
meta/recipes-bsp/grub/files/CVE-2022-28733.patch
Normal file
@@ -0,0 +1,60 @@
|
||||
From 415fb5eb83cbd3b5cfc25ac1290f2de4fe3d231c Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Mon, 1 Aug 2022 10:48:34 +0530
|
||||
Subject: [PATCH] CVE-2022-28733
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=3e4817538de828319ba6d59ced2fbb9b5ca13287]
|
||||
CVE: CVE-2022-28733
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
net/ip: Do IP fragment maths safely
|
||||
|
||||
We can receive packets with invalid IP fragmentation information. This
|
||||
can lead to rsm->total_len underflowing and becoming very large.
|
||||
|
||||
Then, in grub_netbuff_alloc(), we add to this very large number, which can
|
||||
cause it to overflow and wrap back around to a small positive number.
|
||||
The allocation then succeeds, but the resulting buffer is too small and
|
||||
subsequent operations can write past the end of the buffer.
|
||||
|
||||
Catch the underflow here.
|
||||
|
||||
Fixes: CVE-2022-28733
|
||||
|
||||
Signed-off-by: Daniel Axtens <dja@axtens.net>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/net/ip.c | 10 +++++++++-
|
||||
1 file changed, 9 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/grub-core/net/ip.c b/grub-core/net/ip.c
|
||||
index ea5edf8..74e4e8b 100644
|
||||
--- a/grub-core/net/ip.c
|
||||
+++ b/grub-core/net/ip.c
|
||||
@@ -25,6 +25,7 @@
|
||||
#include <grub/net/netbuff.h>
|
||||
#include <grub/mm.h>
|
||||
#include <grub/priority_queue.h>
|
||||
+#include <grub/safemath.h>
|
||||
#include <grub/time.h>
|
||||
|
||||
struct iphdr {
|
||||
@@ -512,7 +513,14 @@ grub_net_recv_ip4_packets (struct grub_net_buff *nb,
|
||||
{
|
||||
rsm->total_len = (8 * (grub_be_to_cpu16 (iph->frags) & OFFSET_MASK)
|
||||
+ (nb->tail - nb->data));
|
||||
- rsm->total_len -= ((iph->verhdrlen & 0xf) * sizeof (grub_uint32_t));
|
||||
+
|
||||
+ if (grub_sub (rsm->total_len, (iph->verhdrlen & 0xf) * sizeof (grub_uint32_t),
|
||||
+ &rsm->total_len))
|
||||
+ {
|
||||
+ grub_dprintf ("net", "IP reassembly size underflow\n");
|
||||
+ return GRUB_ERR_NONE;
|
||||
+ }
|
||||
+
|
||||
rsm->asm_netbuff = grub_netbuff_alloc (rsm->total_len);
|
||||
if (!rsm->asm_netbuff)
|
||||
{
|
||||
--
|
||||
2.25.1
|
||||
|
||||
67
meta/recipes-bsp/grub/files/CVE-2022-28734.patch
Normal file
67
meta/recipes-bsp/grub/files/CVE-2022-28734.patch
Normal file
@@ -0,0 +1,67 @@
|
||||
From f03f09c2a07eae7f3a4646e33a406ae2689afb9e Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Mon, 1 Aug 2022 10:59:41 +0530
|
||||
Subject: [PATCH] CVE-2022-28734
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=b26b4c08e7119281ff30d0fb4a6169bd2afa8fe4]
|
||||
CVE: CVE-2022-28734
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
net/http: Fix OOB write for split http headers
|
||||
|
||||
GRUB has special code for handling an http header that is split
|
||||
across two packets.
|
||||
|
||||
The code tracks the end of line by looking for a "\n" byte. The
|
||||
code for split headers has always advanced the pointer just past the
|
||||
end of the line, whereas the code that handles unsplit headers does
|
||||
not advance the pointer. This extra advance causes the length to be
|
||||
one greater, which breaks an assumption in parse_line(), leading to
|
||||
it writing a NUL byte one byte past the end of the buffer where we
|
||||
reconstruct the line from the two packets.
|
||||
|
||||
It's conceivable that an attacker controlled set of packets could
|
||||
cause this to zero out the first byte of the "next" pointer of the
|
||||
grub_mm_region structure following the current_line buffer.
|
||||
|
||||
Do not advance the pointer in the split header case.
|
||||
|
||||
Fixes: CVE-2022-28734
|
||||
---
|
||||
grub-core/net/http.c | 12 +++++++++---
|
||||
1 file changed, 9 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/grub-core/net/http.c b/grub-core/net/http.c
|
||||
index 5aa4ad3..a220d21 100644
|
||||
--- a/grub-core/net/http.c
|
||||
+++ b/grub-core/net/http.c
|
||||
@@ -68,7 +68,15 @@ parse_line (grub_file_t file, http_data_t data, char *ptr, grub_size_t len)
|
||||
char *end = ptr + len;
|
||||
while (end > ptr && *(end - 1) == '\r')
|
||||
end--;
|
||||
+
|
||||
+ /* LF without CR. */
|
||||
+ if (end == ptr + len)
|
||||
+ {
|
||||
+ data->errmsg = grub_strdup (_("invalid HTTP header - LF without CR"));
|
||||
+ return GRUB_ERR_NONE;
|
||||
+ }
|
||||
*end = 0;
|
||||
+
|
||||
/* Trailing CRLF. */
|
||||
if (data->in_chunk_len == 1)
|
||||
{
|
||||
@@ -190,9 +198,7 @@ http_receive (grub_net_tcp_socket_t sock __attribute__ ((unused)),
|
||||
int have_line = 1;
|
||||
char *t;
|
||||
ptr = grub_memchr (nb->data, '\n', nb->tail - nb->data);
|
||||
- if (ptr)
|
||||
- ptr++;
|
||||
- else
|
||||
+ if (ptr == NULL)
|
||||
{
|
||||
have_line = 0;
|
||||
ptr = (char *) nb->tail;
|
||||
--
|
||||
2.25.1
|
||||
|
||||
275
meta/recipes-bsp/grub/files/CVE-2022-28736.patch
Normal file
275
meta/recipes-bsp/grub/files/CVE-2022-28736.patch
Normal file
@@ -0,0 +1,275 @@
|
||||
From 431a111c60095fc973d83fe9209f26f29ce78784 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Mon, 1 Aug 2022 11:17:17 +0530
|
||||
Subject: [PATCH] CVE-2022-28736
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=04c86e0bb7b58fc2f913f798cdb18934933e532d]
|
||||
CVE: CVE-2022-28736
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
loader/efi/chainloader: Use grub_loader_set_ex()
|
||||
|
||||
This ports the EFI chainloader to use grub_loader_set_ex() in order to fix
|
||||
a use-after-free bug that occurs when grub_cmd_chainloader() is executed
|
||||
more than once before a boot attempt is performed.
|
||||
|
||||
Fixes: CVE-2022-28736
|
||||
|
||||
Signed-off-by: Chris Coulson <chris.coulson@canonical.com>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/commands/boot.c | 66 ++++++++++++++++++++++++++----
|
||||
grub-core/loader/efi/chainloader.c | 46 +++++++++++----------
|
||||
include/grub/loader.h | 5 +++
|
||||
3 files changed, 87 insertions(+), 30 deletions(-)
|
||||
|
||||
diff --git a/grub-core/commands/boot.c b/grub-core/commands/boot.c
|
||||
index bbca81e..6151478 100644
|
||||
--- a/grub-core/commands/boot.c
|
||||
+++ b/grub-core/commands/boot.c
|
||||
@@ -27,10 +27,20 @@
|
||||
|
||||
GRUB_MOD_LICENSE ("GPLv3+");
|
||||
|
||||
-static grub_err_t (*grub_loader_boot_func) (void);
|
||||
-static grub_err_t (*grub_loader_unload_func) (void);
|
||||
+static grub_err_t (*grub_loader_boot_func) (void *context);
|
||||
+static grub_err_t (*grub_loader_unload_func) (void *context);
|
||||
+static void *grub_loader_context;
|
||||
static int grub_loader_flags;
|
||||
|
||||
+struct grub_simple_loader_hooks
|
||||
+{
|
||||
+ grub_err_t (*boot) (void);
|
||||
+ grub_err_t (*unload) (void);
|
||||
+};
|
||||
+
|
||||
+/* Don't heap allocate this to avoid making grub_loader_set() fallible. */
|
||||
+static struct grub_simple_loader_hooks simple_loader_hooks;
|
||||
+
|
||||
struct grub_preboot
|
||||
{
|
||||
grub_err_t (*preboot_func) (int);
|
||||
@@ -44,6 +54,29 @@ static int grub_loader_loaded;
|
||||
static struct grub_preboot *preboots_head = 0,
|
||||
*preboots_tail = 0;
|
||||
|
||||
+static grub_err_t
|
||||
+grub_simple_boot_hook (void *context)
|
||||
+{
|
||||
+ struct grub_simple_loader_hooks *hooks;
|
||||
+
|
||||
+ hooks = (struct grub_simple_loader_hooks *) context;
|
||||
+ return hooks->boot ();
|
||||
+}
|
||||
+
|
||||
+static grub_err_t
|
||||
+grub_simple_unload_hook (void *context)
|
||||
+{
|
||||
+ struct grub_simple_loader_hooks *hooks;
|
||||
+ grub_err_t ret;
|
||||
+
|
||||
+ hooks = (struct grub_simple_loader_hooks *) context;
|
||||
+
|
||||
+ ret = hooks->unload ();
|
||||
+ grub_memset (hooks, 0, sizeof (*hooks));
|
||||
+
|
||||
+ return ret;
|
||||
+}
|
||||
+
|
||||
int
|
||||
grub_loader_is_loaded (void)
|
||||
{
|
||||
@@ -110,28 +143,45 @@ grub_loader_unregister_preboot_hook (struct grub_preboot *hnd)
|
||||
}
|
||||
|
||||
void
|
||||
-grub_loader_set (grub_err_t (*boot) (void),
|
||||
- grub_err_t (*unload) (void),
|
||||
- int flags)
|
||||
+grub_loader_set_ex (grub_err_t (*boot) (void *context),
|
||||
+ grub_err_t (*unload) (void *context),
|
||||
+ void *context,
|
||||
+ int flags)
|
||||
{
|
||||
if (grub_loader_loaded && grub_loader_unload_func)
|
||||
- grub_loader_unload_func ();
|
||||
+ grub_loader_unload_func (grub_loader_context);
|
||||
|
||||
grub_loader_boot_func = boot;
|
||||
grub_loader_unload_func = unload;
|
||||
+ grub_loader_context = context;
|
||||
grub_loader_flags = flags;
|
||||
|
||||
grub_loader_loaded = 1;
|
||||
}
|
||||
|
||||
+void
|
||||
+grub_loader_set (grub_err_t (*boot) (void),
|
||||
+ grub_err_t (*unload) (void),
|
||||
+ int flags)
|
||||
+{
|
||||
+ grub_loader_set_ex (grub_simple_boot_hook,
|
||||
+ grub_simple_unload_hook,
|
||||
+ &simple_loader_hooks,
|
||||
+ flags);
|
||||
+
|
||||
+ simple_loader_hooks.boot = boot;
|
||||
+ simple_loader_hooks.unload = unload;
|
||||
+}
|
||||
+
|
||||
void
|
||||
grub_loader_unset(void)
|
||||
{
|
||||
if (grub_loader_loaded && grub_loader_unload_func)
|
||||
- grub_loader_unload_func ();
|
||||
+ grub_loader_unload_func (grub_loader_context);
|
||||
|
||||
grub_loader_boot_func = 0;
|
||||
grub_loader_unload_func = 0;
|
||||
+ grub_loader_context = 0;
|
||||
|
||||
grub_loader_loaded = 0;
|
||||
}
|
||||
@@ -158,7 +208,7 @@ grub_loader_boot (void)
|
||||
return err;
|
||||
}
|
||||
}
|
||||
- err = (grub_loader_boot_func) ();
|
||||
+ err = (grub_loader_boot_func) (grub_loader_context);
|
||||
|
||||
for (cur = preboots_tail; cur; cur = cur->prev)
|
||||
if (! err)
|
||||
diff --git a/grub-core/loader/efi/chainloader.c b/grub-core/loader/efi/chainloader.c
|
||||
index a8d7b91..93a028a 100644
|
||||
--- a/grub-core/loader/efi/chainloader.c
|
||||
+++ b/grub-core/loader/efi/chainloader.c
|
||||
@@ -44,33 +44,28 @@ GRUB_MOD_LICENSE ("GPLv3+");
|
||||
|
||||
static grub_dl_t my_mod;
|
||||
|
||||
-static grub_efi_physical_address_t address;
|
||||
-static grub_efi_uintn_t pages;
|
||||
-static grub_efi_device_path_t *file_path;
|
||||
-static grub_efi_handle_t image_handle;
|
||||
-static grub_efi_char16_t *cmdline;
|
||||
-
|
||||
static grub_err_t
|
||||
-grub_chainloader_unload (void)
|
||||
+grub_chainloader_unload (void *context)
|
||||
{
|
||||
+ grub_efi_handle_t image_handle = (grub_efi_handle_t) context;
|
||||
+ grub_efi_loaded_image_t *loaded_image;
|
||||
grub_efi_boot_services_t *b;
|
||||
|
||||
+ loaded_image = grub_efi_get_loaded_image (image_handle);
|
||||
+ if (loaded_image != NULL)
|
||||
+ grub_free (loaded_image->load_options);
|
||||
+
|
||||
b = grub_efi_system_table->boot_services;
|
||||
efi_call_1 (b->unload_image, image_handle);
|
||||
- efi_call_2 (b->free_pages, address, pages);
|
||||
-
|
||||
- grub_free (file_path);
|
||||
- grub_free (cmdline);
|
||||
- cmdline = 0;
|
||||
- file_path = 0;
|
||||
|
||||
grub_dl_unref (my_mod);
|
||||
return GRUB_ERR_NONE;
|
||||
}
|
||||
|
||||
static grub_err_t
|
||||
-grub_chainloader_boot (void)
|
||||
+grub_chainloader_boot (void *context)
|
||||
{
|
||||
+ grub_efi_handle_t image_handle = (grub_efi_handle_t) context;
|
||||
grub_efi_boot_services_t *b;
|
||||
grub_efi_status_t status;
|
||||
grub_efi_uintn_t exit_data_size;
|
||||
@@ -139,7 +134,7 @@ make_file_path (grub_efi_device_path_t *dp, const char *filename)
|
||||
char *dir_start;
|
||||
char *dir_end;
|
||||
grub_size_t size;
|
||||
- grub_efi_device_path_t *d;
|
||||
+ grub_efi_device_path_t *d, *file_path;
|
||||
|
||||
dir_start = grub_strchr (filename, ')');
|
||||
if (! dir_start)
|
||||
@@ -215,11 +210,15 @@ grub_cmd_chainloader (grub_command_t cmd __attribute__ ((unused)),
|
||||
grub_efi_status_t status;
|
||||
grub_efi_boot_services_t *b;
|
||||
grub_device_t dev = 0;
|
||||
- grub_efi_device_path_t *dp = 0;
|
||||
+ grub_efi_device_path_t *dp = NULL, *file_path = NULL;
|
||||
grub_efi_loaded_image_t *loaded_image;
|
||||
char *filename;
|
||||
void *boot_image = 0;
|
||||
grub_efi_handle_t dev_handle = 0;
|
||||
+ grub_efi_physical_address_t address = 0;
|
||||
+ grub_efi_uintn_t pages = 0;
|
||||
+ grub_efi_char16_t *cmdline = NULL;
|
||||
+ grub_efi_handle_t image_handle = NULL;
|
||||
|
||||
if (argc == 0)
|
||||
return grub_error (GRUB_ERR_BAD_ARGUMENT, N_("filename expected"));
|
||||
@@ -227,11 +226,6 @@ grub_cmd_chainloader (grub_command_t cmd __attribute__ ((unused)),
|
||||
|
||||
grub_dl_ref (my_mod);
|
||||
|
||||
- /* Initialize some global variables. */
|
||||
- address = 0;
|
||||
- image_handle = 0;
|
||||
- file_path = 0;
|
||||
-
|
||||
b = grub_efi_system_table->boot_services;
|
||||
|
||||
file = grub_file_open (filename, GRUB_FILE_TYPE_EFI_CHAINLOADED_IMAGE);
|
||||
@@ -401,7 +395,11 @@ grub_cmd_chainloader (grub_command_t cmd __attribute__ ((unused)),
|
||||
grub_file_close (file);
|
||||
grub_device_close (dev);
|
||||
|
||||
- grub_loader_set (grub_chainloader_boot, grub_chainloader_unload, 0);
|
||||
+ /* We're finished with the source image buffer and file path now. */
|
||||
+ efi_call_2 (b->free_pages, address, pages);
|
||||
+ grub_free (file_path);
|
||||
+
|
||||
+ grub_loader_set_ex (grub_chainloader_boot, grub_chainloader_unload, image_handle, 0);
|
||||
return 0;
|
||||
|
||||
fail:
|
||||
@@ -412,11 +410,15 @@ grub_cmd_chainloader (grub_command_t cmd __attribute__ ((unused)),
|
||||
if (file)
|
||||
grub_file_close (file);
|
||||
|
||||
+ grub_free (cmdline);
|
||||
grub_free (file_path);
|
||||
|
||||
if (address)
|
||||
efi_call_2 (b->free_pages, address, pages);
|
||||
|
||||
+ if (image_handle != NULL)
|
||||
+ efi_call_1 (b->unload_image, image_handle);
|
||||
+
|
||||
grub_dl_unref (my_mod);
|
||||
|
||||
return grub_errno;
|
||||
diff --git a/include/grub/loader.h b/include/grub/loader.h
|
||||
index 7f82a49..3071a50 100644
|
||||
--- a/include/grub/loader.h
|
||||
+++ b/include/grub/loader.h
|
||||
@@ -39,6 +39,11 @@ void EXPORT_FUNC (grub_loader_set) (grub_err_t (*boot) (void),
|
||||
grub_err_t (*unload) (void),
|
||||
int flags);
|
||||
|
||||
+void EXPORT_FUNC (grub_loader_set_ex) (grub_err_t (*boot) (void *context),
|
||||
+ grub_err_t (*unload) (void *context),
|
||||
+ void *context,
|
||||
+ int flags);
|
||||
+
|
||||
/* Unset current loader, if any. */
|
||||
void EXPORT_FUNC (grub_loader_unset) (void);
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -95,6 +95,13 @@ SRC_URI = "${GNU_MIRROR}/grub/grub-${PV}.tar.gz \
|
||||
file://0044-script-execute-Fix-NULL-dereference-in-grub_script_e.patch \
|
||||
file://0045-commands-ls-Require-device_name-is-not-NULL-before-p.patch \
|
||||
file://0046-script-execute-Avoid-crash-when-using-outside-a-func.patch \
|
||||
file://CVE-2021-3981.patch \
|
||||
file://CVE-2021-3695.patch \
|
||||
file://CVE-2021-3696.patch \
|
||||
file://CVE-2021-3697.patch \
|
||||
file://CVE-2022-28733.patch \
|
||||
file://CVE-2022-28734.patch \
|
||||
file://CVE-2022-28736.patch \
|
||||
"
|
||||
SRC_URI[md5sum] = "5ce674ca6b2612d8939b9e6abed32934"
|
||||
SRC_URI[sha256sum] = "f10c85ae3e204dbaec39ae22fa3c5e99f0665417e91c2cb49b7e5031658ba6ea"
|
||||
|
||||
@@ -60,6 +60,13 @@ CVE_CHECK_WHITELIST += "CVE-2008-3844"
|
||||
# https://ubuntu.com/security/CVE-2016-20012
|
||||
CVE_CHECK_WHITELIST += "CVE-2016-20012"
|
||||
|
||||
# As per debian, the issue is fixed by a feature called "agent restriction" in openssh 8.9
|
||||
# Urgency is unimportant as per debian, Hence this CVE is whitelisting.
|
||||
# https://security-tracker.debian.org/tracker/CVE-2021-36368
|
||||
# https://bugzilla.mindrot.org/show_bug.cgi?id=3316#c2
|
||||
# https://docs.ssh-mitm.at/trivialauth.html
|
||||
CVE_CHECK_WHITELIST += "CVE-2021-36368"
|
||||
|
||||
PAM_SRC_URI = "file://sshd"
|
||||
|
||||
inherit manpages useradd update-rc.d update-alternatives systemd
|
||||
@@ -183,12 +190,17 @@ FILES_${PN}-sftp-server = "${libexecdir}/sftp-server"
|
||||
FILES_${PN}-misc = "${bindir}/ssh* ${libexecdir}/ssh*"
|
||||
FILES_${PN}-keygen = "${bindir}/ssh-keygen"
|
||||
|
||||
RDEPENDS_${PN} += "${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-keygen"
|
||||
RDEPENDS_${PN} += "${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-keygen ${PN}-sftp-server"
|
||||
RDEPENDS_${PN}-sshd += "${PN}-keygen ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-keyinit pam-plugin-loginuid', '', d)}"
|
||||
RRECOMMENDS_${PN}-sshd_append_class-target = "\
|
||||
${@bb.utils.filter('PACKAGECONFIG', 'rng-tools', d)} \
|
||||
"
|
||||
|
||||
# break dependency on base package for -dev package
|
||||
# otherwise SDK fails to build as the main openssh and dropbear packages
|
||||
# conflict with each other
|
||||
RDEPENDS:${PN}-dev = ""
|
||||
|
||||
# gdb would make attach-ptrace test pass rather than skip but not worth the build dependencies
|
||||
RDEPENDS_${PN}-ptest += "${PN}-sftp ${PN}-misc ${PN}-sftp-server make sed sudo coreutils"
|
||||
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
From 770aea88c3888cc5cb3ebc94ffcef706c68bc1d2 Mon Sep 17 00:00:00 2001
|
||||
From: Tomas Mraz <tomas@openssl.org>
|
||||
Date: Wed, 1 Jun 2022 12:06:33 +0200
|
||||
Subject: [PATCH] Update expired SCT issuer certificate
|
||||
|
||||
Fixes #15179
|
||||
|
||||
Reviewed-by: Matt Caswell <matt@openssl.org>
|
||||
Reviewed-by: Dmitry Belyavskiy <beldmit@gmail.com>
|
||||
(Merged from https://github.com/openssl/openssl/pull/18444)
|
||||
|
||||
Upstream-Status: Backport
|
||||
[Fixes ptest failures in OE-Core]
|
||||
---
|
||||
test/certs/embeddedSCTs1_issuer.pem | 30 ++++++++++++++---------------
|
||||
1 file changed, 15 insertions(+), 15 deletions(-)
|
||||
|
||||
diff --git a/test/certs/embeddedSCTs1_issuer.pem b/test/certs/embeddedSCTs1_issuer.pem
|
||||
index 1fa449d5a098..6aa9455f09ed 100644
|
||||
--- a/test/certs/embeddedSCTs1_issuer.pem
|
||||
+++ b/test/certs/embeddedSCTs1_issuer.pem
|
||||
@@ -1,18 +1,18 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
-MIIC0DCCAjmgAwIBAgIBADANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJHQjEk
|
||||
+MIIC0jCCAjugAwIBAgIBADANBgkqhkiG9w0BAQsFADBVMQswCQYDVQQGEwJHQjEk
|
||||
MCIGA1UEChMbQ2VydGlmaWNhdGUgVHJhbnNwYXJlbmN5IENBMQ4wDAYDVQQIEwVX
|
||||
-YWxlczEQMA4GA1UEBxMHRXJ3IFdlbjAeFw0xMjA2MDEwMDAwMDBaFw0yMjA2MDEw
|
||||
-MDAwMDBaMFUxCzAJBgNVBAYTAkdCMSQwIgYDVQQKExtDZXJ0aWZpY2F0ZSBUcmFu
|
||||
-c3BhcmVuY3kgQ0ExDjAMBgNVBAgTBVdhbGVzMRAwDgYDVQQHEwdFcncgV2VuMIGf
|
||||
-MA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDVimhTYhCicRmTbneDIRgcKkATxtB7
|
||||
-jHbrkVfT0PtLO1FuzsvRyY2RxS90P6tjXVUJnNE6uvMa5UFEJFGnTHgW8iQ8+EjP
|
||||
-KDHM5nugSlojgZ88ujfmJNnDvbKZuDnd/iYx0ss6hPx7srXFL8/BT/9Ab1zURmnL
|
||||
-svfP34b7arnRsQIDAQABo4GvMIGsMB0GA1UdDgQWBBRfnYgNyHPmVNT4DdjmsMEk
|
||||
-tEfDVTB9BgNVHSMEdjB0gBRfnYgNyHPmVNT4DdjmsMEktEfDVaFZpFcwVTELMAkG
|
||||
-A1UEBhMCR0IxJDAiBgNVBAoTG0NlcnRpZmljYXRlIFRyYW5zcGFyZW5jeSBDQTEO
|
||||
-MAwGA1UECBMFV2FsZXMxEDAOBgNVBAcTB0VydyBXZW6CAQAwDAYDVR0TBAUwAwEB
|
||||
-/zANBgkqhkiG9w0BAQUFAAOBgQAGCMxKbWTyIF4UbASydvkrDvqUpdryOvw4BmBt
|
||||
-OZDQoeojPUApV2lGOwRmYef6HReZFSCa6i4Kd1F2QRIn18ADB8dHDmFYT9czQiRy
|
||||
-f1HWkLxHqd81TbD26yWVXeGJPE3VICskovPkQNJ0tU4b03YmnKliibduyqQQkOFP
|
||||
-OwqULg==
|
||||
+YWxlczEQMA4GA1UEBxMHRXJ3IFdlbjAgFw0yMjA2MDExMDM4MDJaGA8yMTIyMDUw
|
||||
+ODEwMzgwMlowVTELMAkGA1UEBhMCR0IxJDAiBgNVBAoTG0NlcnRpZmljYXRlIFRy
|
||||
+YW5zcGFyZW5jeSBDQTEOMAwGA1UECBMFV2FsZXMxEDAOBgNVBAcTB0VydyBXZW4w
|
||||
+gZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANWKaFNiEKJxGZNud4MhGBwqQBPG
|
||||
+0HuMduuRV9PQ+0s7UW7Oy9HJjZHFL3Q/q2NdVQmc0Tq68xrlQUQkUadMeBbyJDz4
|
||||
+SM8oMczme6BKWiOBnzy6N+Yk2cO9spm4Od3+JjHSyzqE/HuytcUvz8FP/0BvXNRG
|
||||
+acuy98/fhvtqudGxAgMBAAGjga8wgawwHQYDVR0OBBYEFF+diA3Ic+ZU1PgN2Oaw
|
||||
+wSS0R8NVMH0GA1UdIwR2MHSAFF+diA3Ic+ZU1PgN2OawwSS0R8NVoVmkVzBVMQsw
|
||||
+CQYDVQQGEwJHQjEkMCIGA1UEChMbQ2VydGlmaWNhdGUgVHJhbnNwYXJlbmN5IENB
|
||||
+MQ4wDAYDVQQIEwVXYWxlczEQMA4GA1UEBxMHRXJ3IFdlboIBADAMBgNVHRMEBTAD
|
||||
+AQH/MA0GCSqGSIb3DQEBCwUAA4GBAD0aYh9OkFYfXV7kBfhrtD0PJG2U47OV/1qq
|
||||
++uFpqB0S1WO06eJT0pzYf1ebUcxjBkajbJZm/FHT85VthZ1lFHsky87aFD8XlJCo
|
||||
+2IOhKOkvvWKPUdFLoO/ZVXqEVKkcsS1eXK1glFvb07eJZya3JVG0KdMhV2YoDg6c
|
||||
+Doud4XrO
|
||||
-----END CERTIFICATE-----
|
||||
@@ -18,14 +18,13 @@ SRC_URI = "http://www.openssl.org/source/openssl-${PV}.tar.gz \
|
||||
file://afalg.patch \
|
||||
file://reproducible.patch \
|
||||
file://reproducibility.patch \
|
||||
file://770aea88c3888cc5cb3ebc94ffcef706c68bc1d2.patch \
|
||||
"
|
||||
|
||||
SRC_URI_append_class-nativesdk = " \
|
||||
file://environment.d-openssl.sh \
|
||||
"
|
||||
|
||||
SRC_URI[sha256sum] = "9384a2b0570dd80358841464677115df785edb941c71211f75076d72fe6b438f"
|
||||
SRC_URI[sha256sum] = "d7939ce614029cdff0b6c20f0e2e5703158a489a72b2507b8bd51bf8c8fd10ca"
|
||||
|
||||
inherit lib_package multilib_header multilib_script ptest
|
||||
MULTILIB_SCRIPTS = "${PN}-bin:${bindir}/c_rehash"
|
||||
@@ -12,6 +12,11 @@ DEPENDS = "zlib virtual/crypt"
|
||||
RPROVIDES_${PN} = "ssh sshd"
|
||||
RCONFLICTS_${PN} = "openssh-sshd openssh"
|
||||
|
||||
# break dependency on base package for -dev package
|
||||
# otherwise SDK fails to build as the main openssh and dropbear packages
|
||||
# conflict with each other
|
||||
RDEPENDS:${PN}-dev = ""
|
||||
|
||||
DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
|
||||
|
||||
SRC_URI = "http://matt.ucc.asn.au/dropbear/releases/dropbear-${PV}.tar.bz2 \
|
||||
|
||||
@@ -24,7 +24,7 @@ IMAGE_FSTYPES = "wic.vmdk"
|
||||
|
||||
inherit core-image setuptools3
|
||||
|
||||
SRCREV ?= "8a7fd5f633a2b72185501d4c4a8a51ed1fc7cea1"
|
||||
SRCREV ?= "23322786e02469c08e3db007043da1091bf0f466"
|
||||
SRC_URI = "git://git.yoctoproject.org/poky;branch=dunfell \
|
||||
file://Yocto_Build_Appliance.vmx \
|
||||
file://Yocto_Build_Appliance.vmxf \
|
||||
|
||||
@@ -14,6 +14,15 @@ finish_run() {
|
||||
|
||||
info "Switching root to '$ROOTFS_DIR'..."
|
||||
|
||||
debug "Moving basic mounts onto rootfs"
|
||||
for dir in `awk '/\/dev.* \/run\/media/{print $2}' /proc/mounts`; do
|
||||
# Parse any OCT or HEX encoded chars such as spaces
|
||||
# in the mount points to actual ASCII chars
|
||||
dir=`printf $dir`
|
||||
mkdir -p "${ROOTFS_DIR}/media/${dir##*/}"
|
||||
mount -n --move "$dir" "${ROOTFS_DIR}/media/${dir##*/}"
|
||||
done
|
||||
|
||||
debug "Moving /dev, /proc and /sys onto rootfs..."
|
||||
mount --move /dev $ROOTFS_DIR/dev
|
||||
mount --move /proc $ROOTFS_DIR/proc
|
||||
|
||||
@@ -129,7 +129,7 @@ do_install () {
|
||||
update-rc.d -r ${D} rmnologin.sh start 99 2 3 4 5 .
|
||||
update-rc.d -r ${D} sendsigs start 20 0 6 .
|
||||
update-rc.d -r ${D} urandom start 38 S 0 6 .
|
||||
update-rc.d -r ${D} umountnfs.sh start 31 0 1 6 .
|
||||
update-rc.d -r ${D} umountnfs.sh stop 31 0 1 6 .
|
||||
update-rc.d -r ${D} umountfs start 40 0 6 .
|
||||
update-rc.d -r ${D} reboot start 90 6 .
|
||||
update-rc.d -r ${D} halt start 90 0 .
|
||||
|
||||
@@ -0,0 +1,813 @@
|
||||
From b5125000917810731bc28055c0445d571121f80e Mon Sep 17 00:00:00 2001
|
||||
From: Nick Wellnhofer <wellnhofer@aevum.de>
|
||||
Date: Thu, 21 Apr 2022 00:45:58 +0200
|
||||
Subject: [PATCH] Port gentest.py to Python 3
|
||||
|
||||
Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/libxml2/-/commit/343fc1421cdae097fa6c4cffeb1a065a40be6bbb]
|
||||
|
||||
* fixes:
|
||||
|
||||
make[1]: 'testReader' is up to date.
|
||||
File "../libxml2-2.9.10/gentest.py", line 11
|
||||
print "libxml2 python bindings not available, skipping testapi.c generation"
|
||||
^
|
||||
SyntaxError: Missing parentheses in call to 'print'. Did you mean print("libxml2 python bindings not available, skipping testapi.c generation")?
|
||||
make[1]: [Makefile:2078: testapi.c] Error 1 (ignored)
|
||||
|
||||
...
|
||||
|
||||
make[1]: 'testReader' is up to date.
|
||||
File "../libxml2-2.9.10/gentest.py", line 271
|
||||
return 1
|
||||
^
|
||||
TabError: inconsistent use of tabs and spaces in indentation
|
||||
make[1]: [Makefile:2078: testapi.c] Error 1 (ignored)
|
||||
|
||||
...
|
||||
|
||||
aarch64-oe-linux-gcc: error: testapi.c: No such file or directory
|
||||
aarch64-oe-linux-gcc: fatal error: no input files
|
||||
compilation terminated.
|
||||
make[1]: *** [Makefile:1275: testapi.o] Error 1
|
||||
|
||||
But there is still a bit mystery why it worked before, because check-am
|
||||
calls gentest.py with $(PYTHON), so it ignores the shebang in the script
|
||||
and libxml2 is using python3native (through python3targetconfig.bbclass)
|
||||
so something like:
|
||||
|
||||
libxml2/2.9.10-r0/recipe-sysroot-native/usr/bin/python3-native/python3 gentest.py
|
||||
|
||||
But that still fails (now without SyntaxError) with:
|
||||
libxml2 python bindings not available, skipping testapi.c generation
|
||||
|
||||
because we don't have dependency on libxml2-native (to provide libxml2
|
||||
python bindings form python3native) and exported PYTHON_SITE_PACKAGES
|
||||
might be useless (e.g. /usr/lib/python3.8/site-packages on Ubuntu-22.10
|
||||
which uses python 3.10 and there is no site-packages with libxml2)
|
||||
|
||||
Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
|
||||
---
|
||||
gentest.py | 421 ++++++++++++++++++++++++++---------------------------
|
||||
1 file changed, 209 insertions(+), 212 deletions(-)
|
||||
|
||||
diff --git a/gentest.py b/gentest.py
|
||||
index b763300..0756706 100755
|
||||
--- a/gentest.py
|
||||
+++ b/gentest.py
|
||||
@@ -8,7 +8,7 @@ import string
|
||||
try:
|
||||
import libxml2
|
||||
except:
|
||||
- print "libxml2 python bindings not available, skipping testapi.c generation"
|
||||
+ print("libxml2 python bindings not available, skipping testapi.c generation")
|
||||
sys.exit(0)
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
@@ -227,7 +227,7 @@ extra_post_call = {
|
||||
if (old != NULL) {
|
||||
xmlUnlinkNode(old);
|
||||
xmlFreeNode(old) ; old = NULL ; }
|
||||
- ret_val = NULL;""",
|
||||
+\t ret_val = NULL;""",
|
||||
"xmlTextMerge":
|
||||
"""if ((first != NULL) && (first->type != XML_TEXT_NODE)) {
|
||||
xmlUnlinkNode(second);
|
||||
@@ -236,7 +236,7 @@ extra_post_call = {
|
||||
"""if ((ret_val != NULL) && (ret_val != ncname) &&
|
||||
(ret_val != prefix) && (ret_val != memory))
|
||||
xmlFree(ret_val);
|
||||
- ret_val = NULL;""",
|
||||
+\t ret_val = NULL;""",
|
||||
"xmlNewDocElementContent":
|
||||
"""xmlFreeDocElementContent(doc, ret_val); ret_val = NULL;""",
|
||||
"xmlDictReference": "xmlDictFree(dict);",
|
||||
@@ -268,29 +268,29 @@ modules = []
|
||||
def is_skipped_module(name):
|
||||
for mod in skipped_modules:
|
||||
if mod == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
return 0
|
||||
|
||||
def is_skipped_function(name):
|
||||
for fun in skipped_functions:
|
||||
if fun == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
# Do not test destructors
|
||||
- if string.find(name, 'Free') != -1:
|
||||
+ if name.find('Free') != -1:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def is_skipped_memcheck(name):
|
||||
for fun in skipped_memcheck:
|
||||
if fun == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
return 0
|
||||
|
||||
missing_types = {}
|
||||
def add_missing_type(name, func):
|
||||
try:
|
||||
list = missing_types[name]
|
||||
- list.append(func)
|
||||
+ list.append(func)
|
||||
except:
|
||||
missing_types[name] = [func]
|
||||
|
||||
@@ -310,7 +310,7 @@ def add_missing_functions(name, module):
|
||||
missing_functions_nr = missing_functions_nr + 1
|
||||
try:
|
||||
list = missing_functions[module]
|
||||
- list.append(name)
|
||||
+ list.append(name)
|
||||
except:
|
||||
missing_functions[module] = [name]
|
||||
|
||||
@@ -319,45 +319,45 @@ def add_missing_functions(name, module):
|
||||
#
|
||||
|
||||
def type_convert(str, name, info, module, function, pos):
|
||||
-# res = string.replace(str, " ", " ")
|
||||
-# res = string.replace(str, " ", " ")
|
||||
-# res = string.replace(str, " ", " ")
|
||||
- res = string.replace(str, " *", "_ptr")
|
||||
-# res = string.replace(str, "*", "_ptr")
|
||||
- res = string.replace(res, " ", "_")
|
||||
+# res = str.replace(" ", " ")
|
||||
+# res = str.replace(" ", " ")
|
||||
+# res = str.replace(" ", " ")
|
||||
+ res = str.replace(" *", "_ptr")
|
||||
+# res = str.replace("*", "_ptr")
|
||||
+ res = res.replace(" ", "_")
|
||||
if res == 'const_char_ptr':
|
||||
- if string.find(name, "file") != -1 or \
|
||||
- string.find(name, "uri") != -1 or \
|
||||
- string.find(name, "URI") != -1 or \
|
||||
- string.find(info, "filename") != -1 or \
|
||||
- string.find(info, "URI") != -1 or \
|
||||
- string.find(info, "URL") != -1:
|
||||
- if string.find(function, "Save") != -1 or \
|
||||
- string.find(function, "Create") != -1 or \
|
||||
- string.find(function, "Write") != -1 or \
|
||||
- string.find(function, "Fetch") != -1:
|
||||
- return('fileoutput')
|
||||
- return('filepath')
|
||||
+ if name.find("file") != -1 or \
|
||||
+ name.find("uri") != -1 or \
|
||||
+ name.find("URI") != -1 or \
|
||||
+ info.find("filename") != -1 or \
|
||||
+ info.find("URI") != -1 or \
|
||||
+ info.find("URL") != -1:
|
||||
+ if function.find("Save") != -1 or \
|
||||
+ function.find("Create") != -1 or \
|
||||
+ function.find("Write") != -1 or \
|
||||
+ function.find("Fetch") != -1:
|
||||
+ return('fileoutput')
|
||||
+ return('filepath')
|
||||
if res == 'void_ptr':
|
||||
if module == 'nanoftp' and name == 'ctx':
|
||||
- return('xmlNanoFTPCtxtPtr')
|
||||
+ return('xmlNanoFTPCtxtPtr')
|
||||
if function == 'xmlNanoFTPNewCtxt' or \
|
||||
- function == 'xmlNanoFTPConnectTo' or \
|
||||
- function == 'xmlNanoFTPOpen':
|
||||
- return('xmlNanoFTPCtxtPtr')
|
||||
+ function == 'xmlNanoFTPConnectTo' or \
|
||||
+ function == 'xmlNanoFTPOpen':
|
||||
+ return('xmlNanoFTPCtxtPtr')
|
||||
if module == 'nanohttp' and name == 'ctx':
|
||||
- return('xmlNanoHTTPCtxtPtr')
|
||||
- if function == 'xmlNanoHTTPMethod' or \
|
||||
- function == 'xmlNanoHTTPMethodRedir' or \
|
||||
- function == 'xmlNanoHTTPOpen' or \
|
||||
- function == 'xmlNanoHTTPOpenRedir':
|
||||
- return('xmlNanoHTTPCtxtPtr');
|
||||
+ return('xmlNanoHTTPCtxtPtr')
|
||||
+ if function == 'xmlNanoHTTPMethod' or \
|
||||
+ function == 'xmlNanoHTTPMethodRedir' or \
|
||||
+ function == 'xmlNanoHTTPOpen' or \
|
||||
+ function == 'xmlNanoHTTPOpenRedir':
|
||||
+ return('xmlNanoHTTPCtxtPtr');
|
||||
if function == 'xmlIOHTTPOpen':
|
||||
- return('xmlNanoHTTPCtxtPtr')
|
||||
- if string.find(name, "data") != -1:
|
||||
- return('userdata')
|
||||
- if string.find(name, "user") != -1:
|
||||
- return('userdata')
|
||||
+ return('xmlNanoHTTPCtxtPtr')
|
||||
+ if name.find("data") != -1:
|
||||
+ return('userdata')
|
||||
+ if name.find("user") != -1:
|
||||
+ return('userdata')
|
||||
if res == 'xmlDoc_ptr':
|
||||
res = 'xmlDocPtr'
|
||||
if res == 'xmlNode_ptr':
|
||||
@@ -366,18 +366,18 @@ def type_convert(str, name, info, module, function, pos):
|
||||
res = 'xmlDictPtr'
|
||||
if res == 'xmlNodePtr' and pos != 0:
|
||||
if (function == 'xmlAddChild' and pos == 2) or \
|
||||
- (function == 'xmlAddChildList' and pos == 2) or \
|
||||
+ (function == 'xmlAddChildList' and pos == 2) or \
|
||||
(function == 'xmlAddNextSibling' and pos == 2) or \
|
||||
(function == 'xmlAddSibling' and pos == 2) or \
|
||||
(function == 'xmlDocSetRootElement' and pos == 2) or \
|
||||
(function == 'xmlReplaceNode' and pos == 2) or \
|
||||
(function == 'xmlTextMerge') or \
|
||||
- (function == 'xmlAddPrevSibling' and pos == 2):
|
||||
- return('xmlNodePtr_in');
|
||||
+ (function == 'xmlAddPrevSibling' and pos == 2):
|
||||
+ return('xmlNodePtr_in');
|
||||
if res == 'const xmlBufferPtr':
|
||||
res = 'xmlBufferPtr'
|
||||
if res == 'xmlChar_ptr' and name == 'name' and \
|
||||
- string.find(function, "EatName") != -1:
|
||||
+ function.find("EatName") != -1:
|
||||
return('eaten_name')
|
||||
if res == 'void_ptr*':
|
||||
res = 'void_ptr_ptr'
|
||||
@@ -393,7 +393,7 @@ def type_convert(str, name, info, module, function, pos):
|
||||
res = 'debug_FILE_ptr';
|
||||
if res == 'int' and name == 'options':
|
||||
if module == 'parser' or module == 'xmlreader':
|
||||
- res = 'parseroptions'
|
||||
+ res = 'parseroptions'
|
||||
|
||||
return res
|
||||
|
||||
@@ -402,28 +402,28 @@ known_param_types = []
|
||||
def is_known_param_type(name):
|
||||
for type in known_param_types:
|
||||
if type == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
return name[-3:] == 'Ptr' or name[-4:] == '_ptr'
|
||||
|
||||
def generate_param_type(name, rtype):
|
||||
global test
|
||||
for type in known_param_types:
|
||||
if type == name:
|
||||
- return
|
||||
+ return
|
||||
for type in generated_param_types:
|
||||
if type == name:
|
||||
- return
|
||||
+ return
|
||||
|
||||
if name[-3:] == 'Ptr' or name[-4:] == '_ptr':
|
||||
if rtype[0:6] == 'const ':
|
||||
- crtype = rtype[6:]
|
||||
- else:
|
||||
- crtype = rtype
|
||||
+ crtype = rtype[6:]
|
||||
+ else:
|
||||
+ crtype = rtype
|
||||
|
||||
define = 0
|
||||
- if modules_defines.has_key(module):
|
||||
- test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
- define = 1
|
||||
+ if module in modules_defines:
|
||||
+ test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
+ define = 1
|
||||
test.write("""
|
||||
#define gen_nb_%s 1
|
||||
static %s gen_%s(int no ATTRIBUTE_UNUSED, int nr ATTRIBUTE_UNUSED) {
|
||||
@@ -433,7 +433,7 @@ static void des_%s(int no ATTRIBUTE_UNUSED, %s val ATTRIBUTE_UNUSED, int nr ATTR
|
||||
}
|
||||
""" % (name, crtype, name, name, rtype))
|
||||
if define == 1:
|
||||
- test.write("#endif\n\n")
|
||||
+ test.write("#endif\n\n")
|
||||
add_generated_param_type(name)
|
||||
|
||||
#
|
||||
@@ -445,7 +445,7 @@ known_return_types = []
|
||||
def is_known_return_type(name):
|
||||
for type in known_return_types:
|
||||
if type == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
return 0
|
||||
|
||||
#
|
||||
@@ -471,7 +471,7 @@ def compare_and_save():
|
||||
try:
|
||||
os.system("rm testapi.c; mv testapi.c.new testapi.c")
|
||||
except:
|
||||
- os.system("mv testapi.c.new testapi.c")
|
||||
+ os.system("mv testapi.c.new testapi.c")
|
||||
print("Updated testapi.c")
|
||||
else:
|
||||
print("Generated testapi.c is identical")
|
||||
@@ -481,17 +481,17 @@ while line != "":
|
||||
if line == "/* CUT HERE: everything below that line is generated */\n":
|
||||
break;
|
||||
if line[0:15] == "#define gen_nb_":
|
||||
- type = string.split(line[15:])[0]
|
||||
- known_param_types.append(type)
|
||||
+ type = line[15:].split()[0]
|
||||
+ known_param_types.append(type)
|
||||
if line[0:19] == "static void desret_":
|
||||
- type = string.split(line[19:], '(')[0]
|
||||
- known_return_types.append(type)
|
||||
+ type = line[19:].split('(')[0]
|
||||
+ known_return_types.append(type)
|
||||
test.write(line)
|
||||
line = input.readline()
|
||||
input.close()
|
||||
|
||||
if line == "":
|
||||
- print "Could not find the CUT marker in testapi.c skipping generation"
|
||||
+ print("Could not find the CUT marker in testapi.c skipping generation")
|
||||
test.close()
|
||||
sys.exit(0)
|
||||
|
||||
@@ -505,7 +505,7 @@ test.write("/* CUT HERE: everything below that line is generated */\n")
|
||||
#
|
||||
doc = libxml2.readFile(srcPref + 'doc/libxml2-api.xml', None, 0)
|
||||
if doc == None:
|
||||
- print "Failed to load doc/libxml2-api.xml"
|
||||
+ print("Failed to load doc/libxml2-api.xml")
|
||||
sys.exit(1)
|
||||
ctxt = doc.xpathNewContext()
|
||||
|
||||
@@ -519,9 +519,9 @@ for arg in args:
|
||||
mod = arg.xpathEval('string(../@file)')
|
||||
func = arg.xpathEval('string(../@name)')
|
||||
if (mod not in skipped_modules) and (func not in skipped_functions):
|
||||
- type = arg.xpathEval('string(@type)')
|
||||
- if not argtypes.has_key(type):
|
||||
- argtypes[type] = func
|
||||
+ type = arg.xpathEval('string(@type)')
|
||||
+ if type not in argtypes:
|
||||
+ argtypes[type] = func
|
||||
|
||||
# similarly for return types
|
||||
rettypes = {}
|
||||
@@ -531,8 +531,8 @@ for ret in rets:
|
||||
func = ret.xpathEval('string(../@name)')
|
||||
if (mod not in skipped_modules) and (func not in skipped_functions):
|
||||
type = ret.xpathEval('string(@type)')
|
||||
- if not rettypes.has_key(type):
|
||||
- rettypes[type] = func
|
||||
+ if type not in rettypes:
|
||||
+ rettypes[type] = func
|
||||
|
||||
#
|
||||
# Generate constructors and return type handling for all enums
|
||||
@@ -549,49 +549,49 @@ for enum in enums:
|
||||
continue;
|
||||
define = 0
|
||||
|
||||
- if argtypes.has_key(name) and is_known_param_type(name) == 0:
|
||||
- values = ctxt.xpathEval("/api/symbols/enum[@type='%s']" % name)
|
||||
- i = 0
|
||||
- vals = []
|
||||
- for value in values:
|
||||
- vname = value.xpathEval('string(@name)')
|
||||
- if vname == None:
|
||||
- continue;
|
||||
- i = i + 1
|
||||
- if i >= 5:
|
||||
- break;
|
||||
- vals.append(vname)
|
||||
- if vals == []:
|
||||
- print "Didn't find any value for enum %s" % (name)
|
||||
- continue
|
||||
- if modules_defines.has_key(module):
|
||||
- test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
- define = 1
|
||||
- test.write("#define gen_nb_%s %d\n" % (name, len(vals)))
|
||||
- test.write("""static %s gen_%s(int no, int nr ATTRIBUTE_UNUSED) {\n""" %
|
||||
- (name, name))
|
||||
- i = 1
|
||||
- for value in vals:
|
||||
- test.write(" if (no == %d) return(%s);\n" % (i, value))
|
||||
- i = i + 1
|
||||
- test.write(""" return(0);
|
||||
+ if (name in argtypes) and is_known_param_type(name) == 0:
|
||||
+ values = ctxt.xpathEval("/api/symbols/enum[@type='%s']" % name)
|
||||
+ i = 0
|
||||
+ vals = []
|
||||
+ for value in values:
|
||||
+ vname = value.xpathEval('string(@name)')
|
||||
+ if vname == None:
|
||||
+ continue;
|
||||
+ i = i + 1
|
||||
+ if i >= 5:
|
||||
+ break;
|
||||
+ vals.append(vname)
|
||||
+ if vals == []:
|
||||
+ print("Didn't find any value for enum %s" % (name))
|
||||
+ continue
|
||||
+ if module in modules_defines:
|
||||
+ test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
+ define = 1
|
||||
+ test.write("#define gen_nb_%s %d\n" % (name, len(vals)))
|
||||
+ test.write("""static %s gen_%s(int no, int nr ATTRIBUTE_UNUSED) {\n""" %
|
||||
+ (name, name))
|
||||
+ i = 1
|
||||
+ for value in vals:
|
||||
+ test.write(" if (no == %d) return(%s);\n" % (i, value))
|
||||
+ i = i + 1
|
||||
+ test.write(""" return(0);
|
||||
}
|
||||
|
||||
static void des_%s(int no ATTRIBUTE_UNUSED, %s val ATTRIBUTE_UNUSED, int nr ATTRIBUTE_UNUSED) {
|
||||
}
|
||||
|
||||
""" % (name, name));
|
||||
- known_param_types.append(name)
|
||||
+ known_param_types.append(name)
|
||||
|
||||
if (is_known_return_type(name) == 0) and (name in rettypes):
|
||||
- if define == 0 and modules_defines.has_key(module):
|
||||
- test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
- define = 1
|
||||
+ if define == 0 and (module in modules_defines):
|
||||
+ test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
+ define = 1
|
||||
test.write("""static void desret_%s(%s val ATTRIBUTE_UNUSED) {
|
||||
}
|
||||
|
||||
""" % (name, name))
|
||||
- known_return_types.append(name)
|
||||
+ known_return_types.append(name)
|
||||
if define == 1:
|
||||
test.write("#endif\n\n")
|
||||
|
||||
@@ -615,9 +615,9 @@ for file in headers:
|
||||
# do not test deprecated APIs
|
||||
#
|
||||
desc = file.xpathEval('string(description)')
|
||||
- if string.find(desc, 'DEPRECATED') != -1:
|
||||
- print "Skipping deprecated interface %s" % name
|
||||
- continue;
|
||||
+ if desc.find('DEPRECATED') != -1:
|
||||
+ print("Skipping deprecated interface %s" % name)
|
||||
+ continue;
|
||||
|
||||
test.write("#include <libxml/%s.h>\n" % name)
|
||||
modules.append(name)
|
||||
@@ -679,7 +679,7 @@ def generate_test(module, node):
|
||||
# and store the informations for the generation
|
||||
#
|
||||
try:
|
||||
- args = node.xpathEval("arg")
|
||||
+ args = node.xpathEval("arg")
|
||||
except:
|
||||
args = []
|
||||
t_args = []
|
||||
@@ -687,37 +687,37 @@ def generate_test(module, node):
|
||||
for arg in args:
|
||||
n = n + 1
|
||||
rtype = arg.xpathEval("string(@type)")
|
||||
- if rtype == 'void':
|
||||
- break;
|
||||
- info = arg.xpathEval("string(@info)")
|
||||
- nam = arg.xpathEval("string(@name)")
|
||||
+ if rtype == 'void':
|
||||
+ break;
|
||||
+ info = arg.xpathEval("string(@info)")
|
||||
+ nam = arg.xpathEval("string(@name)")
|
||||
type = type_convert(rtype, nam, info, module, name, n)
|
||||
- if is_known_param_type(type) == 0:
|
||||
- add_missing_type(type, name);
|
||||
- no_gen = 1
|
||||
+ if is_known_param_type(type) == 0:
|
||||
+ add_missing_type(type, name);
|
||||
+ no_gen = 1
|
||||
if (type[-3:] == 'Ptr' or type[-4:] == '_ptr') and \
|
||||
- rtype[0:6] == 'const ':
|
||||
- crtype = rtype[6:]
|
||||
- else:
|
||||
- crtype = rtype
|
||||
- t_args.append((nam, type, rtype, crtype, info))
|
||||
+ rtype[0:6] == 'const ':
|
||||
+ crtype = rtype[6:]
|
||||
+ else:
|
||||
+ crtype = rtype
|
||||
+ t_args.append((nam, type, rtype, crtype, info))
|
||||
|
||||
try:
|
||||
- rets = node.xpathEval("return")
|
||||
+ rets = node.xpathEval("return")
|
||||
except:
|
||||
rets = []
|
||||
t_ret = None
|
||||
for ret in rets:
|
||||
rtype = ret.xpathEval("string(@type)")
|
||||
- info = ret.xpathEval("string(@info)")
|
||||
+ info = ret.xpathEval("string(@info)")
|
||||
type = type_convert(rtype, 'return', info, module, name, 0)
|
||||
- if rtype == 'void':
|
||||
- break
|
||||
- if is_known_return_type(type) == 0:
|
||||
- add_missing_type(type, name);
|
||||
- no_gen = 1
|
||||
- t_ret = (type, rtype, info)
|
||||
- break
|
||||
+ if rtype == 'void':
|
||||
+ break
|
||||
+ if is_known_return_type(type) == 0:
|
||||
+ add_missing_type(type, name);
|
||||
+ no_gen = 1
|
||||
+ t_ret = (type, rtype, info)
|
||||
+ break
|
||||
|
||||
if no_gen == 0:
|
||||
for t_arg in t_args:
|
||||
@@ -733,7 +733,7 @@ test_%s(void) {
|
||||
|
||||
if no_gen == 1:
|
||||
add_missing_functions(name, module)
|
||||
- test.write("""
|
||||
+ test.write("""
|
||||
/* missing type support */
|
||||
return(test_ret);
|
||||
}
|
||||
@@ -742,22 +742,22 @@ test_%s(void) {
|
||||
return
|
||||
|
||||
try:
|
||||
- conds = node.xpathEval("cond")
|
||||
- for cond in conds:
|
||||
- test.write("#if %s\n" % (cond.get_content()))
|
||||
- nb_cond = nb_cond + 1
|
||||
+ conds = node.xpathEval("cond")
|
||||
+ for cond in conds:
|
||||
+ test.write("#if %s\n" % (cond.get_content()))
|
||||
+ nb_cond = nb_cond + 1
|
||||
except:
|
||||
pass
|
||||
|
||||
define = 0
|
||||
- if function_defines.has_key(name):
|
||||
+ if name in function_defines:
|
||||
test.write("#ifdef %s\n" % (function_defines[name]))
|
||||
- define = 1
|
||||
+ define = 1
|
||||
|
||||
# Declare the memory usage counter
|
||||
no_mem = is_skipped_memcheck(name)
|
||||
if no_mem == 0:
|
||||
- test.write(" int mem_base;\n");
|
||||
+ test.write(" int mem_base;\n");
|
||||
|
||||
# Declare the return value
|
||||
if t_ret != None:
|
||||
@@ -766,29 +766,29 @@ test_%s(void) {
|
||||
# Declare the arguments
|
||||
for arg in t_args:
|
||||
(nam, type, rtype, crtype, info) = arg;
|
||||
- # add declaration
|
||||
- test.write(" %s %s; /* %s */\n" % (crtype, nam, info))
|
||||
- test.write(" int n_%s;\n" % (nam))
|
||||
+ # add declaration
|
||||
+ test.write(" %s %s; /* %s */\n" % (crtype, nam, info))
|
||||
+ test.write(" int n_%s;\n" % (nam))
|
||||
test.write("\n")
|
||||
|
||||
# Cascade loop on of each argument list of values
|
||||
for arg in t_args:
|
||||
(nam, type, rtype, crtype, info) = arg;
|
||||
- #
|
||||
- test.write(" for (n_%s = 0;n_%s < gen_nb_%s;n_%s++) {\n" % (
|
||||
- nam, nam, type, nam))
|
||||
+ #
|
||||
+ test.write(" for (n_%s = 0;n_%s < gen_nb_%s;n_%s++) {\n" % (
|
||||
+ nam, nam, type, nam))
|
||||
|
||||
# log the memory usage
|
||||
if no_mem == 0:
|
||||
- test.write(" mem_base = xmlMemBlocks();\n");
|
||||
+ test.write(" mem_base = xmlMemBlocks();\n");
|
||||
|
||||
# prepare the call
|
||||
i = 0;
|
||||
for arg in t_args:
|
||||
(nam, type, rtype, crtype, info) = arg;
|
||||
- #
|
||||
- test.write(" %s = gen_%s(n_%s, %d);\n" % (nam, type, nam, i))
|
||||
- i = i + 1;
|
||||
+ #
|
||||
+ test.write(" %s = gen_%s(n_%s, %d);\n" % (nam, type, nam, i))
|
||||
+ i = i + 1;
|
||||
|
||||
# add checks to avoid out-of-bounds array access
|
||||
i = 0;
|
||||
@@ -797,7 +797,7 @@ test_%s(void) {
|
||||
# assume that "size", "len", and "start" parameters apply to either
|
||||
# the nearest preceding or following char pointer
|
||||
if type == "int" and (nam == "size" or nam == "len" or nam == "start"):
|
||||
- for j in range(i - 1, -1, -1) + range(i + 1, len(t_args)):
|
||||
+ for j in (*range(i - 1, -1, -1), *range(i + 1, len(t_args))):
|
||||
(bnam, btype) = t_args[j][:2]
|
||||
if btype == "const_char_ptr" or btype == "const_xmlChar_ptr":
|
||||
test.write(
|
||||
@@ -806,42 +806,42 @@ test_%s(void) {
|
||||
" continue;\n"
|
||||
% (bnam, nam, bnam))
|
||||
break
|
||||
- i = i + 1;
|
||||
+ i = i + 1;
|
||||
|
||||
# do the call, and clanup the result
|
||||
- if extra_pre_call.has_key(name):
|
||||
- test.write(" %s\n"% (extra_pre_call[name]))
|
||||
+ if name in extra_pre_call:
|
||||
+ test.write(" %s\n"% (extra_pre_call[name]))
|
||||
if t_ret != None:
|
||||
- test.write("\n ret_val = %s(" % (name))
|
||||
- need = 0
|
||||
- for arg in t_args:
|
||||
- (nam, type, rtype, crtype, info) = arg
|
||||
- if need:
|
||||
- test.write(", ")
|
||||
- else:
|
||||
- need = 1
|
||||
- if rtype != crtype:
|
||||
- test.write("(%s)" % rtype)
|
||||
- test.write("%s" % nam);
|
||||
- test.write(");\n")
|
||||
- if extra_post_call.has_key(name):
|
||||
- test.write(" %s\n"% (extra_post_call[name]))
|
||||
- test.write(" desret_%s(ret_val);\n" % t_ret[0])
|
||||
+ test.write("\n ret_val = %s(" % (name))
|
||||
+ need = 0
|
||||
+ for arg in t_args:
|
||||
+ (nam, type, rtype, crtype, info) = arg
|
||||
+ if need:
|
||||
+ test.write(", ")
|
||||
+ else:
|
||||
+ need = 1
|
||||
+ if rtype != crtype:
|
||||
+ test.write("(%s)" % rtype)
|
||||
+ test.write("%s" % nam);
|
||||
+ test.write(");\n")
|
||||
+ if name in extra_post_call:
|
||||
+ test.write(" %s\n"% (extra_post_call[name]))
|
||||
+ test.write(" desret_%s(ret_val);\n" % t_ret[0])
|
||||
else:
|
||||
- test.write("\n %s(" % (name));
|
||||
- need = 0;
|
||||
- for arg in t_args:
|
||||
- (nam, type, rtype, crtype, info) = arg;
|
||||
- if need:
|
||||
- test.write(", ")
|
||||
- else:
|
||||
- need = 1
|
||||
- if rtype != crtype:
|
||||
- test.write("(%s)" % rtype)
|
||||
- test.write("%s" % nam)
|
||||
- test.write(");\n")
|
||||
- if extra_post_call.has_key(name):
|
||||
- test.write(" %s\n"% (extra_post_call[name]))
|
||||
+ test.write("\n %s(" % (name));
|
||||
+ need = 0;
|
||||
+ for arg in t_args:
|
||||
+ (nam, type, rtype, crtype, info) = arg;
|
||||
+ if need:
|
||||
+ test.write(", ")
|
||||
+ else:
|
||||
+ need = 1
|
||||
+ if rtype != crtype:
|
||||
+ test.write("(%s)" % rtype)
|
||||
+ test.write("%s" % nam)
|
||||
+ test.write(");\n")
|
||||
+ if name in extra_post_call:
|
||||
+ test.write(" %s\n"% (extra_post_call[name]))
|
||||
|
||||
test.write(" call_tests++;\n");
|
||||
|
||||
@@ -849,32 +849,32 @@ test_%s(void) {
|
||||
i = 0;
|
||||
for arg in t_args:
|
||||
(nam, type, rtype, crtype, info) = arg;
|
||||
- # This is a hack to prevent generating a destructor for the
|
||||
- # 'input' argument in xmlTextReaderSetup. There should be
|
||||
- # a better, more generic way to do this!
|
||||
- if string.find(info, 'destroy') == -1:
|
||||
- test.write(" des_%s(n_%s, " % (type, nam))
|
||||
- if rtype != crtype:
|
||||
- test.write("(%s)" % rtype)
|
||||
- test.write("%s, %d);\n" % (nam, i))
|
||||
- i = i + 1;
|
||||
+ # This is a hack to prevent generating a destructor for the
|
||||
+ # 'input' argument in xmlTextReaderSetup. There should be
|
||||
+ # a better, more generic way to do this!
|
||||
+ if info.find('destroy') == -1:
|
||||
+ test.write(" des_%s(n_%s, " % (type, nam))
|
||||
+ if rtype != crtype:
|
||||
+ test.write("(%s)" % rtype)
|
||||
+ test.write("%s, %d);\n" % (nam, i))
|
||||
+ i = i + 1;
|
||||
|
||||
test.write(" xmlResetLastError();\n");
|
||||
# Check the memory usage
|
||||
if no_mem == 0:
|
||||
- test.write(""" if (mem_base != xmlMemBlocks()) {
|
||||
+ test.write(""" if (mem_base != xmlMemBlocks()) {
|
||||
printf("Leak of %%d blocks found in %s",
|
||||
- xmlMemBlocks() - mem_base);
|
||||
- test_ret++;
|
||||
+\t xmlMemBlocks() - mem_base);
|
||||
+\t test_ret++;
|
||||
""" % (name));
|
||||
- for arg in t_args:
|
||||
- (nam, type, rtype, crtype, info) = arg;
|
||||
- test.write(""" printf(" %%d", n_%s);\n""" % (nam))
|
||||
- test.write(""" printf("\\n");\n""")
|
||||
- test.write(" }\n")
|
||||
+ for arg in t_args:
|
||||
+ (nam, type, rtype, crtype, info) = arg;
|
||||
+ test.write(""" printf(" %%d", n_%s);\n""" % (nam))
|
||||
+ test.write(""" printf("\\n");\n""")
|
||||
+ test.write(" }\n")
|
||||
|
||||
for arg in t_args:
|
||||
- test.write(" }\n")
|
||||
+ test.write(" }\n")
|
||||
|
||||
test.write(" function_tests++;\n")
|
||||
#
|
||||
@@ -882,7 +882,7 @@ test_%s(void) {
|
||||
#
|
||||
while nb_cond > 0:
|
||||
test.write("#endif\n")
|
||||
- nb_cond = nb_cond -1
|
||||
+ nb_cond = nb_cond -1
|
||||
if define == 1:
|
||||
test.write("#endif\n")
|
||||
|
||||
@@ -900,10 +900,10 @@ test_%s(void) {
|
||||
for module in modules:
|
||||
# gather all the functions exported by that module
|
||||
try:
|
||||
- functions = ctxt.xpathEval("/api/symbols/function[@file='%s']" % (module))
|
||||
+ functions = ctxt.xpathEval("/api/symbols/function[@file='%s']" % (module))
|
||||
except:
|
||||
- print "Failed to gather functions from module %s" % (module)
|
||||
- continue;
|
||||
+ print("Failed to gather functions from module %s" % (module))
|
||||
+ continue;
|
||||
|
||||
# iterate over all functions in the module generating the test
|
||||
i = 0
|
||||
@@ -923,14 +923,14 @@ test_%s(void) {
|
||||
# iterate over all functions in the module generating the call
|
||||
for function in functions:
|
||||
name = function.xpathEval('string(@name)')
|
||||
- if is_skipped_function(name):
|
||||
- continue
|
||||
- test.write(" test_ret += test_%s();\n" % (name))
|
||||
+ if is_skipped_function(name):
|
||||
+ continue
|
||||
+ test.write(" test_ret += test_%s();\n" % (name))
|
||||
|
||||
# footer
|
||||
test.write("""
|
||||
if (test_ret != 0)
|
||||
- printf("Module %s: %%d errors\\n", test_ret);
|
||||
+\tprintf("Module %s: %%d errors\\n", test_ret);
|
||||
return(test_ret);
|
||||
}
|
||||
""" % (module))
|
||||
@@ -948,7 +948,7 @@ test.write(""" return(0);
|
||||
}
|
||||
""");
|
||||
|
||||
-print "Generated test for %d modules and %d functions" %(len(modules), nb_tests)
|
||||
+print("Generated test for %d modules and %d functions" %(len(modules), nb_tests))
|
||||
|
||||
compare_and_save()
|
||||
|
||||
@@ -960,11 +960,8 @@ for missing in missing_types.keys():
|
||||
n = len(missing_types[missing])
|
||||
missing_list.append((n, missing))
|
||||
|
||||
-def compare_missing(a, b):
|
||||
- return b[0] - a[0]
|
||||
-
|
||||
-missing_list.sort(compare_missing)
|
||||
-print "Missing support for %d functions and %d types see missing.lst" % (missing_functions_nr, len(missing_list))
|
||||
+missing_list.sort(key=lambda a: a[0])
|
||||
+print("Missing support for %d functions and %d types see missing.lst" % (missing_functions_nr, len(missing_list)))
|
||||
lst = open("missing.lst", "w")
|
||||
lst.write("Missing support for %d types" % (len(missing_list)))
|
||||
lst.write("\n")
|
||||
@@ -974,9 +971,9 @@ for miss in missing_list:
|
||||
for n in missing_types[miss[1]]:
|
||||
i = i + 1
|
||||
if i > 5:
|
||||
- lst.write(" ...")
|
||||
- break
|
||||
- lst.write(" %s" % (n))
|
||||
+ lst.write(" ...")
|
||||
+ break
|
||||
+ lst.write(" %s" % (n))
|
||||
lst.write("\n")
|
||||
lst.write("\n")
|
||||
lst.write("\n")
|
||||
@@ -32,6 +32,7 @@ SRC_URI += "http://www.w3.org/XML/Test/xmlts20080827.tar.gz;subdir=${BP};name=te
|
||||
file://CVE-2022-23308-fix-regression.patch \
|
||||
file://CVE-2022-29824-dependent.patch \
|
||||
file://CVE-2022-29824.patch \
|
||||
file://0001-Port-gentest.py-to-Python-3.patch \
|
||||
"
|
||||
|
||||
SRC_URI[archive.sha256sum] = "593b7b751dd18c2d6abcd0c4bcb29efc203d0b4373a6df98e3a455ea74ae2813"
|
||||
@@ -89,6 +90,16 @@ do_configure_prepend () {
|
||||
}
|
||||
|
||||
do_compile_ptest() {
|
||||
# Make sure that testapi.c is newer than gentests.py, because
|
||||
# with reproducible builds, they will both get e.g. Jan 1 1970
|
||||
# modification time from SOURCE_DATE_EPOCH and then check-am
|
||||
# might try to rebuild_testapi, which will fail even with
|
||||
# 0001-Port-gentest.py-to-Python-3.patch, because it needs
|
||||
# libxml2 module (libxml2-native dependency and correctly
|
||||
# set PYTHON_SITE_PACKAGES), it's easier to
|
||||
# just rely on pre-generated testapi.c from the release
|
||||
touch ${S}/testapi.c
|
||||
|
||||
oe_runmake check-am
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ deltask do_populate_sysroot
|
||||
|
||||
# CVE database update interval, in seconds. By default: once a day (24*60*60).
|
||||
# Use 0 to force the update
|
||||
# Use a negative value to skip the update
|
||||
CVE_DB_UPDATE_INTERVAL ?= "86400"
|
||||
|
||||
python () {
|
||||
@@ -51,8 +52,9 @@ python do_fetch() {
|
||||
try:
|
||||
import time
|
||||
update_interval = int(d.getVar("CVE_DB_UPDATE_INTERVAL"))
|
||||
if (update_interval < 0):
|
||||
update_interval = 0
|
||||
if update_interval < 0:
|
||||
bb.note("CVE database update skipped")
|
||||
return
|
||||
if time.time() - os.path.getmtime(db_file) < update_interval:
|
||||
return
|
||||
|
||||
|
||||
@@ -4,3 +4,4 @@ PR = "r1"
|
||||
inherit packagegroup
|
||||
|
||||
RDEPENDS_${PN} = "dropbear"
|
||||
RRECOMMENDS_${PN} = "openssh-sftp-server"
|
||||
|
||||
@@ -11,6 +11,7 @@ import re
|
||||
import sys
|
||||
|
||||
from collections import namedtuple
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
|
||||
version = 1.0
|
||||
@@ -25,12 +26,16 @@ locations = list()
|
||||
|
||||
class SystemdFile():
|
||||
"""Class representing a single systemd configuration file"""
|
||||
def __init__(self, root, path):
|
||||
def __init__(self, root, path, instance_unit_name):
|
||||
self.sections = dict()
|
||||
self._parse(root, path)
|
||||
dirname = os.path.basename(path.name) + ".d"
|
||||
for location in locations:
|
||||
for path2 in sorted((root / location / "system" / dirname).glob("*.conf")):
|
||||
files = (root / location / "system" / dirname).glob("*.conf")
|
||||
if instance_unit_name:
|
||||
inst_dirname = instance_unit_name + ".d"
|
||||
files = chain(files, (root / location / "system" / inst_dirname).glob("*.conf"))
|
||||
for path2 in sorted(files):
|
||||
self._parse(root, path2)
|
||||
|
||||
def _parse(self, root, path):
|
||||
@@ -193,8 +198,11 @@ class SystemdUnit():
|
||||
# if we're enabling an instance, first extract the actual instance
|
||||
# then figure out what the template unit is
|
||||
template = re.match(r"[^@]+@(?P<instance>[^\.]*)\.", self.unit)
|
||||
instance_unit_name = None
|
||||
if template:
|
||||
instance = template.group('instance')
|
||||
if instance != "":
|
||||
instance_unit_name = self.unit
|
||||
unit = re.sub(r"@[^\.]*\.", "@.", self.unit, 1)
|
||||
else:
|
||||
instance = None
|
||||
@@ -206,7 +214,7 @@ class SystemdUnit():
|
||||
# ignore aliases
|
||||
return
|
||||
|
||||
config = SystemdFile(self.root, path)
|
||||
config = SystemdFile(self.root, path, instance_unit_name)
|
||||
if instance == "":
|
||||
try:
|
||||
default_instance = config.get('Install', 'DefaultInstance')[0]
|
||||
|
||||
44
meta/recipes-core/zlib/zlib/CVE-2022-37434.patch
Normal file
44
meta/recipes-core/zlib/zlib/CVE-2022-37434.patch
Normal file
@@ -0,0 +1,44 @@
|
||||
From 8617d83d6939754ae3a04fc2d22daa18eeea2a43 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Wed, 17 Aug 2022 10:15:57 +0530
|
||||
Subject: [PATCH] CVE-2022-37434
|
||||
|
||||
Upstream-Status: Backport [https://github.com/madler/zlib/commit/eff308af425b67093bab25f80f1ae950166bece1 & https://github.com/madler/zlib/commit/1eb7682f845ac9e9bf9ae35bbfb3bad5dacbd91d]
|
||||
CVE: CVE-2022-37434
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
Fix a bug when getting a gzip header extra field with inflate().
|
||||
|
||||
If the extra field was larger than the space the user provided with
|
||||
inflateGetHeader(), and if multiple calls of inflate() delivered
|
||||
the extra header data, then there could be a buffer overflow of the
|
||||
provided space. This commit assures that provided space is not
|
||||
exceeded.
|
||||
|
||||
Fix extra field processing bug that dereferences NULL state->head.
|
||||
|
||||
The recent commit to fix a gzip header extra field processing bug
|
||||
introduced the new bug fixed here.
|
||||
---
|
||||
inflate.c | 5 +++--
|
||||
1 file changed, 3 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/inflate.c b/inflate.c
|
||||
index ac333e8..cd01857 100644
|
||||
--- a/inflate.c
|
||||
+++ b/inflate.c
|
||||
@@ -759,8 +759,9 @@ int flush;
|
||||
if (copy > have) copy = have;
|
||||
if (copy) {
|
||||
if (state->head != Z_NULL &&
|
||||
- state->head->extra != Z_NULL) {
|
||||
- len = state->head->extra_len - state->length;
|
||||
+ state->head->extra != Z_NULL &&
|
||||
+ (len = state->head->extra_len - state->length) <
|
||||
+ state->head->extra_max) {
|
||||
zmemcpy(state->head->extra + len, next,
|
||||
len + copy > state->head->extra_max ?
|
||||
state->head->extra_max - len : copy);
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -10,6 +10,7 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/libpng/${BPN}/${PV}/${BPN}-${PV}.tar.xz \
|
||||
file://ldflags-tests.patch \
|
||||
file://CVE-2018-25032.patch \
|
||||
file://run-ptest \
|
||||
file://CVE-2022-37434.patch \
|
||||
"
|
||||
UPSTREAM_CHECK_URI = "http://zlib.net/"
|
||||
|
||||
|
||||
@@ -18,5 +18,5 @@ SRC_URI_append_class-native = " \
|
||||
file://tweak-options-require-tar-1.27.patch \
|
||||
"
|
||||
|
||||
SRC_URI[md5sum] = "60f57c5494e6dfa177504d47bfa0e383"
|
||||
SRC_URI[sha256sum] = "4c27fededf620c0aa522fff1a48577ba08144445341257502e7730f2b1a296e8"
|
||||
SRC_URI[md5sum] = "9d170c8baa1aa36b09698c909f304508"
|
||||
SRC_URI[sha256sum] = "2632c00b0cf0ea19ed7bd6700e6ec5faca93f0045af629d356dc03ad74ae6f10"
|
||||
@@ -0,0 +1,42 @@
|
||||
From a66071ed6a0d1fa666d22dcb78fa6fcb3bf22df3 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Fri, 27 May 2022 14:01:50 +0530
|
||||
Subject: [PATCH] CVE-2022-1304
|
||||
|
||||
Upstream-Status: Backport [https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/commit/?h=maint&id=ab51d587bb9b229b1fade1afd02e1574c1ba5c76]
|
||||
CVE: CVE-2022-1304
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
---
|
||||
lib/ext2fs/extent.c | 8 ++++++++
|
||||
1 file changed, 8 insertions(+)
|
||||
|
||||
diff --git a/lib/ext2fs/extent.c b/lib/ext2fs/extent.c
|
||||
index ac3dbfec9..a1b1905cd 100644
|
||||
--- a/lib/ext2fs/extent.c
|
||||
+++ b/lib/ext2fs/extent.c
|
||||
@@ -495,6 +495,10 @@ retry:
|
||||
ext2fs_le16_to_cpu(eh->eh_entries);
|
||||
newpath->max_entries = ext2fs_le16_to_cpu(eh->eh_max);
|
||||
|
||||
+ /* Make sure there is at least one extent present */
|
||||
+ if (newpath->left <= 0)
|
||||
+ return EXT2_ET_EXTENT_NO_DOWN;
|
||||
+
|
||||
if (path->left > 0) {
|
||||
ix++;
|
||||
newpath->end_blk = ext2fs_le32_to_cpu(ix->ei_block);
|
||||
@@ -1630,6 +1634,10 @@ errcode_t ext2fs_extent_delete(ext2_extent_handle_t handle, int flags)
|
||||
|
||||
cp = path->curr;
|
||||
|
||||
+ /* Sanity check before memmove() */
|
||||
+ if (path->left < 0)
|
||||
+ return EXT2_ET_EXTENT_LEAF_BAD;
|
||||
+
|
||||
if (path->left) {
|
||||
memmove(cp, cp + sizeof(struct ext3_extent_idx),
|
||||
path->left * sizeof(struct ext3_extent_idx));
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -6,6 +6,7 @@ SRC_URI += "file://remove.ldconfig.call.patch \
|
||||
file://mkdir_p.patch \
|
||||
file://0001-configure.ac-correct-AM_GNU_GETTEXT.patch \
|
||||
file://0001-intl-do-not-try-to-use-gettext-defines-that-no-longe.patch \
|
||||
file://CVE-2022-1304.patch \
|
||||
"
|
||||
|
||||
SRC_URI_append_class-native = " file://e2fsprogs-fix-missing-check-for-permission-denied.patch \
|
||||
@@ -53,6 +54,7 @@ do_install () {
|
||||
oe_multilib_header ext2fs/ext2_types.h
|
||||
install -d ${D}${base_bindir}
|
||||
mv ${D}${bindir}/chattr ${D}${base_bindir}/chattr.e2fsprogs
|
||||
mv ${D}${bindir}/lsattr ${D}${base_bindir}/lsattr.e2fsprogs
|
||||
|
||||
install -v -m 755 ${S}/contrib/populate-extfs.sh ${D}${base_sbindir}/
|
||||
|
||||
@@ -101,10 +103,12 @@ FILES_libe2p = "${base_libdir}/libe2p.so.*"
|
||||
FILES_libext2fs = "${libdir}/e2initrd_helper ${base_libdir}/libext2fs.so.*"
|
||||
FILES_${PN}-dev += "${datadir}/*/*.awk ${datadir}/*/*.sed ${base_libdir}/*.so ${bindir}/compile_et ${bindir}/mk_cmds"
|
||||
|
||||
ALTERNATIVE_${PN} = "chattr"
|
||||
ALTERNATIVE_${PN} = "chattr lsattr"
|
||||
ALTERNATIVE_PRIORITY = "100"
|
||||
ALTERNATIVE_LINK_NAME[chattr] = "${base_bindir}/chattr"
|
||||
ALTERNATIVE_TARGET[chattr] = "${base_bindir}/chattr.e2fsprogs"
|
||||
ALTERNATIVE_LINK_NAME[lsattr] = "${base_bindir}/lsattr"
|
||||
ALTERNATIVE_TARGET[lsattr] = "${base_bindir}/lsattr.e2fsprogs"
|
||||
|
||||
ALTERNATIVE_${PN}-doc = "fsck.8"
|
||||
ALTERNATIVE_LINK_NAME[fsck.8] = "${mandir}/man8/fsck.8"
|
||||
|
||||
@@ -100,7 +100,7 @@ BINV = "${PV}"
|
||||
#S = "${WORKDIR}/gcc-${PV}"
|
||||
S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/gcc-${PV}"
|
||||
|
||||
B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}"
|
||||
B ?= "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}"
|
||||
|
||||
target_includedir ?= "${includedir}"
|
||||
target_libdir ?= "${libdir}"
|
||||
|
||||
@@ -18,6 +18,7 @@ INHIBIT_DEFAULT_DEPS = "1"
|
||||
DEPENDS = ""
|
||||
PACKAGES = ""
|
||||
|
||||
B = "${WORKDIR}/build"
|
||||
|
||||
# This needs to be Python to avoid lots of shell variables becoming dependencies.
|
||||
python do_preconfigure () {
|
||||
|
||||
@@ -22,6 +22,9 @@ SRC_URI += "\
|
||||
file://CVE-2021-38297.patch \
|
||||
file://CVE-2022-23806.patch \
|
||||
file://CVE-2022-23772.patch \
|
||||
file://CVE-2021-44717.patch \
|
||||
file://CVE-2022-24675.patch \
|
||||
file://CVE-2021-31525.patch \
|
||||
"
|
||||
|
||||
SRC_URI_append_libc-musl = " file://0009-ld-replace-glibc-dynamic-linker-with-musl.patch"
|
||||
|
||||
38
meta/recipes-devtools/go/go-1.14/CVE-2021-31525.patch
Normal file
38
meta/recipes-devtools/go/go-1.14/CVE-2021-31525.patch
Normal file
@@ -0,0 +1,38 @@
|
||||
From efb465ada003d23353a91ef930be408eb575dba6 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Thu, 16 Jun 2022 17:40:12 +0530
|
||||
Subject: [PATCH] CVE-2021-31525
|
||||
|
||||
Upstream-Status: Backport [https://github.com/argoheyard/lang-net/commit/701957006ef151feb43f86aa99c8a1f474f69282]
|
||||
CVE: CVE-2021-31525
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
---
|
||||
src/vendor/golang.org/x/net/http/httpguts/httplex.go | 10 ++++++----
|
||||
1 file changed, 6 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/src/vendor/golang.org/x/net/http/httpguts/httplex.go b/src/vendor/golang.org/x/net/http/httpguts/httplex.go
|
||||
index e7de24e..c79aa73 100644
|
||||
--- a/src/vendor/golang.org/x/net/http/httpguts/httplex.go
|
||||
+++ b/src/vendor/golang.org/x/net/http/httpguts/httplex.go
|
||||
@@ -137,11 +137,13 @@ func trimOWS(x string) string {
|
||||
// contains token amongst its comma-separated tokens, ASCII
|
||||
// case-insensitively.
|
||||
func headerValueContainsToken(v string, token string) bool {
|
||||
- v = trimOWS(v)
|
||||
- if comma := strings.IndexByte(v, ','); comma != -1 {
|
||||
- return tokenEqual(trimOWS(v[:comma]), token) || headerValueContainsToken(v[comma+1:], token)
|
||||
+ for comma := strings.IndexByte(v, ','); comma != -1; comma = strings.IndexByte(v, ',') {
|
||||
+ if tokenEqual(trimOWS(v[:comma]), token) {
|
||||
+ return true
|
||||
+ }
|
||||
+ v = v[comma+1:]
|
||||
}
|
||||
- return tokenEqual(v, token)
|
||||
+ return tokenEqual(trimOWS(v), token)
|
||||
}
|
||||
|
||||
// lowerASCII returns the ASCII lowercase version of b.
|
||||
--
|
||||
2.25.1
|
||||
|
||||
83
meta/recipes-devtools/go/go-1.14/CVE-2021-44717.patch
Normal file
83
meta/recipes-devtools/go/go-1.14/CVE-2021-44717.patch
Normal file
@@ -0,0 +1,83 @@
|
||||
From 9171c664e7af479aa26bc72f2e7cf4e69d8e0a6f Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Fri, 17 Jun 2022 10:22:47 +0530
|
||||
Subject: [PATCH] CVE-2021-44717
|
||||
|
||||
Upstream-Status: Backport [https://github.com/golang/go/commit/44a3fb49]
|
||||
CVE: CVE-2021-44717
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
syscall: fix ForkLock spurious close(0) on pipe failure
|
||||
Pipe (and therefore forkLockPipe) does not make any guarantees
|
||||
about the state of p after a failed Pipe(p). Avoid that assumption
|
||||
and the too-clever goto, so that we don't accidentally Close a real fd
|
||||
if the failed pipe leaves p[0] or p[1] set >= 0.
|
||||
|
||||
Updates #50057
|
||||
Fixes CVE-2021-44717
|
||||
|
||||
Change-Id: Iff8e19a6efbba0c73cc8b13ecfae381c87600bb4
|
||||
Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1291270
|
||||
Reviewed-by: Ian Lance Taylor <iant@google.com>
|
||||
Reviewed-on: https://go-review.googlesource.com/c/go/+/370514
|
||||
Trust: Filippo Valsorda <filippo@golang.org>
|
||||
Run-TryBot: Filippo Valsorda <filippo@golang.org>
|
||||
TryBot-Result: Gopher Robot <gobot@golang.org>
|
||||
Reviewed-by: Alex Rakoczy <alex@golang.org>
|
||||
---
|
||||
src/syscall/exec_unix.go | 20 ++++++--------------
|
||||
1 file changed, 6 insertions(+), 14 deletions(-)
|
||||
|
||||
diff --git a/src/syscall/exec_unix.go b/src/syscall/exec_unix.go
|
||||
index b3798b6..b73782c 100644
|
||||
--- a/src/syscall/exec_unix.go
|
||||
+++ b/src/syscall/exec_unix.go
|
||||
@@ -151,9 +151,6 @@ func forkExec(argv0 string, argv []string, attr *ProcAttr) (pid int, err error)
|
||||
sys = &zeroSysProcAttr
|
||||
}
|
||||
|
||||
- p[0] = -1
|
||||
- p[1] = -1
|
||||
-
|
||||
// Convert args to C form.
|
||||
argv0p, err := BytePtrFromString(argv0)
|
||||
if err != nil {
|
||||
@@ -194,14 +191,17 @@ func forkExec(argv0 string, argv []string, attr *ProcAttr) (pid int, err error)
|
||||
|
||||
// Allocate child status pipe close on exec.
|
||||
if err = forkExecPipe(p[:]); err != nil {
|
||||
- goto error
|
||||
+ ForkLock.Unlock()
|
||||
+ return 0, err
|
||||
}
|
||||
|
||||
// Kick off child.
|
||||
pid, err1 = forkAndExecInChild(argv0p, argvp, envvp, chroot, dir, attr, sys, p[1])
|
||||
if err1 != 0 {
|
||||
- err = Errno(err1)
|
||||
- goto error
|
||||
+ Close(p[0])
|
||||
+ Close(p[1])
|
||||
+ ForkLock.Unlock()
|
||||
+ return 0, Errno(err1)
|
||||
}
|
||||
ForkLock.Unlock()
|
||||
|
||||
@@ -228,14 +228,6 @@ func forkExec(argv0 string, argv []string, attr *ProcAttr) (pid int, err error)
|
||||
|
||||
// Read got EOF, so pipe closed on exec, so exec succeeded.
|
||||
return pid, nil
|
||||
-
|
||||
-error:
|
||||
- if p[0] >= 0 {
|
||||
- Close(p[0])
|
||||
- Close(p[1])
|
||||
- }
|
||||
- ForkLock.Unlock()
|
||||
- return 0, err
|
||||
}
|
||||
|
||||
// Combination of fork and exec, careful to be thread safe.
|
||||
--
|
||||
2.25.1
|
||||
|
||||
271
meta/recipes-devtools/go/go-1.14/CVE-2022-24675.patch
Normal file
271
meta/recipes-devtools/go/go-1.14/CVE-2022-24675.patch
Normal file
@@ -0,0 +1,271 @@
|
||||
From 1eb931d60a24501a9668e5cb4647593e19115507 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Fri, 17 Jun 2022 12:22:53 +0530
|
||||
Subject: [PATCH] CVE-2022-24675
|
||||
|
||||
Upstream-Status: Backport [https://go-review.googlesource.com/c/go/+/399816/]
|
||||
CVE: CVE-2022-24675
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
src/encoding/pem/pem.go | 174 +++++++++++++++--------------------
|
||||
src/encoding/pem/pem_test.go | 28 +++++-
|
||||
2 files changed, 101 insertions(+), 101 deletions(-)
|
||||
|
||||
diff --git a/src/encoding/pem/pem.go b/src/encoding/pem/pem.go
|
||||
index a7272da..1bee1c1 100644
|
||||
--- a/src/encoding/pem/pem.go
|
||||
+++ b/src/encoding/pem/pem.go
|
||||
@@ -87,123 +87,97 @@ func Decode(data []byte) (p *Block, rest []byte) {
|
||||
// pemStart begins with a newline. However, at the very beginning of
|
||||
// the byte array, we'll accept the start string without it.
|
||||
rest = data
|
||||
- if bytes.HasPrefix(data, pemStart[1:]) {
|
||||
- rest = rest[len(pemStart)-1 : len(data)]
|
||||
- } else if i := bytes.Index(data, pemStart); i >= 0 {
|
||||
- rest = rest[i+len(pemStart) : len(data)]
|
||||
- } else {
|
||||
- return nil, data
|
||||
- }
|
||||
-
|
||||
- typeLine, rest := getLine(rest)
|
||||
- if !bytes.HasSuffix(typeLine, pemEndOfLine) {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
- typeLine = typeLine[0 : len(typeLine)-len(pemEndOfLine)]
|
||||
-
|
||||
- p = &Block{
|
||||
- Headers: make(map[string]string),
|
||||
- Type: string(typeLine),
|
||||
- }
|
||||
-
|
||||
for {
|
||||
- // This loop terminates because getLine's second result is
|
||||
- // always smaller than its argument.
|
||||
- if len(rest) == 0 {
|
||||
+ if bytes.HasPrefix(rest, pemStart[1:]) {
|
||||
+ rest = rest[len(pemStart)-1:]
|
||||
+ } else if i := bytes.Index(rest, pemStart); i >= 0 {
|
||||
+ rest = rest[i+len(pemStart) : len(rest)]
|
||||
+ } else {
|
||||
return nil, data
|
||||
}
|
||||
- line, next := getLine(rest)
|
||||
|
||||
- i := bytes.IndexByte(line, ':')
|
||||
- if i == -1 {
|
||||
- break
|
||||
+ var typeLine []byte
|
||||
+ typeLine, rest = getLine(rest)
|
||||
+ if !bytes.HasSuffix(typeLine, pemEndOfLine) {
|
||||
+ continue
|
||||
}
|
||||
+ typeLine = typeLine[0 : len(typeLine)-len(pemEndOfLine)]
|
||||
|
||||
- // TODO(agl): need to cope with values that spread across lines.
|
||||
- key, val := line[:i], line[i+1:]
|
||||
- key = bytes.TrimSpace(key)
|
||||
- val = bytes.TrimSpace(val)
|
||||
- p.Headers[string(key)] = string(val)
|
||||
- rest = next
|
||||
- }
|
||||
+ p = &Block{
|
||||
+ Headers: make(map[string]string),
|
||||
+ Type: string(typeLine),
|
||||
+ }
|
||||
|
||||
- var endIndex, endTrailerIndex int
|
||||
+ for {
|
||||
+ // This loop terminates because getLine's second result is
|
||||
+ // always smaller than its argument.
|
||||
+ if len(rest) == 0 {
|
||||
+ return nil, data
|
||||
+ }
|
||||
+ line, next := getLine(rest)
|
||||
|
||||
- // If there were no headers, the END line might occur
|
||||
- // immediately, without a leading newline.
|
||||
- if len(p.Headers) == 0 && bytes.HasPrefix(rest, pemEnd[1:]) {
|
||||
- endIndex = 0
|
||||
- endTrailerIndex = len(pemEnd) - 1
|
||||
- } else {
|
||||
- endIndex = bytes.Index(rest, pemEnd)
|
||||
- endTrailerIndex = endIndex + len(pemEnd)
|
||||
- }
|
||||
+ i := bytes.IndexByte(line, ':')
|
||||
+ if i == -1 {
|
||||
+ break
|
||||
+ }
|
||||
|
||||
- if endIndex < 0 {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
+ // TODO(agl): need to cope with values that spread across lines.
|
||||
+ key, val := line[:i], line[i+1:]
|
||||
+ key = bytes.TrimSpace(key)
|
||||
+ val = bytes.TrimSpace(val)
|
||||
+ p.Headers[string(key)] = string(val)
|
||||
+ rest = next
|
||||
+ }
|
||||
|
||||
- // After the "-----" of the ending line, there should be the same type
|
||||
- // and then a final five dashes.
|
||||
- endTrailer := rest[endTrailerIndex:]
|
||||
- endTrailerLen := len(typeLine) + len(pemEndOfLine)
|
||||
- if len(endTrailer) < endTrailerLen {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
+ var endIndex, endTrailerIndex int
|
||||
|
||||
- restOfEndLine := endTrailer[endTrailerLen:]
|
||||
- endTrailer = endTrailer[:endTrailerLen]
|
||||
- if !bytes.HasPrefix(endTrailer, typeLine) ||
|
||||
- !bytes.HasSuffix(endTrailer, pemEndOfLine) {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
+ // If there were no headers, the END line might occur
|
||||
+ // immediately, without a leading newline.
|
||||
+ if len(p.Headers) == 0 && bytes.HasPrefix(rest, pemEnd[1:]) {
|
||||
+ endIndex = 0
|
||||
+ endTrailerIndex = len(pemEnd) - 1
|
||||
+ } else {
|
||||
+ endIndex = bytes.Index(rest, pemEnd)
|
||||
+ endTrailerIndex = endIndex + len(pemEnd)
|
||||
+ }
|
||||
|
||||
- // The line must end with only whitespace.
|
||||
- if s, _ := getLine(restOfEndLine); len(s) != 0 {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
+ if endIndex < 0 {
|
||||
+ continue
|
||||
+ }
|
||||
|
||||
- base64Data := removeSpacesAndTabs(rest[:endIndex])
|
||||
- p.Bytes = make([]byte, base64.StdEncoding.DecodedLen(len(base64Data)))
|
||||
- n, err := base64.StdEncoding.Decode(p.Bytes, base64Data)
|
||||
- if err != nil {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
- p.Bytes = p.Bytes[:n]
|
||||
+ // After the "-----" of the ending line, there should be the same type
|
||||
+ // and then a final five dashes.
|
||||
+ endTrailer := rest[endTrailerIndex:]
|
||||
+ endTrailerLen := len(typeLine) + len(pemEndOfLine)
|
||||
+ if len(endTrailer) < endTrailerLen {
|
||||
+ continue
|
||||
+ }
|
||||
+
|
||||
+ restOfEndLine := endTrailer[endTrailerLen:]
|
||||
+ endTrailer = endTrailer[:endTrailerLen]
|
||||
+ if !bytes.HasPrefix(endTrailer, typeLine) ||
|
||||
+ !bytes.HasSuffix(endTrailer, pemEndOfLine) {
|
||||
+ continue
|
||||
+ }
|
||||
|
||||
- // the -1 is because we might have only matched pemEnd without the
|
||||
- // leading newline if the PEM block was empty.
|
||||
- _, rest = getLine(rest[endIndex+len(pemEnd)-1:])
|
||||
+ // The line must end with only whitespace.
|
||||
+ if s, _ := getLine(restOfEndLine); len(s) != 0 {
|
||||
+ continue
|
||||
+ }
|
||||
|
||||
- return
|
||||
-}
|
||||
+ base64Data := removeSpacesAndTabs(rest[:endIndex])
|
||||
+ p.Bytes = make([]byte, base64.StdEncoding.DecodedLen(len(base64Data)))
|
||||
+ n, err := base64.StdEncoding.Decode(p.Bytes, base64Data)
|
||||
+ if err != nil {
|
||||
+ continue
|
||||
+ }
|
||||
+ p.Bytes = p.Bytes[:n]
|
||||
|
||||
-func decodeError(data, rest []byte) (*Block, []byte) {
|
||||
- // If we get here then we have rejected a likely looking, but
|
||||
- // ultimately invalid PEM block. We need to start over from a new
|
||||
- // position. We have consumed the preamble line and will have consumed
|
||||
- // any lines which could be header lines. However, a valid preamble
|
||||
- // line is not a valid header line, therefore we cannot have consumed
|
||||
- // the preamble line for the any subsequent block. Thus, we will always
|
||||
- // find any valid block, no matter what bytes precede it.
|
||||
- //
|
||||
- // For example, if the input is
|
||||
- //
|
||||
- // -----BEGIN MALFORMED BLOCK-----
|
||||
- // junk that may look like header lines
|
||||
- // or data lines, but no END line
|
||||
- //
|
||||
- // -----BEGIN ACTUAL BLOCK-----
|
||||
- // realdata
|
||||
- // -----END ACTUAL BLOCK-----
|
||||
- //
|
||||
- // we've failed to parse using the first BEGIN line
|
||||
- // and now will try again, using the second BEGIN line.
|
||||
- p, rest := Decode(rest)
|
||||
- if p == nil {
|
||||
- rest = data
|
||||
+ // the -1 is because we might have only matched pemEnd without the
|
||||
+ // leading newline if the PEM block was empty.
|
||||
+ _, rest = getLine(rest[endIndex+len(pemEnd)-1:])
|
||||
+ return p, rest
|
||||
}
|
||||
- return p, rest
|
||||
}
|
||||
|
||||
const pemLineLength = 64
|
||||
diff --git a/src/encoding/pem/pem_test.go b/src/encoding/pem/pem_test.go
|
||||
index 8515b46..4485581 100644
|
||||
--- a/src/encoding/pem/pem_test.go
|
||||
+++ b/src/encoding/pem/pem_test.go
|
||||
@@ -107,6 +107,12 @@ const pemMissingEndingSpace = `
|
||||
dGVzdA==
|
||||
-----ENDBAR-----`
|
||||
|
||||
+const pemMissingEndLine = `
|
||||
+-----BEGIN FOO-----
|
||||
+Header: 1`
|
||||
+
|
||||
+var pemRepeatingBegin = strings.Repeat("-----BEGIN \n", 10)
|
||||
+
|
||||
var badPEMTests = []struct {
|
||||
name string
|
||||
input string
|
||||
@@ -131,14 +137,34 @@ var badPEMTests = []struct {
|
||||
"missing ending space",
|
||||
pemMissingEndingSpace,
|
||||
},
|
||||
+ {
|
||||
+ "repeating begin",
|
||||
+ pemRepeatingBegin,
|
||||
+ },
|
||||
+ {
|
||||
+ "missing end line",
|
||||
+ pemMissingEndLine,
|
||||
+ },
|
||||
}
|
||||
|
||||
func TestBadDecode(t *testing.T) {
|
||||
for _, test := range badPEMTests {
|
||||
- result, _ := Decode([]byte(test.input))
|
||||
+ result, rest := Decode([]byte(test.input))
|
||||
if result != nil {
|
||||
t.Errorf("unexpected success while parsing %q", test.name)
|
||||
}
|
||||
+ if string(rest) != test.input {
|
||||
+ t.Errorf("unexpected rest: %q; want = %q", rest, test.input)
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestCVE202224675(t *testing.T) {
|
||||
+ // Prior to CVE-2022-24675, this input would cause a stack overflow.
|
||||
+ input := []byte(strings.Repeat("-----BEGIN \n", 10000000))
|
||||
+ result, rest := Decode(input)
|
||||
+ if result != nil || !reflect.DeepEqual(rest, input) {
|
||||
+ t.Errorf("Encode of %#v decoded as %#v", input, rest)
|
||||
}
|
||||
}
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -37,6 +37,7 @@ EXTRA_CPAN_BUILD_FLAGS = "--create_packlist=0"
|
||||
|
||||
do_install_append () {
|
||||
rm -rf ${D}${docdir}/perl/html
|
||||
sed -i "s:^#!.*:#!/usr/bin/env perl:" ${D}${bindir}/config_data
|
||||
}
|
||||
|
||||
do_install_ptest() {
|
||||
|
||||
48
meta/recipes-devtools/python/python3-pip/CVE-2021-3572.patch
Normal file
48
meta/recipes-devtools/python/python3-pip/CVE-2021-3572.patch
Normal file
@@ -0,0 +1,48 @@
|
||||
From c4fd13410b9a219f77fc30775d4a0ac9f69725bd Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Thu, 16 Jun 2022 09:52:43 +0530
|
||||
Subject: [PATCH] CVE-2021-3572
|
||||
|
||||
Upstream-Status: Backport [https://github.com/pypa/pip/commit/e46bdda9711392fec0c45c1175bae6db847cb30b]
|
||||
CVE: CVE-2021-3572
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
news/9827.bugfix.rst | 3 +++
|
||||
src/pip/_internal/vcs/git.py | 10 ++++++++--
|
||||
2 files changed, 11 insertions(+), 2 deletions(-)
|
||||
create mode 100644 news/9827.bugfix.rst
|
||||
|
||||
diff --git a/news/9827.bugfix.rst b/news/9827.bugfix.rst
|
||||
new file mode 100644
|
||||
index 0000000..e0d27c3
|
||||
--- /dev/null
|
||||
+++ b/news/9827.bugfix.rst
|
||||
@@ -0,0 +1,3 @@
|
||||
+**SECURITY**: Stop splitting on unicode separators in git references,
|
||||
+which could be maliciously used to install a different revision on the
|
||||
+repository.
|
||||
diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py
|
||||
index 7483303..1b895f6 100644
|
||||
--- a/src/pip/_internal/vcs/git.py
|
||||
+++ b/src/pip/_internal/vcs/git.py
|
||||
@@ -137,9 +137,15 @@ class Git(VersionControl):
|
||||
output = cls.run_command(['show-ref', rev], cwd=dest,
|
||||
show_stdout=False, on_returncode='ignore')
|
||||
refs = {}
|
||||
- for line in output.strip().splitlines():
|
||||
+ # NOTE: We do not use splitlines here since that would split on other
|
||||
+ # unicode separators, which can be maliciously used to install a
|
||||
+ # different revision.
|
||||
+ for line in output.strip().split("\n"):
|
||||
+ line = line.rstrip("\r")
|
||||
+ if not line:
|
||||
+ continue
|
||||
try:
|
||||
- sha, ref = line.split()
|
||||
+ ref_sha, ref_name = line.split(" ", maxsplit=2)
|
||||
except ValueError:
|
||||
# Include the offending line to simplify troubleshooting if
|
||||
# this error ever occurs.
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -6,6 +6,7 @@ LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=8ba06d529c955048e5ddd7c45459eb2e"
|
||||
|
||||
DEPENDS += "python3 python3-setuptools-native"
|
||||
|
||||
SRC_URI = "file://CVE-2021-3572.patch "
|
||||
SRC_URI[md5sum] = "7d42ba49b809604f0df3d55df1c3fd86"
|
||||
SRC_URI[sha256sum] = "7db0c8ea4c7ea51c8049640e8e6e7fde949de672bfa4949920675563a5a6967f"
|
||||
|
||||
|
||||
@@ -98,6 +98,8 @@ SRC_URI = "https://download.qemu.org/${BPN}-${PV}.tar.xz \
|
||||
file://CVE-2020-13253_4.patch \
|
||||
file://CVE-2020-13253_5.patch \
|
||||
file://CVE-2020-13791.patch \
|
||||
file://CVE-2022-35414.patch \
|
||||
file://CVE-2020-27821.patch \
|
||||
"
|
||||
UPSTREAM_CHECK_REGEX = "qemu-(?P<pver>\d+(\.\d+)+)\.tar"
|
||||
|
||||
@@ -254,6 +256,7 @@ PACKAGECONFIG[xkbcommon] = "--enable-xkbcommon,--disable-xkbcommon,libxkbcommon"
|
||||
PACKAGECONFIG[libudev] = "--enable-libudev,--disable-libudev,eudev"
|
||||
PACKAGECONFIG[libxml2] = "--enable-libxml2,--disable-libxml2,libxml2"
|
||||
PACKAGECONFIG[seccomp] = "--enable-seccomp,--disable-seccomp,libseccomp"
|
||||
PACKAGECONFIG[capstone] = "--enable-capstone,--disable-capstone"
|
||||
|
||||
INSANE_SKIP_${PN} = "arch"
|
||||
|
||||
|
||||
73
meta/recipes-devtools/qemu/qemu/CVE-2020-27821.patch
Normal file
73
meta/recipes-devtools/qemu/qemu/CVE-2020-27821.patch
Normal file
@@ -0,0 +1,73 @@
|
||||
From 15222d4636d742f3395fd211fad0cd7e36d9f43e Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Tue, 16 Aug 2022 10:07:01 +0530
|
||||
Subject: [PATCH] CVE-2020-27821
|
||||
|
||||
Upstream-Status: Backport [https://git.qemu.org/?p=qemu.git;a=commit;h=4bfb024bc76973d40a359476dc0291f46e435442]
|
||||
CVE: CVE-2020-27821
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
memory: clamp cached translation in case it points to an MMIO region
|
||||
|
||||
In using the address_space_translate_internal API, address_space_cache_init
|
||||
forgot one piece of advice that can be found in the code for
|
||||
address_space_translate_internal:
|
||||
|
||||
/* MMIO registers can be expected to perform full-width accesses based only
|
||||
* on their address, without considering adjacent registers that could
|
||||
* decode to completely different MemoryRegions. When such registers
|
||||
* exist (e.g. I/O ports 0xcf8 and 0xcf9 on most PC chipsets), MMIO
|
||||
* regions overlap wildly. For this reason we cannot clamp the accesses
|
||||
* here.
|
||||
*
|
||||
* If the length is small (as is the case for address_space_ldl/stl),
|
||||
* everything works fine. If the incoming length is large, however,
|
||||
* the caller really has to do the clamping through memory_access_size.
|
||||
*/
|
||||
|
||||
address_space_cache_init is exactly one such case where "the incoming length
|
||||
is large", therefore we need to clamp the resulting length---not to
|
||||
memory_access_size though, since we are not doing an access yet, but to
|
||||
the size of the resulting section. This ensures that subsequent accesses
|
||||
to the cached MemoryRegionSection will be in range.
|
||||
|
||||
With this patch, the enclosed testcase notices that the used ring does
|
||||
not fit into the MSI-X table and prints a "qemu-system-x86_64: Cannot map used"
|
||||
error.
|
||||
|
||||
Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>
|
||||
---
|
||||
exec.c | 10 ++++++++++
|
||||
1 file changed, 10 insertions(+)
|
||||
|
||||
diff --git a/exec.c b/exec.c
|
||||
index 2d6add46..1360051a 100644
|
||||
--- a/exec.c
|
||||
+++ b/exec.c
|
||||
@@ -3632,6 +3632,7 @@ int64_t address_space_cache_init(MemoryRegionCache *cache,
|
||||
AddressSpaceDispatch *d;
|
||||
hwaddr l;
|
||||
MemoryRegion *mr;
|
||||
+ Int128 diff;
|
||||
|
||||
assert(len > 0);
|
||||
|
||||
@@ -3640,6 +3641,15 @@ int64_t address_space_cache_init(MemoryRegionCache *cache,
|
||||
d = flatview_to_dispatch(cache->fv);
|
||||
cache->mrs = *address_space_translate_internal(d, addr, &cache->xlat, &l, true);
|
||||
|
||||
+ /*
|
||||
+ * cache->xlat is now relative to cache->mrs.mr, not to the section itself.
|
||||
+ * Take that into account to compute how many bytes are there between
|
||||
+ * cache->xlat and the end of the section.
|
||||
+ */
|
||||
+ diff = int128_sub(cache->mrs.size,
|
||||
+ int128_make64(cache->xlat - cache->mrs.offset_within_region));
|
||||
+ l = int128_get64(int128_min(diff, int128_make64(l)));
|
||||
+
|
||||
mr = cache->mrs.mr;
|
||||
memory_region_ref(mr);
|
||||
if (memory_access_is_direct(mr, is_write)) {
|
||||
--
|
||||
2.25.1
|
||||
|
||||
53
meta/recipes-devtools/qemu/qemu/CVE-2022-35414.patch
Normal file
53
meta/recipes-devtools/qemu/qemu/CVE-2022-35414.patch
Normal file
@@ -0,0 +1,53 @@
|
||||
From 09a07b5b39c87423df9e8f6574c19a14d36beac5 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Wed, 27 Jul 2022 10:34:12 +0530
|
||||
Subject: [PATCH] CVE-2022-35414
|
||||
|
||||
Upstream-Status: Backport [https://github.com/qemu/qemu/commit/418ade7849ce7641c0f7333718caf5091a02fd4c]
|
||||
CVE: CVE-2022-35414
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
exec.c | 13 ++++++++++++-
|
||||
1 file changed, 12 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/exec.c b/exec.c
|
||||
index 43c70ffb..2d6add46 100644
|
||||
--- a/exec.c
|
||||
+++ b/exec.c
|
||||
@@ -685,7 +685,7 @@ static void tcg_iommu_free_notifier_list(CPUState *cpu)
|
||||
|
||||
/* Called from RCU critical section */
|
||||
MemoryRegionSection *
|
||||
-address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr addr,
|
||||
+address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr orig_addr,
|
||||
hwaddr *xlat, hwaddr *plen,
|
||||
MemTxAttrs attrs, int *prot)
|
||||
{
|
||||
@@ -694,6 +694,7 @@ address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr addr,
|
||||
IOMMUMemoryRegionClass *imrc;
|
||||
IOMMUTLBEntry iotlb;
|
||||
int iommu_idx;
|
||||
+ hwaddr addr = orig_addr;
|
||||
AddressSpaceDispatch *d = atomic_rcu_read(&cpu->cpu_ases[asidx].memory_dispatch);
|
||||
|
||||
for (;;) {
|
||||
@@ -737,6 +738,16 @@ address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr addr,
|
||||
return section;
|
||||
|
||||
translate_fail:
|
||||
+ /*
|
||||
+ * We should be given a page-aligned address -- certainly
|
||||
+ * tlb_set_page_with_attrs() does so. The page offset of xlat
|
||||
+ * is used to index sections[], and PHYS_SECTION_UNASSIGNED = 0.
|
||||
+ * The page portion of xlat will be logged by memory_region_access_valid()
|
||||
+ * when this memory access is rejected, so use the original untranslated
|
||||
+ * physical address.
|
||||
+ */
|
||||
+ assert((orig_addr & ~TARGET_PAGE_MASK) == 0);
|
||||
+ *xlat = orig_addr;
|
||||
return &d->map.sections[PHYS_SECTION_UNASSIGNED];
|
||||
}
|
||||
#endif
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -13,6 +13,7 @@ SRC_URI = "https://github.com/apple/cups/releases/download/v${PV}/${BP}-source.t
|
||||
file://0002-don-t-try-to-run-generated-binaries.patch \
|
||||
file://0003-cups_1.4.6.bb-Fix-build-on-ppc64.patch \
|
||||
file://0004-cups-fix-multilib-install-file-conflicts.patch\
|
||||
file://CVE-2022-26691.patch \
|
||||
"
|
||||
|
||||
UPSTREAM_CHECK_URI = "https://github.com/apple/cups/releases"
|
||||
@@ -119,4 +120,4 @@ cups_sysroot_preprocess () {
|
||||
|
||||
# -25317 concerns /var/log/cups having lp ownership. Our /var/log/cups is
|
||||
# root:root, so this doesn't apply.
|
||||
CVE_CHECK_WHITELIST += "CVE-2021-25317"
|
||||
CVE_CHECK_WHITELIST += "CVE-2021-25317"
|
||||
|
||||
33
meta/recipes-extended/cups/cups/CVE-2022-26691.patch
Normal file
33
meta/recipes-extended/cups/cups/CVE-2022-26691.patch
Normal file
@@ -0,0 +1,33 @@
|
||||
From de4f8c196106033e4c372dce3e91b9d42b0b9444 Mon Sep 17 00:00:00 2001
|
||||
From: Zdenek Dohnal <zdohnal@redhat.com>
|
||||
Date: Thu, 26 May 2022 06:27:04 +0200
|
||||
Subject: [PATCH] scheduler/cert.c: Fix string comparison (fixes
|
||||
CVE-2022-26691)
|
||||
|
||||
The previous algorithm didn't expect the strings can have a different
|
||||
length, so one string can be a substring of the other and such substring
|
||||
was reported as equal to the longer string.
|
||||
|
||||
CVE: CVE-2022-26691
|
||||
Upstream-Status: Backport [https://github.com/OpenPrinting/cups/commit/de4f8c196106033e4c372dce3e91b9d42b0b9444]
|
||||
Signed-off-by: Steve Sakoman
|
||||
|
||||
---
|
||||
diff --git a/scheduler/cert.c b/scheduler/cert.c
|
||||
index b268bf1b2..9b65b96c9 100644
|
||||
--- a/scheduler/cert.c
|
||||
+++ b/scheduler/cert.c
|
||||
@@ -434,5 +434,12 @@ ctcompare(const char *a, /* I - First string */
|
||||
b ++;
|
||||
}
|
||||
|
||||
- return (result);
|
||||
+ /*
|
||||
+ * The while loop finishes when *a == '\0' or *b == '\0'
|
||||
+ * so after the while loop either both *a and *b == '\0',
|
||||
+ * or one points inside a string, so when we apply logical OR on *a,
|
||||
+ * *b and result, we get a non-zero return value if the compared strings don't match.
|
||||
+ */
|
||||
+
|
||||
+ return (result | *a | *b);
|
||||
}
|
||||
155
meta/recipes-extended/libtirpc/libtirpc/CVE-2021-46828.patch
Normal file
155
meta/recipes-extended/libtirpc/libtirpc/CVE-2021-46828.patch
Normal file
@@ -0,0 +1,155 @@
|
||||
From 48309e7cb230fc539c3edab0b3363f8ce973194f Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Thu, 28 Jul 2022 09:11:04 +0530
|
||||
Subject: [PATCH] CVE-2021-46828
|
||||
|
||||
Upstream-Status: Backport [http://git.linux-nfs.org/?p=steved/libtirpc.git;a=commit;h=86529758570cef4c73fb9b9c4104fdc510f701ed}
|
||||
CVE: CVE-2021-46828
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
src/svc.c | 17 +++++++++++++-
|
||||
src/svc_vc.c | 62 +++++++++++++++++++++++++++++++++++++++++++++++++++-
|
||||
2 files changed, 77 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/svc.c b/src/svc.c
|
||||
index 6db164b..3a8709f 100644
|
||||
--- a/src/svc.c
|
||||
+++ b/src/svc.c
|
||||
@@ -57,7 +57,7 @@
|
||||
|
||||
#define max(a, b) (a > b ? a : b)
|
||||
|
||||
-static SVCXPRT **__svc_xports;
|
||||
+SVCXPRT **__svc_xports;
|
||||
int __svc_maxrec;
|
||||
|
||||
/*
|
||||
@@ -194,6 +194,21 @@ __xprt_do_unregister (xprt, dolock)
|
||||
rwlock_unlock (&svc_fd_lock);
|
||||
}
|
||||
|
||||
+int
|
||||
+svc_open_fds()
|
||||
+{
|
||||
+ int ix;
|
||||
+ int nfds = 0;
|
||||
+
|
||||
+ rwlock_rdlock (&svc_fd_lock);
|
||||
+ for (ix = 0; ix < svc_max_pollfd; ++ix) {
|
||||
+ if (svc_pollfd[ix].fd != -1)
|
||||
+ nfds++;
|
||||
+ }
|
||||
+ rwlock_unlock (&svc_fd_lock);
|
||||
+ return (nfds);
|
||||
+}
|
||||
+
|
||||
/*
|
||||
* Add a service program to the callout list.
|
||||
* The dispatch routine will be called when a rpc request for this
|
||||
diff --git a/src/svc_vc.c b/src/svc_vc.c
|
||||
index c23cd36..1729963 100644
|
||||
--- a/src/svc_vc.c
|
||||
+++ b/src/svc_vc.c
|
||||
@@ -64,6 +64,8 @@
|
||||
|
||||
|
||||
extern rwlock_t svc_fd_lock;
|
||||
+extern SVCXPRT **__svc_xports;
|
||||
+extern int svc_open_fds();
|
||||
|
||||
static SVCXPRT *makefd_xprt(int, u_int, u_int);
|
||||
static bool_t rendezvous_request(SVCXPRT *, struct rpc_msg *);
|
||||
@@ -82,6 +84,7 @@ static void svc_vc_ops(SVCXPRT *);
|
||||
static bool_t svc_vc_control(SVCXPRT *xprt, const u_int rq, void *in);
|
||||
static bool_t svc_vc_rendezvous_control (SVCXPRT *xprt, const u_int rq,
|
||||
void *in);
|
||||
+static int __svc_destroy_idle(int timeout);
|
||||
|
||||
struct cf_rendezvous { /* kept in xprt->xp_p1 for rendezvouser */
|
||||
u_int sendsize;
|
||||
@@ -312,13 +315,14 @@ done:
|
||||
return (xprt);
|
||||
}
|
||||
|
||||
+
|
||||
/*ARGSUSED*/
|
||||
static bool_t
|
||||
rendezvous_request(xprt, msg)
|
||||
SVCXPRT *xprt;
|
||||
struct rpc_msg *msg;
|
||||
{
|
||||
- int sock, flags;
|
||||
+ int sock, flags, nfds, cnt;
|
||||
struct cf_rendezvous *r;
|
||||
struct cf_conn *cd;
|
||||
struct sockaddr_storage addr;
|
||||
@@ -378,6 +382,16 @@ again:
|
||||
|
||||
gettimeofday(&cd->last_recv_time, NULL);
|
||||
|
||||
+ nfds = svc_open_fds();
|
||||
+ if (nfds >= (_rpc_dtablesize() / 5) * 4) {
|
||||
+ /* destroy idle connections */
|
||||
+ cnt = __svc_destroy_idle(15);
|
||||
+ if (cnt == 0) {
|
||||
+ /* destroy least active */
|
||||
+ __svc_destroy_idle(0);
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
return (FALSE); /* there is never an rpc msg to be processed */
|
||||
}
|
||||
|
||||
@@ -819,3 +833,49 @@ __svc_clean_idle(fd_set *fds, int timeout, bool_t cleanblock)
|
||||
{
|
||||
return FALSE;
|
||||
}
|
||||
+
|
||||
+static int
|
||||
+__svc_destroy_idle(int timeout)
|
||||
+{
|
||||
+ int i, ncleaned = 0;
|
||||
+ SVCXPRT *xprt, *least_active;
|
||||
+ struct timeval tv, tdiff, tmax;
|
||||
+ struct cf_conn *cd;
|
||||
+
|
||||
+ gettimeofday(&tv, NULL);
|
||||
+ tmax.tv_sec = tmax.tv_usec = 0;
|
||||
+ least_active = NULL;
|
||||
+ rwlock_wrlock(&svc_fd_lock);
|
||||
+
|
||||
+ for (i = 0; i <= svc_max_pollfd; i++) {
|
||||
+ if (svc_pollfd[i].fd == -1)
|
||||
+ continue;
|
||||
+ xprt = __svc_xports[i];
|
||||
+ if (xprt == NULL || xprt->xp_ops == NULL ||
|
||||
+ xprt->xp_ops->xp_recv != svc_vc_recv)
|
||||
+ continue;
|
||||
+ cd = (struct cf_conn *)xprt->xp_p1;
|
||||
+ if (!cd->nonblock)
|
||||
+ continue;
|
||||
+ if (timeout == 0) {
|
||||
+ timersub(&tv, &cd->last_recv_time, &tdiff);
|
||||
+ if (timercmp(&tdiff, &tmax, >)) {
|
||||
+ tmax = tdiff;
|
||||
+ least_active = xprt;
|
||||
+ }
|
||||
+ continue;
|
||||
+ }
|
||||
+ if (tv.tv_sec - cd->last_recv_time.tv_sec > timeout) {
|
||||
+ __xprt_unregister_unlocked(xprt);
|
||||
+ __svc_vc_dodestroy(xprt);
|
||||
+ ncleaned++;
|
||||
+ }
|
||||
+ }
|
||||
+ if (timeout == 0 && least_active != NULL) {
|
||||
+ __xprt_unregister_unlocked(least_active);
|
||||
+ __svc_vc_dodestroy(least_active);
|
||||
+ ncleaned++;
|
||||
+ }
|
||||
+ rwlock_unlock(&svc_fd_lock);
|
||||
+ return (ncleaned);
|
||||
+}
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -9,7 +9,9 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=f835cce8852481e4b2bbbdd23b5e47f3 \
|
||||
|
||||
PROVIDES = "virtual/librpc"
|
||||
|
||||
SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.bz2"
|
||||
SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.bz2 \
|
||||
file://CVE-2021-46828.patch \
|
||||
"
|
||||
UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/libtirpc/files/libtirpc/"
|
||||
UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)/"
|
||||
SRC_URI[md5sum] = "b25f9cc18bfad50f7c446c77f4ae00bb"
|
||||
|
||||
67
meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch
Normal file
67
meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch
Normal file
@@ -0,0 +1,67 @@
|
||||
From 731d698377dbd1f5b1b90efeb8094602ed59fc40 Mon Sep 17 00:00:00 2001
|
||||
From: Nils Bars <nils.bars@t-online.de>
|
||||
Date: Mon, 17 Jan 2022 16:53:16 +0000
|
||||
Subject: [PATCH] Fix null pointer dereference and use of uninitialized data
|
||||
|
||||
This fixes a bug that causes use of uninitialized heap data if `readbuf` fails
|
||||
to read as many bytes as indicated by the extra field length attribute.
|
||||
Furthermore, this fixes a null pointer dereference if an archive contains an
|
||||
`EF_UNIPATH` extra field but does not have a filename set.
|
||||
---
|
||||
fileio.c | 5 ++++-
|
||||
process.c | 6 +++++-
|
||||
2 files changed, 9 insertions(+), 2 deletions(-)
|
||||
---
|
||||
|
||||
Patch from:
|
||||
https://bugs.launchpad.net/ubuntu/+source/unzip/+bug/1957077
|
||||
https://launchpadlibrarian.net/580782282/0001-Fix-null-pointer-dereference-and-use-of-uninitialized-data.patch
|
||||
Regenerated to apply without offsets.
|
||||
|
||||
CVE: CVE-2021-4217
|
||||
|
||||
Upstream-Status: Pending [infozip upstream inactive]
|
||||
|
||||
Signed-off-by: Joe Slater <joe.slater@windriver.com>
|
||||
|
||||
|
||||
diff --git a/fileio.c b/fileio.c
|
||||
index 14460f3..1dc319e 100644
|
||||
--- a/fileio.c
|
||||
+++ b/fileio.c
|
||||
@@ -2301,8 +2301,11 @@ int do_string(__G__ length, option) /* return PK-type error code */
|
||||
seek_zipf(__G__ G.cur_zipfile_bufstart - G.extra_bytes +
|
||||
(G.inptr-G.inbuf) + length);
|
||||
} else {
|
||||
- if (readbuf(__G__ (char *)G.extra_field, length) == 0)
|
||||
+ unsigned bytes_read = readbuf(__G__ (char *)G.extra_field, length);
|
||||
+ if (bytes_read == 0)
|
||||
return PK_EOF;
|
||||
+ if (bytes_read != length)
|
||||
+ return PK_ERR;
|
||||
/* Looks like here is where extra fields are read */
|
||||
if (getZip64Data(__G__ G.extra_field, length) != PK_COOL)
|
||||
{
|
||||
diff --git a/process.c b/process.c
|
||||
index 5f8f6c6..de843a5 100644
|
||||
--- a/process.c
|
||||
+++ b/process.c
|
||||
@@ -2058,10 +2058,14 @@ int getUnicodeData(__G__ ef_buf, ef_len)
|
||||
G.unipath_checksum = makelong(offset + ef_buf);
|
||||
offset += 4;
|
||||
|
||||
+ if (!G.filename_full) {
|
||||
+ /* Check if we have a unicode extra section but no filename set */
|
||||
+ return PK_ERR;
|
||||
+ }
|
||||
+
|
||||
/*
|
||||
* Compute 32-bit crc
|
||||
*/
|
||||
-
|
||||
chksum = crc32(chksum, (uch *)(G.filename_full),
|
||||
strlen(G.filename_full));
|
||||
|
||||
--
|
||||
2.32.0
|
||||
|
||||
39
meta/recipes-extended/unzip/unzip/CVE-2022-0529.patch
Normal file
39
meta/recipes-extended/unzip/unzip/CVE-2022-0529.patch
Normal file
@@ -0,0 +1,39 @@
|
||||
https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1010355
|
||||
|
||||
CVE: CVE-2022-0529
|
||||
Upstream-Status: Inactive-Upstream [need a new release]
|
||||
|
||||
diff --git a/process.c b/process.c
|
||||
index d2a846e..99b9c7b 100644
|
||||
--- a/process.c
|
||||
+++ b/process.c
|
||||
@@ -2507,13 +2507,15 @@ char *wide_to_local_string(wide_string, escape_all)
|
||||
char buf[9];
|
||||
char *buffer = NULL;
|
||||
char *local_string = NULL;
|
||||
+ size_t buffer_size;
|
||||
|
||||
for (wsize = 0; wide_string[wsize]; wsize++) ;
|
||||
|
||||
if (max_bytes < MAX_ESCAPE_BYTES)
|
||||
max_bytes = MAX_ESCAPE_BYTES;
|
||||
|
||||
- if ((buffer = (char *)malloc(wsize * max_bytes + 1)) == NULL) {
|
||||
+ buffer_size = wsize * max_bytes + 1;
|
||||
+ if ((buffer = (char *)malloc(buffer_size)) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@@ -2552,7 +2554,11 @@ char *wide_to_local_string(wide_string, escape_all)
|
||||
/* no MB for this wide */
|
||||
/* use escape for wide character */
|
||||
char *escape_string = wide_to_escape_string(wide_string[i]);
|
||||
- strcat(buffer, escape_string);
|
||||
+ size_t buffer_len = strlen(buffer);
|
||||
+ size_t escape_string_len = strlen(escape_string);
|
||||
+ if (buffer_len + escape_string_len + 1 > buffer_size)
|
||||
+ escape_string_len = buffer_size - buffer_len - 1;
|
||||
+ strncat(buffer, escape_string, escape_string_len);
|
||||
free(escape_string);
|
||||
}
|
||||
}
|
||||
33
meta/recipes-extended/unzip/unzip/CVE-2022-0530.patch
Normal file
33
meta/recipes-extended/unzip/unzip/CVE-2022-0530.patch
Normal file
@@ -0,0 +1,33 @@
|
||||
https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1010355
|
||||
|
||||
CVE: CVE-2022-0530
|
||||
Upstream-Status: Inactive-Upstream [need a new release]
|
||||
|
||||
diff --git a/fileio.c b/fileio.c
|
||||
index 6290824..77e4b5f 100644
|
||||
--- a/fileio.c
|
||||
+++ b/fileio.c
|
||||
@@ -2361,6 +2361,9 @@ int do_string(__G__ length, option) /* return PK-type error code */
|
||||
/* convert UTF-8 to local character set */
|
||||
fn = utf8_to_local_string(G.unipath_filename,
|
||||
G.unicode_escape_all);
|
||||
+ if (fn == NULL)
|
||||
+ return PK_ERR;
|
||||
+
|
||||
/* make sure filename is short enough */
|
||||
if (strlen(fn) >= FILNAMSIZ) {
|
||||
fn[FILNAMSIZ - 1] = '\0';
|
||||
diff --git a/process.c b/process.c
|
||||
index d2a846e..715bc0f 100644
|
||||
--- a/process.c
|
||||
+++ b/process.c
|
||||
@@ -2605,6 +2605,8 @@ char *utf8_to_local_string(utf8_string, escape_all)
|
||||
int escape_all;
|
||||
{
|
||||
zwchar *wide = utf8_to_wide_string(utf8_string);
|
||||
+ if (wide == NULL)
|
||||
+ return NULL;
|
||||
char *loc = wide_to_local_string(wide, escape_all);
|
||||
free(wide);
|
||||
return loc;
|
||||
|
||||
@@ -26,6 +26,9 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/infozip/UnZip%206.x%20%28latest%29/UnZip%206.0/
|
||||
file://CVE-2019-13232_p1.patch \
|
||||
file://CVE-2019-13232_p2.patch \
|
||||
file://CVE-2019-13232_p3.patch \
|
||||
file://CVE-2021-4217.patch \
|
||||
file://CVE-2022-0529.patch \
|
||||
file://CVE-2022-0530.patch \
|
||||
"
|
||||
UPSTREAM_VERSION_UNKNOWN = "1"
|
||||
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
From bdf3a2630c02a63803309cf0ad4b274234c814ce Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Tue, 9 Aug 2022 09:45:42 +0530
|
||||
Subject: [PATCH] CVE-2021-46829
|
||||
|
||||
Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/gdk-pixbuf/-/commit/5398f04d772f7f8baf5265715696ed88db0f0512]
|
||||
CVE: CVE-2021-46829
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
gdk-pixbuf/io-gif-animation.c | 21 +++++++++++++--------
|
||||
1 file changed, 13 insertions(+), 8 deletions(-)
|
||||
|
||||
diff --git a/gdk-pixbuf/io-gif-animation.c b/gdk-pixbuf/io-gif-animation.c
|
||||
index d742963..9544391 100644
|
||||
--- a/gdk-pixbuf/io-gif-animation.c
|
||||
+++ b/gdk-pixbuf/io-gif-animation.c
|
||||
@@ -364,7 +364,7 @@ composite_frame (GdkPixbufGifAnim *anim, GdkPixbufFrame *frame)
|
||||
for (i = 0; i < n_indexes; i++) {
|
||||
guint8 index = index_buffer[i];
|
||||
guint x, y;
|
||||
- int offset;
|
||||
+ gsize offset;
|
||||
|
||||
if (index == frame->transparent_index)
|
||||
continue;
|
||||
@@ -374,11 +374,13 @@ composite_frame (GdkPixbufGifAnim *anim, GdkPixbufFrame *frame)
|
||||
if (x >= anim->width || y >= anim->height)
|
||||
continue;
|
||||
|
||||
- offset = y * gdk_pixbuf_get_rowstride (anim->last_frame_data) + x * 4;
|
||||
- pixels[offset + 0] = frame->color_map[index * 3 + 0];
|
||||
- pixels[offset + 1] = frame->color_map[index * 3 + 1];
|
||||
- pixels[offset + 2] = frame->color_map[index * 3 + 2];
|
||||
- pixels[offset + 3] = 255;
|
||||
+ if (g_size_checked_mul (&offset, gdk_pixbuf_get_rowstride (anim->last_frame_data), y) &&
|
||||
+ g_size_checked_add (&offset, offset, x * 4)) {
|
||||
+ pixels[offset + 0] = frame->color_map[index * 3 + 0];
|
||||
+ pixels[offset + 1] = frame->color_map[index * 3 + 1];
|
||||
+ pixels[offset + 2] = frame->color_map[index * 3 + 2];
|
||||
+ pixels[offset + 3] = 255;
|
||||
+ }
|
||||
}
|
||||
|
||||
out:
|
||||
@@ -443,8 +445,11 @@ gdk_pixbuf_gif_anim_iter_get_pixbuf (GdkPixbufAnimationIter *anim_iter)
|
||||
x_end = MIN (anim->last_frame->x_offset + anim->last_frame->width, anim->width);
|
||||
y_end = MIN (anim->last_frame->y_offset + anim->last_frame->height, anim->height);
|
||||
for (y = anim->last_frame->y_offset; y < y_end; y++) {
|
||||
- guchar *line = pixels + y * gdk_pixbuf_get_rowstride (anim->last_frame_data) + anim->last_frame->x_offset * 4;
|
||||
- memset (line, 0, (x_end - anim->last_frame->x_offset) * 4);
|
||||
+ gsize offset;
|
||||
+ if (g_size_checked_mul (&offset, gdk_pixbuf_get_rowstride (anim->last_frame_data), y) &&
|
||||
+ g_size_checked_add (&offset, offset, anim->last_frame->x_offset * 4)) {
|
||||
+ memset (pixels + offset, 0, (x_end - anim->last_frame->x_offset) * 4);
|
||||
+ }
|
||||
}
|
||||
break;
|
||||
case GDK_PIXBUF_FRAME_REVERT:
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -26,6 +26,7 @@ SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz \
|
||||
file://missing-test-data.patch \
|
||||
file://CVE-2020-29385.patch \
|
||||
file://CVE-2021-20240.patch \
|
||||
file://CVE-2021-46829.patch \
|
||||
"
|
||||
|
||||
SRC_URI_append_class-target = " \
|
||||
|
||||
133
meta/recipes-graphics/jpeg/files/CVE-2021-46822.patch
Normal file
133
meta/recipes-graphics/jpeg/files/CVE-2021-46822.patch
Normal file
@@ -0,0 +1,133 @@
|
||||
From f35fd27ec641c42d6b115bfa595e483ec58188d2 Mon Sep 17 00:00:00 2001
|
||||
From: DRC <information@libjpeg-turbo.org>
|
||||
Date: Tue, 6 Apr 2021 12:51:03 -0500
|
||||
Subject: [PATCH] tjLoadImage: Fix issues w/loading 16-bit PPMs/PGMs
|
||||
|
||||
- The PPM reader now throws an error rather than segfaulting (due to a
|
||||
buffer overrun) if an application attempts to load a 16-bit PPM file
|
||||
into a grayscale uncompressed image buffer. No known applications
|
||||
allowed that (not even the test applications in libjpeg-turbo),
|
||||
because that mode of operation was never expected to work and did not
|
||||
work under any circumstances. (In fact, it was necessary to modify
|
||||
TJBench in order to reproduce the issue outside of a fuzzing
|
||||
environment.) This was purely a matter of making the library bow out
|
||||
gracefully rather than crash if an application tries to do something
|
||||
really stupid.
|
||||
|
||||
- The PPM reader now throws an error rather than generating incorrect
|
||||
pixels if an application attempts to load a 16-bit PGM file into an
|
||||
RGB uncompressed image buffer.
|
||||
|
||||
- The PPM reader now correctly loads 16-bit PPM files into extended
|
||||
RGB uncompressed image buffers. (Previously it generated incorrect
|
||||
pixels unless the input colorspace was JCS_RGB or JCS_EXT_RGB.)
|
||||
|
||||
The only way that users could have potentially encountered these issues
|
||||
was through the tjLoadImage() function. cjpeg and TJBench were
|
||||
unaffected.
|
||||
|
||||
CVE: CVE-2021-46822
|
||||
Upstream-Status: Backport [https://github.com/libjpeg-turbo/libjpeg-turbo/commit/f35fd27ec641c42d6b115bfa595e483ec58188d2.patch]
|
||||
Comment: Refreshed hunks from ChangeLog.md
|
||||
Refreshed hunks from rdppm.c
|
||||
|
||||
Signed-off-by: Bhabu Bindu <bhabu.bindu@kpit.com>
|
||||
|
||||
---
|
||||
ChangeLog.md | 10 ++++++++++
|
||||
rdppm.c | 26 ++++++++++++++++++++------
|
||||
2 files changed, 30 insertions(+), 6 deletions(-)
|
||||
|
||||
diff --git a/ChangeLog.md b/ChangeLog.md
|
||||
index 968969c6b..12e730a0e 100644
|
||||
--- a/ChangeLog.md
|
||||
+++ b/ChangeLog.md
|
||||
@@ -44,6 +44,15 @@
|
||||
that maximum value was less than 255. libjpeg-turbo 1.5.0 already included a
|
||||
similar fix for binary PPM/PGM files with maximum values greater than 255.
|
||||
|
||||
+7. The PPM reader now throws an error, rather than segfaulting (due to a buffer
|
||||
+overrun) or generating incorrect pixels, if an application attempts to use the
|
||||
+`tjLoadImage()` function to load a 16-bit binary PPM file (a binary PPM file
|
||||
+with a maximum value greater than 255) into a grayscale image buffer or to load
|
||||
+a 16-bit binary PGM file into an RGB image buffer.
|
||||
+
|
||||
+8. Fixed an issue in the PPM reader that caused incorrect pixels to be
|
||||
+generated when using the `tjLoadImage()` function to load a 16-bit binary PPM
|
||||
+file into an extended RGB image buffer.
|
||||
|
||||
2.0.3
|
||||
=====
|
||||
diff --git a/rdppm.c b/rdppm.c
|
||||
index c4c937e8a..6ac8fdbf7 100644
|
||||
--- a/rdppm.c
|
||||
+++ b/rdppm.c
|
||||
@@ -5,7 +5,7 @@
|
||||
* Copyright (C) 1991-1997, Thomas G. Lane.
|
||||
* Modified 2009 by Bill Allombert, Guido Vollbeding.
|
||||
* libjpeg-turbo Modifications:
|
||||
- * Copyright (C) 2015-2017, 2020, D. R. Commander.
|
||||
+ * Copyright (C) 2015-2017, 2020-2021, D. R. Commander.
|
||||
* For conditions of distribution and use, see the accompanying README.ijg
|
||||
* file.
|
||||
*
|
||||
@@ -516,6 +516,11 @@ get_word_rgb_row(j_compress_ptr cinfo, cjpeg_source_ptr sinfo)
|
||||
register JSAMPLE *rescale = source->rescale;
|
||||
JDIMENSION col;
|
||||
unsigned int maxval = source->maxval;
|
||||
+ register int rindex = rgb_red[cinfo->in_color_space];
|
||||
+ register int gindex = rgb_green[cinfo->in_color_space];
|
||||
+ register int bindex = rgb_blue[cinfo->in_color_space];
|
||||
+ register int aindex = alpha_index[cinfo->in_color_space];
|
||||
+ register int ps = rgb_pixelsize[cinfo->in_color_space];
|
||||
|
||||
if (!ReadOK(source->pub.input_file, source->iobuffer, source->buffer_width))
|
||||
ERREXIT(cinfo, JERR_INPUT_EOF);
|
||||
@@ -527,17 +532,20 @@ get_word_rgb_row(j_compress_ptr cinfo, cjpeg_source_ptr sinfo)
|
||||
temp |= UCH(*bufferptr++);
|
||||
if (temp > maxval)
|
||||
ERREXIT(cinfo, JERR_PPM_OUTOFRANGE);
|
||||
- *ptr++ = rescale[temp];
|
||||
+ ptr[rindex] = rescale[temp];
|
||||
temp = UCH(*bufferptr++) << 8;
|
||||
temp |= UCH(*bufferptr++);
|
||||
if (temp > maxval)
|
||||
ERREXIT(cinfo, JERR_PPM_OUTOFRANGE);
|
||||
- *ptr++ = rescale[temp];
|
||||
+ ptr[gindex] = rescale[temp];
|
||||
temp = UCH(*bufferptr++) << 8;
|
||||
temp |= UCH(*bufferptr++);
|
||||
if (temp > maxval)
|
||||
ERREXIT(cinfo, JERR_PPM_OUTOFRANGE);
|
||||
- *ptr++ = rescale[temp];
|
||||
+ ptr[bindex] = rescale[temp];
|
||||
+ if (aindex >= 0)
|
||||
+ ptr[aindex] = 0xFF;
|
||||
+ ptr += ps;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
@@ -624,7 +632,10 @@ start_input_ppm(j_compress_ptr cinfo, cjpeg_source_ptr sinfo)
|
||||
cinfo->in_color_space = JCS_GRAYSCALE;
|
||||
TRACEMS2(cinfo, 1, JTRC_PGM, w, h);
|
||||
if (maxval > 255) {
|
||||
- source->pub.get_pixel_rows = get_word_gray_row;
|
||||
+ if (cinfo->in_color_space == JCS_GRAYSCALE)
|
||||
+ source->pub.get_pixel_rows = get_word_gray_row;
|
||||
+ else
|
||||
+ ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE);
|
||||
} else if (maxval == MAXJSAMPLE && sizeof(JSAMPLE) == sizeof(U_CHAR) &&
|
||||
cinfo->in_color_space == JCS_GRAYSCALE) {
|
||||
source->pub.get_pixel_rows = get_raw_row;
|
||||
@@ -657,7 +657,10 @@
|
||||
cinfo->in_color_space = JCS_EXT_RGB;
|
||||
TRACEMS2(cinfo, 1, JTRC_PPM, w, h);
|
||||
if (maxval > 255) {
|
||||
- source->pub.get_pixel_rows = get_word_rgb_row;
|
||||
+ if (IsExtRGB(cinfo->in_color_space))
|
||||
+ source->pub.get_pixel_rows = get_word_rgb_row;
|
||||
+ else
|
||||
+ ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE);
|
||||
} else if (maxval == MAXJSAMPLE && sizeof(JSAMPLE) == sizeof(U_CHAR) &&
|
||||
(cinfo->in_color_space == JCS_EXT_RGB
|
||||
#if RGB_RED == 0 && RGB_GREEN == 1 && RGB_BLUE == 2 && RGB_PIXELSIZE == 3
|
||||
@@ -13,6 +13,7 @@ DEPENDS_append_x86_class-target = " nasm-native"
|
||||
SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}-${PV}.tar.gz \
|
||||
file://0001-libjpeg-turbo-fix-package_qa-error.patch \
|
||||
file://CVE-2020-13790.patch \
|
||||
file://CVE-2021-46822.patch \
|
||||
"
|
||||
|
||||
SRC_URI[md5sum] = "d01d9e0c28c27bc0de9f4e2e8ff49855"
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
From 71514e74f35f2b51ca24062573d6d913525b30db Mon Sep 17 00:00:00 2001
|
||||
From: Konrad Weihmann <kweihmann@outlook.com>
|
||||
Date: Mon, 9 May 2022 12:57:57 +0200
|
||||
Subject: [PATCH] Makefile: replace mkdir by install
|
||||
|
||||
mkdir -p creates paths that are bound to user's settings and therefore
|
||||
can lead to different file mode bits of the base paths accross different
|
||||
machines.
|
||||
Use install instead, as this tool is not prone to such behavior.
|
||||
|
||||
Signed-off-by: Konrad Weihmann <kweihmann@outlook.com>
|
||||
Upstream-Status: Submitted [https://lore.kernel.org/linux-firmware/PR2PR09MB310088EA719E6D7CA5C268F1A8C69@PR2PR09MB3100.eurprd09.prod.outlook.com/]
|
||||
---
|
||||
Makefile | 2 +-
|
||||
carl9170fw/toolchain/Makefile | 4 ++--
|
||||
copy-firmware.sh | 6 +++---
|
||||
3 files changed, 6 insertions(+), 6 deletions(-)
|
||||
|
||||
diff --git a/Makefile b/Makefile
|
||||
index e1c362f..83a0ec6 100644
|
||||
--- a/Makefile
|
||||
+++ b/Makefile
|
||||
@@ -9,5 +9,5 @@ check:
|
||||
@./check_whence.py
|
||||
|
||||
install:
|
||||
- mkdir -p $(DESTDIR)$(FIRMWAREDIR)
|
||||
+ install -d $(DESTDIR)$(FIRMWAREDIR)
|
||||
./copy-firmware.sh $(DESTDIR)$(FIRMWAREDIR)
|
||||
diff --git a/carl9170fw/toolchain/Makefile b/carl9170fw/toolchain/Makefile
|
||||
index 2b25ffe..aaea8e8 100644
|
||||
--- a/carl9170fw/toolchain/Makefile
|
||||
+++ b/carl9170fw/toolchain/Makefile
|
||||
@@ -46,14 +46,14 @@ src/gcc-$(GCC_VER): src/$(GCC_TAR) src/newlib-$(NEWLIB_VER)
|
||||
ln -s $(BASEDIR)/src/newlib-$(NEWLIB_VER)/libgloss $@
|
||||
|
||||
binutils: src/binutils-$(BINUTILS_VER)
|
||||
- mkdir -p build/binutils
|
||||
+ install -d build/binutils
|
||||
cd build/binutils; \
|
||||
$(BASEDIR)/$</configure --target=sh-elf --prefix=$(BASEDIR)/inst; \
|
||||
$(MAKE) -j3; \
|
||||
$(MAKE) install
|
||||
|
||||
gcc: src/gcc-$(GCC_VER) binutils
|
||||
- mkdir -p build/gcc
|
||||
+ install -d build/gcc
|
||||
cd build/gcc; \
|
||||
$(BASEDIR)/$</configure --target=sh-elf --prefix=$(BASEDIR)/inst -enable-languages=c --without-pkgversion --with-newlib; \
|
||||
$(MAKE) -j3; \
|
||||
diff --git a/copy-firmware.sh b/copy-firmware.sh
|
||||
index 9b46b63..bbacb92 100755
|
||||
--- a/copy-firmware.sh
|
||||
+++ b/copy-firmware.sh
|
||||
@@ -34,7 +34,7 @@ done
|
||||
grep '^File:' WHENCE | sed -e's/^File: *//g' -e's/"//g' | while read f; do
|
||||
test -f "$f" || continue
|
||||
$verbose "copying file $f"
|
||||
- mkdir -p $destdir/$(dirname "$f")
|
||||
+ install -d $destdir/$(dirname "$f")
|
||||
cp -d "$f" $destdir/"$f"
|
||||
done
|
||||
|
||||
@@ -42,7 +42,7 @@ grep -E '^Link:' WHENCE | sed -e's/^Link: *//g' -e's/-> //g' | while read f d; d
|
||||
if test -L "$f"; then
|
||||
test -f "$destdir/$f" && continue
|
||||
$verbose "copying link $f"
|
||||
- mkdir -p $destdir/$(dirname "$f")
|
||||
+ install -d $destdir/$(dirname "$f")
|
||||
cp -d "$f" $destdir/"$f"
|
||||
|
||||
if test "x$d" != "x"; then
|
||||
@@ -63,7 +63,7 @@ grep -E '^Link:' WHENCE | sed -e's/^Link: *//g' -e's/-> //g' | while read f d; d
|
||||
fi
|
||||
else
|
||||
$verbose "creating link $f -> $d"
|
||||
- mkdir -p $destdir/$(dirname "$f")
|
||||
+ install -d $destdir/$(dirname "$f")
|
||||
ln -sf "$d" "$destdir/$f"
|
||||
fi
|
||||
done
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -27,7 +27,6 @@ LICENSE = "\
|
||||
& Firmware-go7007 \
|
||||
& Firmware-GPLv2 \
|
||||
& Firmware-hfi1_firmware \
|
||||
& Firmware-i2400m \
|
||||
& Firmware-i915 \
|
||||
& Firmware-ibt_firmware \
|
||||
& Firmware-ice \
|
||||
@@ -57,7 +56,6 @@ LICENSE = "\
|
||||
& Firmware-rtlwifi_firmware \
|
||||
& Firmware-imx-sdma_firmware \
|
||||
& Firmware-siano \
|
||||
& Firmware-tda7706-firmware \
|
||||
& Firmware-ti-connectivity \
|
||||
& Firmware-ti-keystone \
|
||||
& Firmware-ueagle-atm4-firmware \
|
||||
@@ -91,7 +89,6 @@ LIC_FILES_CHKSUM = "file://LICENCE.Abilis;md5=b5ee3f410780e56711ad48eadc22b8bc \
|
||||
file://LICENCE.go7007;md5=c0bb9f6aaaba55b0529ee9b30aa66beb \
|
||||
file://GPL-2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
|
||||
file://LICENSE.hfi1_firmware;md5=5e7b6e586ce7339d12689e49931ad444 \
|
||||
file://LICENCE.i2400m;md5=14b901969e23c41881327c0d9e4b7d36 \
|
||||
file://LICENSE.i915;md5=2b0b2e0d20984affd4490ba2cba02570 \
|
||||
file://LICENCE.ibt_firmware;md5=fdbee1ddfe0fb7ab0b2fcd6b454a366b \
|
||||
file://LICENSE.ice;md5=742ab4850f2670792940e6d15c974b2f \
|
||||
@@ -123,7 +120,6 @@ LIC_FILES_CHKSUM = "file://LICENCE.Abilis;md5=b5ee3f410780e56711ad48eadc22b8bc \
|
||||
file://LICENCE.rtlwifi_firmware.txt;md5=00d06cfd3eddd5a2698948ead2ad54a5 \
|
||||
file://LICENSE.sdma_firmware;md5=51e8c19ecc2270f4b8ea30341ad63ce9 \
|
||||
file://LICENCE.siano;md5=4556c1bf830067f12ca151ad953ec2a5 \
|
||||
file://LICENCE.tda7706-firmware.txt;md5=835997cf5e3c131d0dddd695c7d9103e \
|
||||
file://LICENCE.ti-connectivity;md5=c5e02be633f1499c109d1652514d85ec \
|
||||
file://LICENCE.ti-keystone;md5=3a86335d32864b0bef996bee26cc0f2c \
|
||||
file://LICENCE.ueagle-atm4-firmware;md5=4ed7ea6b507ccc583b9d594417714118 \
|
||||
@@ -132,8 +128,11 @@ LIC_FILES_CHKSUM = "file://LICENCE.Abilis;md5=b5ee3f410780e56711ad48eadc22b8bc \
|
||||
file://LICENCE.xc4000;md5=0ff51d2dc49fce04814c9155081092f0 \
|
||||
file://LICENCE.xc5000;md5=1e170c13175323c32c7f4d0998d53f66 \
|
||||
file://LICENCE.xc5000c;md5=12b02efa3049db65d524aeb418dd87ca \
|
||||
file://WHENCE;md5=d3eb82686904888f8bbbe8d865371404 \
|
||||
file://WHENCE;md5=${WHENCE_CHKSUM} \
|
||||
"
|
||||
# WHENCE checksum is defined separately to ease overriding it if
|
||||
# class-devupstream is selected.
|
||||
WHENCE_CHKSUM = "def08711eb23ba967fb7e1f8cff66178"
|
||||
|
||||
# These are not common licenses, set NO_GENERIC_LICENSE for them
|
||||
# so that the license files will be copied from fetched source
|
||||
@@ -159,7 +158,6 @@ NO_GENERIC_LICENSE[Firmware-fw_sst_0f28] = "LICENCE.fw_sst_0f28"
|
||||
NO_GENERIC_LICENSE[Firmware-go7007] = "LICENCE.go7007"
|
||||
NO_GENERIC_LICENSE[Firmware-GPLv2] = "GPL-2"
|
||||
NO_GENERIC_LICENSE[Firmware-hfi1_firmware] = "LICENSE.hfi1_firmware"
|
||||
NO_GENERIC_LICENSE[Firmware-i2400m] = "LICENCE.i2400m"
|
||||
NO_GENERIC_LICENSE[Firmware-i915] = "LICENSE.i915"
|
||||
NO_GENERIC_LICENSE[Firmware-ibt_firmware] = "LICENCE.ibt_firmware"
|
||||
NO_GENERIC_LICENSE[Firmware-ice] = "LICENSE.ice"
|
||||
@@ -190,7 +188,6 @@ NO_GENERIC_LICENSE[Firmware-ralink-firmware] = "LICENCE.ralink-firmware.txt"
|
||||
NO_GENERIC_LICENSE[Firmware-rtlwifi_firmware] = "LICENCE.rtlwifi_firmware.txt"
|
||||
NO_GENERIC_LICENSE[Firmware-siano] = "LICENCE.siano"
|
||||
NO_GENERIC_LICENSE[Firmware-imx-sdma_firmware] = "LICENSE.sdma_firmware"
|
||||
NO_GENERIC_LICENSE[Firmware-tda7706-firmware] = "LICENCE.tda7706-firmware.txt"
|
||||
NO_GENERIC_LICENSE[Firmware-ti-connectivity] = "LICENCE.ti-connectivity"
|
||||
NO_GENERIC_LICENSE[Firmware-ti-keystone] = "LICENCE.ti-keystone"
|
||||
NO_GENERIC_LICENSE[Firmware-ueagle-atm4-firmware] = "LICENCE.ueagle-atm4-firmware"
|
||||
@@ -205,10 +202,14 @@ PE = "1"
|
||||
|
||||
SRC_URI = "\
|
||||
${KERNELORG_MIRROR}/linux/kernel/firmware/${BPN}-${PV}.tar.xz \
|
||||
file://0001-Makefile-replace-mkdir-by-install.patch \
|
||||
"
|
||||
|
||||
SRC_URI[sha256sum] = "376e0b3d7b4f8aaa2abf7f5ab74803dcf14b06b94e3d841b1467cd9a2848255e"
|
||||
BBCLASSEXTEND = "devupstream:target"
|
||||
SRC_URI:class-devupstream = "git://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git;protocol=https;branch=main"
|
||||
# Pin this to the 20220509 release, override this in local.conf
|
||||
SRCREV:class-devupstream ?= "b19cbdca78ab2adfd210c91be15a22568e8b8cae"
|
||||
|
||||
SRC_URI[sha256sum] = "0abec827a035c82bdcabdf82aa37ded247bc682ef05861bd409ea6f477bab81d"
|
||||
|
||||
inherit allarch
|
||||
|
||||
@@ -1015,7 +1016,6 @@ LICENSE_${PN} = "\
|
||||
& Firmware-fw_sst_0f28 \
|
||||
& Firmware-go7007 \
|
||||
& Firmware-hfi1_firmware \
|
||||
& Firmware-i2400m \
|
||||
& Firmware-ibt_firmware \
|
||||
& Firmware-it913x \
|
||||
& Firmware-IntcSST2 \
|
||||
@@ -1036,7 +1036,6 @@ LICENSE_${PN} = "\
|
||||
& Firmware-ralink-firmware \
|
||||
& Firmware-imx-sdma_firmware \
|
||||
& Firmware-siano \
|
||||
& Firmware-tda7706-firmware \
|
||||
& Firmware-ti-connectivity \
|
||||
& Firmware-ti-keystone \
|
||||
& Firmware-ueagle-atm4-firmware \
|
||||
@@ -11,13 +11,13 @@ python () {
|
||||
raise bb.parse.SkipRecipe("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it")
|
||||
}
|
||||
|
||||
SRCREV_machine ?= "24d323fa0e17bcd62c9cfe1fd4153c304a06f38c"
|
||||
SRCREV_meta ?= "3fecb08507e286d1458497faaf31d1a07cc7d373"
|
||||
SRCREV_machine ?= "f6c9d6db383201a730e8d638995eae82acd4d8e7"
|
||||
SRCREV_meta ?= "028688aaad2b64e353d771ba5505a8666cd01696"
|
||||
|
||||
SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;branch=${KBRANCH};name=machine \
|
||||
git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-5.4;destsuffix=${KMETA}"
|
||||
|
||||
LINUX_VERSION ?= "5.4.192"
|
||||
LINUX_VERSION ?= "5.4.209"
|
||||
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=bbea815ee2795b2f4230826c0c6b8814"
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ KCONFIG_MODE = "--allnoconfig"
|
||||
|
||||
require recipes-kernel/linux/linux-yocto.inc
|
||||
|
||||
LINUX_VERSION ?= "5.4.192"
|
||||
LINUX_VERSION ?= "5.4.209"
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=bbea815ee2795b2f4230826c0c6b8814"
|
||||
|
||||
DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
|
||||
@@ -15,9 +15,9 @@ DEPENDS += "openssl-native util-linux-native"
|
||||
KMETA = "kernel-meta"
|
||||
KCONF_BSP_AUDIT_LEVEL = "2"
|
||||
|
||||
SRCREV_machine_qemuarm ?= "460de085c07ab1a221317e6804c13657456c5368"
|
||||
SRCREV_machine ?= "b414a2fc5ce5f68c33d297d9cde4fef5437b773b"
|
||||
SRCREV_meta ?= "3fecb08507e286d1458497faaf31d1a07cc7d373"
|
||||
SRCREV_machine_qemuarm ?= "8f087017ff03465fa8d318c06a7e4e072c533daf"
|
||||
SRCREV_machine ?= "a4b7263158de8713dc85c5171aed99e3424a9f7c"
|
||||
SRCREV_meta ?= "028688aaad2b64e353d771ba5505a8666cd01696"
|
||||
|
||||
PV = "${LINUX_VERSION}+git${SRCPV}"
|
||||
|
||||
|
||||
@@ -12,16 +12,16 @@ KBRANCH_qemux86 ?= "v5.4/standard/base"
|
||||
KBRANCH_qemux86-64 ?= "v5.4/standard/base"
|
||||
KBRANCH_qemumips64 ?= "v5.4/standard/mti-malta64"
|
||||
|
||||
SRCREV_machine_qemuarm ?= "68a2ce69aaf2e8d96eef4aaccd70fc0ef7368a46"
|
||||
SRCREV_machine_qemuarm64 ?= "acfed0930d37a714d705645ff7cfbfbd0ad040e7"
|
||||
SRCREV_machine_qemumips ?= "e7046a2c8972e925cd2e6ac7f392abe87cbec5f5"
|
||||
SRCREV_machine_qemuppc ?= "997e06e0af674c27627eaa76a60b2f63cb16f38d"
|
||||
SRCREV_machine_qemuriscv64 ?= "85f0668fea1442bbcc2c8b1509d9f711b4b73649"
|
||||
SRCREV_machine_qemux86 ?= "85f0668fea1442bbcc2c8b1509d9f711b4b73649"
|
||||
SRCREV_machine_qemux86-64 ?= "85f0668fea1442bbcc2c8b1509d9f711b4b73649"
|
||||
SRCREV_machine_qemumips64 ?= "7b526cde12d78604b6f1e1ad62da31dcb729f35f"
|
||||
SRCREV_machine ?= "85f0668fea1442bbcc2c8b1509d9f711b4b73649"
|
||||
SRCREV_meta ?= "3fecb08507e286d1458497faaf31d1a07cc7d373"
|
||||
SRCREV_machine_qemuarm ?= "4fefb5a57ecb9bc5c6aab38319f773b02c894e6b"
|
||||
SRCREV_machine_qemuarm64 ?= "407b5fa877ca8993a405542fa4c3d73584e8ea98"
|
||||
SRCREV_machine_qemumips ?= "1bfe5d39c9f954f0ac2480115f4750f39500d4f4"
|
||||
SRCREV_machine_qemuppc ?= "753def987b630ed41686223b5dc252436757e893"
|
||||
SRCREV_machine_qemuriscv64 ?= "90d5f03a7c79ccd5c02e0579049d22cf2686da9b"
|
||||
SRCREV_machine_qemux86 ?= "90d5f03a7c79ccd5c02e0579049d22cf2686da9b"
|
||||
SRCREV_machine_qemux86-64 ?= "90d5f03a7c79ccd5c02e0579049d22cf2686da9b"
|
||||
SRCREV_machine_qemumips64 ?= "b391bfc877fe8ae41e579ffd4bcd814b4ad438ea"
|
||||
SRCREV_machine ?= "90d5f03a7c79ccd5c02e0579049d22cf2686da9b"
|
||||
SRCREV_meta ?= "028688aaad2b64e353d771ba5505a8666cd01696"
|
||||
|
||||
# remap qemuarm to qemuarma15 for the 5.4 kernel
|
||||
# KMACHINE_qemuarm ?= "qemuarma15"
|
||||
@@ -30,7 +30,7 @@ SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;name=machine;branch=${KBRA
|
||||
git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-5.4;destsuffix=${KMETA}"
|
||||
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=bbea815ee2795b2f4230826c0c6b8814"
|
||||
LINUX_VERSION ?= "5.4.192"
|
||||
LINUX_VERSION ?= "5.4.209"
|
||||
|
||||
DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
|
||||
DEPENDS += "openssl-native util-linux-native"
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
From 25b70c486bb96de0caf7cea1da42ed07801cca84 Mon Sep 17 00:00:00 2001
|
||||
From: Michael Jeanson <mjeanson@efficios.com>
|
||||
Date: Mon, 4 Apr 2022 14:33:42 -0400
|
||||
Subject: [PATCH 17/19] fix: random: remove unused tracepoints (v5.18)
|
||||
|
||||
See upstream commit :
|
||||
|
||||
commit 14c174633f349cb41ea90c2c0aaddac157012f74
|
||||
Author: Jason A. Donenfeld <Jason@zx2c4.com>
|
||||
Date: Thu Feb 10 16:40:44 2022 +0100
|
||||
|
||||
random: remove unused tracepoints
|
||||
|
||||
These explicit tracepoints aren't really used and show sign of aging.
|
||||
It's work to keep these up to date, and before I attempted to keep them
|
||||
up to date, they weren't up to date, which indicates that they're not
|
||||
really used. These days there are better ways of introspecting anyway.
|
||||
|
||||
Upstream-Status: Backport [369d82bb1746447514c877088d7c5fd0f39140f8]
|
||||
Change-Id: I3b8c3e2732e7efdd76ce63204ac53a48784d0df6
|
||||
Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
|
||||
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
|
||||
---
|
||||
probes/Kbuild | 5 ++++-
|
||||
1 file changed, 4 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/probes/Kbuild b/probes/Kbuild
|
||||
index 3ae2d39e..58da82b8 100644
|
||||
--- a/probes/Kbuild
|
||||
+++ b/probes/Kbuild
|
||||
@@ -215,8 +215,11 @@ ifneq ($(CONFIG_FRAME_WARN),0)
|
||||
CFLAGS_lttng-probe-printk.o += -Wframe-larger-than=2200
|
||||
endif
|
||||
|
||||
+# Introduced in v3.6, remove in v5.18
|
||||
obj-$(CONFIG_LTTNG) += $(shell \
|
||||
- if [ $(VERSION) -ge 4 \
|
||||
+ if [ \( ! \( $(VERSION) -ge 6 -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -ge 18 \) \) \) \
|
||||
+ -a \
|
||||
+ $(VERSION) -ge 4 \
|
||||
-o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -ge 6 \) \
|
||||
-o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -eq 5 -a $(SUBLEVEL) -ge 2 \) \
|
||||
-o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -eq 4 -a $(SUBLEVEL) -ge 9 \) \
|
||||
--
|
||||
2.35.1
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
From da956d1444139883f5d01078d945078738ffade4 Mon Sep 17 00:00:00 2001
|
||||
From: He Zhe <zhe.he@windriver.com>
|
||||
Date: Thu, 2 Jun 2022 06:36:08 +0000
|
||||
Subject: [PATCH 18/19] fix: random: remove unused tracepoints (v5.10, v5.15)
|
||||
|
||||
The following kernel commit has been back ported to v5.10.119 and v5.15.44.
|
||||
|
||||
commit 14c174633f349cb41ea90c2c0aaddac157012f74
|
||||
Author: Jason A. Donenfeld <Jason@zx2c4.com>
|
||||
Date: Thu Feb 10 16:40:44 2022 +0100
|
||||
|
||||
random: remove unused tracepoints
|
||||
|
||||
These explicit tracepoints aren't really used and show sign of aging.
|
||||
It's work to keep these up to date, and before I attempted to keep them
|
||||
up to date, they weren't up to date, which indicates that they're not
|
||||
really used. These days there are better ways of introspecting anyway.
|
||||
|
||||
Upstream-Status: Backport [1901e0eb58795e850e8fdcb5e1c235e4397b470d]
|
||||
Signed-off-by: He Zhe <zhe.he@windriver.com>
|
||||
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
|
||||
Change-Id: I0b7eb8aa78b5bd2039e20ae3e1da4c5eb9018789
|
||||
---
|
||||
probes/Kbuild | 5 ++++-
|
||||
1 file changed, 4 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/probes/Kbuild b/probes/Kbuild
|
||||
index 58da82b8..87f2d681 100644
|
||||
--- a/probes/Kbuild
|
||||
+++ b/probes/Kbuild
|
||||
@@ -217,7 +217,10 @@ endif
|
||||
|
||||
# Introduced in v3.6, remove in v5.18
|
||||
obj-$(CONFIG_LTTNG) += $(shell \
|
||||
- if [ \( ! \( $(VERSION) -ge 6 -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -ge 18 \) \) \) \
|
||||
+ if [ \( ! \( $(VERSION) -ge 6 \
|
||||
+ -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -ge 18 \) \
|
||||
+ -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -eq 15 -a $(SUBLEVEL) -ge 44 \) \
|
||||
+ -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -eq 10 -a $(SUBLEVEL) -ge 119\) \) \) \
|
||||
-a \
|
||||
$(VERSION) -ge 4 \
|
||||
-o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -ge 6 \) \
|
||||
--
|
||||
2.35.1
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
From 2c98e0cd03eba0aa935796bc7413c51b5e4b055c Mon Sep 17 00:00:00 2001
|
||||
From: Michael Jeanson <mjeanson@efficios.com>
|
||||
Date: Tue, 31 May 2022 15:24:48 -0400
|
||||
Subject: [PATCH 19/19] fix: 'random' tracepoints removed in stable kernels
|
||||
|
||||
The upstream commit 14c174633f349cb41ea90c2c0aaddac157012f74 removing
|
||||
the 'random' tracepoints is being backported to multiple stable kernel
|
||||
branches, I don't see how that qualifies as a fix but here we are.
|
||||
|
||||
Use the presence of 'include/trace/events/random.h' in the kernel source
|
||||
tree instead of the rather tortuous version check to determine if we
|
||||
need to build 'lttng-probe-random.ko'.
|
||||
|
||||
Upstream-Status: Backport [ed1149ef88fb62c365ac66cf62c58ac6abd8d7e8]
|
||||
Change-Id: I8f5f2f4c9e09c61127c49c7949b22dd3fab0460d
|
||||
Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
|
||||
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
|
||||
---
|
||||
probes/Kbuild | 16 ++++------------
|
||||
1 file changed, 4 insertions(+), 12 deletions(-)
|
||||
|
||||
diff --git a/probes/Kbuild b/probes/Kbuild
|
||||
index 87f2d681..f09d6b65 100644
|
||||
--- a/probes/Kbuild
|
||||
+++ b/probes/Kbuild
|
||||
@@ -216,18 +216,10 @@ ifneq ($(CONFIG_FRAME_WARN),0)
|
||||
endif
|
||||
|
||||
# Introduced in v3.6, remove in v5.18
|
||||
-obj-$(CONFIG_LTTNG) += $(shell \
|
||||
- if [ \( ! \( $(VERSION) -ge 6 \
|
||||
- -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -ge 18 \) \
|
||||
- -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -eq 15 -a $(SUBLEVEL) -ge 44 \) \
|
||||
- -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -eq 10 -a $(SUBLEVEL) -ge 119\) \) \) \
|
||||
- -a \
|
||||
- $(VERSION) -ge 4 \
|
||||
- -o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -ge 6 \) \
|
||||
- -o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -eq 5 -a $(SUBLEVEL) -ge 2 \) \
|
||||
- -o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -eq 4 -a $(SUBLEVEL) -ge 9 \) \
|
||||
- -o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -eq 0 -a $(SUBLEVEL) -ge 41 \) ] ; then \
|
||||
- echo "lttng-probe-random.o" ; fi;)
|
||||
+random_dep = $(srctree)/include/trace/events/random.h
|
||||
+ifneq ($(wildcard $(random_dep)),)
|
||||
+ obj-$(CONFIG_LTTNG) += lttng-probe-random.o
|
||||
+endif
|
||||
|
||||
obj-$(CONFIG_LTTNG) += $(shell \
|
||||
if [ $(VERSION) -ge 4 \
|
||||
--
|
||||
2.35.1
|
||||
|
||||
@@ -28,6 +28,9 @@ SRC_URI = "https://lttng.org/files/${BPN}/${BPN}-${PV}.tar.bz2 \
|
||||
file://0014-Revert-fix-include-order-for-older-kernels.patch \
|
||||
file://0015-fix-backport-of-fix-tracepoint-Optimize-using-static.patch \
|
||||
file://0016-fix-adjust-version-range-for-trace_find_free_extent.patch \
|
||||
file://0017-fix-random-remove-unused-tracepoints-v5.18.patch \
|
||||
file://0018-fix-random-remove-unused-tracepoints-v5.10-v5.15.patch \
|
||||
file://0019-fix-random-tracepoints-removed-in-stable-kernels.patch \
|
||||
"
|
||||
|
||||
SRC_URI[md5sum] = "8ef09fdfcdec669d33f7fc1c1c80f2c4"
|
||||
|
||||
@@ -5,7 +5,7 @@ LICENSE = "ISC"
|
||||
LIC_FILES_CHKSUM = "file://LICENSE;md5=07c4f6dea3845b02a18dc00c8c87699c"
|
||||
|
||||
SRC_URI = "https://www.kernel.org/pub/software/network/${BPN}/${BP}.tar.xz"
|
||||
SRC_URI[sha256sum] = "884ba2e3c1e8b98762b6dc25ff60b5ec75c8d33a39e019b3ed4aa615491460d3"
|
||||
SRC_URI[sha256sum] = "ac00f97efecce5046ed069d1d93f3365fdf994c7c7854a8fc50831e959537230"
|
||||
|
||||
inherit bin_package allarch
|
||||
|
||||
@@ -36,7 +36,7 @@ PACKAGECONFIG ??= "\
|
||||
speexdsp \
|
||||
${@bb.utils.filter('DISTRO_FEATURES', 'pulseaudio', d)} \
|
||||
"
|
||||
PACKAGECONFIG[aaf] = "--enable-aaf,--disable-aaf,avtp"
|
||||
PACKAGECONFIG[aaf] = "--enable-aaf,--disable-aaf,libavtp"
|
||||
PACKAGECONFIG[jack] = "--enable-jack,--disable-jack,jack"
|
||||
PACKAGECONFIG[libav] = "--enable-libav,--disable-libav,libav"
|
||||
PACKAGECONFIG[maemo-plugin] = "--enable-maemo-plugin,--disable-maemo-plugin"
|
||||
|
||||
@@ -41,7 +41,7 @@ PACKAGECONFIG[unwind] = "-Dlibunwind=enabled,-Dlibunwind=disabled,libunwind"
|
||||
PACKAGECONFIG[dw] = "-Dlibdw=enabled,-Dlibdw=disabled,elfutils"
|
||||
PACKAGECONFIG[bash-completion] = "-Dbash-completion=enabled,-Dbash-completion=disabled,bash-completion"
|
||||
PACKAGECONFIG[tools] = "-Dtools=enabled,-Dtools=disabled"
|
||||
PACKAGECONFIG[setcap] = ",,libcap libcap-native"
|
||||
PACKAGECONFIG[setcap] = "-Dptp-helper-permissions=capabilities,,libcap libcap-native"
|
||||
|
||||
# TODO: put this in a gettext.bbclass patch
|
||||
def gettext_oemeson(d):
|
||||
|
||||
@@ -0,0 +1,183 @@
|
||||
From 8261237113a53cd21029c4a8cbb62c47b4c19523 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Wed, 27 Jul 2022 11:30:18 +0530
|
||||
Subject: [PATCH] CVE-2022-2056 CVE-2022-2057 CVE-2022-2058
|
||||
|
||||
Upstream-Status: Backport [https://gitlab.com/libtiff/libtiff/-/commit/dd1bcc7abb26094e93636e85520f0d8f81ab0fab]
|
||||
CVE: CVE-2022-2056 CVE-2022-2057 CVE-2022-2058
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
libtiff/tif_aux.c | 9 +++++++
|
||||
libtiff/tiffiop.h | 1 +
|
||||
tools/tiffcrop.c | 62 ++++++++++++++++++++++++++---------------------
|
||||
3 files changed, 44 insertions(+), 28 deletions(-)
|
||||
|
||||
diff --git a/libtiff/tif_aux.c b/libtiff/tif_aux.c
|
||||
index 8188db5..3dac542 100644
|
||||
--- a/libtiff/tif_aux.c
|
||||
+++ b/libtiff/tif_aux.c
|
||||
@@ -402,6 +402,15 @@ float _TIFFClampDoubleToFloat( double val )
|
||||
return (float)val;
|
||||
}
|
||||
|
||||
+uint32 _TIFFClampDoubleToUInt32(double val)
|
||||
+{
|
||||
+ if( val < 0 )
|
||||
+ return 0;
|
||||
+ if( val > 0xFFFFFFFFU || val != val )
|
||||
+ return 0xFFFFFFFFU;
|
||||
+ return (uint32)val;
|
||||
+}
|
||||
+
|
||||
int _TIFFSeekOK(TIFF* tif, toff_t off)
|
||||
{
|
||||
/* Huge offsets, especially -1 / UINT64_MAX, can cause issues */
|
||||
diff --git a/libtiff/tiffiop.h b/libtiff/tiffiop.h
|
||||
index 45a7932..c6f6f93 100644
|
||||
--- a/libtiff/tiffiop.h
|
||||
+++ b/libtiff/tiffiop.h
|
||||
@@ -393,6 +393,7 @@ extern double _TIFFUInt64ToDouble(uint64);
|
||||
extern float _TIFFUInt64ToFloat(uint64);
|
||||
|
||||
extern float _TIFFClampDoubleToFloat(double);
|
||||
+extern uint32 _TIFFClampDoubleToUInt32(double);
|
||||
|
||||
extern tmsize_t
|
||||
_TIFFReadEncodedStripAndAllocBuffer(TIFF* tif, uint32 strip,
|
||||
diff --git a/tools/tiffcrop.c b/tools/tiffcrop.c
|
||||
index c2c2052..79dd0a0 100644
|
||||
--- a/tools/tiffcrop.c
|
||||
+++ b/tools/tiffcrop.c
|
||||
@@ -5141,17 +5141,17 @@ computeInputPixelOffsets(struct crop_mask *crop, struct image_data *image,
|
||||
{
|
||||
if ((crop->res_unit == RESUNIT_INCH) || (crop->res_unit == RESUNIT_CENTIMETER))
|
||||
{
|
||||
- x1 = (uint32) (crop->corners[i].X1 * scale * xres);
|
||||
- x2 = (uint32) (crop->corners[i].X2 * scale * xres);
|
||||
- y1 = (uint32) (crop->corners[i].Y1 * scale * yres);
|
||||
- y2 = (uint32) (crop->corners[i].Y2 * scale * yres);
|
||||
+ x1 = _TIFFClampDoubleToUInt32(crop->corners[i].X1 * scale * xres);
|
||||
+ x2 = _TIFFClampDoubleToUInt32(crop->corners[i].X2 * scale * xres);
|
||||
+ y1 = _TIFFClampDoubleToUInt32(crop->corners[i].Y1 * scale * yres);
|
||||
+ y2 = _TIFFClampDoubleToUInt32(crop->corners[i].Y2 * scale * yres);
|
||||
}
|
||||
else
|
||||
{
|
||||
- x1 = (uint32) (crop->corners[i].X1);
|
||||
- x2 = (uint32) (crop->corners[i].X2);
|
||||
- y1 = (uint32) (crop->corners[i].Y1);
|
||||
- y2 = (uint32) (crop->corners[i].Y2);
|
||||
+ x1 = _TIFFClampDoubleToUInt32(crop->corners[i].X1);
|
||||
+ x2 = _TIFFClampDoubleToUInt32(crop->corners[i].X2);
|
||||
+ y1 = _TIFFClampDoubleToUInt32(crop->corners[i].Y1);
|
||||
+ y2 = _TIFFClampDoubleToUInt32(crop->corners[i].Y2);
|
||||
}
|
||||
if (x1 < 1)
|
||||
crop->regionlist[i].x1 = 0;
|
||||
@@ -5214,17 +5214,17 @@ computeInputPixelOffsets(struct crop_mask *crop, struct image_data *image,
|
||||
{
|
||||
if (crop->res_unit != RESUNIT_INCH && crop->res_unit != RESUNIT_CENTIMETER)
|
||||
{ /* User has specified pixels as reference unit */
|
||||
- tmargin = (uint32)(crop->margins[0]);
|
||||
- lmargin = (uint32)(crop->margins[1]);
|
||||
- bmargin = (uint32)(crop->margins[2]);
|
||||
- rmargin = (uint32)(crop->margins[3]);
|
||||
+ tmargin = _TIFFClampDoubleToUInt32(crop->margins[0]);
|
||||
+ lmargin = _TIFFClampDoubleToUInt32(crop->margins[1]);
|
||||
+ bmargin = _TIFFClampDoubleToUInt32(crop->margins[2]);
|
||||
+ rmargin = _TIFFClampDoubleToUInt32(crop->margins[3]);
|
||||
}
|
||||
else
|
||||
{ /* inches or centimeters specified */
|
||||
- tmargin = (uint32)(crop->margins[0] * scale * yres);
|
||||
- lmargin = (uint32)(crop->margins[1] * scale * xres);
|
||||
- bmargin = (uint32)(crop->margins[2] * scale * yres);
|
||||
- rmargin = (uint32)(crop->margins[3] * scale * xres);
|
||||
+ tmargin = _TIFFClampDoubleToUInt32(crop->margins[0] * scale * yres);
|
||||
+ lmargin = _TIFFClampDoubleToUInt32(crop->margins[1] * scale * xres);
|
||||
+ bmargin = _TIFFClampDoubleToUInt32(crop->margins[2] * scale * yres);
|
||||
+ rmargin = _TIFFClampDoubleToUInt32(crop->margins[3] * scale * xres);
|
||||
}
|
||||
|
||||
if ((lmargin + rmargin) > image->width)
|
||||
@@ -5254,24 +5254,24 @@ computeInputPixelOffsets(struct crop_mask *crop, struct image_data *image,
|
||||
if (crop->res_unit != RESUNIT_INCH && crop->res_unit != RESUNIT_CENTIMETER)
|
||||
{
|
||||
if (crop->crop_mode & CROP_WIDTH)
|
||||
- width = (uint32)crop->width;
|
||||
+ width = _TIFFClampDoubleToUInt32(crop->width);
|
||||
else
|
||||
width = image->width - lmargin - rmargin;
|
||||
|
||||
if (crop->crop_mode & CROP_LENGTH)
|
||||
- length = (uint32)crop->length;
|
||||
+ length = _TIFFClampDoubleToUInt32(crop->length);
|
||||
else
|
||||
length = image->length - tmargin - bmargin;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (crop->crop_mode & CROP_WIDTH)
|
||||
- width = (uint32)(crop->width * scale * image->xres);
|
||||
+ width = _TIFFClampDoubleToUInt32(crop->width * scale * image->xres);
|
||||
else
|
||||
width = image->width - lmargin - rmargin;
|
||||
|
||||
if (crop->crop_mode & CROP_LENGTH)
|
||||
- length = (uint32)(crop->length * scale * image->yres);
|
||||
+ length = _TIFFClampDoubleToUInt32(crop->length * scale * image->yres);
|
||||
else
|
||||
length = image->length - tmargin - bmargin;
|
||||
}
|
||||
@@ -5670,13 +5670,13 @@ computeOutputPixelOffsets (struct crop_mask *crop, struct image_data *image,
|
||||
{
|
||||
if (page->res_unit == RESUNIT_INCH || page->res_unit == RESUNIT_CENTIMETER)
|
||||
{ /* inches or centimeters specified */
|
||||
- hmargin = (uint32)(page->hmargin * scale * page->hres * ((image->bps + 7)/ 8));
|
||||
- vmargin = (uint32)(page->vmargin * scale * page->vres * ((image->bps + 7)/ 8));
|
||||
+ hmargin = _TIFFClampDoubleToUInt32(page->hmargin * scale * page->hres * ((image->bps + 7) / 8));
|
||||
+ vmargin = _TIFFClampDoubleToUInt32(page->vmargin * scale * page->vres * ((image->bps + 7) / 8));
|
||||
}
|
||||
else
|
||||
{ /* Otherwise user has specified pixels as reference unit */
|
||||
- hmargin = (uint32)(page->hmargin * scale * ((image->bps + 7)/ 8));
|
||||
- vmargin = (uint32)(page->vmargin * scale * ((image->bps + 7)/ 8));
|
||||
+ hmargin = _TIFFClampDoubleToUInt32(page->hmargin * scale * ((image->bps + 7) / 8));
|
||||
+ vmargin = _TIFFClampDoubleToUInt32(page->vmargin * scale * ((image->bps + 7) / 8));
|
||||
}
|
||||
|
||||
if ((hmargin * 2.0) > (pwidth * page->hres))
|
||||
@@ -5714,13 +5714,13 @@ computeOutputPixelOffsets (struct crop_mask *crop, struct image_data *image,
|
||||
{
|
||||
if (page->mode & PAGE_MODE_PAPERSIZE )
|
||||
{
|
||||
- owidth = (uint32)((pwidth * page->hres) - (hmargin * 2));
|
||||
- olength = (uint32)((plength * page->vres) - (vmargin * 2));
|
||||
+ owidth = _TIFFClampDoubleToUInt32((pwidth * page->hres) - (hmargin * 2));
|
||||
+ olength = _TIFFClampDoubleToUInt32((plength * page->vres) - (vmargin * 2));
|
||||
}
|
||||
else
|
||||
{
|
||||
- owidth = (uint32)(iwidth - (hmargin * 2 * page->hres));
|
||||
- olength = (uint32)(ilength - (vmargin * 2 * page->vres));
|
||||
+ owidth = _TIFFClampDoubleToUInt32(iwidth - (hmargin * 2 * page->hres));
|
||||
+ olength = _TIFFClampDoubleToUInt32(ilength - (vmargin * 2 * page->vres));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5729,6 +5729,12 @@ computeOutputPixelOffsets (struct crop_mask *crop, struct image_data *image,
|
||||
if (olength > ilength)
|
||||
olength = ilength;
|
||||
|
||||
+ if (owidth == 0 || olength == 0)
|
||||
+ {
|
||||
+ TIFFError("computeOutputPixelOffsets", "Integer overflow when calculating the number of pages");
|
||||
+ exit(EXIT_FAILURE);
|
||||
+ }
|
||||
+
|
||||
/* Compute the number of pages required for Portrait or Landscape */
|
||||
switch (page->orient)
|
||||
{
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -24,6 +24,7 @@ SRC_URI = "http://download.osgeo.org/libtiff/tiff-${PV}.tar.gz \
|
||||
file://CVE-2022-0909.patch \
|
||||
file://CVE-2022-0891.patch \
|
||||
file://CVE-2022-0924.patch \
|
||||
file://CVE-2022-2056-CVE-2022-2057-CVE-2022-2058.patch \
|
||||
"
|
||||
SRC_URI[md5sum] = "2165e7aba557463acc0664e71a3ed424"
|
||||
SRC_URI[sha256sum] = "5d29f32517dadb6dbcd1255ea5bbc93a2b54b94fbf83653b4d65c7d6775b8634"
|
||||
|
||||
45
meta/recipes-support/curl/curl/CVE-2022-27774-1.patch
Normal file
45
meta/recipes-support/curl/curl/CVE-2022-27774-1.patch
Normal file
@@ -0,0 +1,45 @@
|
||||
From 2a797e099731facf62a2c675396334bc2ad3bc7c Mon Sep 17 00:00:00 2001
|
||||
From: Daniel Stenberg <daniel@haxx.se>
|
||||
Date: Mon, 25 Apr 2022 16:24:33 +0200
|
||||
Subject: [PATCH] connect: store "conn_remote_port" in the info struct
|
||||
|
||||
To make it available after the connection ended.
|
||||
|
||||
Prerequisite for the patches that address CVE-2022-27774.
|
||||
|
||||
Upstream-Status: Backport [https://github.com/curl/curl/commit/08b8ef4e726ba10f45081ecda5b3cea788d3c839]
|
||||
Signed-off-by: Robert Joslyn <robert.joslyn@redrectangle.org>
|
||||
---
|
||||
lib/connect.c | 1 +
|
||||
lib/urldata.h | 6 +++++-
|
||||
2 files changed, 6 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/lib/connect.c b/lib/connect.c
|
||||
index b3d4057..a977d67 100644
|
||||
--- a/lib/connect.c
|
||||
+++ b/lib/connect.c
|
||||
@@ -624,6 +624,7 @@ void Curl_persistconninfo(struct connectdata *conn)
|
||||
conn->data->info.conn_scheme = conn->handler->scheme;
|
||||
conn->data->info.conn_protocol = conn->handler->protocol;
|
||||
conn->data->info.conn_primary_port = conn->primary_port;
|
||||
+ conn->data->info.conn_remote_port = conn->remote_port;
|
||||
conn->data->info.conn_local_port = conn->local_port;
|
||||
}
|
||||
|
||||
diff --git a/lib/urldata.h b/lib/urldata.h
|
||||
index fafb7a3..ab1b267 100644
|
||||
--- a/lib/urldata.h
|
||||
+++ b/lib/urldata.h
|
||||
@@ -1148,7 +1148,11 @@ struct PureInfo {
|
||||
reused, in the connection cache. */
|
||||
|
||||
char conn_primary_ip[MAX_IPADR_LEN];
|
||||
- long conn_primary_port;
|
||||
+ long conn_primary_port; /* this is the destination port to the connection,
|
||||
+ which might have been a proxy */
|
||||
+ long conn_remote_port; /* this is the "remote port", which is the port
|
||||
+ number of the used URL, independent of proxy or
|
||||
+ not */
|
||||
char conn_local_ip[MAX_IPADR_LEN];
|
||||
long conn_local_port;
|
||||
const char *conn_scheme;
|
||||
80
meta/recipes-support/curl/curl/CVE-2022-27774-2.patch
Normal file
80
meta/recipes-support/curl/curl/CVE-2022-27774-2.patch
Normal file
@@ -0,0 +1,80 @@
|
||||
From 5c2f3b3a5f115625134669d90d591de9c5aafc8e Mon Sep 17 00:00:00 2001
|
||||
From: Daniel Stenberg <daniel@haxx.se>
|
||||
Date: Mon, 25 Apr 2022 16:24:33 +0200
|
||||
Subject: [PATCH] transfer: redirects to other protocols or ports clear auth
|
||||
|
||||
... unless explicitly permitted.
|
||||
|
||||
Bug: https://curl.se/docs/CVE-2022-27774.html
|
||||
Reported-by: Harry Sintonen
|
||||
Closes #8748
|
||||
|
||||
Upstream-Status: Backport [https://github.com/curl/curl/commit/620ea21410030a9977396b4661806bc187231b79]
|
||||
Signed-off-by: Robert Joslyn <robert.joslyn@redrectangle.org>
|
||||
---
|
||||
lib/transfer.c | 49 ++++++++++++++++++++++++++++++++++++++++++++++++-
|
||||
1 file changed, 48 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/lib/transfer.c b/lib/transfer.c
|
||||
index 744e1c0..ac69d27 100644
|
||||
--- a/lib/transfer.c
|
||||
+++ b/lib/transfer.c
|
||||
@@ -1627,10 +1627,57 @@ CURLcode Curl_follow(struct Curl_easy *data,
|
||||
return CURLE_OUT_OF_MEMORY;
|
||||
}
|
||||
else {
|
||||
-
|
||||
uc = curl_url_get(data->state.uh, CURLUPART_URL, &newurl, 0);
|
||||
if(uc)
|
||||
return Curl_uc_to_curlcode(uc);
|
||||
+
|
||||
+ /* Clear auth if this redirects to a different port number or protocol,
|
||||
+ unless permitted */
|
||||
+ if(!data->set.allow_auth_to_other_hosts && (type != FOLLOW_FAKE)) {
|
||||
+ char *portnum;
|
||||
+ int port;
|
||||
+ bool clear = FALSE;
|
||||
+
|
||||
+ if(data->set.use_port && data->state.allow_port)
|
||||
+ /* a custom port is used */
|
||||
+ port = (int)data->set.use_port;
|
||||
+ else {
|
||||
+ uc = curl_url_get(data->state.uh, CURLUPART_PORT, &portnum,
|
||||
+ CURLU_DEFAULT_PORT);
|
||||
+ if(uc) {
|
||||
+ free(newurl);
|
||||
+ return Curl_uc_to_curlcode(uc);
|
||||
+ }
|
||||
+ port = atoi(portnum);
|
||||
+ free(portnum);
|
||||
+ }
|
||||
+ if(port != data->info.conn_remote_port) {
|
||||
+ infof(data, "Clear auth, redirects to port from %u to %u",
|
||||
+ data->info.conn_remote_port, port);
|
||||
+ clear = TRUE;
|
||||
+ }
|
||||
+ else {
|
||||
+ char *scheme;
|
||||
+ const struct Curl_handler *p;
|
||||
+ uc = curl_url_get(data->state.uh, CURLUPART_SCHEME, &scheme, 0);
|
||||
+ if(uc) {
|
||||
+ free(newurl);
|
||||
+ return Curl_uc_to_curlcode(uc);
|
||||
+ }
|
||||
+
|
||||
+ p = Curl_builtin_scheme(scheme);
|
||||
+ if(p && (p->protocol != data->info.conn_protocol)) {
|
||||
+ infof(data, "Clear auth, redirects scheme from %s to %s",
|
||||
+ data->info.conn_scheme, scheme);
|
||||
+ clear = TRUE;
|
||||
+ }
|
||||
+ free(scheme);
|
||||
+ }
|
||||
+ if(clear) {
|
||||
+ Curl_safefree(data->set.str[STRING_USERNAME]);
|
||||
+ Curl_safefree(data->set.str[STRING_PASSWORD]);
|
||||
+ }
|
||||
+ }
|
||||
}
|
||||
|
||||
if(type == FOLLOW_FAKE) {
|
||||
83
meta/recipes-support/curl/curl/CVE-2022-27774-3.patch
Normal file
83
meta/recipes-support/curl/curl/CVE-2022-27774-3.patch
Normal file
@@ -0,0 +1,83 @@
|
||||
From 5dccf21ad49eed925e8f76b0cb844877239ce23d Mon Sep 17 00:00:00 2001
|
||||
From: Daniel Stenberg <daniel@haxx.se>
|
||||
Date: Mon, 25 Apr 2022 17:59:15 +0200
|
||||
Subject: [PATCH] openssl: don't leak the SRP credentials in redirects either
|
||||
|
||||
Follow-up to 620ea21410030
|
||||
|
||||
Reported-by: Harry Sintonen
|
||||
Closes #8751
|
||||
|
||||
Upstream-Status: Backport [https://github.com/curl/curl/commit/139a54ed0a172adaaf1a78d6f4fff50b2c3f9e08]
|
||||
Signed-off-by: Robert Joslyn <robert.joslyn@redrectangle.org>
|
||||
---
|
||||
lib/http.c | 10 +++++-----
|
||||
lib/http.h | 6 ++++++
|
||||
lib/vtls/openssl.c | 3 ++-
|
||||
3 files changed, 13 insertions(+), 6 deletions(-)
|
||||
|
||||
diff --git a/lib/http.c b/lib/http.c
|
||||
index 8b16c09..5291c07 100644
|
||||
--- a/lib/http.c
|
||||
+++ b/lib/http.c
|
||||
@@ -732,10 +732,10 @@ output_auth_headers(struct connectdata *conn,
|
||||
}
|
||||
|
||||
/*
|
||||
- * allow_auth_to_host() tells if autentication, cookies or other "sensitive
|
||||
- * data" can (still) be sent to this host.
|
||||
+ * Curl_allow_auth_to_host() tells if authentication, cookies or other
|
||||
+ * "sensitive data" can (still) be sent to this host.
|
||||
*/
|
||||
-static bool allow_auth_to_host(struct Curl_easy *data)
|
||||
+bool Curl_allow_auth_to_host(struct Curl_easy *data)
|
||||
{
|
||||
struct connectdata *conn = data->conn;
|
||||
return (!data->state.this_is_a_follow ||
|
||||
@@ -816,7 +816,7 @@ Curl_http_output_auth(struct connectdata *conn,
|
||||
|
||||
/* To prevent the user+password to get sent to other than the original host
|
||||
due to a location-follow */
|
||||
- if(allow_auth_to_host(data)
|
||||
+ if(Curl_allow_auth_to_host(data)
|
||||
|| conn->bits.netrc
|
||||
)
|
||||
result = output_auth_headers(conn, authhost, request, path, FALSE);
|
||||
@@ -1891,7 +1891,7 @@ CURLcode Curl_add_custom_headers(struct connectdata *conn,
|
||||
checkprefix("Cookie:", compare)) &&
|
||||
/* be careful of sending this potentially sensitive header to
|
||||
other hosts */
|
||||
- !allow_auth_to_host(data))
|
||||
+ !Curl_allow_auth_to_host(data))
|
||||
;
|
||||
else {
|
||||
result = Curl_add_bufferf(&req_buffer, "%s\r\n", compare);
|
||||
diff --git a/lib/http.h b/lib/http.h
|
||||
index 4c1825f..4fbae1d 100644
|
||||
--- a/lib/http.h
|
||||
+++ b/lib/http.h
|
||||
@@ -273,4 +273,10 @@ Curl_http_output_auth(struct connectdata *conn,
|
||||
bool proxytunnel); /* TRUE if this is the request setting
|
||||
up the proxy tunnel */
|
||||
|
||||
+/*
|
||||
+ * Curl_allow_auth_to_host() tells if authentication, cookies or other
|
||||
+ * "sensitive data" can (still) be sent to this host.
|
||||
+ */
|
||||
+bool Curl_allow_auth_to_host(struct Curl_easy *data);
|
||||
+
|
||||
#endif /* HEADER_CURL_HTTP_H */
|
||||
diff --git a/lib/vtls/openssl.c b/lib/vtls/openssl.c
|
||||
index 006a8c8..a14cecc 100644
|
||||
--- a/lib/vtls/openssl.c
|
||||
+++ b/lib/vtls/openssl.c
|
||||
@@ -2739,7 +2739,8 @@ static CURLcode ossl_connect_step1(struct connectdata *conn, int sockindex)
|
||||
#endif
|
||||
|
||||
#ifdef USE_TLS_SRP
|
||||
- if(ssl_authtype == CURL_TLSAUTH_SRP) {
|
||||
+ if((ssl_authtype == CURL_TLSAUTH_SRP) &&
|
||||
+ Curl_allow_auth_to_host(data)) {
|
||||
char * const ssl_username = SSL_SET_OPTION(username);
|
||||
|
||||
infof(data, "Using TLS-SRP username: %s\n", ssl_username);
|
||||
35
meta/recipes-support/curl/curl/CVE-2022-27774-4.patch
Normal file
35
meta/recipes-support/curl/curl/CVE-2022-27774-4.patch
Normal file
@@ -0,0 +1,35 @@
|
||||
From 7395752e2f7b87dc8c8f2a7137075e2da554aaea Mon Sep 17 00:00:00 2001
|
||||
From: Daniel Stenberg <daniel@haxx.se>
|
||||
Date: Tue, 26 Apr 2022 07:46:19 +0200
|
||||
Subject: [PATCH] gnutls: don't leak the SRP credentials in redirects
|
||||
|
||||
Follow-up to 620ea21410030 and 139a54ed0a172a
|
||||
|
||||
Reported-by: Harry Sintonen
|
||||
Closes #8752
|
||||
|
||||
Upstream-Status: Backport [https://github.com/curl/curl/commit/093531556203decd92d92bccd431edbe5561781c]
|
||||
Signed-off-by: Robert Joslyn <robert.joslyn@redrectangle.org>
|
||||
---
|
||||
lib/vtls/gtls.c | 6 +++---
|
||||
1 file changed, 3 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/lib/vtls/gtls.c b/lib/vtls/gtls.c
|
||||
index 8c05102..3d0758d 100644
|
||||
--- a/lib/vtls/gtls.c
|
||||
+++ b/lib/vtls/gtls.c
|
||||
@@ -581,11 +581,11 @@ gtls_connect_step1(struct connectdata *conn,
|
||||
}
|
||||
|
||||
#ifdef USE_TLS_SRP
|
||||
- if(SSL_SET_OPTION(authtype) == CURL_TLSAUTH_SRP) {
|
||||
+ if((SSL_SET_OPTION(authtype) == CURL_TLSAUTH_SRP) &&
|
||||
+ Curl_allow_auth_to_host(data)) {
|
||||
infof(data, "Using TLS-SRP username: %s\n", SSL_SET_OPTION(username));
|
||||
|
||||
- rc = gnutls_srp_allocate_client_credentials(
|
||||
- &BACKEND->srp_client_cred);
|
||||
+ rc = gnutls_srp_allocate_client_credentials(&BACKEND->srp_client_cred);
|
||||
if(rc != GNUTLS_E_SUCCESS) {
|
||||
failf(data, "gnutls_srp_allocate_client_cred() failed: %s",
|
||||
gnutls_strerror(rc));
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user