mirror of
https://git.yoctoproject.org/poky
synced 2026-02-20 16:39:40 +01:00
Compare commits
166 Commits
yocto-3.1.
...
yocto-3.1.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4aad5914ef | ||
|
|
23322786e0 | ||
|
|
139225f0ba | ||
|
|
fc24cd1948 | ||
|
|
1fc880e165 | ||
|
|
9243169d4f | ||
|
|
f97bd9abe6 | ||
|
|
59180eb474 | ||
|
|
2340b1dbb9 | ||
|
|
0b85e5d610 | ||
|
|
ef2da8f28e | ||
|
|
5373e681cf | ||
|
|
98dd6e4cac | ||
|
|
ae4acc9f81 | ||
|
|
cfd2eaa0e1 | ||
|
|
5b956ef359 | ||
|
|
54846f581e | ||
|
|
b361f2a931 | ||
|
|
0c3dfb682d | ||
|
|
7c7fc0de71 | ||
|
|
354f571f61 | ||
|
|
883102b9b8 | ||
|
|
b365d212dc | ||
|
|
c4499b85f7 | ||
|
|
c35c1e15f0 | ||
|
|
820e8891b8 | ||
|
|
b9ae8da74e | ||
|
|
038831674e | ||
|
|
25606f450d | ||
|
|
9e7f4a7db2 | ||
|
|
e4946bd39e | ||
|
|
97810ff2d7 | ||
|
|
d323923047 | ||
|
|
d695bd0d3d | ||
|
|
08bd8cc114 | ||
|
|
eb32f7f5e6 | ||
|
|
88be415b10 | ||
|
|
24fc40faef | ||
|
|
868ebed326 | ||
|
|
17c23e485e | ||
|
|
61ea9f7665 | ||
|
|
b38628041b | ||
|
|
dee08141f2 | ||
|
|
61023f9e61 | ||
|
|
7350f515b3 | ||
|
|
50aa474c84 | ||
|
|
9c5b33ccba | ||
|
|
eb12590623 | ||
|
|
35bcc28983 | ||
|
|
48ea7812c7 | ||
|
|
010094a2ae | ||
|
|
43980058ca | ||
|
|
a985415ec2 | ||
|
|
79ac8cf161 | ||
|
|
3860414240 | ||
|
|
387d23c02e | ||
|
|
232fdbf0e5 | ||
|
|
60a98feb86 | ||
|
|
6a3d60d873 | ||
|
|
1c38d0d3d6 | ||
|
|
ca90350d13 | ||
|
|
159a2de146 | ||
|
|
684c5d4c12 | ||
|
|
8dfc7162e3 | ||
|
|
d2f8a57a30 | ||
|
|
0a0e0663ab | ||
|
|
79b3e05767 | ||
|
|
b6f4778e37 | ||
|
|
6e79d96c6d | ||
|
|
31b4392e6e | ||
|
|
4bc2324a25 | ||
|
|
6013fc2606 | ||
|
|
3f2da49c2b | ||
|
|
02867c9039 | ||
|
|
33a08f7b8f | ||
|
|
07eca06c71 | ||
|
|
9f20f682ff | ||
|
|
6d1f8412be | ||
|
|
872caf23ad | ||
|
|
b9bffd7650 | ||
|
|
0b84202a2b | ||
|
|
ae90fa778a | ||
|
|
fe6c34c48d | ||
|
|
2ae3d43628 | ||
|
|
5582ab6aae | ||
|
|
d4c7b40039 | ||
|
|
a2805141e9 | ||
|
|
7d9d97368b | ||
|
|
69fb63b4fc | ||
|
|
9638dc4826 | ||
|
|
f51a254415 | ||
|
|
1487d68388 | ||
|
|
8a382d8655 | ||
|
|
8d6f9680e4 | ||
|
|
23ed0037b6 | ||
|
|
95cda9d091 | ||
|
|
238fb89434 | ||
|
|
7f694e46a8 | ||
|
|
e873840317 | ||
|
|
9868f99149 | ||
|
|
f2d12bc50b | ||
|
|
6cf824520a | ||
|
|
42bb9689a0 | ||
|
|
7da79fcac2 | ||
|
|
1be2437fd2 | ||
|
|
d3d92d7852 | ||
|
|
6be9d793a3 | ||
|
|
77332ffb9b | ||
|
|
99478d73c5 | ||
|
|
196895a482 | ||
|
|
27877797c7 | ||
|
|
1e298a4222 | ||
|
|
bb6c7e09e3 | ||
|
|
86146334f1 | ||
|
|
898aedf585 | ||
|
|
cc3cefdb43 | ||
|
|
b0cff6d434 | ||
|
|
08fb6eb2e0 | ||
|
|
34aaa93bfe | ||
|
|
c2bd2eae86 | ||
|
|
396373610c | ||
|
|
38b588a1a1 | ||
|
|
f0d18846de | ||
|
|
d6941efc0b | ||
|
|
df1129b022 | ||
|
|
0ca0aec7aa | ||
|
|
0f83e5bd42 | ||
|
|
475b0d3fad | ||
|
|
232b5533de | ||
|
|
b4e5bf3e7f | ||
|
|
e8255f5086 | ||
|
|
afc8929c5b | ||
|
|
72385662c8 | ||
|
|
5c61613405 | ||
|
|
dfd1497961 | ||
|
|
5999f70889 | ||
|
|
37bbb105c9 | ||
|
|
fec7f76cfc | ||
|
|
61c36064c8 | ||
|
|
a48231b5bf | ||
|
|
a75678145b | ||
|
|
31970fb2a4 | ||
|
|
6327db048b | ||
|
|
c408846f41 | ||
|
|
49cd9f898f | ||
|
|
46e00399e5 | ||
|
|
2120a39b09 | ||
|
|
dd76704ea5 | ||
|
|
66b0097533 | ||
|
|
24f305b4dd | ||
|
|
d68406497e | ||
|
|
5daf9735c9 | ||
|
|
de24466823 | ||
|
|
a03e13a00b | ||
|
|
eba0e64a88 | ||
|
|
3a9cef8dbe | ||
|
|
90cf135b04 | ||
|
|
e47d35353c | ||
|
|
b4ba37ce13 | ||
|
|
4dfdb53c8a | ||
|
|
64f632c93f | ||
|
|
319ca9f460 | ||
|
|
dcd40cfa37 | ||
|
|
5b0093ecee | ||
|
|
49032f1e8d | ||
|
|
86cdd92b15 |
48
bitbake/bin/bitbake-getvar
Executable file
48
bitbake/bin/bitbake-getvar
Executable file
@@ -0,0 +1,48 @@
|
||||
#! /usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2021 Richard Purdie
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
|
||||
bindir = os.path.dirname(__file__)
|
||||
topdir = os.path.dirname(bindir)
|
||||
sys.path[0:0] = [os.path.join(topdir, 'lib')]
|
||||
|
||||
import bb.tinfoil
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Bitbake Query Variable")
|
||||
parser.add_argument("variable", help="variable name to query")
|
||||
parser.add_argument("-r", "--recipe", help="Recipe name to query", default=None, required=False)
|
||||
parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
|
||||
parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
|
||||
parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.unexpand and not args.value:
|
||||
print("--unexpand only makes sense with --value")
|
||||
sys.exit(1)
|
||||
|
||||
if args.flag and not args.value:
|
||||
print("--flag only makes sense with --value")
|
||||
sys.exit(1)
|
||||
|
||||
with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
|
||||
if args.recipe:
|
||||
tinfoil.prepare(quiet=2)
|
||||
d = tinfoil.parse_recipe(args.recipe)
|
||||
else:
|
||||
tinfoil.prepare(quiet=2, config_only=True)
|
||||
d = tinfoil.config_data
|
||||
if args.flag:
|
||||
print(str(d.getVarFlag(args.variable, args.flag, expand=(not args.unexpand))))
|
||||
elif args.value:
|
||||
print(str(d.getVar(args.variable, expand=(not args.unexpand))))
|
||||
else:
|
||||
bb.data.emit_var(args.variable, d=d, all=True)
|
||||
@@ -20,6 +20,7 @@ Commands are queued in a CommandQueue
|
||||
|
||||
from collections import OrderedDict, defaultdict
|
||||
|
||||
import io
|
||||
import bb.event
|
||||
import bb.cooker
|
||||
import bb.remotedata
|
||||
@@ -478,6 +479,17 @@ class CommandsSync:
|
||||
d = command.remotedatastores[dsindex].varhistory
|
||||
return getattr(d, method)(*args, **kwargs)
|
||||
|
||||
def dataStoreConnectorVarHistCmdEmit(self, command, params):
|
||||
dsindex = params[0]
|
||||
var = params[1]
|
||||
oval = params[2]
|
||||
val = params[3]
|
||||
d = command.remotedatastores[params[4]]
|
||||
|
||||
o = io.StringIO()
|
||||
command.remotedatastores[dsindex].varhistory.emit(var, oval, val, o, d)
|
||||
return o.getvalue()
|
||||
|
||||
def dataStoreConnectorIncHistCmd(self, command, params):
|
||||
dsindex = params[0]
|
||||
method = params[1]
|
||||
|
||||
@@ -224,7 +224,12 @@ class Git(FetchMethod):
|
||||
ud.shallow = False
|
||||
|
||||
if ud.usehead:
|
||||
ud.unresolvedrev['default'] = 'HEAD'
|
||||
# When usehead is set let's associate 'HEAD' with the unresolved
|
||||
# rev of this repository. This will get resolved into a revision
|
||||
# later. If an actual revision happens to have also been provided
|
||||
# then this setting will be overridden.
|
||||
for name in ud.names:
|
||||
ud.unresolvedrev[name] = 'HEAD'
|
||||
|
||||
ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
|
||||
|
||||
|
||||
@@ -52,6 +52,12 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
|
||||
|
||||
|
||||
class Wget(FetchMethod):
|
||||
|
||||
# CDNs like CloudFlare may do a 'browser integrity test' which can fail
|
||||
# with the standard wget/urllib User-Agent, so pretend to be a modern
|
||||
# browser.
|
||||
user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
|
||||
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
def supports(self, ud, d):
|
||||
"""
|
||||
@@ -91,10 +97,9 @@ class Wget(FetchMethod):
|
||||
|
||||
fetchcmd = self.basecmd
|
||||
|
||||
if 'downloadfilename' in ud.parm:
|
||||
localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
|
||||
bb.utils.mkdirhier(os.path.dirname(localpath))
|
||||
fetchcmd += " -O %s" % shlex.quote(localpath)
|
||||
localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp"
|
||||
bb.utils.mkdirhier(os.path.dirname(localpath))
|
||||
fetchcmd += " -O %s" % shlex.quote(localpath)
|
||||
|
||||
if ud.user and ud.pswd:
|
||||
fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
|
||||
@@ -108,6 +113,10 @@ class Wget(FetchMethod):
|
||||
|
||||
self._runwget(ud, d, fetchcmd, False)
|
||||
|
||||
# Remove the ".tmp" and move the file into position atomically
|
||||
# Our lock prevents multiple writers but mirroring code may grab incomplete files
|
||||
os.rename(localpath, localpath[:-4])
|
||||
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath):
|
||||
@@ -300,7 +309,7 @@ class Wget(FetchMethod):
|
||||
# Some servers (FusionForge, as used on Alioth) require that the
|
||||
# optional Accept header is set.
|
||||
r.add_header("Accept", "*/*")
|
||||
r.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12")
|
||||
r.add_header("User-Agent", self.user_agent)
|
||||
def add_basic_auth(login_str, request):
|
||||
'''Adds Basic auth to http request, pass in login:password as string'''
|
||||
import base64
|
||||
@@ -404,9 +413,8 @@ class Wget(FetchMethod):
|
||||
"""
|
||||
f = tempfile.NamedTemporaryFile()
|
||||
with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
|
||||
agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
|
||||
fetchcmd = self.basecmd
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'"
|
||||
try:
|
||||
self._runwget(ud, d, fetchcmd, True, workdir=workdir)
|
||||
fetchresult = f.read()
|
||||
|
||||
@@ -650,6 +650,58 @@ class FetcherLocalTest(FetcherTest):
|
||||
with self.assertRaises(bb.fetch2.UnpackError):
|
||||
self.fetchUnpack(['file://a;subdir=/bin/sh'])
|
||||
|
||||
def test_local_gitfetch_usehead(self):
|
||||
# Create dummy local Git repo
|
||||
src_dir = tempfile.mkdtemp(dir=self.tempdir,
|
||||
prefix='gitfetch_localusehead_')
|
||||
src_dir = os.path.abspath(src_dir)
|
||||
bb.process.run("git init", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit'",
|
||||
cwd=src_dir)
|
||||
# Use other branch than master
|
||||
bb.process.run("git checkout -b my-devel", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
|
||||
cwd=src_dir)
|
||||
stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
|
||||
orig_rev = stdout[0].strip()
|
||||
|
||||
# Fetch and check revision
|
||||
self.d.setVar("SRCREV", "AUTOINC")
|
||||
url = "git://" + src_dir + ";protocol=file;usehead=1"
|
||||
fetcher = bb.fetch.Fetch([url], self.d)
|
||||
fetcher.download()
|
||||
fetcher.unpack(self.unpackdir)
|
||||
stdout = bb.process.run("git rev-parse HEAD",
|
||||
cwd=os.path.join(self.unpackdir, 'git'))
|
||||
unpack_rev = stdout[0].strip()
|
||||
self.assertEqual(orig_rev, unpack_rev)
|
||||
|
||||
def test_local_gitfetch_usehead_withname(self):
|
||||
# Create dummy local Git repo
|
||||
src_dir = tempfile.mkdtemp(dir=self.tempdir,
|
||||
prefix='gitfetch_localusehead_')
|
||||
src_dir = os.path.abspath(src_dir)
|
||||
bb.process.run("git init", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit'",
|
||||
cwd=src_dir)
|
||||
# Use other branch than master
|
||||
bb.process.run("git checkout -b my-devel", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
|
||||
cwd=src_dir)
|
||||
stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
|
||||
orig_rev = stdout[0].strip()
|
||||
|
||||
# Fetch and check revision
|
||||
self.d.setVar("SRCREV", "AUTOINC")
|
||||
url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName"
|
||||
fetcher = bb.fetch.Fetch([url], self.d)
|
||||
fetcher.download()
|
||||
fetcher.unpack(self.unpackdir)
|
||||
stdout = bb.process.run("git rev-parse HEAD",
|
||||
cwd=os.path.join(self.unpackdir, 'git'))
|
||||
unpack_rev = stdout[0].strip()
|
||||
self.assertEqual(orig_rev, unpack_rev)
|
||||
|
||||
class FetcherNoNetworkTest(FetcherTest):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
@@ -53,6 +53,10 @@ class TinfoilDataStoreConnectorVarHistory:
|
||||
def remoteCommand(self, cmd, *args, **kwargs):
|
||||
return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs)
|
||||
|
||||
def emit(self, var, oval, val, o, d):
|
||||
ret = self.tinfoil.run_command('dataStoreConnectorVarHistCmdEmit', self.dsindex, var, oval, val, d.dsindex)
|
||||
o.write(ret)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if not hasattr(bb.data_smart.VariableHistory, name):
|
||||
raise AttributeError("VariableHistory has no such method %s" % name)
|
||||
|
||||
@@ -227,7 +227,9 @@ class TerminalFilter(object):
|
||||
|
||||
def keepAlive(self, t):
|
||||
if not self.cuu:
|
||||
print("Bitbake still alive (%ds)" % t)
|
||||
print("Bitbake still alive (no events for %ds). Active tasks:" % t)
|
||||
for t in self.helper.running_tasks:
|
||||
print(t)
|
||||
sys.stdout.flush()
|
||||
|
||||
def updateFooter(self):
|
||||
@@ -597,7 +599,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
|
||||
printinterval = 5000
|
||||
printintervaldelta = 10 * 60 # 10 minutes
|
||||
printinterval = printintervaldelta
|
||||
lastprint = time.time()
|
||||
|
||||
termfilter = tf(main, helper, console_handlers, params.options.quiet)
|
||||
@@ -607,7 +610,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
try:
|
||||
if (lastprint + printinterval) <= time.time():
|
||||
termfilter.keepAlive(printinterval)
|
||||
printinterval += 5000
|
||||
printinterval += printintervaldelta
|
||||
event = eventHandler.waitEvent(0)
|
||||
if event is None:
|
||||
if main.shutdown > 1:
|
||||
@@ -638,7 +641,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
|
||||
if isinstance(event, logging.LogRecord):
|
||||
lastprint = time.time()
|
||||
printinterval = 5000
|
||||
printinterval = printintervaldelta
|
||||
if event.levelno >= bb.msg.BBLogFormatter.ERROR:
|
||||
errors = errors + 1
|
||||
return_value = 1
|
||||
|
||||
@@ -222,19 +222,10 @@ an entire Linux distribution, including the toolchain, from source.
|
||||
.. tip::
|
||||
|
||||
You can significantly speed up your build and guard against fetcher
|
||||
failures by using mirrors. To use mirrors, add these lines to your
|
||||
local.conf file in the Build directory: ::
|
||||
failures by using mirrors. To use mirrors, add this line to your
|
||||
``local.conf`` file in the :term:`Build Directory`: ::
|
||||
|
||||
SSTATE_MIRRORS = "\
|
||||
file://.* http://sstate.yoctoproject.org/dev/PATH;downloadfilename=PATH \n \
|
||||
file://.* http://sstate.yoctoproject.org/&YOCTO_DOC_VERSION_MINUS_ONE;/PATH;downloadfilename=PATH \n \
|
||||
file://.* http://sstate.yoctoproject.org/&YOCTO_DOC_VERSION;/PATH;downloadfilename=PATH \n \
|
||||
"
|
||||
|
||||
|
||||
The previous examples showed how to add sstate paths for Yocto Project
|
||||
&YOCTO_DOC_VERSION_MINUS_ONE;, &YOCTO_DOC_VERSION;, and a development
|
||||
area. For a complete index of sstate locations, see http://sstate.yoctoproject.org/.
|
||||
SSTATE_MIRRORS ?= "file://.* https://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
|
||||
|
||||
#. **Start the Build:** Continue with the following command to build an OS
|
||||
image for the target, which is ``core-image-sato`` in this example:
|
||||
|
||||
@@ -4967,7 +4967,7 @@ configuration would be as follows:
|
||||
require conf/multilib.conf
|
||||
MULTILIBS = "multilib:lib32"
|
||||
DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
|
||||
IMAGE_INSTALL_append = "lib32-glib-2.0"
|
||||
IMAGE_INSTALL_append = " lib32-glib-2.0"
|
||||
|
||||
This example enables an additional library named
|
||||
``lib32`` alongside the normal target packages. When combining these
|
||||
|
||||
@@ -1100,7 +1100,7 @@ Section.
|
||||
::
|
||||
|
||||
FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
|
||||
SRC_URI_append = "file://0001-calibrate.c-Added-some-printk-statements.patch"
|
||||
SRC_URI_append = " file://0001-calibrate.c-Added-some-printk-statements.patch"
|
||||
|
||||
The :term:`FILESEXTRAPATHS` and :term:`SRC_URI` statements
|
||||
enable the OpenEmbedded build system to find the patch file.
|
||||
|
||||
@@ -1986,9 +1986,7 @@ Behind the scenes, the shared state code works by looking in
|
||||
shared state files. Here is an example:
|
||||
::
|
||||
|
||||
SSTATE_MIRRORS ?= "\
|
||||
file://.\* http://someserver.tld/share/sstate/PATH;downloadfilename=PATH \n \
|
||||
file://.\* file:///some/local/dir/sstate/PATH"
|
||||
SSTATE_MIRRORS ?= "file://.* https://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
|
||||
|
||||
.. note::
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
DISTRO : "3.1.16"
|
||||
DISTRO : "3.1.19"
|
||||
DISTRO_NAME_NO_CAP : "dunfell"
|
||||
DISTRO_NAME : "Dunfell"
|
||||
DISTRO_NAME_NO_CAP_MINUS_ONE : "zeus"
|
||||
YOCTO_DOC_VERSION : "3.1.16"
|
||||
YOCTO_DOC_VERSION : "3.1.19"
|
||||
YOCTO_DOC_VERSION_MINUS_ONE : "3.0.4"
|
||||
DISTRO_REL_TAG : "yocto-3.1.16"
|
||||
DOCCONF_VERSION : "3.1.16"
|
||||
DISTRO_REL_TAG : "yocto-3.1.19"
|
||||
DOCCONF_VERSION : "3.1.19"
|
||||
BITBAKE_SERIES : "1.46"
|
||||
POKYVERSION : "23.0.16"
|
||||
POKYVERSION : "23.0.19"
|
||||
YOCTO_POKY : "poky-&DISTRO_NAME_NO_CAP;-&POKYVERSION;"
|
||||
YOCTO_DL_URL : "https://downloads.yoctoproject.org"
|
||||
YOCTO_AB_URL : "https://autobuilder.yoctoproject.org"
|
||||
|
||||
@@ -3846,10 +3846,10 @@ system and gives an overview of their function and contents.
|
||||
::
|
||||
|
||||
KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc features/taskstats/taskstats.scc"
|
||||
KERNEL_FEATURES_append = "${KERNEL_EXTRA_FEATURES}"
|
||||
KERNEL_FEATURES_append_qemuall = "cfg/virtio.scc"
|
||||
KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
|
||||
KERNEL_FEATURES_append_qemuall = " cfg/virtio.scc"
|
||||
KERNEL_FEATURES_append_qemux86 = " cfg/sound.scc cfg/paravirt_kvm.scc"
|
||||
KERNEL_FEATURES_append_qemux86-64 = "cfg/sound.scc"
|
||||
KERNEL_FEATURES_append_qemux86-64 = " cfg/sound.scc"
|
||||
|
||||
:term:`KERNEL_FIT_LINK_NAME`
|
||||
The link name of the kernel flattened image tree (FIT) image. This
|
||||
@@ -4048,7 +4048,7 @@ system and gives an overview of their function and contents.
|
||||
SRCREV_machine_core2-32-intel-common = "43b9eced9ba8a57add36af07736344dcc383f711"
|
||||
KMACHINE_core2-32-intel-common = "intel-core2-32"
|
||||
KBRANCH_core2-32-intel-common = "standard/base"
|
||||
KERNEL_FEATURES_append_core2-32-intel-common = "${KERNEL_FEATURES_INTEL_COMMON}"
|
||||
KERNEL_FEATURES_append_core2-32-intel-common = " ${KERNEL_FEATURES_INTEL_COMMON}"
|
||||
|
||||
The ``KMACHINE`` statement says
|
||||
that the kernel understands the machine name as "intel-core2-32".
|
||||
@@ -7542,7 +7542,7 @@ system and gives an overview of their function and contents.
|
||||
``SYSTEMD_BOOT_CFG`` as follows:
|
||||
::
|
||||
|
||||
SYSTEMD_BOOT_CFG ?= "${:term:`S`}/loader.conf"
|
||||
SYSTEMD_BOOT_CFG ?= "${S}/loader.conf"
|
||||
|
||||
For information on Systemd-boot, see the `Systemd-boot
|
||||
documentation <http://www.freedesktop.org/wiki/Software/systemd/systemd-boot/>`__.
|
||||
@@ -8745,4 +8745,22 @@ system and gives an overview of their function and contents.
|
||||
|
||||
The default value of ``XSERVER``, if not specified in the machine
|
||||
configuration, is "xserver-xorg xf86-video-fbdev xf86-input-evdev".
|
||||
|
||||
|
||||
:term:`XZ_THREADS`
|
||||
Specifies the number of parallel threads that should be used when
|
||||
using xz compression.
|
||||
|
||||
By default this scales with core count, but is never set less than 2
|
||||
to ensure that multi-threaded mode is always used so that the output
|
||||
file contents are deterministic. Builds will work with a value of 1
|
||||
but the output will differ compared to the output from the compression
|
||||
generated when more than one thread is used.
|
||||
|
||||
On systems where many tasks run in parallel, setting a limit to this
|
||||
can be helpful in controlling system resource usage.
|
||||
|
||||
:term:`XZ_MEMLIMIT`
|
||||
Specifies the maximum memory the xz compression should use as a percentage
|
||||
of system memory. If unconstrained the xz compressor can use large amounts of
|
||||
memory and become problematic with parallelism elsewhere in the build.
|
||||
"50%" has been found to be a good value.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
DISTRO = "poky"
|
||||
DISTRO_NAME = "Poky (Yocto Project Reference Distro)"
|
||||
DISTRO_VERSION = "3.1.16"
|
||||
DISTRO_VERSION = "3.1.19"
|
||||
DISTRO_CODENAME = "dunfell"
|
||||
SDK_VENDOR = "-pokysdk"
|
||||
SDK_VERSION = "${@d.getVar('DISTRO_VERSION').replace('snapshot-${DATE}', 'snapshot')}"
|
||||
|
||||
@@ -231,7 +231,7 @@ BB_DISKMON_DIRS ??= "\
|
||||
# present in the cache. It assumes you can download something faster than you can build it
|
||||
# which will depend on your network.
|
||||
#
|
||||
#SSTATE_MIRRORS ?= "file://.* http://sstate.yoctoproject.org/2.5/PATH;downloadfilename=PATH"
|
||||
#SSTATE_MIRRORS ?= "file://.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
|
||||
|
||||
#
|
||||
# Qemu configuration
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
SUMMARY = "An image used during oe-selftest tests"
|
||||
|
||||
IMAGE_INSTALL = "packagegroup-core-boot dropbear"
|
||||
IMAGE_INSTALL = "packagegroup-core-boot packagegroup-core-ssh-dropbear"
|
||||
IMAGE_FEATURES = "debug-tweaks"
|
||||
|
||||
IMAGE_LINGUAS = " "
|
||||
|
||||
@@ -54,9 +54,10 @@ ARCHIVER_MODE[mirror] ?= "split"
|
||||
|
||||
DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
|
||||
ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources"
|
||||
ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
|
||||
ARCHIVER_ARCH = "${TARGET_SYS}"
|
||||
ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${ARCHIVER_ARCH}/${PF}/"
|
||||
ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
|
||||
ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/"
|
||||
ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${ARCHIVER_ARCH}/${PF}/"
|
||||
ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
|
||||
|
||||
# When producing a combined mirror directory, allow duplicates for the case
|
||||
@@ -100,6 +101,10 @@ python () {
|
||||
bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
|
||||
return
|
||||
|
||||
# TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig
|
||||
if pn.startswith('gcc-source'):
|
||||
d.setVar('ARCHIVER_ARCH', "allarch")
|
||||
|
||||
def hasTask(task):
|
||||
return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
|
||||
|
||||
@@ -578,7 +583,7 @@ python do_dumpdata () {
|
||||
|
||||
SSTATETASKS += "do_deploy_archives"
|
||||
do_deploy_archives () {
|
||||
echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
|
||||
bbnote "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
|
||||
}
|
||||
python do_deploy_archives_setscene () {
|
||||
sstate_setscene(d)
|
||||
|
||||
@@ -122,6 +122,10 @@ def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
|
||||
tools = d.getVar(toolsvar).split()
|
||||
origbbenv = d.getVar("BB_ORIGENV", False)
|
||||
path = origbbenv.getVar("PATH")
|
||||
# Need to ignore our own scripts directories to avoid circular links
|
||||
for p in path.split(":"):
|
||||
if p.endswith("/scripts"):
|
||||
path = path.replace(p, "/ignoreme")
|
||||
bb.utils.mkdirhier(dest)
|
||||
notfound = []
|
||||
for tool in tools:
|
||||
|
||||
@@ -30,8 +30,9 @@ bin_package_do_install () {
|
||||
bbfatal bin_package has nothing to install. Be sure the SRC_URI unpacks into S.
|
||||
fi
|
||||
cd ${S}
|
||||
install -d ${D}${base_prefix}
|
||||
tar --no-same-owner --exclude='./patches' --exclude='./.pc' -cpf - . \
|
||||
| tar --no-same-owner -xpf - -C ${D}
|
||||
| tar --no-same-owner -xpf - -C ${D}${base_prefix}
|
||||
}
|
||||
|
||||
FILES_${PN} = "/"
|
||||
|
||||
@@ -34,15 +34,33 @@ CVE_CHECK_TMP_FILE ?= "${TMPDIR}/cve_check"
|
||||
CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve"
|
||||
CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary"
|
||||
CVE_CHECK_SUMMARY_FILE ?= "${CVE_CHECK_SUMMARY_DIR}/${CVE_CHECK_SUMMARY_FILE_NAME}"
|
||||
CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json"
|
||||
CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt"
|
||||
|
||||
CVE_CHECK_LOG_JSON ?= "${T}/cve.json"
|
||||
|
||||
CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
|
||||
CVE_CHECK_RECIPE_FILE ?= "${CVE_CHECK_DIR}/${PN}"
|
||||
CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json"
|
||||
CVE_CHECK_MANIFEST ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cve"
|
||||
CVE_CHECK_MANIFEST_JSON ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.json"
|
||||
CVE_CHECK_COPY_FILES ??= "1"
|
||||
CVE_CHECK_CREATE_MANIFEST ??= "1"
|
||||
|
||||
# Report Patched or Ignored/Whitelisted CVEs
|
||||
CVE_CHECK_REPORT_PATCHED ??= "1"
|
||||
|
||||
CVE_CHECK_SHOW_WARNINGS ??= "1"
|
||||
|
||||
# Provide text output
|
||||
CVE_CHECK_FORMAT_TEXT ??= "1"
|
||||
|
||||
# Provide JSON output - disabled by default for backward compatibility
|
||||
CVE_CHECK_FORMAT_JSON ??= "0"
|
||||
|
||||
# Check for packages without CVEs (no issues or missing product name)
|
||||
CVE_CHECK_COVERAGE ??= "1"
|
||||
|
||||
# Whitelist for packages (PN)
|
||||
CVE_CHECK_PN_WHITELIST ?= ""
|
||||
|
||||
@@ -63,9 +81,31 @@ CVE_CHECK_LAYER_INCLUDELIST ??= ""
|
||||
# set to "alphabetical" for version using single alphabetical character as increment release
|
||||
CVE_VERSION_SUFFIX ??= ""
|
||||
|
||||
def generate_json_report(d, out_path, link_path):
|
||||
if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")):
|
||||
import json
|
||||
from oe.cve_check import cve_check_merge_jsons, update_symlinks
|
||||
|
||||
bb.note("Generating JSON CVE summary")
|
||||
index_file = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
|
||||
summary = {"version":"1", "package": []}
|
||||
with open(index_file) as f:
|
||||
filename = f.readline()
|
||||
while filename:
|
||||
with open(filename.rstrip()) as j:
|
||||
data = json.load(j)
|
||||
cve_check_merge_jsons(summary, data)
|
||||
filename = f.readline()
|
||||
|
||||
with open(out_path, "w") as f:
|
||||
json.dump(summary, f, indent=2)
|
||||
|
||||
update_symlinks(out_path, link_path)
|
||||
|
||||
python cve_save_summary_handler () {
|
||||
import shutil
|
||||
import datetime
|
||||
from oe.cve_check import update_symlinks
|
||||
|
||||
cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
|
||||
|
||||
@@ -78,13 +118,15 @@ python cve_save_summary_handler () {
|
||||
|
||||
if os.path.exists(cve_tmp_file):
|
||||
shutil.copyfile(cve_tmp_file, cve_summary_file)
|
||||
cvefile_link = os.path.join(cvelogpath, cve_summary_name)
|
||||
update_symlinks(cve_summary_file, cvefile_link)
|
||||
bb.plain("Complete CVE report summary created at: %s" % cvefile_link)
|
||||
|
||||
if cve_summary_file and os.path.exists(cve_summary_file):
|
||||
cvefile_link = os.path.join(cvelogpath, cve_summary_name)
|
||||
|
||||
if os.path.exists(os.path.realpath(cvefile_link)):
|
||||
os.remove(cvefile_link)
|
||||
os.symlink(os.path.basename(cve_summary_file), cvefile_link)
|
||||
if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
|
||||
json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON"))
|
||||
json_summary_name = os.path.join(cvelogpath, "%s-%s.json" % (cve_summary_name, timestamp))
|
||||
generate_json_report(d, json_summary_name, json_summary_link_name)
|
||||
bb.plain("Complete CVE JSON report summary created at: %s" % json_summary_link_name)
|
||||
}
|
||||
|
||||
addhandler cve_save_summary_handler
|
||||
@@ -94,22 +136,23 @@ python do_cve_check () {
|
||||
"""
|
||||
Check recipe for patched and unpatched CVEs
|
||||
"""
|
||||
from oe.cve_check import get_patched_cves
|
||||
|
||||
if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")):
|
||||
try:
|
||||
patched_cves = get_patches_cves(d)
|
||||
patched_cves = get_patched_cves(d)
|
||||
except FileNotFoundError:
|
||||
bb.fatal("Failure in searching patches")
|
||||
whitelisted, patched, unpatched = check_cves(d, patched_cves)
|
||||
if patched or unpatched:
|
||||
cve_data = get_cve_info(d, patched + unpatched)
|
||||
cve_write_data(d, patched, unpatched, whitelisted, cve_data)
|
||||
whitelisted, patched, unpatched, status = check_cves(d, patched_cves)
|
||||
if patched or unpatched or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status):
|
||||
cve_data = get_cve_info(d, patched + unpatched + whitelisted)
|
||||
cve_write_data(d, patched, unpatched, whitelisted, cve_data, status)
|
||||
else:
|
||||
bb.note("No CVE database found, skipping CVE check")
|
||||
|
||||
}
|
||||
|
||||
addtask cve_check before do_build after do_fetch
|
||||
addtask cve_check before do_build
|
||||
do_cve_check[depends] = "cve-update-db-native:do_fetch"
|
||||
do_cve_check[nostamp] = "1"
|
||||
|
||||
@@ -118,10 +161,11 @@ python cve_check_cleanup () {
|
||||
Delete the file used to gather all the CVE information.
|
||||
"""
|
||||
bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE"))
|
||||
bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH"))
|
||||
}
|
||||
|
||||
addhandler cve_check_cleanup
|
||||
cve_check_cleanup[eventmask] = "bb.cooker.CookerExit"
|
||||
cve_check_cleanup[eventmask] = "bb.event.BuildCompleted"
|
||||
|
||||
python cve_check_write_rootfs_manifest () {
|
||||
"""
|
||||
@@ -129,92 +173,80 @@ python cve_check_write_rootfs_manifest () {
|
||||
"""
|
||||
|
||||
import shutil
|
||||
import json
|
||||
from oe.rootfs import image_list_installed_packages
|
||||
from oe.cve_check import cve_check_merge_jsons, update_symlinks
|
||||
|
||||
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
|
||||
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
|
||||
if os.path.exists(deploy_file):
|
||||
bb.utils.remove(deploy_file)
|
||||
deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
|
||||
if os.path.exists(deploy_file_json):
|
||||
bb.utils.remove(deploy_file_json)
|
||||
|
||||
if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")):
|
||||
bb.note("Writing rootfs CVE manifest")
|
||||
deploy_dir = d.getVar("DEPLOY_DIR_IMAGE")
|
||||
link_name = d.getVar("IMAGE_LINK_NAME")
|
||||
# Create a list of relevant recipies
|
||||
recipies = set()
|
||||
for pkg in list(image_list_installed_packages(d)):
|
||||
pkg_info = os.path.join(d.getVar('PKGDATA_DIR'),
|
||||
'runtime-reverse', pkg)
|
||||
pkg_data = oe.packagedata.read_pkgdatafile(pkg_info)
|
||||
recipies.add(pkg_data["PN"])
|
||||
|
||||
bb.note("Writing rootfs CVE manifest")
|
||||
deploy_dir = d.getVar("DEPLOY_DIR_IMAGE")
|
||||
link_name = d.getVar("IMAGE_LINK_NAME")
|
||||
|
||||
json_data = {"version":"1", "package": []}
|
||||
text_data = ""
|
||||
enable_json = d.getVar("CVE_CHECK_FORMAT_JSON") == "1"
|
||||
enable_text = d.getVar("CVE_CHECK_FORMAT_TEXT") == "1"
|
||||
|
||||
save_pn = d.getVar("PN")
|
||||
|
||||
for pkg in recipies:
|
||||
# To be able to use the CVE_CHECK_RECIPE_FILE variable we have to evaluate
|
||||
# it with the different PN names set each time.
|
||||
d.setVar("PN", pkg)
|
||||
if enable_text:
|
||||
pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE")
|
||||
if os.path.exists(pkgfilepath):
|
||||
with open(pkgfilepath) as pfile:
|
||||
text_data += pfile.read()
|
||||
|
||||
if enable_json:
|
||||
pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
|
||||
if os.path.exists(pkgfilepath):
|
||||
with open(pkgfilepath) as j:
|
||||
data = json.load(j)
|
||||
cve_check_merge_jsons(json_data, data)
|
||||
|
||||
d.setVar("PN", save_pn)
|
||||
|
||||
if enable_text:
|
||||
link_path = os.path.join(deploy_dir, "%s.cve" % link_name)
|
||||
manifest_name = d.getVar("CVE_CHECK_MANIFEST")
|
||||
cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
|
||||
|
||||
bb.utils.mkdirhier(os.path.dirname(manifest_name))
|
||||
shutil.copyfile(cve_tmp_file, manifest_name)
|
||||
with open(manifest_name, "w") as f:
|
||||
f.write(text_data)
|
||||
|
||||
if manifest_name and os.path.exists(manifest_name):
|
||||
manifest_link = os.path.join(deploy_dir, "%s.cve" % link_name)
|
||||
# If we already have another manifest, update symlinks
|
||||
if os.path.exists(os.path.realpath(manifest_link)):
|
||||
os.remove(manifest_link)
|
||||
os.symlink(os.path.basename(manifest_name), manifest_link)
|
||||
bb.plain("Image CVE report stored in: %s" % manifest_name)
|
||||
update_symlinks(manifest_name, link_path)
|
||||
bb.plain("Image CVE report stored in: %s" % manifest_name)
|
||||
|
||||
if enable_json:
|
||||
link_path = os.path.join(deploy_dir, "%s.json" % link_name)
|
||||
manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON")
|
||||
|
||||
with open(manifest_name, "w") as f:
|
||||
json.dump(json_data, f, indent=2)
|
||||
|
||||
update_symlinks(manifest_name, link_path)
|
||||
bb.plain("Image CVE JSON report stored in: %s" % manifest_name)
|
||||
}
|
||||
|
||||
ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
|
||||
do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
|
||||
|
||||
def get_patches_cves(d):
|
||||
"""
|
||||
Get patches that solve CVEs using the "CVE: " tag.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
pn = d.getVar("PN")
|
||||
cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
|
||||
|
||||
# Matches the last "CVE-YYYY-ID" in the file name, also if written
|
||||
# in lowercase. Possible to have multiple CVE IDs in a single
|
||||
# file name, but only the last one will be detected from the file name.
|
||||
# However, patch files contents addressing multiple CVE IDs are supported
|
||||
# (cve_match regular expression)
|
||||
|
||||
cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
|
||||
|
||||
patched_cves = set()
|
||||
bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
|
||||
for url in src_patches(d):
|
||||
patch_file = bb.fetch.decodeurl(url)[2]
|
||||
|
||||
if not os.path.isfile(patch_file):
|
||||
bb.error("File Not found: %s" % patch_file)
|
||||
raise FileNotFoundError
|
||||
|
||||
# Check patch file name for CVE ID
|
||||
fname_match = cve_file_name_match.search(patch_file)
|
||||
if fname_match:
|
||||
cve = fname_match.group(1).upper()
|
||||
patched_cves.add(cve)
|
||||
bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file))
|
||||
|
||||
with open(patch_file, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
patch_text = f.read()
|
||||
except UnicodeDecodeError:
|
||||
bb.debug(1, "Failed to read patch %s using UTF-8 encoding"
|
||||
" trying with iso8859-1" % patch_file)
|
||||
f.close()
|
||||
with open(patch_file, "r", encoding="iso8859-1") as f:
|
||||
patch_text = f.read()
|
||||
|
||||
# Search for one or more "CVE: " lines
|
||||
text_match = False
|
||||
for match in cve_match.finditer(patch_text):
|
||||
# Get only the CVEs without the "CVE: " tag
|
||||
cves = patch_text[match.start()+5:match.end()]
|
||||
for cve in cves.split():
|
||||
bb.debug(2, "Patch %s solves %s" % (patch_file, cve))
|
||||
patched_cves.add(cve)
|
||||
text_match = True
|
||||
|
||||
if not fname_match and not text_match:
|
||||
bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
|
||||
|
||||
return patched_cves
|
||||
do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
|
||||
|
||||
def check_cves(d, patched_cves):
|
||||
"""
|
||||
@@ -227,17 +259,20 @@ def check_cves(d, patched_cves):
|
||||
suffix = d.getVar("CVE_VERSION_SUFFIX")
|
||||
|
||||
cves_unpatched = []
|
||||
cves_ignored = []
|
||||
cves_status = []
|
||||
cves_in_recipe = False
|
||||
# CVE_PRODUCT can contain more than one product (eg. curl/libcurl)
|
||||
products = d.getVar("CVE_PRODUCT").split()
|
||||
# If this has been unset then we're not scanning for CVEs here (for example, image recipes)
|
||||
if not products:
|
||||
return ([], [], [])
|
||||
return ([], [], [], [])
|
||||
pv = d.getVar("CVE_VERSION").split("+git")[0]
|
||||
|
||||
# If the recipe has been whitelisted we return empty lists
|
||||
if pn in d.getVar("CVE_CHECK_PN_WHITELIST").split():
|
||||
bb.note("Recipe has been whitelisted, skipping check")
|
||||
return ([], [], [])
|
||||
return ([], [], [], [])
|
||||
|
||||
cve_whitelist = d.getVar("CVE_CHECK_WHITELIST").split()
|
||||
|
||||
@@ -247,6 +282,7 @@ def check_cves(d, patched_cves):
|
||||
|
||||
# For each of the known product names (e.g. curl has CPEs using curl and libcurl)...
|
||||
for product in products:
|
||||
cves_in_product = False
|
||||
if ":" in product:
|
||||
vendor, product = product.split(":", 1)
|
||||
else:
|
||||
@@ -258,17 +294,25 @@ def check_cves(d, patched_cves):
|
||||
|
||||
if cve in cve_whitelist:
|
||||
bb.note("%s-%s has been whitelisted for %s" % (product, pv, cve))
|
||||
# TODO: this should be in the report as 'whitelisted'
|
||||
patched_cves.add(cve)
|
||||
cves_ignored.append(cve)
|
||||
continue
|
||||
elif cve in patched_cves:
|
||||
bb.note("%s has been patched" % (cve))
|
||||
continue
|
||||
# Write status once only for each product
|
||||
if not cves_in_product:
|
||||
cves_status.append([product, True])
|
||||
cves_in_product = True
|
||||
cves_in_recipe = True
|
||||
|
||||
vulnerable = False
|
||||
ignored = False
|
||||
|
||||
for row in conn.execute("SELECT * FROM PRODUCTS WHERE ID IS ? AND PRODUCT IS ? AND VENDOR LIKE ?", (cve, product, vendor)):
|
||||
(_, _, _, version_start, operator_start, version_end, operator_end) = row
|
||||
#bb.debug(2, "Evaluating row " + str(row))
|
||||
if cve in cve_whitelist:
|
||||
ignored = True
|
||||
|
||||
if (operator_start == '=' and pv == version_start) or version_start == '-':
|
||||
vulnerable = True
|
||||
@@ -301,18 +345,25 @@ def check_cves(d, patched_cves):
|
||||
vulnerable = vulnerable_start or vulnerable_end
|
||||
|
||||
if vulnerable:
|
||||
bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve))
|
||||
cves_unpatched.append(cve)
|
||||
if ignored:
|
||||
bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv))
|
||||
cves_ignored.append(cve)
|
||||
else:
|
||||
bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve))
|
||||
cves_unpatched.append(cve)
|
||||
break
|
||||
|
||||
if not vulnerable:
|
||||
bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve))
|
||||
# TODO: not patched but not vulnerable
|
||||
patched_cves.add(cve)
|
||||
|
||||
if not cves_in_product:
|
||||
bb.note("No CVE records found for product %s, pn %s" % (product, pn))
|
||||
cves_status.append([product, False])
|
||||
|
||||
conn.close()
|
||||
|
||||
return (list(cve_whitelist), list(patched_cves), cves_unpatched)
|
||||
return (list(cves_ignored), list(patched_cves), cves_unpatched, cves_status)
|
||||
|
||||
def get_cve_info(d, cves):
|
||||
"""
|
||||
@@ -337,13 +388,12 @@ def get_cve_info(d, cves):
|
||||
conn.close()
|
||||
return cve_data
|
||||
|
||||
def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
|
||||
def cve_write_data_text(d, patched, unpatched, whitelisted, cve_data):
|
||||
"""
|
||||
Write CVE information in WORKDIR; and to CVE_CHECK_DIR, and
|
||||
CVE manifest if enabled.
|
||||
"""
|
||||
|
||||
|
||||
cve_file = d.getVar("CVE_CHECK_LOG")
|
||||
fdir_name = d.getVar("FILE_DIRNAME")
|
||||
layer = fdir_name.split("/")[-3]
|
||||
@@ -351,12 +401,18 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
|
||||
include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
|
||||
exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
|
||||
|
||||
report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
|
||||
|
||||
if exclude_layers and layer in exclude_layers:
|
||||
return
|
||||
|
||||
if include_layers and layer not in include_layers:
|
||||
return
|
||||
|
||||
# Early exit, the text format does not report packages without CVEs
|
||||
if not patched+unpatched+whitelisted:
|
||||
return
|
||||
|
||||
nvd_link = "https://nvd.nist.gov/vuln/detail/"
|
||||
write_string = ""
|
||||
unpatched_cves = []
|
||||
@@ -364,13 +420,16 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
|
||||
|
||||
for cve in sorted(cve_data):
|
||||
is_patched = cve in patched
|
||||
if is_patched and (d.getVar("CVE_CHECK_REPORT_PATCHED") != "1"):
|
||||
is_ignored = cve in whitelisted
|
||||
|
||||
if (is_patched or is_ignored) and not report_all:
|
||||
continue
|
||||
|
||||
write_string += "LAYER: %s\n" % layer
|
||||
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
|
||||
write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
|
||||
write_string += "CVE: %s\n" % cve
|
||||
if cve in whitelisted:
|
||||
if is_ignored:
|
||||
write_string += "CVE STATUS: Whitelisted\n"
|
||||
elif is_patched:
|
||||
write_string += "CVE STATUS: Patched\n"
|
||||
@@ -383,23 +442,138 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
|
||||
write_string += "VECTOR: %s\n" % cve_data[cve]["vector"]
|
||||
write_string += "MORE INFORMATION: %s%s\n\n" % (nvd_link, cve)
|
||||
|
||||
if unpatched_cves:
|
||||
if unpatched_cves and d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1":
|
||||
bb.warn("Found unpatched CVE (%s), for more information check %s" % (" ".join(unpatched_cves),cve_file))
|
||||
|
||||
if write_string:
|
||||
with open(cve_file, "w") as f:
|
||||
bb.note("Writing file %s with CVE information" % cve_file)
|
||||
with open(cve_file, "w") as f:
|
||||
bb.note("Writing file %s with CVE information" % cve_file)
|
||||
f.write(write_string)
|
||||
|
||||
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
|
||||
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
|
||||
bb.utils.mkdirhier(os.path.dirname(deploy_file))
|
||||
with open(deploy_file, "w") as f:
|
||||
f.write(write_string)
|
||||
|
||||
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
|
||||
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
|
||||
bb.utils.mkdirhier(os.path.dirname(deploy_file))
|
||||
with open(deploy_file, "w") as f:
|
||||
f.write(write_string)
|
||||
if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
|
||||
cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
|
||||
bb.utils.mkdirhier(cvelogpath)
|
||||
|
||||
if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
|
||||
cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
|
||||
bb.utils.mkdirhier(cvelogpath)
|
||||
with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
|
||||
f.write("%s" % write_string)
|
||||
|
||||
with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
|
||||
f.write("%s" % write_string)
|
||||
def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file):
|
||||
"""
|
||||
Write CVE information in the JSON format: to WORKDIR; and to
|
||||
CVE_CHECK_DIR, if CVE manifest if enabled, write fragment
|
||||
files that will be assembled at the end in cve_check_write_rootfs_manifest.
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
write_string = json.dumps(output, indent=2)
|
||||
with open(direct_file, "w") as f:
|
||||
bb.note("Writing file %s with CVE information" % direct_file)
|
||||
f.write(write_string)
|
||||
|
||||
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
|
||||
bb.utils.mkdirhier(os.path.dirname(deploy_file))
|
||||
with open(deploy_file, "w") as f:
|
||||
f.write(write_string)
|
||||
|
||||
if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
|
||||
cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
|
||||
index_path = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
|
||||
bb.utils.mkdirhier(cvelogpath)
|
||||
fragment_file = os.path.basename(deploy_file)
|
||||
fragment_path = os.path.join(cvelogpath, fragment_file)
|
||||
with open(fragment_path, "w") as f:
|
||||
f.write(write_string)
|
||||
with open(index_path, "a+") as f:
|
||||
f.write("%s\n" % fragment_path)
|
||||
|
||||
def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
|
||||
"""
|
||||
Prepare CVE data for the JSON format, then write it.
|
||||
"""
|
||||
|
||||
output = {"version":"1", "package": []}
|
||||
nvd_link = "https://nvd.nist.gov/vuln/detail/"
|
||||
|
||||
fdir_name = d.getVar("FILE_DIRNAME")
|
||||
layer = fdir_name.split("/")[-3]
|
||||
|
||||
include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
|
||||
exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
|
||||
|
||||
report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
|
||||
|
||||
if exclude_layers and layer in exclude_layers:
|
||||
return
|
||||
|
||||
if include_layers and layer not in include_layers:
|
||||
return
|
||||
|
||||
unpatched_cves = []
|
||||
|
||||
product_data = []
|
||||
for s in cve_status:
|
||||
p = {"product": s[0], "cvesInRecord": "Yes"}
|
||||
if s[1] == False:
|
||||
p["cvesInRecord"] = "No"
|
||||
product_data.append(p)
|
||||
|
||||
package_version = "%s%s" % (d.getVar("EXTENDPE"), d.getVar("PV"))
|
||||
package_data = {
|
||||
"name" : d.getVar("PN"),
|
||||
"layer" : layer,
|
||||
"version" : package_version,
|
||||
"products": product_data
|
||||
}
|
||||
cve_list = []
|
||||
|
||||
for cve in sorted(cve_data):
|
||||
is_patched = cve in patched
|
||||
is_ignored = cve in ignored
|
||||
status = "Unpatched"
|
||||
if (is_patched or is_ignored) and not report_all:
|
||||
continue
|
||||
if is_ignored:
|
||||
status = "Ignored"
|
||||
elif is_patched:
|
||||
status = "Patched"
|
||||
else:
|
||||
# default value of status is Unpatched
|
||||
unpatched_cves.append(cve)
|
||||
|
||||
issue_link = "%s%s" % (nvd_link, cve)
|
||||
|
||||
cve_item = {
|
||||
"id" : cve,
|
||||
"summary" : cve_data[cve]["summary"],
|
||||
"scorev2" : cve_data[cve]["scorev2"],
|
||||
"scorev3" : cve_data[cve]["scorev3"],
|
||||
"vector" : cve_data[cve]["vector"],
|
||||
"status" : status,
|
||||
"link": issue_link
|
||||
}
|
||||
cve_list.append(cve_item)
|
||||
|
||||
package_data["issue"] = cve_list
|
||||
output["package"].append(package_data)
|
||||
|
||||
direct_file = d.getVar("CVE_CHECK_LOG_JSON")
|
||||
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
|
||||
manifest_file = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON")
|
||||
|
||||
cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file)
|
||||
|
||||
def cve_write_data(d, patched, unpatched, ignored, cve_data, status):
|
||||
"""
|
||||
Write CVE data in each enabled format.
|
||||
"""
|
||||
|
||||
if d.getVar("CVE_CHECK_FORMAT_TEXT") == "1":
|
||||
cve_write_data_text(d, patched, unpatched, ignored, cve_data)
|
||||
if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
|
||||
cve_write_data_json(d, patched, unpatched, ignored, cve_data, status)
|
||||
|
||||
@@ -2,6 +2,8 @@ inherit terminal
|
||||
|
||||
DEVSHELL = "${SHELL}"
|
||||
|
||||
PATH:prepend:task-devshell = "${COREBASE}/scripts/git-intercept:"
|
||||
|
||||
python do_devshell () {
|
||||
if d.getVarFlag("do_devshell", "manualfakeroot"):
|
||||
d.prependVar("DEVSHELL", "pseudo ")
|
||||
|
||||
@@ -124,7 +124,7 @@ python () {
|
||||
def rootfs_variables(d):
|
||||
from oe.rootfs import variable_depends
|
||||
variables = ['IMAGE_DEVICE_TABLE','IMAGE_DEVICE_TABLES','BUILD_IMAGES_FROM_FEEDS','IMAGE_TYPES_MASKED','IMAGE_ROOTFS_ALIGNMENT','IMAGE_OVERHEAD_FACTOR','IMAGE_ROOTFS_SIZE','IMAGE_ROOTFS_EXTRA_SPACE',
|
||||
'IMAGE_ROOTFS_MAXSIZE','IMAGE_NAME','IMAGE_LINK_NAME','IMAGE_MANIFEST','DEPLOY_DIR_IMAGE','IMAGE_FSTYPES','IMAGE_INSTALL_COMPLEMENTARY','IMAGE_LINGUAS', 'IMAGE_LINGUAS_COMPLEMENTARY',
|
||||
'IMAGE_ROOTFS_MAXSIZE','IMAGE_NAME','IMAGE_LINK_NAME','IMAGE_MANIFEST','DEPLOY_DIR_IMAGE','IMAGE_FSTYPES','IMAGE_INSTALL_COMPLEMENTARY','IMAGE_LINGUAS', 'IMAGE_LINGUAS_COMPLEMENTARY', 'IMAGE_LOCALES_ARCHIVE',
|
||||
'MULTILIBRE_ALLOW_REP','MULTILIB_TEMP_ROOTFS','MULTILIB_VARIANTS','MULTILIBS','ALL_MULTILIB_PACKAGE_ARCHS','MULTILIB_GLOBAL_VARIANTS','BAD_RECOMMENDATIONS','NO_RECOMMENDATIONS',
|
||||
'PACKAGE_ARCHS','PACKAGE_CLASSES','TARGET_VENDOR','TARGET_ARCH','TARGET_OS','OVERRIDES','BBEXTENDVARIANT','FEED_DEPLOYDIR_BASE_URI','INTERCEPT_DIR','USE_DEVFS',
|
||||
'CONVERSIONTYPES', 'IMAGE_GEN_DEBUGFS', 'ROOTFS_RO_UNNEEDED', 'IMGDEPLOYDIR', 'PACKAGE_EXCLUDE_COMPLEMENTARY', 'REPRODUCIBLE_TIMESTAMP_ROOTFS', 'IMAGE_INSTALL_DEBUGFS']
|
||||
@@ -176,6 +176,9 @@ IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
|
||||
|
||||
LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS').split()))}"
|
||||
|
||||
# per default create a locale archive
|
||||
IMAGE_LOCALES_ARCHIVE ?= '1'
|
||||
|
||||
# Prefer image, but use the fallback files for lookups if the image ones
|
||||
# aren't yet available.
|
||||
PSEUDO_PASSWD = "${IMAGE_ROOTFS}:${STAGING_DIR_NATIVE}"
|
||||
|
||||
@@ -452,12 +452,14 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
|
||||
"""
|
||||
Check for build paths inside target files and error if not found in the whitelist
|
||||
"""
|
||||
import stat
|
||||
# Ignore .debug files, not interesting
|
||||
if path.find(".debug") != -1:
|
||||
return
|
||||
|
||||
# Ignore symlinks
|
||||
if os.path.islink(path):
|
||||
# Ignore symlinks/devs/fifos
|
||||
mode = os.lstat(path).st_mode
|
||||
if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode):
|
||||
return
|
||||
|
||||
tmpdir = bytes(d.getVar('TMPDIR'), encoding="utf-8")
|
||||
@@ -945,7 +947,7 @@ def package_qa_check_host_user(path, name, d, elf, messages):
|
||||
|
||||
dest = d.getVar('PKGDEST')
|
||||
pn = d.getVar('PN')
|
||||
home = os.path.join(dest, 'home')
|
||||
home = os.path.join(dest, name, 'home')
|
||||
if path == home or path.startswith(home + os.sep):
|
||||
return
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ HOST_LD_KERNEL_ARCH ?= "${TARGET_LD_KERNEL_ARCH}"
|
||||
TARGET_AR_KERNEL_ARCH ?= ""
|
||||
HOST_AR_KERNEL_ARCH ?= "${TARGET_AR_KERNEL_ARCH}"
|
||||
|
||||
KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_KERNEL_ARCH} -fuse-ld=bfd ${DEBUG_PREFIX_MAP} -fdebug-prefix-map=${STAGING_KERNEL_DIR}=${KERNEL_SRC_PATH}"
|
||||
KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_KERNEL_ARCH} -fuse-ld=bfd ${DEBUG_PREFIX_MAP} -fdebug-prefix-map=${STAGING_KERNEL_DIR}=${KERNEL_SRC_PATH} -fdebug-prefix-map=${STAGING_KERNEL_BUILDDIR}=${KERNEL_SRC_PATH}"
|
||||
KERNEL_LD = "${CCACHE}${HOST_PREFIX}ld.bfd ${HOST_LD_KERNEL_ARCH}"
|
||||
KERNEL_AR = "${CCACHE}${HOST_PREFIX}ar ${HOST_AR_KERNEL_ARCH}"
|
||||
TOOLCHAIN = "gcc"
|
||||
|
||||
@@ -56,6 +56,9 @@ FIT_HASH_ALG ?= "sha256"
|
||||
# fitImage Signature Algo
|
||||
FIT_SIGN_ALG ?= "rsa2048"
|
||||
|
||||
# fitImage Padding Algo
|
||||
FIT_PAD_ALG ?= "pkcs-1.5"
|
||||
|
||||
#
|
||||
# Emit the fitImage ITS header
|
||||
#
|
||||
@@ -250,6 +253,7 @@ fitimage_emit_section_config() {
|
||||
|
||||
conf_csum="${FIT_HASH_ALG}"
|
||||
conf_sign_algo="${FIT_SIGN_ALG}"
|
||||
conf_padding_algo="${FIT_PAD_ALG}"
|
||||
if [ "${UBOOT_SIGN_ENABLE}" = "1" ] ; then
|
||||
conf_sign_keyname="${UBOOT_SIGN_KEYNAME}"
|
||||
fi
|
||||
@@ -333,6 +337,7 @@ EOF
|
||||
signature-1 {
|
||||
algo = "${conf_csum},${conf_sign_algo}";
|
||||
key-name-hint = "${conf_sign_keyname}";
|
||||
padding = "${conf_padding_algo}";
|
||||
${sign_line}
|
||||
};
|
||||
EOF
|
||||
|
||||
@@ -269,6 +269,8 @@ do_kernel_metadata() {
|
||||
bbnote "KERNEL_FEATURES: $KERNEL_FEATURES_FINAL"
|
||||
bbnote "Final scc/cfg list: $sccs_defconfig $bsp_definition $sccs $KERNEL_FEATURES_FINAL"
|
||||
fi
|
||||
|
||||
set -e
|
||||
}
|
||||
|
||||
do_patch() {
|
||||
@@ -298,6 +300,8 @@ do_patch() {
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
set -e
|
||||
}
|
||||
|
||||
do_kernel_checkout() {
|
||||
@@ -356,6 +360,8 @@ do_kernel_checkout() {
|
||||
git commit -q -m "baseline commit: creating repo for ${PN}-${PV}"
|
||||
git clean -d -f
|
||||
fi
|
||||
|
||||
set -e
|
||||
}
|
||||
do_kernel_checkout[dirs] = "${S}"
|
||||
|
||||
@@ -523,6 +529,8 @@ do_validate_branches() {
|
||||
kgit-s2q --clean
|
||||
fi
|
||||
fi
|
||||
|
||||
set -e
|
||||
}
|
||||
|
||||
OE_TERMINAL_EXPORTS += "KBUILD_OUTPUT"
|
||||
|
||||
@@ -91,17 +91,17 @@ def copy_license_files(lic_files_paths, destdir):
|
||||
os.link(src, dst)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EXDEV:
|
||||
# Copy license files if hard-link is not possible even if st_dev is the
|
||||
# Copy license files if hardlink is not possible even if st_dev is the
|
||||
# same on source and destination (docker container with device-mapper?)
|
||||
canlink = False
|
||||
else:
|
||||
raise
|
||||
# Only chown if we did hardling, and, we're running under pseudo
|
||||
# Only chown if we did hardlink and we're running under pseudo
|
||||
if canlink and os.environ.get('PSEUDO_DISABLED') == '0':
|
||||
os.chown(dst,0,0)
|
||||
if not canlink:
|
||||
begin_idx = int(beginline)-1 if beginline is not None else None
|
||||
end_idx = int(endline) if endline is not None else None
|
||||
begin_idx = max(0, int(beginline) - 1) if beginline is not None else None
|
||||
end_idx = max(0, int(endline)) if endline is not None else None
|
||||
if begin_idx is None and end_idx is None:
|
||||
shutil.copyfile(src, dst)
|
||||
else:
|
||||
|
||||
@@ -267,9 +267,10 @@ python write_image_manifest () {
|
||||
|
||||
if os.path.exists(manifest_name) and link_name:
|
||||
manifest_link = deploy_dir + "/" + link_name + ".manifest"
|
||||
if os.path.lexists(manifest_link):
|
||||
os.remove(manifest_link)
|
||||
os.symlink(os.path.basename(manifest_name), manifest_link)
|
||||
if manifest_link != manifest_name:
|
||||
if os.path.lexists(manifest_link):
|
||||
os.remove(manifest_link)
|
||||
os.symlink(os.path.basename(manifest_name), manifest_link)
|
||||
}
|
||||
|
||||
# Can be used to create /etc/timestamp during image construction to give a reasonably
|
||||
@@ -304,7 +305,7 @@ rootfs_trim_schemas () {
|
||||
}
|
||||
|
||||
rootfs_check_host_user_contaminated () {
|
||||
contaminated="${WORKDIR}/host-user-contaminated.txt"
|
||||
contaminated="${S}/host-user-contaminated.txt"
|
||||
HOST_USER_UID="$(PSEUDO_UNLOAD=1 id -u)"
|
||||
HOST_USER_GID="$(PSEUDO_UNLOAD=1 id -g)"
|
||||
|
||||
@@ -339,9 +340,10 @@ python write_image_test_data() {
|
||||
|
||||
if os.path.exists(testdata_name) and link_name:
|
||||
testdata_link = os.path.join(deploy_dir, "%s.testdata.json" % link_name)
|
||||
if os.path.lexists(testdata_link):
|
||||
os.remove(testdata_link)
|
||||
os.symlink(os.path.basename(testdata_name), testdata_link)
|
||||
if testdata_link != testdata_name:
|
||||
if os.path.lexists(testdata_link):
|
||||
os.remove(testdata_link)
|
||||
os.symlink(os.path.basename(testdata_name), testdata_link)
|
||||
}
|
||||
write_image_test_data[vardepsexclude] += "TOPDIR"
|
||||
|
||||
|
||||
@@ -53,24 +53,23 @@ CVE-2015-4778 CVE-2015-4779 CVE-2015-4780 CVE-2015-4781 CVE-2015-4782 CVE-2015-4
|
||||
CVE-2015-4785 CVE-2015-4786 CVE-2015-4787 CVE-2015-4788 CVE-2015-4789 CVE-2015-4790 CVE-2016-0682 \
|
||||
CVE-2016-0689 CVE-2016-0692 CVE-2016-0694 CVE-2016-3418 CVE-2020-2981"
|
||||
|
||||
#### CPE update pending ####
|
||||
|
||||
# groff:groff-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2000-0803
|
||||
# Appears it was fixed in https://git.savannah.gnu.org/cgit/groff.git/commit/?id=07f95f1674217275ed4612f1dcaa95a88435c6a7
|
||||
# so from 1.17 onwards. Reported to the database for update by RP 2021/5/9. Update accepted 2021/5/10.
|
||||
#CVE_CHECK_WHITELIST += "CVE-2000-0803"
|
||||
|
||||
|
||||
|
||||
#### Upstream still working on ####
|
||||
|
||||
# qemu:qemu-native:qemu-system-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2021-20255
|
||||
# There was a proposed patch https://lists.gnu.org/archive/html/qemu-devel/2021-02/msg06098.html
|
||||
# however qemu maintainers are sure the patch is incorrect and should not be applied.
|
||||
# qemu maintainers say the patch is incorrect and should not be applied
|
||||
# Ignore from OE's perspectivee as the issue is of low impact, at worst sitting in an infinite loop rather than exploitable
|
||||
CVE_CHECK_WHITELIST += "CVE-2021-20255"
|
||||
|
||||
# wget https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2021-31879
|
||||
# https://mail.gnu.org/archive/html/bug-wget/2021-02/msg00002.html
|
||||
# No response upstream as of 2021/5/12
|
||||
# qemu:qemu-native:qemu-system-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-12067
|
||||
# There was a proposed patch but rejected by upstream qemu. It is unclear if the issue can
|
||||
# still be reproduced or where exactly any bug is.
|
||||
# Ignore from OE's perspective as we'll pick up any fix when upstream accepts one.
|
||||
CVE_CHECK_WHITELIST += "CVE-2019-12067"
|
||||
|
||||
# nasm:nasm-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2020-18974
|
||||
# It is a fuzzing related buffer overflow. It is of low impact since most devices
|
||||
# wouldn't expose an assembler. The upstream is inactive and there is little to be
|
||||
# done about the bug, ignore from an OE perspective.
|
||||
CVE_CHECK_WHITELIST += "CVE-2020-18974"
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -7,9 +7,9 @@
|
||||
#
|
||||
|
||||
UNINATIVE_MAXGLIBCVERSION = "2.35"
|
||||
UNINATIVE_VERSION = "3.5"
|
||||
UNINATIVE_VERSION = "3.6"
|
||||
|
||||
UNINATIVE_URL ?= "http://downloads.yoctoproject.org/releases/uninative/${UNINATIVE_VERSION}/"
|
||||
UNINATIVE_CHECKSUM[aarch64] ?= "6de0771bd21e0fcb5e80388e5b561a8023b24083bcbf46e056a089982aff75d7"
|
||||
UNINATIVE_CHECKSUM[i686] ?= "8c8745becbfa1c341bae839c7eab56ddf17ce36c303bcd73d3b2f2f788b631c2"
|
||||
UNINATIVE_CHECKSUM[x86_64] ?= "e8047a5748e6f266165da141eb6d08b23674f30e477b0e5505b6403d50fbc4b2"
|
||||
UNINATIVE_CHECKSUM[aarch64] ?= "d64831cf2792c8e470c2e42230660e1a8e5de56a579cdd59978791f663c2f3ed"
|
||||
UNINATIVE_CHECKSUM[i686] ?= "2f0ee9b66b1bb2c85e2b592fb3c9c7f5d77399fa638d74961330cdb8de34ca3b"
|
||||
UNINATIVE_CHECKSUM[x86_64] ?= "9bfc4c970495b3716b2f9e52c4df9f968c02463a9a95000f6657fbc3fde1f098"
|
||||
|
||||
@@ -63,3 +63,112 @@ def _cmpkey(release, patch_l, pre_l, pre_v):
|
||||
else:
|
||||
_pre = float(pre_v) if pre_v else float('-inf')
|
||||
return _release, _patch, _pre
|
||||
|
||||
def cve_check_merge_jsons(output, data):
|
||||
"""
|
||||
Merge the data in the "package" property to the main data file
|
||||
output
|
||||
"""
|
||||
if output["version"] != data["version"]:
|
||||
bb.error("Version mismatch when merging JSON outputs")
|
||||
return
|
||||
|
||||
for product in output["package"]:
|
||||
if product["name"] == data["package"][0]["name"]:
|
||||
bb.error("Error adding the same package twice")
|
||||
return
|
||||
|
||||
output["package"].append(data["package"][0])
|
||||
|
||||
def update_symlinks(target_path, link_path):
|
||||
"""
|
||||
Update a symbolic link link_path to point to target_path.
|
||||
Remove the link and recreate it if exist and is different.
|
||||
"""
|
||||
if link_path != target_path and os.path.exists(target_path):
|
||||
if os.path.exists(os.path.realpath(link_path)):
|
||||
os.remove(link_path)
|
||||
os.symlink(os.path.basename(target_path), link_path)
|
||||
|
||||
def get_patched_cves(d):
|
||||
"""
|
||||
Get patches that solve CVEs using the "CVE: " tag.
|
||||
"""
|
||||
|
||||
import re
|
||||
import oe.patch
|
||||
|
||||
pn = d.getVar("PN")
|
||||
cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
|
||||
|
||||
# Matches the last "CVE-YYYY-ID" in the file name, also if written
|
||||
# in lowercase. Possible to have multiple CVE IDs in a single
|
||||
# file name, but only the last one will be detected from the file name.
|
||||
# However, patch files contents addressing multiple CVE IDs are supported
|
||||
# (cve_match regular expression)
|
||||
|
||||
cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
|
||||
|
||||
patched_cves = set()
|
||||
bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
|
||||
for url in oe.patch.src_patches(d):
|
||||
patch_file = bb.fetch.decodeurl(url)[2]
|
||||
|
||||
# Remote compressed patches may not be unpacked, so silently ignore them
|
||||
if not os.path.isfile(patch_file):
|
||||
bb.warn("%s does not exist, cannot extract CVE list" % patch_file)
|
||||
continue
|
||||
|
||||
# Check patch file name for CVE ID
|
||||
fname_match = cve_file_name_match.search(patch_file)
|
||||
if fname_match:
|
||||
cve = fname_match.group(1).upper()
|
||||
patched_cves.add(cve)
|
||||
bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file))
|
||||
|
||||
with open(patch_file, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
patch_text = f.read()
|
||||
except UnicodeDecodeError:
|
||||
bb.debug(1, "Failed to read patch %s using UTF-8 encoding"
|
||||
" trying with iso8859-1" % patch_file)
|
||||
f.close()
|
||||
with open(patch_file, "r", encoding="iso8859-1") as f:
|
||||
patch_text = f.read()
|
||||
|
||||
# Search for one or more "CVE: " lines
|
||||
text_match = False
|
||||
for match in cve_match.finditer(patch_text):
|
||||
# Get only the CVEs without the "CVE: " tag
|
||||
cves = patch_text[match.start()+5:match.end()]
|
||||
for cve in cves.split():
|
||||
bb.debug(2, "Patch %s solves %s" % (patch_file, cve))
|
||||
patched_cves.add(cve)
|
||||
text_match = True
|
||||
|
||||
if not fname_match and not text_match:
|
||||
bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
|
||||
|
||||
return patched_cves
|
||||
|
||||
|
||||
def get_cpe_ids(cve_product, version):
|
||||
"""
|
||||
Get list of CPE identifiers for the given product and version
|
||||
"""
|
||||
|
||||
version = version.split("+git")[0]
|
||||
|
||||
cpe_ids = []
|
||||
for product in cve_product.split():
|
||||
# CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not,
|
||||
# use wildcard for vendor.
|
||||
if ":" in product:
|
||||
vendor, product = product.split(":", 1)
|
||||
else:
|
||||
vendor = "*"
|
||||
|
||||
cpe_id = f'cpe:2.3:a:{vendor}:{product}:{version}:*:*:*:*:*:*:*'
|
||||
cpe_ids.append(cpe_id)
|
||||
|
||||
return cpe_ids
|
||||
|
||||
@@ -611,12 +611,13 @@ class PackageManager(object, metaclass=ABCMeta):
|
||||
"'%s' returned %d:\n%s" %
|
||||
(' '.join(cmd), e.returncode, e.output.decode("utf-8")))
|
||||
|
||||
target_arch = self.d.getVar('TARGET_ARCH')
|
||||
localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
|
||||
if os.path.exists(localedir) and os.listdir(localedir):
|
||||
generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
|
||||
# And now delete the binary locales
|
||||
self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
|
||||
if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1':
|
||||
target_arch = self.d.getVar('TARGET_ARCH')
|
||||
localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
|
||||
if os.path.exists(localedir) and os.listdir(localedir):
|
||||
generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
|
||||
# And now delete the binary locales
|
||||
self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
|
||||
|
||||
def deploy_dir_lock(self):
|
||||
if self.deploy_dir is None:
|
||||
|
||||
@@ -321,7 +321,9 @@ class Rootfs(object, metaclass=ABCMeta):
|
||||
if not os.path.exists(kernel_abi_ver_file):
|
||||
bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
|
||||
|
||||
kernel_ver = open(kernel_abi_ver_file).read().strip(' \n')
|
||||
with open(kernel_abi_ver_file) as f:
|
||||
kernel_ver = f.read().strip(' \n')
|
||||
|
||||
versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver)
|
||||
|
||||
bb.utils.mkdirhier(versioned_modules_dir)
|
||||
|
||||
@@ -23,7 +23,7 @@ class ScpTest(OERuntimeTestCase):
|
||||
os.remove(cls.tmp_path)
|
||||
|
||||
@OETestDepends(['ssh.SSHTest.test_ssh'])
|
||||
@OEHasPackage(['openssh-scp', 'dropbear'])
|
||||
@OEHasPackage(['openssh-scp'])
|
||||
def test_scp_file(self):
|
||||
dst = '/tmp/test_scp_file'
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ class EpoxyTest(OESDKTestCase):
|
||||
"""
|
||||
def setUp(self):
|
||||
if not (self.tc.hasHostPackage("nativesdk-meson")):
|
||||
raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain Meson")
|
||||
raise unittest.SkipTest("EpoxyTest class: SDK doesn't contain Meson")
|
||||
|
||||
def test_epoxy(self):
|
||||
with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir:
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
from oe.cve_check import Version
|
||||
import json
|
||||
import os
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_vars
|
||||
|
||||
class CVECheck(OESelftestTestCase):
|
||||
|
||||
def test_version_compare(self):
|
||||
from oe.cve_check import Version
|
||||
|
||||
result = Version("100") > Version("99")
|
||||
self.assertTrue( result, msg="Failed to compare version '100' > '99'")
|
||||
result = Version("2.3.1") > Version("2.2.3")
|
||||
@@ -42,3 +46,156 @@ class CVECheck(OESelftestTestCase):
|
||||
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'")
|
||||
result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch")
|
||||
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'")
|
||||
|
||||
|
||||
def test_recipe_report_json(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
|
||||
|
||||
try:
|
||||
os.remove(summary_json)
|
||||
os.remove(recipe_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("m4-native -c cve_check")
|
||||
|
||||
def check_m4_json(filename):
|
||||
with open(filename) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
package = report["package"][0]
|
||||
self.assertEqual(package["name"], "m4-native")
|
||||
found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
|
||||
self.assertIn("CVE-2008-1687", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2008-1687"], "Patched")
|
||||
|
||||
self.assertExists(summary_json)
|
||||
check_m4_json(summary_json)
|
||||
self.assertExists(recipe_json)
|
||||
check_m4_json(recipe_json)
|
||||
|
||||
|
||||
def test_image_json(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
print(report_json)
|
||||
try:
|
||||
os.remove(report_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("core-image-minimal-initramfs")
|
||||
self.assertExists(report_json)
|
||||
|
||||
# Check that the summary report lists at least one package
|
||||
with open(report_json) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertGreater(len(report["package"]), 1)
|
||||
|
||||
# Check that a random recipe wrote a recipe report to deploy/cve/
|
||||
recipename = report["package"][0]["name"]
|
||||
recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json")
|
||||
self.assertExists(recipe_report)
|
||||
with open(recipe_report) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
self.assertEqual(report["package"][0]["name"], recipename)
|
||||
|
||||
|
||||
def test_recipe_report_json_unpatched(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
CVE_CHECK_REPORT_PATCHED = "0"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
|
||||
|
||||
try:
|
||||
os.remove(summary_json)
|
||||
os.remove(recipe_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("m4-native -c cve_check")
|
||||
|
||||
def check_m4_json(filename):
|
||||
with open(filename) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
package = report["package"][0]
|
||||
self.assertEqual(package["name"], "m4-native")
|
||||
#m4 had only Patched CVEs, so the issues array will be empty
|
||||
self.assertEqual(package["issue"], [])
|
||||
|
||||
self.assertExists(summary_json)
|
||||
check_m4_json(summary_json)
|
||||
self.assertExists(recipe_json)
|
||||
check_m4_json(recipe_json)
|
||||
|
||||
|
||||
def test_recipe_report_json_ignored(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
CVE_CHECK_REPORT_PATCHED = "1"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
|
||||
|
||||
try:
|
||||
os.remove(summary_json)
|
||||
os.remove(recipe_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("logrotate -c cve_check")
|
||||
|
||||
def check_m4_json(filename):
|
||||
with open(filename) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
package = report["package"][0]
|
||||
self.assertEqual(package["name"], "logrotate")
|
||||
found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
|
||||
# m4 CVE should not be in logrotate
|
||||
self.assertNotIn("CVE-2008-1687", found_cves)
|
||||
# logrotate has both Patched and Ignored CVEs
|
||||
self.assertIn("CVE-2011-1098", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1098"], "Patched")
|
||||
self.assertIn("CVE-2011-1548", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1548"], "Ignored")
|
||||
self.assertIn("CVE-2011-1549", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1549"], "Ignored")
|
||||
self.assertIn("CVE-2011-1550", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1550"], "Ignored")
|
||||
|
||||
self.assertExists(summary_json)
|
||||
check_m4_json(summary_json)
|
||||
self.assertExists(recipe_json)
|
||||
check_m4_json(recipe_json)
|
||||
|
||||
@@ -133,7 +133,8 @@ class OEListPackageconfigTests(OEScriptTests):
|
||||
def check_endlines(self, results, expected_endlines):
|
||||
for line in results.output.splitlines():
|
||||
for el in expected_endlines:
|
||||
if line.split() == el.split():
|
||||
if line and line.split()[0] == el.split()[0] and \
|
||||
' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
|
||||
expected_endlines.remove(el)
|
||||
break
|
||||
|
||||
|
||||
@@ -175,6 +175,8 @@ class TestImage(OESelftestTestCase):
|
||||
if "DISPLAY" not in os.environ:
|
||||
self.skipTest("virgl gtk test must be run inside a X session")
|
||||
distro = oe.lsb.distro_identifier()
|
||||
if distro and distro == 'almalinux-8.6':
|
||||
self.skipTest('virgl isn\'t working with Alma 8')
|
||||
if distro and distro == 'debian-8':
|
||||
self.skipTest('virgl isn\'t working with Debian 8')
|
||||
if distro and distro == 'centos-7':
|
||||
@@ -185,6 +187,8 @@ class TestImage(OESelftestTestCase):
|
||||
self.skipTest('virgl isn\'t working with Fedora 34')
|
||||
if distro and distro == 'fedora-35':
|
||||
self.skipTest('virgl isn\'t working with Fedora 35')
|
||||
if distro and distro == 'fedora-36':
|
||||
self.skipTest('virgl isn\'t working with Fedora 36')
|
||||
if distro and distro == 'opensuseleap-15.0':
|
||||
self.skipTest('virgl isn\'t working with Opensuse 15.0')
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=6626bb1e20189cfa95f2c508ba286393"
|
||||
|
||||
COMPATIBLE_HOST = "(i.86|x86_64|arm|aarch64).*-linux"
|
||||
|
||||
SRC_URI = "git://github.com/rhinstaller/efivar.git;branch=master;protocol=https \
|
||||
SRC_URI = "git://github.com/rhinstaller/efivar.git;branch=main;protocol=https \
|
||||
file://determinism.patch \
|
||||
file://no-werror.patch"
|
||||
SRCREV = "c1d6b10e1ed4ba2be07f385eae5bceb694478a10"
|
||||
|
||||
178
meta/recipes-bsp/grub/files/CVE-2021-3695.patch
Normal file
178
meta/recipes-bsp/grub/files/CVE-2021-3695.patch
Normal file
@@ -0,0 +1,178 @@
|
||||
From 0693d672abcf720419f86c56bda6428c540e2bb1 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Wed, 20 Jul 2022 10:01:35 +0530
|
||||
Subject: [PATCH] CVE-2021-3695
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=e623866d9286410156e8b9d2c82d6253a1b22d08]
|
||||
CVE: CVE-2021-3695
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
video/readers/png: Drop greyscale support to fix heap out-of-bounds write
|
||||
|
||||
A 16-bit greyscale PNG without alpha is processed in the following loop:
|
||||
|
||||
for (i = 0; i < (data->image_width * data->image_height);
|
||||
i++, d1 += 4, d2 += 2)
|
||||
{
|
||||
d1[R3] = d2[1];
|
||||
d1[G3] = d2[1];
|
||||
d1[B3] = d2[1];
|
||||
}
|
||||
|
||||
The increment of d1 is wrong. d1 is incremented by 4 bytes per iteration,
|
||||
but there are only 3 bytes allocated for storage. This means that image
|
||||
data will overwrite somewhat-attacker-controlled parts of memory - 3 bytes
|
||||
out of every 4 following the end of the image.
|
||||
|
||||
This has existed since greyscale support was added in 2013 in commit
|
||||
3ccf16dff98f (grub-core/video/readers/png.c: Support grayscale).
|
||||
|
||||
Saving starfield.png as a 16-bit greyscale image without alpha in the gimp
|
||||
and attempting to load it causes grub-emu to crash - I don't think this code
|
||||
has ever worked.
|
||||
|
||||
Delete all PNG greyscale support.
|
||||
|
||||
Fixes: CVE-2021-3695
|
||||
|
||||
Signed-off-by: Daniel Axtens <dja@axtens.net>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/video/readers/png.c | 89 ++++-------------------------------
|
||||
1 file changed, 8 insertions(+), 81 deletions(-)
|
||||
|
||||
diff --git a/grub-core/video/readers/png.c b/grub-core/video/readers/png.c
|
||||
index 0157ff7..db4a9d4 100644
|
||||
--- a/grub-core/video/readers/png.c
|
||||
+++ b/grub-core/video/readers/png.c
|
||||
@@ -100,7 +100,7 @@ struct grub_png_data
|
||||
|
||||
unsigned image_width, image_height;
|
||||
int bpp, is_16bit;
|
||||
- int raw_bytes, is_gray, is_alpha, is_palette;
|
||||
+ int raw_bytes, is_alpha, is_palette;
|
||||
int row_bytes, color_bits;
|
||||
grub_uint8_t *image_data;
|
||||
|
||||
@@ -280,13 +280,13 @@ grub_png_decode_image_header (struct grub_png_data *data)
|
||||
data->bpp = 3;
|
||||
else
|
||||
{
|
||||
- data->is_gray = 1;
|
||||
- data->bpp = 1;
|
||||
+ return grub_error (GRUB_ERR_BAD_FILE_TYPE,
|
||||
+ "png: color type not supported");
|
||||
}
|
||||
|
||||
if ((color_bits != 8) && (color_bits != 16)
|
||||
&& (color_bits != 4
|
||||
- || !(data->is_gray || data->is_palette)))
|
||||
+ || !data->is_palette))
|
||||
return grub_error (GRUB_ERR_BAD_FILE_TYPE,
|
||||
"png: bit depth must be 8 or 16");
|
||||
|
||||
@@ -315,7 +315,7 @@ grub_png_decode_image_header (struct grub_png_data *data)
|
||||
}
|
||||
|
||||
#ifndef GRUB_CPU_WORDS_BIGENDIAN
|
||||
- if (data->is_16bit || data->is_gray || data->is_palette)
|
||||
+ if (data->is_16bit || data->is_palette)
|
||||
#endif
|
||||
{
|
||||
data->image_data = grub_calloc (data->image_height, data->row_bytes);
|
||||
@@ -859,27 +859,8 @@ grub_png_convert_image (struct grub_png_data *data)
|
||||
int shift;
|
||||
int mask = (1 << data->color_bits) - 1;
|
||||
unsigned j;
|
||||
- if (data->is_gray)
|
||||
- {
|
||||
- /* Generic formula is
|
||||
- (0xff * i) / ((1U << data->color_bits) - 1)
|
||||
- but for allowed bit depth of 1, 2 and for it's
|
||||
- equivalent to
|
||||
- (0xff / ((1U << data->color_bits) - 1)) * i
|
||||
- Precompute the multipliers to avoid division.
|
||||
- */
|
||||
-
|
||||
- const grub_uint8_t multipliers[5] = { 0xff, 0xff, 0x55, 0x24, 0x11 };
|
||||
- for (i = 0; i < (1U << data->color_bits); i++)
|
||||
- {
|
||||
- grub_uint8_t col = multipliers[data->color_bits] * i;
|
||||
- palette[i][0] = col;
|
||||
- palette[i][1] = col;
|
||||
- palette[i][2] = col;
|
||||
- }
|
||||
- }
|
||||
- else
|
||||
- grub_memcpy (palette, data->palette, 3 << data->color_bits);
|
||||
+
|
||||
+ grub_memcpy (palette, data->palette, 3 << data->color_bits);
|
||||
d1c = d1;
|
||||
d2c = d2;
|
||||
for (j = 0; j < data->image_height; j++, d1c += data->image_width * 3,
|
||||
@@ -917,61 +898,7 @@ grub_png_convert_image (struct grub_png_data *data)
|
||||
return;
|
||||
}
|
||||
|
||||
- if (data->is_gray)
|
||||
- {
|
||||
- switch (data->bpp)
|
||||
- {
|
||||
- case 4:
|
||||
- /* 16-bit gray with alpha. */
|
||||
- for (i = 0; i < (data->image_width * data->image_height);
|
||||
- i++, d1 += 4, d2 += 4)
|
||||
- {
|
||||
- d1[R4] = d2[3];
|
||||
- d1[G4] = d2[3];
|
||||
- d1[B4] = d2[3];
|
||||
- d1[A4] = d2[1];
|
||||
- }
|
||||
- break;
|
||||
- case 2:
|
||||
- if (data->is_16bit)
|
||||
- /* 16-bit gray without alpha. */
|
||||
- {
|
||||
- for (i = 0; i < (data->image_width * data->image_height);
|
||||
- i++, d1 += 4, d2 += 2)
|
||||
- {
|
||||
- d1[R3] = d2[1];
|
||||
- d1[G3] = d2[1];
|
||||
- d1[B3] = d2[1];
|
||||
- }
|
||||
- }
|
||||
- else
|
||||
- /* 8-bit gray with alpha. */
|
||||
- {
|
||||
- for (i = 0; i < (data->image_width * data->image_height);
|
||||
- i++, d1 += 4, d2 += 2)
|
||||
- {
|
||||
- d1[R4] = d2[1];
|
||||
- d1[G4] = d2[1];
|
||||
- d1[B4] = d2[1];
|
||||
- d1[A4] = d2[0];
|
||||
- }
|
||||
- }
|
||||
- break;
|
||||
- /* 8-bit gray without alpha. */
|
||||
- case 1:
|
||||
- for (i = 0; i < (data->image_width * data->image_height);
|
||||
- i++, d1 += 3, d2++)
|
||||
- {
|
||||
- d1[R3] = d2[0];
|
||||
- d1[G3] = d2[0];
|
||||
- d1[B3] = d2[0];
|
||||
- }
|
||||
- break;
|
||||
- }
|
||||
- return;
|
||||
- }
|
||||
-
|
||||
- {
|
||||
+ {
|
||||
/* Only copy the upper 8 bit. */
|
||||
#ifndef GRUB_CPU_WORDS_BIGENDIAN
|
||||
for (i = 0; i < (data->image_width * data->image_height * data->bpp >> 1);
|
||||
--
|
||||
2.25.1
|
||||
|
||||
46
meta/recipes-bsp/grub/files/CVE-2021-3696.patch
Normal file
46
meta/recipes-bsp/grub/files/CVE-2021-3696.patch
Normal file
@@ -0,0 +1,46 @@
|
||||
From b18ce59d6496a9313d75f9497a0efac61dcf4191 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Wed, 20 Jul 2022 10:05:42 +0530
|
||||
Subject: [PATCH] CVE-2021-3696
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=210245129c932dc9e1c2748d9d35524fb95b5042]
|
||||
CVE: CVE-2021-3696
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
video/readers/png: Avoid heap OOB R/W inserting huff table items
|
||||
|
||||
In fuzzing we observed crashes where a code would attempt to be inserted
|
||||
into a huffman table before the start, leading to a set of heap OOB reads
|
||||
and writes as table entries with negative indices were shifted around and
|
||||
the new code written in.
|
||||
|
||||
Catch the case where we would underflow the array and bail.
|
||||
|
||||
Fixes: CVE-2021-3696
|
||||
Signed-off-by: Daniel Axtens <dja@axtens.net>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/video/readers/png.c | 7 +++++++
|
||||
1 file changed, 7 insertions(+)
|
||||
|
||||
diff --git a/grub-core/video/readers/png.c b/grub-core/video/readers/png.c
|
||||
index 36b3f10..3c05951 100644
|
||||
--- a/grub-core/video/readers/png.c
|
||||
+++ b/grub-core/video/readers/png.c
|
||||
@@ -416,6 +416,13 @@ grub_png_insert_huff_item (struct huff_table *ht, int code, int len)
|
||||
for (i = len; i < ht->max_length; i++)
|
||||
n += ht->maxval[i];
|
||||
|
||||
+ if (n > ht->num_values)
|
||||
+ {
|
||||
+ grub_error (GRUB_ERR_BAD_FILE_TYPE,
|
||||
+ "png: out of range inserting huffman table item");
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
for (i = 0; i < n; i++)
|
||||
ht->values[ht->num_values - i] = ht->values[ht->num_values - i - 1];
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
82
meta/recipes-bsp/grub/files/CVE-2021-3697.patch
Normal file
82
meta/recipes-bsp/grub/files/CVE-2021-3697.patch
Normal file
@@ -0,0 +1,82 @@
|
||||
From 4de9de9d14f4ac27229e45514627534e32cc4406 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Tue, 19 Jul 2022 11:13:02 +0530
|
||||
Subject: [PATCH] CVE-2021-3697
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=22a3f97d39f6a10b08ad7fd1cc47c4dcd10413f6]
|
||||
CVE: CVE-2021-3697
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
video/readers/jpeg: Block int underflow -> wild pointer write
|
||||
|
||||
Certain 1 px wide images caused a wild pointer write in
|
||||
grub_jpeg_ycrcb_to_rgb(). This was caused because in grub_jpeg_decode_data(),
|
||||
we have the following loop:
|
||||
|
||||
for (; data->r1 < nr1 && (!data->dri || rst);
|
||||
data->r1++, data->bitmap_ptr += (vb * data->image_width - hb * nc1) * 3)
|
||||
|
||||
We did not check if vb * width >= hb * nc1.
|
||||
|
||||
On a 64-bit platform, if that turns out to be negative, it will underflow,
|
||||
be interpreted as unsigned 64-bit, then be added to the 64-bit pointer, so
|
||||
we see data->bitmap_ptr jump, e.g.:
|
||||
|
||||
0x6180_0000_0480 to
|
||||
0x6181_0000_0498
|
||||
^
|
||||
~--- carry has occurred and this pointer is now far away from
|
||||
any object.
|
||||
|
||||
On a 32-bit platform, it will decrement the pointer, creating a pointer
|
||||
that won't crash but will overwrite random data.
|
||||
|
||||
Catch the underflow and error out.
|
||||
|
||||
Fixes: CVE-2021-3697
|
||||
|
||||
Signed-off-by: Daniel Axtens <dja@axtens.net>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/video/readers/jpeg.c | 10 +++++++++-
|
||||
1 file changed, 9 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/grub-core/video/readers/jpeg.c b/grub-core/video/readers/jpeg.c
|
||||
index 31359a4..545a60b 100644
|
||||
--- a/grub-core/video/readers/jpeg.c
|
||||
+++ b/grub-core/video/readers/jpeg.c
|
||||
@@ -23,6 +23,7 @@
|
||||
#include <grub/mm.h>
|
||||
#include <grub/misc.h>
|
||||
#include <grub/bufio.h>
|
||||
+#include <grub/safemath.h>
|
||||
|
||||
GRUB_MOD_LICENSE ("GPLv3+");
|
||||
|
||||
@@ -617,6 +618,7 @@ static grub_err_t
|
||||
grub_jpeg_decode_data (struct grub_jpeg_data *data)
|
||||
{
|
||||
unsigned c1, vb, hb, nr1, nc1;
|
||||
+ unsigned stride_a, stride_b, stride;
|
||||
int rst = data->dri;
|
||||
|
||||
vb = 8 << data->log_vs;
|
||||
@@ -624,8 +626,14 @@ grub_jpeg_decode_data (struct grub_jpeg_data *data)
|
||||
nr1 = (data->image_height + vb - 1) >> (3 + data->log_vs);
|
||||
nc1 = (data->image_width + hb - 1) >> (3 + data->log_hs);
|
||||
|
||||
+ if (grub_mul(vb, data->image_width, &stride_a) ||
|
||||
+ grub_mul(hb, nc1, &stride_b) ||
|
||||
+ grub_sub(stride_a, stride_b, &stride))
|
||||
+ return grub_error (GRUB_ERR_BAD_FILE_TYPE,
|
||||
+ "jpeg: cannot decode image with these dimensions");
|
||||
+
|
||||
for (; data->r1 < nr1 && (!data->dri || rst);
|
||||
- data->r1++, data->bitmap_ptr += (vb * data->image_width - hb * nc1) * 3)
|
||||
+ data->r1++, data->bitmap_ptr += stride * 3)
|
||||
for (c1 = 0; c1 < nc1 && (!data->dri || rst);
|
||||
c1++, rst--, data->bitmap_ptr += hb * 3)
|
||||
{
|
||||
--
|
||||
2.25.1
|
||||
|
||||
32
meta/recipes-bsp/grub/files/CVE-2021-3981.patch
Normal file
32
meta/recipes-bsp/grub/files/CVE-2021-3981.patch
Normal file
@@ -0,0 +1,32 @@
|
||||
From 67740c43c9326956ea5cd6be77f813b5499a56a5 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Mon, 27 Jun 2022 10:15:29 +0530
|
||||
Subject: [PATCH] CVE-2021-3981
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/grub.git/diff/util/grub-mkconfig.in?id=0adec29674561034771c13e446069b41ef41e4d4]
|
||||
CVE: CVE-2021-3981
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
util/grub-mkconfig.in | 6 +++++-
|
||||
1 file changed, 5 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/util/grub-mkconfig.in b/util/grub-mkconfig.in
|
||||
index 9f477ff..ead94a6 100644
|
||||
--- a/util/grub-mkconfig.in
|
||||
+++ b/util/grub-mkconfig.in
|
||||
@@ -287,7 +287,11 @@ and /etc/grub.d/* files or please file a bug report with
|
||||
exit 1
|
||||
else
|
||||
# none of the children aborted with error, install the new grub.cfg
|
||||
- mv -f ${grub_cfg}.new ${grub_cfg}
|
||||
+ oldumask=$(umask)
|
||||
+ umask 077
|
||||
+ cat ${grub_cfg}.new > ${grub_cfg}
|
||||
+ umask $oldumask
|
||||
+ rm -f ${grub_cfg}.new
|
||||
fi
|
||||
fi
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
60
meta/recipes-bsp/grub/files/CVE-2022-28733.patch
Normal file
60
meta/recipes-bsp/grub/files/CVE-2022-28733.patch
Normal file
@@ -0,0 +1,60 @@
|
||||
From 415fb5eb83cbd3b5cfc25ac1290f2de4fe3d231c Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Mon, 1 Aug 2022 10:48:34 +0530
|
||||
Subject: [PATCH] CVE-2022-28733
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=3e4817538de828319ba6d59ced2fbb9b5ca13287]
|
||||
CVE: CVE-2022-28733
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
net/ip: Do IP fragment maths safely
|
||||
|
||||
We can receive packets with invalid IP fragmentation information. This
|
||||
can lead to rsm->total_len underflowing and becoming very large.
|
||||
|
||||
Then, in grub_netbuff_alloc(), we add to this very large number, which can
|
||||
cause it to overflow and wrap back around to a small positive number.
|
||||
The allocation then succeeds, but the resulting buffer is too small and
|
||||
subsequent operations can write past the end of the buffer.
|
||||
|
||||
Catch the underflow here.
|
||||
|
||||
Fixes: CVE-2022-28733
|
||||
|
||||
Signed-off-by: Daniel Axtens <dja@axtens.net>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/net/ip.c | 10 +++++++++-
|
||||
1 file changed, 9 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/grub-core/net/ip.c b/grub-core/net/ip.c
|
||||
index ea5edf8..74e4e8b 100644
|
||||
--- a/grub-core/net/ip.c
|
||||
+++ b/grub-core/net/ip.c
|
||||
@@ -25,6 +25,7 @@
|
||||
#include <grub/net/netbuff.h>
|
||||
#include <grub/mm.h>
|
||||
#include <grub/priority_queue.h>
|
||||
+#include <grub/safemath.h>
|
||||
#include <grub/time.h>
|
||||
|
||||
struct iphdr {
|
||||
@@ -512,7 +513,14 @@ grub_net_recv_ip4_packets (struct grub_net_buff *nb,
|
||||
{
|
||||
rsm->total_len = (8 * (grub_be_to_cpu16 (iph->frags) & OFFSET_MASK)
|
||||
+ (nb->tail - nb->data));
|
||||
- rsm->total_len -= ((iph->verhdrlen & 0xf) * sizeof (grub_uint32_t));
|
||||
+
|
||||
+ if (grub_sub (rsm->total_len, (iph->verhdrlen & 0xf) * sizeof (grub_uint32_t),
|
||||
+ &rsm->total_len))
|
||||
+ {
|
||||
+ grub_dprintf ("net", "IP reassembly size underflow\n");
|
||||
+ return GRUB_ERR_NONE;
|
||||
+ }
|
||||
+
|
||||
rsm->asm_netbuff = grub_netbuff_alloc (rsm->total_len);
|
||||
if (!rsm->asm_netbuff)
|
||||
{
|
||||
--
|
||||
2.25.1
|
||||
|
||||
67
meta/recipes-bsp/grub/files/CVE-2022-28734.patch
Normal file
67
meta/recipes-bsp/grub/files/CVE-2022-28734.patch
Normal file
@@ -0,0 +1,67 @@
|
||||
From f03f09c2a07eae7f3a4646e33a406ae2689afb9e Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Mon, 1 Aug 2022 10:59:41 +0530
|
||||
Subject: [PATCH] CVE-2022-28734
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=b26b4c08e7119281ff30d0fb4a6169bd2afa8fe4]
|
||||
CVE: CVE-2022-28734
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
net/http: Fix OOB write for split http headers
|
||||
|
||||
GRUB has special code for handling an http header that is split
|
||||
across two packets.
|
||||
|
||||
The code tracks the end of line by looking for a "\n" byte. The
|
||||
code for split headers has always advanced the pointer just past the
|
||||
end of the line, whereas the code that handles unsplit headers does
|
||||
not advance the pointer. This extra advance causes the length to be
|
||||
one greater, which breaks an assumption in parse_line(), leading to
|
||||
it writing a NUL byte one byte past the end of the buffer where we
|
||||
reconstruct the line from the two packets.
|
||||
|
||||
It's conceivable that an attacker controlled set of packets could
|
||||
cause this to zero out the first byte of the "next" pointer of the
|
||||
grub_mm_region structure following the current_line buffer.
|
||||
|
||||
Do not advance the pointer in the split header case.
|
||||
|
||||
Fixes: CVE-2022-28734
|
||||
---
|
||||
grub-core/net/http.c | 12 +++++++++---
|
||||
1 file changed, 9 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/grub-core/net/http.c b/grub-core/net/http.c
|
||||
index 5aa4ad3..a220d21 100644
|
||||
--- a/grub-core/net/http.c
|
||||
+++ b/grub-core/net/http.c
|
||||
@@ -68,7 +68,15 @@ parse_line (grub_file_t file, http_data_t data, char *ptr, grub_size_t len)
|
||||
char *end = ptr + len;
|
||||
while (end > ptr && *(end - 1) == '\r')
|
||||
end--;
|
||||
+
|
||||
+ /* LF without CR. */
|
||||
+ if (end == ptr + len)
|
||||
+ {
|
||||
+ data->errmsg = grub_strdup (_("invalid HTTP header - LF without CR"));
|
||||
+ return GRUB_ERR_NONE;
|
||||
+ }
|
||||
*end = 0;
|
||||
+
|
||||
/* Trailing CRLF. */
|
||||
if (data->in_chunk_len == 1)
|
||||
{
|
||||
@@ -190,9 +198,7 @@ http_receive (grub_net_tcp_socket_t sock __attribute__ ((unused)),
|
||||
int have_line = 1;
|
||||
char *t;
|
||||
ptr = grub_memchr (nb->data, '\n', nb->tail - nb->data);
|
||||
- if (ptr)
|
||||
- ptr++;
|
||||
- else
|
||||
+ if (ptr == NULL)
|
||||
{
|
||||
have_line = 0;
|
||||
ptr = (char *) nb->tail;
|
||||
--
|
||||
2.25.1
|
||||
|
||||
275
meta/recipes-bsp/grub/files/CVE-2022-28736.patch
Normal file
275
meta/recipes-bsp/grub/files/CVE-2022-28736.patch
Normal file
@@ -0,0 +1,275 @@
|
||||
From 431a111c60095fc973d83fe9209f26f29ce78784 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Mon, 1 Aug 2022 11:17:17 +0530
|
||||
Subject: [PATCH] CVE-2022-28736
|
||||
|
||||
Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=grub.git;a=commit;h=04c86e0bb7b58fc2f913f798cdb18934933e532d]
|
||||
CVE: CVE-2022-28736
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
loader/efi/chainloader: Use grub_loader_set_ex()
|
||||
|
||||
This ports the EFI chainloader to use grub_loader_set_ex() in order to fix
|
||||
a use-after-free bug that occurs when grub_cmd_chainloader() is executed
|
||||
more than once before a boot attempt is performed.
|
||||
|
||||
Fixes: CVE-2022-28736
|
||||
|
||||
Signed-off-by: Chris Coulson <chris.coulson@canonical.com>
|
||||
Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
|
||||
---
|
||||
grub-core/commands/boot.c | 66 ++++++++++++++++++++++++++----
|
||||
grub-core/loader/efi/chainloader.c | 46 +++++++++++----------
|
||||
include/grub/loader.h | 5 +++
|
||||
3 files changed, 87 insertions(+), 30 deletions(-)
|
||||
|
||||
diff --git a/grub-core/commands/boot.c b/grub-core/commands/boot.c
|
||||
index bbca81e..6151478 100644
|
||||
--- a/grub-core/commands/boot.c
|
||||
+++ b/grub-core/commands/boot.c
|
||||
@@ -27,10 +27,20 @@
|
||||
|
||||
GRUB_MOD_LICENSE ("GPLv3+");
|
||||
|
||||
-static grub_err_t (*grub_loader_boot_func) (void);
|
||||
-static grub_err_t (*grub_loader_unload_func) (void);
|
||||
+static grub_err_t (*grub_loader_boot_func) (void *context);
|
||||
+static grub_err_t (*grub_loader_unload_func) (void *context);
|
||||
+static void *grub_loader_context;
|
||||
static int grub_loader_flags;
|
||||
|
||||
+struct grub_simple_loader_hooks
|
||||
+{
|
||||
+ grub_err_t (*boot) (void);
|
||||
+ grub_err_t (*unload) (void);
|
||||
+};
|
||||
+
|
||||
+/* Don't heap allocate this to avoid making grub_loader_set() fallible. */
|
||||
+static struct grub_simple_loader_hooks simple_loader_hooks;
|
||||
+
|
||||
struct grub_preboot
|
||||
{
|
||||
grub_err_t (*preboot_func) (int);
|
||||
@@ -44,6 +54,29 @@ static int grub_loader_loaded;
|
||||
static struct grub_preboot *preboots_head = 0,
|
||||
*preboots_tail = 0;
|
||||
|
||||
+static grub_err_t
|
||||
+grub_simple_boot_hook (void *context)
|
||||
+{
|
||||
+ struct grub_simple_loader_hooks *hooks;
|
||||
+
|
||||
+ hooks = (struct grub_simple_loader_hooks *) context;
|
||||
+ return hooks->boot ();
|
||||
+}
|
||||
+
|
||||
+static grub_err_t
|
||||
+grub_simple_unload_hook (void *context)
|
||||
+{
|
||||
+ struct grub_simple_loader_hooks *hooks;
|
||||
+ grub_err_t ret;
|
||||
+
|
||||
+ hooks = (struct grub_simple_loader_hooks *) context;
|
||||
+
|
||||
+ ret = hooks->unload ();
|
||||
+ grub_memset (hooks, 0, sizeof (*hooks));
|
||||
+
|
||||
+ return ret;
|
||||
+}
|
||||
+
|
||||
int
|
||||
grub_loader_is_loaded (void)
|
||||
{
|
||||
@@ -110,28 +143,45 @@ grub_loader_unregister_preboot_hook (struct grub_preboot *hnd)
|
||||
}
|
||||
|
||||
void
|
||||
-grub_loader_set (grub_err_t (*boot) (void),
|
||||
- grub_err_t (*unload) (void),
|
||||
- int flags)
|
||||
+grub_loader_set_ex (grub_err_t (*boot) (void *context),
|
||||
+ grub_err_t (*unload) (void *context),
|
||||
+ void *context,
|
||||
+ int flags)
|
||||
{
|
||||
if (grub_loader_loaded && grub_loader_unload_func)
|
||||
- grub_loader_unload_func ();
|
||||
+ grub_loader_unload_func (grub_loader_context);
|
||||
|
||||
grub_loader_boot_func = boot;
|
||||
grub_loader_unload_func = unload;
|
||||
+ grub_loader_context = context;
|
||||
grub_loader_flags = flags;
|
||||
|
||||
grub_loader_loaded = 1;
|
||||
}
|
||||
|
||||
+void
|
||||
+grub_loader_set (grub_err_t (*boot) (void),
|
||||
+ grub_err_t (*unload) (void),
|
||||
+ int flags)
|
||||
+{
|
||||
+ grub_loader_set_ex (grub_simple_boot_hook,
|
||||
+ grub_simple_unload_hook,
|
||||
+ &simple_loader_hooks,
|
||||
+ flags);
|
||||
+
|
||||
+ simple_loader_hooks.boot = boot;
|
||||
+ simple_loader_hooks.unload = unload;
|
||||
+}
|
||||
+
|
||||
void
|
||||
grub_loader_unset(void)
|
||||
{
|
||||
if (grub_loader_loaded && grub_loader_unload_func)
|
||||
- grub_loader_unload_func ();
|
||||
+ grub_loader_unload_func (grub_loader_context);
|
||||
|
||||
grub_loader_boot_func = 0;
|
||||
grub_loader_unload_func = 0;
|
||||
+ grub_loader_context = 0;
|
||||
|
||||
grub_loader_loaded = 0;
|
||||
}
|
||||
@@ -158,7 +208,7 @@ grub_loader_boot (void)
|
||||
return err;
|
||||
}
|
||||
}
|
||||
- err = (grub_loader_boot_func) ();
|
||||
+ err = (grub_loader_boot_func) (grub_loader_context);
|
||||
|
||||
for (cur = preboots_tail; cur; cur = cur->prev)
|
||||
if (! err)
|
||||
diff --git a/grub-core/loader/efi/chainloader.c b/grub-core/loader/efi/chainloader.c
|
||||
index a8d7b91..93a028a 100644
|
||||
--- a/grub-core/loader/efi/chainloader.c
|
||||
+++ b/grub-core/loader/efi/chainloader.c
|
||||
@@ -44,33 +44,28 @@ GRUB_MOD_LICENSE ("GPLv3+");
|
||||
|
||||
static grub_dl_t my_mod;
|
||||
|
||||
-static grub_efi_physical_address_t address;
|
||||
-static grub_efi_uintn_t pages;
|
||||
-static grub_efi_device_path_t *file_path;
|
||||
-static grub_efi_handle_t image_handle;
|
||||
-static grub_efi_char16_t *cmdline;
|
||||
-
|
||||
static grub_err_t
|
||||
-grub_chainloader_unload (void)
|
||||
+grub_chainloader_unload (void *context)
|
||||
{
|
||||
+ grub_efi_handle_t image_handle = (grub_efi_handle_t) context;
|
||||
+ grub_efi_loaded_image_t *loaded_image;
|
||||
grub_efi_boot_services_t *b;
|
||||
|
||||
+ loaded_image = grub_efi_get_loaded_image (image_handle);
|
||||
+ if (loaded_image != NULL)
|
||||
+ grub_free (loaded_image->load_options);
|
||||
+
|
||||
b = grub_efi_system_table->boot_services;
|
||||
efi_call_1 (b->unload_image, image_handle);
|
||||
- efi_call_2 (b->free_pages, address, pages);
|
||||
-
|
||||
- grub_free (file_path);
|
||||
- grub_free (cmdline);
|
||||
- cmdline = 0;
|
||||
- file_path = 0;
|
||||
|
||||
grub_dl_unref (my_mod);
|
||||
return GRUB_ERR_NONE;
|
||||
}
|
||||
|
||||
static grub_err_t
|
||||
-grub_chainloader_boot (void)
|
||||
+grub_chainloader_boot (void *context)
|
||||
{
|
||||
+ grub_efi_handle_t image_handle = (grub_efi_handle_t) context;
|
||||
grub_efi_boot_services_t *b;
|
||||
grub_efi_status_t status;
|
||||
grub_efi_uintn_t exit_data_size;
|
||||
@@ -139,7 +134,7 @@ make_file_path (grub_efi_device_path_t *dp, const char *filename)
|
||||
char *dir_start;
|
||||
char *dir_end;
|
||||
grub_size_t size;
|
||||
- grub_efi_device_path_t *d;
|
||||
+ grub_efi_device_path_t *d, *file_path;
|
||||
|
||||
dir_start = grub_strchr (filename, ')');
|
||||
if (! dir_start)
|
||||
@@ -215,11 +210,15 @@ grub_cmd_chainloader (grub_command_t cmd __attribute__ ((unused)),
|
||||
grub_efi_status_t status;
|
||||
grub_efi_boot_services_t *b;
|
||||
grub_device_t dev = 0;
|
||||
- grub_efi_device_path_t *dp = 0;
|
||||
+ grub_efi_device_path_t *dp = NULL, *file_path = NULL;
|
||||
grub_efi_loaded_image_t *loaded_image;
|
||||
char *filename;
|
||||
void *boot_image = 0;
|
||||
grub_efi_handle_t dev_handle = 0;
|
||||
+ grub_efi_physical_address_t address = 0;
|
||||
+ grub_efi_uintn_t pages = 0;
|
||||
+ grub_efi_char16_t *cmdline = NULL;
|
||||
+ grub_efi_handle_t image_handle = NULL;
|
||||
|
||||
if (argc == 0)
|
||||
return grub_error (GRUB_ERR_BAD_ARGUMENT, N_("filename expected"));
|
||||
@@ -227,11 +226,6 @@ grub_cmd_chainloader (grub_command_t cmd __attribute__ ((unused)),
|
||||
|
||||
grub_dl_ref (my_mod);
|
||||
|
||||
- /* Initialize some global variables. */
|
||||
- address = 0;
|
||||
- image_handle = 0;
|
||||
- file_path = 0;
|
||||
-
|
||||
b = grub_efi_system_table->boot_services;
|
||||
|
||||
file = grub_file_open (filename, GRUB_FILE_TYPE_EFI_CHAINLOADED_IMAGE);
|
||||
@@ -401,7 +395,11 @@ grub_cmd_chainloader (grub_command_t cmd __attribute__ ((unused)),
|
||||
grub_file_close (file);
|
||||
grub_device_close (dev);
|
||||
|
||||
- grub_loader_set (grub_chainloader_boot, grub_chainloader_unload, 0);
|
||||
+ /* We're finished with the source image buffer and file path now. */
|
||||
+ efi_call_2 (b->free_pages, address, pages);
|
||||
+ grub_free (file_path);
|
||||
+
|
||||
+ grub_loader_set_ex (grub_chainloader_boot, grub_chainloader_unload, image_handle, 0);
|
||||
return 0;
|
||||
|
||||
fail:
|
||||
@@ -412,11 +410,15 @@ grub_cmd_chainloader (grub_command_t cmd __attribute__ ((unused)),
|
||||
if (file)
|
||||
grub_file_close (file);
|
||||
|
||||
+ grub_free (cmdline);
|
||||
grub_free (file_path);
|
||||
|
||||
if (address)
|
||||
efi_call_2 (b->free_pages, address, pages);
|
||||
|
||||
+ if (image_handle != NULL)
|
||||
+ efi_call_1 (b->unload_image, image_handle);
|
||||
+
|
||||
grub_dl_unref (my_mod);
|
||||
|
||||
return grub_errno;
|
||||
diff --git a/include/grub/loader.h b/include/grub/loader.h
|
||||
index 7f82a49..3071a50 100644
|
||||
--- a/include/grub/loader.h
|
||||
+++ b/include/grub/loader.h
|
||||
@@ -39,6 +39,11 @@ void EXPORT_FUNC (grub_loader_set) (grub_err_t (*boot) (void),
|
||||
grub_err_t (*unload) (void),
|
||||
int flags);
|
||||
|
||||
+void EXPORT_FUNC (grub_loader_set_ex) (grub_err_t (*boot) (void *context),
|
||||
+ grub_err_t (*unload) (void *context),
|
||||
+ void *context,
|
||||
+ int flags);
|
||||
+
|
||||
/* Unset current loader, if any. */
|
||||
void EXPORT_FUNC (grub_loader_unset) (void);
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -95,6 +95,13 @@ SRC_URI = "${GNU_MIRROR}/grub/grub-${PV}.tar.gz \
|
||||
file://0044-script-execute-Fix-NULL-dereference-in-grub_script_e.patch \
|
||||
file://0045-commands-ls-Require-device_name-is-not-NULL-before-p.patch \
|
||||
file://0046-script-execute-Avoid-crash-when-using-outside-a-func.patch \
|
||||
file://CVE-2021-3981.patch \
|
||||
file://CVE-2021-3695.patch \
|
||||
file://CVE-2021-3696.patch \
|
||||
file://CVE-2021-3697.patch \
|
||||
file://CVE-2022-28733.patch \
|
||||
file://CVE-2022-28734.patch \
|
||||
file://CVE-2022-28736.patch \
|
||||
"
|
||||
SRC_URI[md5sum] = "5ce674ca6b2612d8939b9e6abed32934"
|
||||
SRC_URI[sha256sum] = "f10c85ae3e204dbaec39ae22fa3c5e99f0665417e91c2cb49b7e5031658ba6ea"
|
||||
|
||||
@@ -5,8 +5,8 @@ SECTION = "network"
|
||||
LICENSE = "PD"
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=87964579b2a8ece4bc6744d2dc9a8b04"
|
||||
|
||||
SRCREV = "4cbb44a9fe26aa6f0b28beb79f9488b37c097b5e"
|
||||
PV = "20220315"
|
||||
SRCREV = "3d5c8d0f7e0264768a2c000d0fd4b4d4a991e041"
|
||||
PV = "20220511"
|
||||
PE = "1"
|
||||
|
||||
SRC_URI = "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=main"
|
||||
|
||||
@@ -2,21 +2,22 @@ SUMMARY = "Linux NFC daemon"
|
||||
DESCRIPTION = "A daemon for the Linux Near Field Communication stack"
|
||||
HOMEPAGE = "http://01.org/linux-nfc"
|
||||
LICENSE = "GPLv2"
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \
|
||||
file://src/near.h;beginline=1;endline=20;md5=358e4deefef251a4761e1ffacc965d13 \
|
||||
"
|
||||
|
||||
DEPENDS = "dbus glib-2.0 libnl"
|
||||
|
||||
SRC_URI = "${KERNELORG_MIRROR}/linux/network/nfc/${BP}.tar.xz \
|
||||
SRC_URI = "git://git.kernel.org/pub/scm/network/nfc/neard.git;protocol=git;branch=master \
|
||||
file://neard.in \
|
||||
file://Makefile.am-fix-parallel-issue.patch \
|
||||
file://Makefile.am-do-not-ship-version.h.patch \
|
||||
file://0001-Add-header-dependency-to-nciattach.o.patch \
|
||||
"
|
||||
SRC_URI[md5sum] = "5c691fb7872856dc0d909c298bc8cb41"
|
||||
SRC_URI[sha256sum] = "eae3b11c541a988ec11ca94b7deab01080cd5b58cfef3ced6ceac9b6e6e65b36"
|
||||
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \
|
||||
file://src/near.h;beginline=1;endline=20;md5=358e4deefef251a4761e1ffacc965d13 \
|
||||
"
|
||||
SRCREV = "949795024f7625420e93e288c56e194cb9a3e74a"
|
||||
|
||||
S = "${WORKDIR}/git"
|
||||
|
||||
inherit autotools pkgconfig systemd update-rc.d
|
||||
|
||||
|
||||
@@ -60,6 +60,13 @@ CVE_CHECK_WHITELIST += "CVE-2008-3844"
|
||||
# https://ubuntu.com/security/CVE-2016-20012
|
||||
CVE_CHECK_WHITELIST += "CVE-2016-20012"
|
||||
|
||||
# As per debian, the issue is fixed by a feature called "agent restriction" in openssh 8.9
|
||||
# Urgency is unimportant as per debian, Hence this CVE is whitelisting.
|
||||
# https://security-tracker.debian.org/tracker/CVE-2021-36368
|
||||
# https://bugzilla.mindrot.org/show_bug.cgi?id=3316#c2
|
||||
# https://docs.ssh-mitm.at/trivialauth.html
|
||||
CVE_CHECK_WHITELIST += "CVE-2021-36368"
|
||||
|
||||
PAM_SRC_URI = "file://sshd"
|
||||
|
||||
inherit manpages useradd update-rc.d update-alternatives systemd
|
||||
@@ -183,12 +190,17 @@ FILES_${PN}-sftp-server = "${libexecdir}/sftp-server"
|
||||
FILES_${PN}-misc = "${bindir}/ssh* ${libexecdir}/ssh*"
|
||||
FILES_${PN}-keygen = "${bindir}/ssh-keygen"
|
||||
|
||||
RDEPENDS_${PN} += "${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-keygen"
|
||||
RDEPENDS_${PN} += "${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-keygen ${PN}-sftp-server"
|
||||
RDEPENDS_${PN}-sshd += "${PN}-keygen ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-keyinit pam-plugin-loginuid', '', d)}"
|
||||
RRECOMMENDS_${PN}-sshd_append_class-target = "\
|
||||
${@bb.utils.filter('PACKAGECONFIG', 'rng-tools', d)} \
|
||||
"
|
||||
|
||||
# break dependency on base package for -dev package
|
||||
# otherwise SDK fails to build as the main openssh and dropbear packages
|
||||
# conflict with each other
|
||||
RDEPENDS:${PN}-dev = ""
|
||||
|
||||
# gdb would make attach-ptrace test pass rather than skip but not worth the build dependencies
|
||||
RDEPENDS_${PN}-ptest += "${PN}-sftp ${PN}-misc ${PN}-sftp-server make sed sudo coreutils"
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ SRC_URI_append_class-nativesdk = " \
|
||||
file://environment.d-openssl.sh \
|
||||
"
|
||||
|
||||
SRC_URI[sha256sum] = "40dceb51a4f6a5275bde0e6bf20ef4b91bfc32ed57c0552e2e8e15463372b17a"
|
||||
SRC_URI[sha256sum] = "d7939ce614029cdff0b6c20f0e2e5703158a489a72b2507b8bd51bf8c8fd10ca"
|
||||
|
||||
inherit lib_package multilib_header multilib_script ptest
|
||||
MULTILIB_SCRIPTS = "${PN}-bin:${bindir}/c_rehash"
|
||||
@@ -348,7 +348,7 @@ do_install_ptest () {
|
||||
# These access the internet which is not guaranteed to work on machines running the tests
|
||||
rm -rf ${D}${PTEST_PATH}/testsuite/wget
|
||||
sort ${B}/.config > ${D}${PTEST_PATH}/.config
|
||||
ln -s /bin/busybox ${D}${PTEST_PATH}/busybox
|
||||
ln -s ${base_bindir}/busybox ${D}${PTEST_PATH}/busybox
|
||||
}
|
||||
|
||||
inherit update-alternatives
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
From c7e181fdf58c392e06ab805e2c044c3e57d5445a Mon Sep 17 00:00:00 2001
|
||||
From: Ariadne Conill <ariadne@dereferenced.org>
|
||||
Date: Sun, 3 Apr 2022 12:14:33 +0000
|
||||
Subject: [PATCH] libbb: sockaddr2str: ensure only printable characters are
|
||||
returned for the hostname part
|
||||
|
||||
CVE: CVE-2022-28391
|
||||
Upstream-Status: Pending
|
||||
Signed-off-by: Ariadne Conill <ariadne@dereferenced.org>
|
||||
Signed-off-by: Steve Sakoman <steve@sakoman.com>
|
||||
---
|
||||
libbb/xconnect.c | 5 +++--
|
||||
1 file changed, 3 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/libbb/xconnect.c b/libbb/xconnect.c
|
||||
index eb2871cb1..b5520bb21 100644
|
||||
--- a/libbb/xconnect.c
|
||||
+++ b/libbb/xconnect.c
|
||||
@@ -501,8 +501,9 @@ static char* FAST_FUNC sockaddr2str(const struct sockaddr *sa, int flags)
|
||||
);
|
||||
if (rc)
|
||||
return NULL;
|
||||
+ /* ensure host contains only printable characters */
|
||||
if (flags & IGNORE_PORT)
|
||||
- return xstrdup(host);
|
||||
+ return xstrdup(printable_string(host));
|
||||
#if ENABLE_FEATURE_IPV6
|
||||
if (sa->sa_family == AF_INET6) {
|
||||
if (strchr(host, ':')) /* heh, it's not a resolved hostname */
|
||||
@@ -513,7 +514,7 @@ static char* FAST_FUNC sockaddr2str(const struct sockaddr *sa, int flags)
|
||||
#endif
|
||||
/* For now we don't support anything else, so it has to be INET */
|
||||
/*if (sa->sa_family == AF_INET)*/
|
||||
- return xasprintf("%s:%s", host, serv);
|
||||
+ return xasprintf("%s:%s", printable_string(host), serv);
|
||||
/*return xstrdup(host);*/
|
||||
}
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
From f8ad7c331b25ba90fd296b37c443b4114cb196e2 Mon Sep 17 00:00:00 2001
|
||||
From: Ariadne Conill <ariadne@dereferenced.org>
|
||||
Date: Sun, 3 Apr 2022 12:16:45 +0000
|
||||
Subject: [PATCH] nslookup: sanitize all printed strings with printable_string
|
||||
|
||||
Otherwise, terminal sequences can be injected, which enables various terminal injection
|
||||
attacks from DNS results.
|
||||
|
||||
MJ: One chunk wasn't applicable on 1.31.1 version, because parsing of
|
||||
SRV records was added only in newer 1.32.0 with:
|
||||
commit 6b4960155e94076bf25518e4e268a7a5f849308e
|
||||
Author: Jo-Philipp Wich <jo@mein.io>
|
||||
Date: Thu Jun 27 17:27:29 2019 +0200
|
||||
|
||||
nslookup: implement support for SRV records
|
||||
|
||||
CVE: CVE-2022-28391
|
||||
Upstream-Status: Pending
|
||||
Signed-off-by: Ariadne Conill <ariadne@dereferenced.org>
|
||||
Signed-off-by: Steve Sakoman <steve@sakoman.com>
|
||||
---
|
||||
networking/nslookup.c | 8 ++++----
|
||||
1 file changed, 4 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/networking/nslookup.c b/networking/nslookup.c
|
||||
index 24e09d4f0..89b9c8a13 100644
|
||||
--- a/networking/nslookup.c
|
||||
+++ b/networking/nslookup.c
|
||||
@@ -404,7 +404,7 @@ static int parse_reply(const unsigned char *msg, size_t len)
|
||||
//printf("Unable to uncompress domain: %s\n", strerror(errno));
|
||||
return -1;
|
||||
}
|
||||
- printf(format, ns_rr_name(rr), dname);
|
||||
+ printf(format, ns_rr_name(rr), printable_string(dname));
|
||||
break;
|
||||
|
||||
case ns_t_mx:
|
||||
@@ -419,7 +419,7 @@ static int parse_reply(const unsigned char *msg, size_t len)
|
||||
//printf("Cannot uncompress MX domain: %s\n", strerror(errno));
|
||||
return -1;
|
||||
}
|
||||
- printf("%s\tmail exchanger = %d %s\n", ns_rr_name(rr), n, dname);
|
||||
+ printf("%s\tmail exchanger = %d %s\n", ns_rr_name(rr), n, printable_string(dname));
|
||||
break;
|
||||
|
||||
case ns_t_txt:
|
||||
@@ -431,7 +431,7 @@ static int parse_reply(const unsigned char *msg, size_t len)
|
||||
if (n > 0) {
|
||||
memset(dname, 0, sizeof(dname));
|
||||
memcpy(dname, ns_rr_rdata(rr) + 1, n);
|
||||
- printf("%s\ttext = \"%s\"\n", ns_rr_name(rr), dname);
|
||||
+ printf("%s\ttext = \"%s\"\n", ns_rr_name(rr), printable_string(dname));
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -461,7 +461,7 @@ static int parse_reply(const unsigned char *msg, size_t len)
|
||||
return -1;
|
||||
}
|
||||
|
||||
- printf("\tmail addr = %s\n", dname);
|
||||
+ printf("\tmail addr = %s\n", printable_string(dname));
|
||||
cp += n;
|
||||
|
||||
printf("\tserial = %lu\n", ns_get32(cp));
|
||||
@@ -55,6 +55,8 @@ SRC_URI = "https://busybox.net/downloads/busybox-${PV}.tar.bz2;name=tarball \
|
||||
file://CVE-2021-42374.patch \
|
||||
file://CVE-2021-42376.patch \
|
||||
file://CVE-2021-423xx-awk.patch \
|
||||
file://0001-libbb-sockaddr2str-ensure-only-printable-characters-.patch \
|
||||
file://0002-nslookup-sanitize-all-printed-strings-with-printable.patch \
|
||||
"
|
||||
SRC_URI_append_libc-musl = " file://musl.cfg "
|
||||
|
||||
|
||||
@@ -12,6 +12,11 @@ DEPENDS = "zlib virtual/crypt"
|
||||
RPROVIDES_${PN} = "ssh sshd"
|
||||
RCONFLICTS_${PN} = "openssh-sshd openssh"
|
||||
|
||||
# break dependency on base package for -dev package
|
||||
# otherwise SDK fails to build as the main openssh and dropbear packages
|
||||
# conflict with each other
|
||||
RDEPENDS:${PN}-dev = ""
|
||||
|
||||
DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
|
||||
|
||||
SRC_URI = "http://matt.ucc.asn.au/dropbear/releases/dropbear-${PV}.tar.bz2 \
|
||||
|
||||
@@ -24,7 +24,7 @@ IMAGE_FSTYPES = "wic.vmdk"
|
||||
|
||||
inherit core-image setuptools3
|
||||
|
||||
SRCREV ?= "8a7fd5f633a2b72185501d4c4a8a51ed1fc7cea1"
|
||||
SRCREV ?= "23322786e02469c08e3db007043da1091bf0f466"
|
||||
SRC_URI = "git://git.yoctoproject.org/poky;branch=dunfell \
|
||||
file://Yocto_Build_Appliance.vmx \
|
||||
file://Yocto_Build_Appliance.vmxf \
|
||||
|
||||
@@ -14,6 +14,15 @@ finish_run() {
|
||||
|
||||
info "Switching root to '$ROOTFS_DIR'..."
|
||||
|
||||
debug "Moving basic mounts onto rootfs"
|
||||
for dir in `awk '/\/dev.* \/run\/media/{print $2}' /proc/mounts`; do
|
||||
# Parse any OCT or HEX encoded chars such as spaces
|
||||
# in the mount points to actual ASCII chars
|
||||
dir=`printf $dir`
|
||||
mkdir -p "${ROOTFS_DIR}/media/${dir##*/}"
|
||||
mount -n --move "$dir" "${ROOTFS_DIR}/media/${dir##*/}"
|
||||
done
|
||||
|
||||
debug "Moving /dev, /proc and /sys onto rootfs..."
|
||||
mount --move /dev $ROOTFS_DIR/dev
|
||||
mount --move /proc $ROOTFS_DIR/proc
|
||||
|
||||
@@ -129,7 +129,7 @@ do_install () {
|
||||
update-rc.d -r ${D} rmnologin.sh start 99 2 3 4 5 .
|
||||
update-rc.d -r ${D} sendsigs start 20 0 6 .
|
||||
update-rc.d -r ${D} urandom start 38 S 0 6 .
|
||||
update-rc.d -r ${D} umountnfs.sh start 31 0 1 6 .
|
||||
update-rc.d -r ${D} umountnfs.sh stop 31 0 1 6 .
|
||||
update-rc.d -r ${D} umountfs start 40 0 6 .
|
||||
update-rc.d -r ${D} reboot start 90 6 .
|
||||
update-rc.d -r ${D} halt start 90 0 .
|
||||
|
||||
@@ -0,0 +1,813 @@
|
||||
From b5125000917810731bc28055c0445d571121f80e Mon Sep 17 00:00:00 2001
|
||||
From: Nick Wellnhofer <wellnhofer@aevum.de>
|
||||
Date: Thu, 21 Apr 2022 00:45:58 +0200
|
||||
Subject: [PATCH] Port gentest.py to Python 3
|
||||
|
||||
Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/libxml2/-/commit/343fc1421cdae097fa6c4cffeb1a065a40be6bbb]
|
||||
|
||||
* fixes:
|
||||
|
||||
make[1]: 'testReader' is up to date.
|
||||
File "../libxml2-2.9.10/gentest.py", line 11
|
||||
print "libxml2 python bindings not available, skipping testapi.c generation"
|
||||
^
|
||||
SyntaxError: Missing parentheses in call to 'print'. Did you mean print("libxml2 python bindings not available, skipping testapi.c generation")?
|
||||
make[1]: [Makefile:2078: testapi.c] Error 1 (ignored)
|
||||
|
||||
...
|
||||
|
||||
make[1]: 'testReader' is up to date.
|
||||
File "../libxml2-2.9.10/gentest.py", line 271
|
||||
return 1
|
||||
^
|
||||
TabError: inconsistent use of tabs and spaces in indentation
|
||||
make[1]: [Makefile:2078: testapi.c] Error 1 (ignored)
|
||||
|
||||
...
|
||||
|
||||
aarch64-oe-linux-gcc: error: testapi.c: No such file or directory
|
||||
aarch64-oe-linux-gcc: fatal error: no input files
|
||||
compilation terminated.
|
||||
make[1]: *** [Makefile:1275: testapi.o] Error 1
|
||||
|
||||
But there is still a bit mystery why it worked before, because check-am
|
||||
calls gentest.py with $(PYTHON), so it ignores the shebang in the script
|
||||
and libxml2 is using python3native (through python3targetconfig.bbclass)
|
||||
so something like:
|
||||
|
||||
libxml2/2.9.10-r0/recipe-sysroot-native/usr/bin/python3-native/python3 gentest.py
|
||||
|
||||
But that still fails (now without SyntaxError) with:
|
||||
libxml2 python bindings not available, skipping testapi.c generation
|
||||
|
||||
because we don't have dependency on libxml2-native (to provide libxml2
|
||||
python bindings form python3native) and exported PYTHON_SITE_PACKAGES
|
||||
might be useless (e.g. /usr/lib/python3.8/site-packages on Ubuntu-22.10
|
||||
which uses python 3.10 and there is no site-packages with libxml2)
|
||||
|
||||
Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
|
||||
---
|
||||
gentest.py | 421 ++++++++++++++++++++++++++---------------------------
|
||||
1 file changed, 209 insertions(+), 212 deletions(-)
|
||||
|
||||
diff --git a/gentest.py b/gentest.py
|
||||
index b763300..0756706 100755
|
||||
--- a/gentest.py
|
||||
+++ b/gentest.py
|
||||
@@ -8,7 +8,7 @@ import string
|
||||
try:
|
||||
import libxml2
|
||||
except:
|
||||
- print "libxml2 python bindings not available, skipping testapi.c generation"
|
||||
+ print("libxml2 python bindings not available, skipping testapi.c generation")
|
||||
sys.exit(0)
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
@@ -227,7 +227,7 @@ extra_post_call = {
|
||||
if (old != NULL) {
|
||||
xmlUnlinkNode(old);
|
||||
xmlFreeNode(old) ; old = NULL ; }
|
||||
- ret_val = NULL;""",
|
||||
+\t ret_val = NULL;""",
|
||||
"xmlTextMerge":
|
||||
"""if ((first != NULL) && (first->type != XML_TEXT_NODE)) {
|
||||
xmlUnlinkNode(second);
|
||||
@@ -236,7 +236,7 @@ extra_post_call = {
|
||||
"""if ((ret_val != NULL) && (ret_val != ncname) &&
|
||||
(ret_val != prefix) && (ret_val != memory))
|
||||
xmlFree(ret_val);
|
||||
- ret_val = NULL;""",
|
||||
+\t ret_val = NULL;""",
|
||||
"xmlNewDocElementContent":
|
||||
"""xmlFreeDocElementContent(doc, ret_val); ret_val = NULL;""",
|
||||
"xmlDictReference": "xmlDictFree(dict);",
|
||||
@@ -268,29 +268,29 @@ modules = []
|
||||
def is_skipped_module(name):
|
||||
for mod in skipped_modules:
|
||||
if mod == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
return 0
|
||||
|
||||
def is_skipped_function(name):
|
||||
for fun in skipped_functions:
|
||||
if fun == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
# Do not test destructors
|
||||
- if string.find(name, 'Free') != -1:
|
||||
+ if name.find('Free') != -1:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def is_skipped_memcheck(name):
|
||||
for fun in skipped_memcheck:
|
||||
if fun == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
return 0
|
||||
|
||||
missing_types = {}
|
||||
def add_missing_type(name, func):
|
||||
try:
|
||||
list = missing_types[name]
|
||||
- list.append(func)
|
||||
+ list.append(func)
|
||||
except:
|
||||
missing_types[name] = [func]
|
||||
|
||||
@@ -310,7 +310,7 @@ def add_missing_functions(name, module):
|
||||
missing_functions_nr = missing_functions_nr + 1
|
||||
try:
|
||||
list = missing_functions[module]
|
||||
- list.append(name)
|
||||
+ list.append(name)
|
||||
except:
|
||||
missing_functions[module] = [name]
|
||||
|
||||
@@ -319,45 +319,45 @@ def add_missing_functions(name, module):
|
||||
#
|
||||
|
||||
def type_convert(str, name, info, module, function, pos):
|
||||
-# res = string.replace(str, " ", " ")
|
||||
-# res = string.replace(str, " ", " ")
|
||||
-# res = string.replace(str, " ", " ")
|
||||
- res = string.replace(str, " *", "_ptr")
|
||||
-# res = string.replace(str, "*", "_ptr")
|
||||
- res = string.replace(res, " ", "_")
|
||||
+# res = str.replace(" ", " ")
|
||||
+# res = str.replace(" ", " ")
|
||||
+# res = str.replace(" ", " ")
|
||||
+ res = str.replace(" *", "_ptr")
|
||||
+# res = str.replace("*", "_ptr")
|
||||
+ res = res.replace(" ", "_")
|
||||
if res == 'const_char_ptr':
|
||||
- if string.find(name, "file") != -1 or \
|
||||
- string.find(name, "uri") != -1 or \
|
||||
- string.find(name, "URI") != -1 or \
|
||||
- string.find(info, "filename") != -1 or \
|
||||
- string.find(info, "URI") != -1 or \
|
||||
- string.find(info, "URL") != -1:
|
||||
- if string.find(function, "Save") != -1 or \
|
||||
- string.find(function, "Create") != -1 or \
|
||||
- string.find(function, "Write") != -1 or \
|
||||
- string.find(function, "Fetch") != -1:
|
||||
- return('fileoutput')
|
||||
- return('filepath')
|
||||
+ if name.find("file") != -1 or \
|
||||
+ name.find("uri") != -1 or \
|
||||
+ name.find("URI") != -1 or \
|
||||
+ info.find("filename") != -1 or \
|
||||
+ info.find("URI") != -1 or \
|
||||
+ info.find("URL") != -1:
|
||||
+ if function.find("Save") != -1 or \
|
||||
+ function.find("Create") != -1 or \
|
||||
+ function.find("Write") != -1 or \
|
||||
+ function.find("Fetch") != -1:
|
||||
+ return('fileoutput')
|
||||
+ return('filepath')
|
||||
if res == 'void_ptr':
|
||||
if module == 'nanoftp' and name == 'ctx':
|
||||
- return('xmlNanoFTPCtxtPtr')
|
||||
+ return('xmlNanoFTPCtxtPtr')
|
||||
if function == 'xmlNanoFTPNewCtxt' or \
|
||||
- function == 'xmlNanoFTPConnectTo' or \
|
||||
- function == 'xmlNanoFTPOpen':
|
||||
- return('xmlNanoFTPCtxtPtr')
|
||||
+ function == 'xmlNanoFTPConnectTo' or \
|
||||
+ function == 'xmlNanoFTPOpen':
|
||||
+ return('xmlNanoFTPCtxtPtr')
|
||||
if module == 'nanohttp' and name == 'ctx':
|
||||
- return('xmlNanoHTTPCtxtPtr')
|
||||
- if function == 'xmlNanoHTTPMethod' or \
|
||||
- function == 'xmlNanoHTTPMethodRedir' or \
|
||||
- function == 'xmlNanoHTTPOpen' or \
|
||||
- function == 'xmlNanoHTTPOpenRedir':
|
||||
- return('xmlNanoHTTPCtxtPtr');
|
||||
+ return('xmlNanoHTTPCtxtPtr')
|
||||
+ if function == 'xmlNanoHTTPMethod' or \
|
||||
+ function == 'xmlNanoHTTPMethodRedir' or \
|
||||
+ function == 'xmlNanoHTTPOpen' or \
|
||||
+ function == 'xmlNanoHTTPOpenRedir':
|
||||
+ return('xmlNanoHTTPCtxtPtr');
|
||||
if function == 'xmlIOHTTPOpen':
|
||||
- return('xmlNanoHTTPCtxtPtr')
|
||||
- if string.find(name, "data") != -1:
|
||||
- return('userdata')
|
||||
- if string.find(name, "user") != -1:
|
||||
- return('userdata')
|
||||
+ return('xmlNanoHTTPCtxtPtr')
|
||||
+ if name.find("data") != -1:
|
||||
+ return('userdata')
|
||||
+ if name.find("user") != -1:
|
||||
+ return('userdata')
|
||||
if res == 'xmlDoc_ptr':
|
||||
res = 'xmlDocPtr'
|
||||
if res == 'xmlNode_ptr':
|
||||
@@ -366,18 +366,18 @@ def type_convert(str, name, info, module, function, pos):
|
||||
res = 'xmlDictPtr'
|
||||
if res == 'xmlNodePtr' and pos != 0:
|
||||
if (function == 'xmlAddChild' and pos == 2) or \
|
||||
- (function == 'xmlAddChildList' and pos == 2) or \
|
||||
+ (function == 'xmlAddChildList' and pos == 2) or \
|
||||
(function == 'xmlAddNextSibling' and pos == 2) or \
|
||||
(function == 'xmlAddSibling' and pos == 2) or \
|
||||
(function == 'xmlDocSetRootElement' and pos == 2) or \
|
||||
(function == 'xmlReplaceNode' and pos == 2) or \
|
||||
(function == 'xmlTextMerge') or \
|
||||
- (function == 'xmlAddPrevSibling' and pos == 2):
|
||||
- return('xmlNodePtr_in');
|
||||
+ (function == 'xmlAddPrevSibling' and pos == 2):
|
||||
+ return('xmlNodePtr_in');
|
||||
if res == 'const xmlBufferPtr':
|
||||
res = 'xmlBufferPtr'
|
||||
if res == 'xmlChar_ptr' and name == 'name' and \
|
||||
- string.find(function, "EatName") != -1:
|
||||
+ function.find("EatName") != -1:
|
||||
return('eaten_name')
|
||||
if res == 'void_ptr*':
|
||||
res = 'void_ptr_ptr'
|
||||
@@ -393,7 +393,7 @@ def type_convert(str, name, info, module, function, pos):
|
||||
res = 'debug_FILE_ptr';
|
||||
if res == 'int' and name == 'options':
|
||||
if module == 'parser' or module == 'xmlreader':
|
||||
- res = 'parseroptions'
|
||||
+ res = 'parseroptions'
|
||||
|
||||
return res
|
||||
|
||||
@@ -402,28 +402,28 @@ known_param_types = []
|
||||
def is_known_param_type(name):
|
||||
for type in known_param_types:
|
||||
if type == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
return name[-3:] == 'Ptr' or name[-4:] == '_ptr'
|
||||
|
||||
def generate_param_type(name, rtype):
|
||||
global test
|
||||
for type in known_param_types:
|
||||
if type == name:
|
||||
- return
|
||||
+ return
|
||||
for type in generated_param_types:
|
||||
if type == name:
|
||||
- return
|
||||
+ return
|
||||
|
||||
if name[-3:] == 'Ptr' or name[-4:] == '_ptr':
|
||||
if rtype[0:6] == 'const ':
|
||||
- crtype = rtype[6:]
|
||||
- else:
|
||||
- crtype = rtype
|
||||
+ crtype = rtype[6:]
|
||||
+ else:
|
||||
+ crtype = rtype
|
||||
|
||||
define = 0
|
||||
- if modules_defines.has_key(module):
|
||||
- test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
- define = 1
|
||||
+ if module in modules_defines:
|
||||
+ test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
+ define = 1
|
||||
test.write("""
|
||||
#define gen_nb_%s 1
|
||||
static %s gen_%s(int no ATTRIBUTE_UNUSED, int nr ATTRIBUTE_UNUSED) {
|
||||
@@ -433,7 +433,7 @@ static void des_%s(int no ATTRIBUTE_UNUSED, %s val ATTRIBUTE_UNUSED, int nr ATTR
|
||||
}
|
||||
""" % (name, crtype, name, name, rtype))
|
||||
if define == 1:
|
||||
- test.write("#endif\n\n")
|
||||
+ test.write("#endif\n\n")
|
||||
add_generated_param_type(name)
|
||||
|
||||
#
|
||||
@@ -445,7 +445,7 @@ known_return_types = []
|
||||
def is_known_return_type(name):
|
||||
for type in known_return_types:
|
||||
if type == name:
|
||||
- return 1
|
||||
+ return 1
|
||||
return 0
|
||||
|
||||
#
|
||||
@@ -471,7 +471,7 @@ def compare_and_save():
|
||||
try:
|
||||
os.system("rm testapi.c; mv testapi.c.new testapi.c")
|
||||
except:
|
||||
- os.system("mv testapi.c.new testapi.c")
|
||||
+ os.system("mv testapi.c.new testapi.c")
|
||||
print("Updated testapi.c")
|
||||
else:
|
||||
print("Generated testapi.c is identical")
|
||||
@@ -481,17 +481,17 @@ while line != "":
|
||||
if line == "/* CUT HERE: everything below that line is generated */\n":
|
||||
break;
|
||||
if line[0:15] == "#define gen_nb_":
|
||||
- type = string.split(line[15:])[0]
|
||||
- known_param_types.append(type)
|
||||
+ type = line[15:].split()[0]
|
||||
+ known_param_types.append(type)
|
||||
if line[0:19] == "static void desret_":
|
||||
- type = string.split(line[19:], '(')[0]
|
||||
- known_return_types.append(type)
|
||||
+ type = line[19:].split('(')[0]
|
||||
+ known_return_types.append(type)
|
||||
test.write(line)
|
||||
line = input.readline()
|
||||
input.close()
|
||||
|
||||
if line == "":
|
||||
- print "Could not find the CUT marker in testapi.c skipping generation"
|
||||
+ print("Could not find the CUT marker in testapi.c skipping generation")
|
||||
test.close()
|
||||
sys.exit(0)
|
||||
|
||||
@@ -505,7 +505,7 @@ test.write("/* CUT HERE: everything below that line is generated */\n")
|
||||
#
|
||||
doc = libxml2.readFile(srcPref + 'doc/libxml2-api.xml', None, 0)
|
||||
if doc == None:
|
||||
- print "Failed to load doc/libxml2-api.xml"
|
||||
+ print("Failed to load doc/libxml2-api.xml")
|
||||
sys.exit(1)
|
||||
ctxt = doc.xpathNewContext()
|
||||
|
||||
@@ -519,9 +519,9 @@ for arg in args:
|
||||
mod = arg.xpathEval('string(../@file)')
|
||||
func = arg.xpathEval('string(../@name)')
|
||||
if (mod not in skipped_modules) and (func not in skipped_functions):
|
||||
- type = arg.xpathEval('string(@type)')
|
||||
- if not argtypes.has_key(type):
|
||||
- argtypes[type] = func
|
||||
+ type = arg.xpathEval('string(@type)')
|
||||
+ if type not in argtypes:
|
||||
+ argtypes[type] = func
|
||||
|
||||
# similarly for return types
|
||||
rettypes = {}
|
||||
@@ -531,8 +531,8 @@ for ret in rets:
|
||||
func = ret.xpathEval('string(../@name)')
|
||||
if (mod not in skipped_modules) and (func not in skipped_functions):
|
||||
type = ret.xpathEval('string(@type)')
|
||||
- if not rettypes.has_key(type):
|
||||
- rettypes[type] = func
|
||||
+ if type not in rettypes:
|
||||
+ rettypes[type] = func
|
||||
|
||||
#
|
||||
# Generate constructors and return type handling for all enums
|
||||
@@ -549,49 +549,49 @@ for enum in enums:
|
||||
continue;
|
||||
define = 0
|
||||
|
||||
- if argtypes.has_key(name) and is_known_param_type(name) == 0:
|
||||
- values = ctxt.xpathEval("/api/symbols/enum[@type='%s']" % name)
|
||||
- i = 0
|
||||
- vals = []
|
||||
- for value in values:
|
||||
- vname = value.xpathEval('string(@name)')
|
||||
- if vname == None:
|
||||
- continue;
|
||||
- i = i + 1
|
||||
- if i >= 5:
|
||||
- break;
|
||||
- vals.append(vname)
|
||||
- if vals == []:
|
||||
- print "Didn't find any value for enum %s" % (name)
|
||||
- continue
|
||||
- if modules_defines.has_key(module):
|
||||
- test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
- define = 1
|
||||
- test.write("#define gen_nb_%s %d\n" % (name, len(vals)))
|
||||
- test.write("""static %s gen_%s(int no, int nr ATTRIBUTE_UNUSED) {\n""" %
|
||||
- (name, name))
|
||||
- i = 1
|
||||
- for value in vals:
|
||||
- test.write(" if (no == %d) return(%s);\n" % (i, value))
|
||||
- i = i + 1
|
||||
- test.write(""" return(0);
|
||||
+ if (name in argtypes) and is_known_param_type(name) == 0:
|
||||
+ values = ctxt.xpathEval("/api/symbols/enum[@type='%s']" % name)
|
||||
+ i = 0
|
||||
+ vals = []
|
||||
+ for value in values:
|
||||
+ vname = value.xpathEval('string(@name)')
|
||||
+ if vname == None:
|
||||
+ continue;
|
||||
+ i = i + 1
|
||||
+ if i >= 5:
|
||||
+ break;
|
||||
+ vals.append(vname)
|
||||
+ if vals == []:
|
||||
+ print("Didn't find any value for enum %s" % (name))
|
||||
+ continue
|
||||
+ if module in modules_defines:
|
||||
+ test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
+ define = 1
|
||||
+ test.write("#define gen_nb_%s %d\n" % (name, len(vals)))
|
||||
+ test.write("""static %s gen_%s(int no, int nr ATTRIBUTE_UNUSED) {\n""" %
|
||||
+ (name, name))
|
||||
+ i = 1
|
||||
+ for value in vals:
|
||||
+ test.write(" if (no == %d) return(%s);\n" % (i, value))
|
||||
+ i = i + 1
|
||||
+ test.write(""" return(0);
|
||||
}
|
||||
|
||||
static void des_%s(int no ATTRIBUTE_UNUSED, %s val ATTRIBUTE_UNUSED, int nr ATTRIBUTE_UNUSED) {
|
||||
}
|
||||
|
||||
""" % (name, name));
|
||||
- known_param_types.append(name)
|
||||
+ known_param_types.append(name)
|
||||
|
||||
if (is_known_return_type(name) == 0) and (name in rettypes):
|
||||
- if define == 0 and modules_defines.has_key(module):
|
||||
- test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
- define = 1
|
||||
+ if define == 0 and (module in modules_defines):
|
||||
+ test.write("#ifdef %s\n" % (modules_defines[module]))
|
||||
+ define = 1
|
||||
test.write("""static void desret_%s(%s val ATTRIBUTE_UNUSED) {
|
||||
}
|
||||
|
||||
""" % (name, name))
|
||||
- known_return_types.append(name)
|
||||
+ known_return_types.append(name)
|
||||
if define == 1:
|
||||
test.write("#endif\n\n")
|
||||
|
||||
@@ -615,9 +615,9 @@ for file in headers:
|
||||
# do not test deprecated APIs
|
||||
#
|
||||
desc = file.xpathEval('string(description)')
|
||||
- if string.find(desc, 'DEPRECATED') != -1:
|
||||
- print "Skipping deprecated interface %s" % name
|
||||
- continue;
|
||||
+ if desc.find('DEPRECATED') != -1:
|
||||
+ print("Skipping deprecated interface %s" % name)
|
||||
+ continue;
|
||||
|
||||
test.write("#include <libxml/%s.h>\n" % name)
|
||||
modules.append(name)
|
||||
@@ -679,7 +679,7 @@ def generate_test(module, node):
|
||||
# and store the informations for the generation
|
||||
#
|
||||
try:
|
||||
- args = node.xpathEval("arg")
|
||||
+ args = node.xpathEval("arg")
|
||||
except:
|
||||
args = []
|
||||
t_args = []
|
||||
@@ -687,37 +687,37 @@ def generate_test(module, node):
|
||||
for arg in args:
|
||||
n = n + 1
|
||||
rtype = arg.xpathEval("string(@type)")
|
||||
- if rtype == 'void':
|
||||
- break;
|
||||
- info = arg.xpathEval("string(@info)")
|
||||
- nam = arg.xpathEval("string(@name)")
|
||||
+ if rtype == 'void':
|
||||
+ break;
|
||||
+ info = arg.xpathEval("string(@info)")
|
||||
+ nam = arg.xpathEval("string(@name)")
|
||||
type = type_convert(rtype, nam, info, module, name, n)
|
||||
- if is_known_param_type(type) == 0:
|
||||
- add_missing_type(type, name);
|
||||
- no_gen = 1
|
||||
+ if is_known_param_type(type) == 0:
|
||||
+ add_missing_type(type, name);
|
||||
+ no_gen = 1
|
||||
if (type[-3:] == 'Ptr' or type[-4:] == '_ptr') and \
|
||||
- rtype[0:6] == 'const ':
|
||||
- crtype = rtype[6:]
|
||||
- else:
|
||||
- crtype = rtype
|
||||
- t_args.append((nam, type, rtype, crtype, info))
|
||||
+ rtype[0:6] == 'const ':
|
||||
+ crtype = rtype[6:]
|
||||
+ else:
|
||||
+ crtype = rtype
|
||||
+ t_args.append((nam, type, rtype, crtype, info))
|
||||
|
||||
try:
|
||||
- rets = node.xpathEval("return")
|
||||
+ rets = node.xpathEval("return")
|
||||
except:
|
||||
rets = []
|
||||
t_ret = None
|
||||
for ret in rets:
|
||||
rtype = ret.xpathEval("string(@type)")
|
||||
- info = ret.xpathEval("string(@info)")
|
||||
+ info = ret.xpathEval("string(@info)")
|
||||
type = type_convert(rtype, 'return', info, module, name, 0)
|
||||
- if rtype == 'void':
|
||||
- break
|
||||
- if is_known_return_type(type) == 0:
|
||||
- add_missing_type(type, name);
|
||||
- no_gen = 1
|
||||
- t_ret = (type, rtype, info)
|
||||
- break
|
||||
+ if rtype == 'void':
|
||||
+ break
|
||||
+ if is_known_return_type(type) == 0:
|
||||
+ add_missing_type(type, name);
|
||||
+ no_gen = 1
|
||||
+ t_ret = (type, rtype, info)
|
||||
+ break
|
||||
|
||||
if no_gen == 0:
|
||||
for t_arg in t_args:
|
||||
@@ -733,7 +733,7 @@ test_%s(void) {
|
||||
|
||||
if no_gen == 1:
|
||||
add_missing_functions(name, module)
|
||||
- test.write("""
|
||||
+ test.write("""
|
||||
/* missing type support */
|
||||
return(test_ret);
|
||||
}
|
||||
@@ -742,22 +742,22 @@ test_%s(void) {
|
||||
return
|
||||
|
||||
try:
|
||||
- conds = node.xpathEval("cond")
|
||||
- for cond in conds:
|
||||
- test.write("#if %s\n" % (cond.get_content()))
|
||||
- nb_cond = nb_cond + 1
|
||||
+ conds = node.xpathEval("cond")
|
||||
+ for cond in conds:
|
||||
+ test.write("#if %s\n" % (cond.get_content()))
|
||||
+ nb_cond = nb_cond + 1
|
||||
except:
|
||||
pass
|
||||
|
||||
define = 0
|
||||
- if function_defines.has_key(name):
|
||||
+ if name in function_defines:
|
||||
test.write("#ifdef %s\n" % (function_defines[name]))
|
||||
- define = 1
|
||||
+ define = 1
|
||||
|
||||
# Declare the memory usage counter
|
||||
no_mem = is_skipped_memcheck(name)
|
||||
if no_mem == 0:
|
||||
- test.write(" int mem_base;\n");
|
||||
+ test.write(" int mem_base;\n");
|
||||
|
||||
# Declare the return value
|
||||
if t_ret != None:
|
||||
@@ -766,29 +766,29 @@ test_%s(void) {
|
||||
# Declare the arguments
|
||||
for arg in t_args:
|
||||
(nam, type, rtype, crtype, info) = arg;
|
||||
- # add declaration
|
||||
- test.write(" %s %s; /* %s */\n" % (crtype, nam, info))
|
||||
- test.write(" int n_%s;\n" % (nam))
|
||||
+ # add declaration
|
||||
+ test.write(" %s %s; /* %s */\n" % (crtype, nam, info))
|
||||
+ test.write(" int n_%s;\n" % (nam))
|
||||
test.write("\n")
|
||||
|
||||
# Cascade loop on of each argument list of values
|
||||
for arg in t_args:
|
||||
(nam, type, rtype, crtype, info) = arg;
|
||||
- #
|
||||
- test.write(" for (n_%s = 0;n_%s < gen_nb_%s;n_%s++) {\n" % (
|
||||
- nam, nam, type, nam))
|
||||
+ #
|
||||
+ test.write(" for (n_%s = 0;n_%s < gen_nb_%s;n_%s++) {\n" % (
|
||||
+ nam, nam, type, nam))
|
||||
|
||||
# log the memory usage
|
||||
if no_mem == 0:
|
||||
- test.write(" mem_base = xmlMemBlocks();\n");
|
||||
+ test.write(" mem_base = xmlMemBlocks();\n");
|
||||
|
||||
# prepare the call
|
||||
i = 0;
|
||||
for arg in t_args:
|
||||
(nam, type, rtype, crtype, info) = arg;
|
||||
- #
|
||||
- test.write(" %s = gen_%s(n_%s, %d);\n" % (nam, type, nam, i))
|
||||
- i = i + 1;
|
||||
+ #
|
||||
+ test.write(" %s = gen_%s(n_%s, %d);\n" % (nam, type, nam, i))
|
||||
+ i = i + 1;
|
||||
|
||||
# add checks to avoid out-of-bounds array access
|
||||
i = 0;
|
||||
@@ -797,7 +797,7 @@ test_%s(void) {
|
||||
# assume that "size", "len", and "start" parameters apply to either
|
||||
# the nearest preceding or following char pointer
|
||||
if type == "int" and (nam == "size" or nam == "len" or nam == "start"):
|
||||
- for j in range(i - 1, -1, -1) + range(i + 1, len(t_args)):
|
||||
+ for j in (*range(i - 1, -1, -1), *range(i + 1, len(t_args))):
|
||||
(bnam, btype) = t_args[j][:2]
|
||||
if btype == "const_char_ptr" or btype == "const_xmlChar_ptr":
|
||||
test.write(
|
||||
@@ -806,42 +806,42 @@ test_%s(void) {
|
||||
" continue;\n"
|
||||
% (bnam, nam, bnam))
|
||||
break
|
||||
- i = i + 1;
|
||||
+ i = i + 1;
|
||||
|
||||
# do the call, and clanup the result
|
||||
- if extra_pre_call.has_key(name):
|
||||
- test.write(" %s\n"% (extra_pre_call[name]))
|
||||
+ if name in extra_pre_call:
|
||||
+ test.write(" %s\n"% (extra_pre_call[name]))
|
||||
if t_ret != None:
|
||||
- test.write("\n ret_val = %s(" % (name))
|
||||
- need = 0
|
||||
- for arg in t_args:
|
||||
- (nam, type, rtype, crtype, info) = arg
|
||||
- if need:
|
||||
- test.write(", ")
|
||||
- else:
|
||||
- need = 1
|
||||
- if rtype != crtype:
|
||||
- test.write("(%s)" % rtype)
|
||||
- test.write("%s" % nam);
|
||||
- test.write(");\n")
|
||||
- if extra_post_call.has_key(name):
|
||||
- test.write(" %s\n"% (extra_post_call[name]))
|
||||
- test.write(" desret_%s(ret_val);\n" % t_ret[0])
|
||||
+ test.write("\n ret_val = %s(" % (name))
|
||||
+ need = 0
|
||||
+ for arg in t_args:
|
||||
+ (nam, type, rtype, crtype, info) = arg
|
||||
+ if need:
|
||||
+ test.write(", ")
|
||||
+ else:
|
||||
+ need = 1
|
||||
+ if rtype != crtype:
|
||||
+ test.write("(%s)" % rtype)
|
||||
+ test.write("%s" % nam);
|
||||
+ test.write(");\n")
|
||||
+ if name in extra_post_call:
|
||||
+ test.write(" %s\n"% (extra_post_call[name]))
|
||||
+ test.write(" desret_%s(ret_val);\n" % t_ret[0])
|
||||
else:
|
||||
- test.write("\n %s(" % (name));
|
||||
- need = 0;
|
||||
- for arg in t_args:
|
||||
- (nam, type, rtype, crtype, info) = arg;
|
||||
- if need:
|
||||
- test.write(", ")
|
||||
- else:
|
||||
- need = 1
|
||||
- if rtype != crtype:
|
||||
- test.write("(%s)" % rtype)
|
||||
- test.write("%s" % nam)
|
||||
- test.write(");\n")
|
||||
- if extra_post_call.has_key(name):
|
||||
- test.write(" %s\n"% (extra_post_call[name]))
|
||||
+ test.write("\n %s(" % (name));
|
||||
+ need = 0;
|
||||
+ for arg in t_args:
|
||||
+ (nam, type, rtype, crtype, info) = arg;
|
||||
+ if need:
|
||||
+ test.write(", ")
|
||||
+ else:
|
||||
+ need = 1
|
||||
+ if rtype != crtype:
|
||||
+ test.write("(%s)" % rtype)
|
||||
+ test.write("%s" % nam)
|
||||
+ test.write(");\n")
|
||||
+ if name in extra_post_call:
|
||||
+ test.write(" %s\n"% (extra_post_call[name]))
|
||||
|
||||
test.write(" call_tests++;\n");
|
||||
|
||||
@@ -849,32 +849,32 @@ test_%s(void) {
|
||||
i = 0;
|
||||
for arg in t_args:
|
||||
(nam, type, rtype, crtype, info) = arg;
|
||||
- # This is a hack to prevent generating a destructor for the
|
||||
- # 'input' argument in xmlTextReaderSetup. There should be
|
||||
- # a better, more generic way to do this!
|
||||
- if string.find(info, 'destroy') == -1:
|
||||
- test.write(" des_%s(n_%s, " % (type, nam))
|
||||
- if rtype != crtype:
|
||||
- test.write("(%s)" % rtype)
|
||||
- test.write("%s, %d);\n" % (nam, i))
|
||||
- i = i + 1;
|
||||
+ # This is a hack to prevent generating a destructor for the
|
||||
+ # 'input' argument in xmlTextReaderSetup. There should be
|
||||
+ # a better, more generic way to do this!
|
||||
+ if info.find('destroy') == -1:
|
||||
+ test.write(" des_%s(n_%s, " % (type, nam))
|
||||
+ if rtype != crtype:
|
||||
+ test.write("(%s)" % rtype)
|
||||
+ test.write("%s, %d);\n" % (nam, i))
|
||||
+ i = i + 1;
|
||||
|
||||
test.write(" xmlResetLastError();\n");
|
||||
# Check the memory usage
|
||||
if no_mem == 0:
|
||||
- test.write(""" if (mem_base != xmlMemBlocks()) {
|
||||
+ test.write(""" if (mem_base != xmlMemBlocks()) {
|
||||
printf("Leak of %%d blocks found in %s",
|
||||
- xmlMemBlocks() - mem_base);
|
||||
- test_ret++;
|
||||
+\t xmlMemBlocks() - mem_base);
|
||||
+\t test_ret++;
|
||||
""" % (name));
|
||||
- for arg in t_args:
|
||||
- (nam, type, rtype, crtype, info) = arg;
|
||||
- test.write(""" printf(" %%d", n_%s);\n""" % (nam))
|
||||
- test.write(""" printf("\\n");\n""")
|
||||
- test.write(" }\n")
|
||||
+ for arg in t_args:
|
||||
+ (nam, type, rtype, crtype, info) = arg;
|
||||
+ test.write(""" printf(" %%d", n_%s);\n""" % (nam))
|
||||
+ test.write(""" printf("\\n");\n""")
|
||||
+ test.write(" }\n")
|
||||
|
||||
for arg in t_args:
|
||||
- test.write(" }\n")
|
||||
+ test.write(" }\n")
|
||||
|
||||
test.write(" function_tests++;\n")
|
||||
#
|
||||
@@ -882,7 +882,7 @@ test_%s(void) {
|
||||
#
|
||||
while nb_cond > 0:
|
||||
test.write("#endif\n")
|
||||
- nb_cond = nb_cond -1
|
||||
+ nb_cond = nb_cond -1
|
||||
if define == 1:
|
||||
test.write("#endif\n")
|
||||
|
||||
@@ -900,10 +900,10 @@ test_%s(void) {
|
||||
for module in modules:
|
||||
# gather all the functions exported by that module
|
||||
try:
|
||||
- functions = ctxt.xpathEval("/api/symbols/function[@file='%s']" % (module))
|
||||
+ functions = ctxt.xpathEval("/api/symbols/function[@file='%s']" % (module))
|
||||
except:
|
||||
- print "Failed to gather functions from module %s" % (module)
|
||||
- continue;
|
||||
+ print("Failed to gather functions from module %s" % (module))
|
||||
+ continue;
|
||||
|
||||
# iterate over all functions in the module generating the test
|
||||
i = 0
|
||||
@@ -923,14 +923,14 @@ test_%s(void) {
|
||||
# iterate over all functions in the module generating the call
|
||||
for function in functions:
|
||||
name = function.xpathEval('string(@name)')
|
||||
- if is_skipped_function(name):
|
||||
- continue
|
||||
- test.write(" test_ret += test_%s();\n" % (name))
|
||||
+ if is_skipped_function(name):
|
||||
+ continue
|
||||
+ test.write(" test_ret += test_%s();\n" % (name))
|
||||
|
||||
# footer
|
||||
test.write("""
|
||||
if (test_ret != 0)
|
||||
- printf("Module %s: %%d errors\\n", test_ret);
|
||||
+\tprintf("Module %s: %%d errors\\n", test_ret);
|
||||
return(test_ret);
|
||||
}
|
||||
""" % (module))
|
||||
@@ -948,7 +948,7 @@ test.write(""" return(0);
|
||||
}
|
||||
""");
|
||||
|
||||
-print "Generated test for %d modules and %d functions" %(len(modules), nb_tests)
|
||||
+print("Generated test for %d modules and %d functions" %(len(modules), nb_tests))
|
||||
|
||||
compare_and_save()
|
||||
|
||||
@@ -960,11 +960,8 @@ for missing in missing_types.keys():
|
||||
n = len(missing_types[missing])
|
||||
missing_list.append((n, missing))
|
||||
|
||||
-def compare_missing(a, b):
|
||||
- return b[0] - a[0]
|
||||
-
|
||||
-missing_list.sort(compare_missing)
|
||||
-print "Missing support for %d functions and %d types see missing.lst" % (missing_functions_nr, len(missing_list))
|
||||
+missing_list.sort(key=lambda a: a[0])
|
||||
+print("Missing support for %d functions and %d types see missing.lst" % (missing_functions_nr, len(missing_list)))
|
||||
lst = open("missing.lst", "w")
|
||||
lst.write("Missing support for %d types" % (len(missing_list)))
|
||||
lst.write("\n")
|
||||
@@ -974,9 +971,9 @@ for miss in missing_list:
|
||||
for n in missing_types[miss[1]]:
|
||||
i = i + 1
|
||||
if i > 5:
|
||||
- lst.write(" ...")
|
||||
- break
|
||||
- lst.write(" %s" % (n))
|
||||
+ lst.write(" ...")
|
||||
+ break
|
||||
+ lst.write(" %s" % (n))
|
||||
lst.write("\n")
|
||||
lst.write("\n")
|
||||
lst.write("\n")
|
||||
@@ -0,0 +1,53 @@
|
||||
From b07251215ef48c70c6e56f7351406c47cfca4d5b Mon Sep 17 00:00:00 2001
|
||||
From: Nick Wellnhofer <wellnhofer@aevum.de>
|
||||
Date: Fri, 10 Jan 2020 15:55:07 +0100
|
||||
Subject: [PATCH] Fix integer overflow in xmlBufferResize
|
||||
|
||||
Found by OSS-Fuzz.
|
||||
|
||||
CVE: CVE-2022-29824
|
||||
|
||||
Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/libxml2/-/commit/b07251215ef48c70c6e56f7351406c47cfca4d5b]
|
||||
|
||||
Signed-off-by: Riyaz Ahmed Khan <Riyaz.Khan@kpit.com>
|
||||
|
||||
---
|
||||
tree.c | 9 +++++++--
|
||||
1 file changed, 7 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/tree.c b/tree.c
|
||||
index 0d7fc98c..f43f6de1 100644
|
||||
--- a/tree.c
|
||||
+++ b/tree.c
|
||||
@@ -7424,12 +7424,17 @@ xmlBufferResize(xmlBufferPtr buf, unsigned int size)
|
||||
if (size < buf->size)
|
||||
return 1;
|
||||
|
||||
+ if (size > UINT_MAX - 10) {
|
||||
+ xmlTreeErrMemory("growing buffer");
|
||||
+ return 0;
|
||||
+ }
|
||||
+
|
||||
/* figure out new size */
|
||||
switch (buf->alloc){
|
||||
case XML_BUFFER_ALLOC_IO:
|
||||
case XML_BUFFER_ALLOC_DOUBLEIT:
|
||||
/*take care of empty case*/
|
||||
- newSize = (buf->size ? buf->size*2 : size + 10);
|
||||
+ newSize = (buf->size ? buf->size : size + 10);
|
||||
while (size > newSize) {
|
||||
if (newSize > UINT_MAX / 2) {
|
||||
xmlTreeErrMemory("growing buffer");
|
||||
@@ -7445,7 +7450,7 @@ xmlBufferResize(xmlBufferPtr buf, unsigned int size)
|
||||
if (buf->use < BASE_BUFFER_SIZE)
|
||||
newSize = size;
|
||||
else {
|
||||
- newSize = buf->size * 2;
|
||||
+ newSize = buf->size;
|
||||
while (size > newSize) {
|
||||
if (newSize > UINT_MAX / 2) {
|
||||
xmlTreeErrMemory("growing buffer");
|
||||
--
|
||||
GitLab
|
||||
|
||||
|
||||
348
meta/recipes-core/libxml/libxml2/CVE-2022-29824.patch
Normal file
348
meta/recipes-core/libxml/libxml2/CVE-2022-29824.patch
Normal file
@@ -0,0 +1,348 @@
|
||||
From 2554a2408e09f13652049e5ffb0d26196b02ebab Mon Sep 17 00:00:00 2001
|
||||
From: Nick Wellnhofer <wellnhofer@aevum.de>
|
||||
Date: Tue, 8 Mar 2022 20:10:02 +0100
|
||||
Subject: [PATCH] [CVE-2022-29824] Fix integer overflows in xmlBuf and
|
||||
xmlBuffer
|
||||
|
||||
In several places, the code handling string buffers didn't check for
|
||||
integer overflow or used wrong types for buffer sizes. This could
|
||||
result in out-of-bounds writes or other memory errors when working on
|
||||
large, multi-gigabyte buffers.
|
||||
|
||||
Thanks to Felix Wilhelm for the report.
|
||||
|
||||
CVE: CVE-2022-29824
|
||||
|
||||
Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/libxml2/-/commit/2554a2408e09f13652049e5ffb0d26196b02ebab]
|
||||
|
||||
Signed-off-by: Riyaz Ahmed Khan <Riyaz.Khan@kpit.com>
|
||||
|
||||
---
|
||||
buf.c | 86 +++++++++++++++++++++++-----------------------------------
|
||||
tree.c | 72 ++++++++++++++++++------------------------------
|
||||
2 files changed, 61 insertions(+), 97 deletions(-)
|
||||
|
||||
diff --git a/buf.c b/buf.c
|
||||
index 24368d37..40a5ee06 100644
|
||||
--- a/buf.c
|
||||
+++ b/buf.c
|
||||
@@ -30,6 +30,10 @@
|
||||
#include <libxml/parserInternals.h> /* for XML_MAX_TEXT_LENGTH */
|
||||
#include "buf.h"
|
||||
|
||||
+#ifndef SIZE_MAX
|
||||
+#define SIZE_MAX ((size_t) -1)
|
||||
+#endif
|
||||
+
|
||||
#define WITH_BUFFER_COMPAT
|
||||
|
||||
/**
|
||||
@@ -156,6 +160,8 @@ xmlBufPtr
|
||||
xmlBufCreateSize(size_t size) {
|
||||
xmlBufPtr ret;
|
||||
|
||||
+ if (size == SIZE_MAX)
|
||||
+ return(NULL);
|
||||
ret = (xmlBufPtr) xmlMalloc(sizeof(xmlBuf));
|
||||
if (ret == NULL) {
|
||||
xmlBufMemoryError(NULL, "creating buffer");
|
||||
@@ -166,8 +172,8 @@ xmlBufCreateSize(size_t size) {
|
||||
ret->error = 0;
|
||||
ret->buffer = NULL;
|
||||
ret->alloc = xmlBufferAllocScheme;
|
||||
- ret->size = (size ? size+2 : 0); /* +1 for ending null */
|
||||
- ret->compat_size = (int) ret->size;
|
||||
+ ret->size = (size ? size + 1 : 0); /* +1 for ending null */
|
||||
+ ret->compat_size = (ret->size > INT_MAX ? INT_MAX : ret->size);
|
||||
if (ret->size){
|
||||
ret->content = (xmlChar *) xmlMallocAtomic(ret->size * sizeof(xmlChar));
|
||||
if (ret->content == NULL) {
|
||||
@@ -442,23 +448,17 @@ xmlBufGrowInternal(xmlBufPtr buf, size_t len) {
|
||||
CHECK_COMPAT(buf)
|
||||
|
||||
if (buf->alloc == XML_BUFFER_ALLOC_IMMUTABLE) return(0);
|
||||
- if (buf->use + len < buf->size)
|
||||
+ if (len < buf->size - buf->use)
|
||||
return(buf->size - buf->use);
|
||||
+ if (len > SIZE_MAX - buf->use)
|
||||
+ return(0);
|
||||
|
||||
- /*
|
||||
- * Windows has a BIG problem on realloc timing, so we try to double
|
||||
- * the buffer size (if that's enough) (bug 146697)
|
||||
- * Apparently BSD too, and it's probably best for linux too
|
||||
- * On an embedded system this may be something to change
|
||||
- */
|
||||
-#if 1
|
||||
- if (buf->size > (size_t) len)
|
||||
- size = buf->size * 2;
|
||||
- else
|
||||
- size = buf->use + len + 100;
|
||||
-#else
|
||||
- size = buf->use + len + 100;
|
||||
-#endif
|
||||
+ if (buf->size > (size_t) len) {
|
||||
+ size = buf->size > SIZE_MAX / 2 ? SIZE_MAX : buf->size * 2;
|
||||
+ } else {
|
||||
+ size = buf->use + len;
|
||||
+ size = size > SIZE_MAX - 100 ? SIZE_MAX : size + 100;
|
||||
+ }
|
||||
|
||||
if (buf->alloc == XML_BUFFER_ALLOC_BOUNDED) {
|
||||
/*
|
||||
@@ -744,7 +744,7 @@ xmlBufIsEmpty(const xmlBufPtr buf)
|
||||
int
|
||||
xmlBufResize(xmlBufPtr buf, size_t size)
|
||||
{
|
||||
- unsigned int newSize;
|
||||
+ size_t newSize;
|
||||
xmlChar* rebuf = NULL;
|
||||
size_t start_buf;
|
||||
|
||||
@@ -772,9 +772,13 @@ xmlBufResize(xmlBufPtr buf, size_t size)
|
||||
case XML_BUFFER_ALLOC_IO:
|
||||
case XML_BUFFER_ALLOC_DOUBLEIT:
|
||||
/*take care of empty case*/
|
||||
- newSize = (buf->size ? buf->size*2 : size + 10);
|
||||
+ if (buf->size == 0) {
|
||||
+ newSize = (size > SIZE_MAX - 10 ? SIZE_MAX : size + 10);
|
||||
+ } else {
|
||||
+ newSize = buf->size;
|
||||
+ }
|
||||
while (size > newSize) {
|
||||
- if (newSize > UINT_MAX / 2) {
|
||||
+ if (newSize > SIZE_MAX / 2) {
|
||||
xmlBufMemoryError(buf, "growing buffer");
|
||||
return 0;
|
||||
}
|
||||
@@ -782,15 +786,15 @@ xmlBufResize(xmlBufPtr buf, size_t size)
|
||||
}
|
||||
break;
|
||||
case XML_BUFFER_ALLOC_EXACT:
|
||||
- newSize = size+10;
|
||||
+ newSize = (size > SIZE_MAX - 10 ? SIZE_MAX : size + 10);
|
||||
break;
|
||||
case XML_BUFFER_ALLOC_HYBRID:
|
||||
if (buf->use < BASE_BUFFER_SIZE)
|
||||
newSize = size;
|
||||
else {
|
||||
- newSize = buf->size * 2;
|
||||
+ newSize = buf->size;
|
||||
while (size > newSize) {
|
||||
- if (newSize > UINT_MAX / 2) {
|
||||
+ if (newSize > SIZE_MAX / 2) {
|
||||
xmlBufMemoryError(buf, "growing buffer");
|
||||
return 0;
|
||||
}
|
||||
@@ -800,7 +804,7 @@ xmlBufResize(xmlBufPtr buf, size_t size)
|
||||
break;
|
||||
|
||||
default:
|
||||
- newSize = size+10;
|
||||
+ newSize = (size > SIZE_MAX - 10 ? SIZE_MAX : size + 10);
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -866,7 +870,7 @@ xmlBufResize(xmlBufPtr buf, size_t size)
|
||||
*/
|
||||
int
|
||||
xmlBufAdd(xmlBufPtr buf, const xmlChar *str, int len) {
|
||||
- unsigned int needSize;
|
||||
+ size_t needSize;
|
||||
|
||||
if ((str == NULL) || (buf == NULL) || (buf->error))
|
||||
return -1;
|
||||
@@ -888,8 +892,10 @@ xmlBufAdd(xmlBufPtr buf, const xmlChar *str, int len) {
|
||||
if (len < 0) return -1;
|
||||
if (len == 0) return 0;
|
||||
|
||||
- needSize = buf->use + len + 2;
|
||||
- if (needSize > buf->size){
|
||||
+ if ((size_t) len >= buf->size - buf->use) {
|
||||
+ if ((size_t) len >= SIZE_MAX - buf->use)
|
||||
+ return(-1);
|
||||
+ needSize = buf->use + len + 1;
|
||||
if (buf->alloc == XML_BUFFER_ALLOC_BOUNDED) {
|
||||
/*
|
||||
* Used to provide parsing limits
|
||||
@@ -1025,31 +1031,7 @@ xmlBufCat(xmlBufPtr buf, const xmlChar *str) {
|
||||
*/
|
||||
int
|
||||
xmlBufCCat(xmlBufPtr buf, const char *str) {
|
||||
- const char *cur;
|
||||
-
|
||||
- if ((buf == NULL) || (buf->error))
|
||||
- return(-1);
|
||||
- CHECK_COMPAT(buf)
|
||||
- if (buf->alloc == XML_BUFFER_ALLOC_IMMUTABLE) return -1;
|
||||
- if (str == NULL) {
|
||||
-#ifdef DEBUG_BUFFER
|
||||
- xmlGenericError(xmlGenericErrorContext,
|
||||
- "xmlBufCCat: str == NULL\n");
|
||||
-#endif
|
||||
- return -1;
|
||||
- }
|
||||
- for (cur = str;*cur != 0;cur++) {
|
||||
- if (buf->use + 10 >= buf->size) {
|
||||
- if (!xmlBufResize(buf, buf->use+10)){
|
||||
- xmlBufMemoryError(buf, "growing buffer");
|
||||
- return XML_ERR_NO_MEMORY;
|
||||
- }
|
||||
- }
|
||||
- buf->content[buf->use++] = *cur;
|
||||
- }
|
||||
- buf->content[buf->use] = 0;
|
||||
- UPDATE_COMPAT(buf)
|
||||
- return 0;
|
||||
+ return xmlBufCat(buf, (const xmlChar *) str);
|
||||
}
|
||||
|
||||
/**
|
||||
diff --git a/tree.c b/tree.c
|
||||
index 9d94aa42..86afb7d6 100644
|
||||
--- a/tree.c
|
||||
+++ b/tree.c
|
||||
@@ -7104,6 +7104,8 @@ xmlBufferPtr
|
||||
xmlBufferCreateSize(size_t size) {
|
||||
xmlBufferPtr ret;
|
||||
|
||||
+ if (size >= UINT_MAX)
|
||||
+ return(NULL);
|
||||
ret = (xmlBufferPtr) xmlMalloc(sizeof(xmlBuffer));
|
||||
if (ret == NULL) {
|
||||
xmlTreeErrMemory("creating buffer");
|
||||
@@ -7111,7 +7113,7 @@ xmlBufferCreateSize(size_t size) {
|
||||
}
|
||||
ret->use = 0;
|
||||
ret->alloc = xmlBufferAllocScheme;
|
||||
- ret->size = (size ? size+2 : 0); /* +1 for ending null */
|
||||
+ ret->size = (size ? size + 1 : 0); /* +1 for ending null */
|
||||
if (ret->size){
|
||||
ret->content = (xmlChar *) xmlMallocAtomic(ret->size * sizeof(xmlChar));
|
||||
if (ret->content == NULL) {
|
||||
@@ -7171,6 +7173,8 @@ xmlBufferCreateStatic(void *mem, size_t size) {
|
||||
|
||||
if ((mem == NULL) || (size == 0))
|
||||
return(NULL);
|
||||
+ if (size > UINT_MAX)
|
||||
+ return(NULL);
|
||||
|
||||
ret = (xmlBufferPtr) xmlMalloc(sizeof(xmlBuffer));
|
||||
if (ret == NULL) {
|
||||
@@ -7318,28 +7322,23 @@ xmlBufferShrink(xmlBufferPtr buf, unsigned int len) {
|
||||
*/
|
||||
int
|
||||
xmlBufferGrow(xmlBufferPtr buf, unsigned int len) {
|
||||
- int size;
|
||||
+ unsigned int size;
|
||||
xmlChar *newbuf;
|
||||
|
||||
if (buf == NULL) return(-1);
|
||||
|
||||
if (buf->alloc == XML_BUFFER_ALLOC_IMMUTABLE) return(0);
|
||||
- if (len + buf->use < buf->size) return(0);
|
||||
+ if (len < buf->size - buf->use)
|
||||
+ return(0);
|
||||
+ if (len > UINT_MAX - buf->use)
|
||||
+ return(-1);
|
||||
|
||||
- /*
|
||||
- * Windows has a BIG problem on realloc timing, so we try to double
|
||||
- * the buffer size (if that's enough) (bug 146697)
|
||||
- * Apparently BSD too, and it's probably best for linux too
|
||||
- * On an embedded system this may be something to change
|
||||
- */
|
||||
-#if 1
|
||||
- if (buf->size > len)
|
||||
- size = buf->size * 2;
|
||||
- else
|
||||
- size = buf->use + len + 100;
|
||||
-#else
|
||||
- size = buf->use + len + 100;
|
||||
-#endif
|
||||
+ if (buf->size > (size_t) len) {
|
||||
+ size = buf->size > UINT_MAX / 2 ? UINT_MAX : buf->size * 2;
|
||||
+ } else {
|
||||
+ size = buf->use + len;
|
||||
+ size = size > UINT_MAX - 100 ? UINT_MAX : size + 100;
|
||||
+ }
|
||||
|
||||
if ((buf->alloc == XML_BUFFER_ALLOC_IO) && (buf->contentIO != NULL)) {
|
||||
size_t start_buf = buf->content - buf->contentIO;
|
||||
@@ -7466,7 +7465,10 @@ xmlBufferResize(xmlBufferPtr buf, unsigned int size)
|
||||
case XML_BUFFER_ALLOC_IO:
|
||||
case XML_BUFFER_ALLOC_DOUBLEIT:
|
||||
/*take care of empty case*/
|
||||
- newSize = (buf->size ? buf->size : size + 10);
|
||||
+ if (buf->size == 0)
|
||||
+ newSize = (size > UINT_MAX - 10 ? UINT_MAX : size + 10);
|
||||
+ else
|
||||
+ newSize = buf->size;
|
||||
while (size > newSize) {
|
||||
if (newSize > UINT_MAX / 2) {
|
||||
xmlTreeErrMemory("growing buffer");
|
||||
@@ -7476,7 +7478,7 @@ xmlBufferResize(xmlBufferPtr buf, unsigned int size)
|
||||
}
|
||||
break;
|
||||
case XML_BUFFER_ALLOC_EXACT:
|
||||
- newSize = size+10;
|
||||
+ newSize = (size > UINT_MAX - 10 ? UINT_MAX : size + 10);;
|
||||
break;
|
||||
case XML_BUFFER_ALLOC_HYBRID:
|
||||
if (buf->use < BASE_BUFFER_SIZE)
|
||||
@@ -7494,7 +7496,7 @@ xmlBufferResize(xmlBufferPtr buf, unsigned int size)
|
||||
break;
|
||||
|
||||
default:
|
||||
- newSize = size+10;
|
||||
+ newSize = (size > UINT_MAX - 10 ? UINT_MAX : size + 10);;
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -7580,8 +7582,10 @@ xmlBufferAdd(xmlBufferPtr buf, const xmlChar *str, int len) {
|
||||
if (len < 0) return -1;
|
||||
if (len == 0) return 0;
|
||||
|
||||
- needSize = buf->use + len + 2;
|
||||
- if (needSize > buf->size){
|
||||
+ if ((unsigned) len >= buf->size - buf->use) {
|
||||
+ if ((unsigned) len >= UINT_MAX - buf->use)
|
||||
+ return XML_ERR_NO_MEMORY;
|
||||
+ needSize = buf->use + len + 1;
|
||||
if (!xmlBufferResize(buf, needSize)){
|
||||
xmlTreeErrMemory("growing buffer");
|
||||
return XML_ERR_NO_MEMORY;
|
||||
@@ -7694,29 +7698,7 @@ xmlBufferCat(xmlBufferPtr buf, const xmlChar *str) {
|
||||
*/
|
||||
int
|
||||
xmlBufferCCat(xmlBufferPtr buf, const char *str) {
|
||||
- const char *cur;
|
||||
-
|
||||
- if (buf == NULL)
|
||||
- return(-1);
|
||||
- if (buf->alloc == XML_BUFFER_ALLOC_IMMUTABLE) return -1;
|
||||
- if (str == NULL) {
|
||||
-#ifdef DEBUG_BUFFER
|
||||
- xmlGenericError(xmlGenericErrorContext,
|
||||
- "xmlBufferCCat: str == NULL\n");
|
||||
-#endif
|
||||
- return -1;
|
||||
- }
|
||||
- for (cur = str;*cur != 0;cur++) {
|
||||
- if (buf->use + 10 >= buf->size) {
|
||||
- if (!xmlBufferResize(buf, buf->use+10)){
|
||||
- xmlTreeErrMemory("growing buffer");
|
||||
- return XML_ERR_NO_MEMORY;
|
||||
- }
|
||||
- }
|
||||
- buf->content[buf->use++] = *cur;
|
||||
- }
|
||||
- buf->content[buf->use] = 0;
|
||||
- return 0;
|
||||
+ return xmlBufferCat(buf, (const xmlChar *) str);
|
||||
}
|
||||
|
||||
/**
|
||||
--
|
||||
GitLab
|
||||
|
||||
@@ -30,6 +30,9 @@ SRC_URI += "http://www.w3.org/XML/Test/xmlts20080827.tar.gz;subdir=${BP};name=te
|
||||
file://CVE-2021-3541.patch \
|
||||
file://CVE-2022-23308.patch \
|
||||
file://CVE-2022-23308-fix-regression.patch \
|
||||
file://CVE-2022-29824-dependent.patch \
|
||||
file://CVE-2022-29824.patch \
|
||||
file://0001-Port-gentest.py-to-Python-3.patch \
|
||||
"
|
||||
|
||||
SRC_URI[archive.sha256sum] = "593b7b751dd18c2d6abcd0c4bcb29efc203d0b4373a6df98e3a455ea74ae2813"
|
||||
@@ -87,6 +90,16 @@ do_configure_prepend () {
|
||||
}
|
||||
|
||||
do_compile_ptest() {
|
||||
# Make sure that testapi.c is newer than gentests.py, because
|
||||
# with reproducible builds, they will both get e.g. Jan 1 1970
|
||||
# modification time from SOURCE_DATE_EPOCH and then check-am
|
||||
# might try to rebuild_testapi, which will fail even with
|
||||
# 0001-Port-gentest.py-to-Python-3.patch, because it needs
|
||||
# libxml2 module (libxml2-native dependency and correctly
|
||||
# set PYTHON_SITE_PACKAGES), it's easier to
|
||||
# just rely on pre-generated testapi.c from the release
|
||||
touch ${S}/testapi.c
|
||||
|
||||
oe_runmake check-am
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,11 @@ deltask do_compile
|
||||
deltask do_install
|
||||
deltask do_populate_sysroot
|
||||
|
||||
# CVE database update interval, in seconds. By default: once a day (24*60*60).
|
||||
# Use 0 to force the update
|
||||
# Use a negative value to skip the update
|
||||
CVE_DB_UPDATE_INTERVAL ?= "86400"
|
||||
|
||||
python () {
|
||||
if not bb.data.inherits_class("cve-check", d):
|
||||
raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.")
|
||||
@@ -42,11 +47,17 @@ python do_fetch() {
|
||||
if os.path.exists(db_file):
|
||||
os.remove(db_file)
|
||||
|
||||
# Don't refresh the database more than once an hour
|
||||
# The NVD database changes once a day, so no need to update more frequently
|
||||
# Allow the user to force-update
|
||||
try:
|
||||
import time
|
||||
if time.time() - os.path.getmtime(db_file) < (60*60):
|
||||
update_interval = int(d.getVar("CVE_DB_UPDATE_INTERVAL"))
|
||||
if update_interval < 0:
|
||||
bb.note("CVE database update skipped")
|
||||
return
|
||||
if time.time() - os.path.getmtime(db_file) < update_interval:
|
||||
return
|
||||
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
135
meta/recipes-core/ncurses/files/CVE-2022-29458.patch
Normal file
135
meta/recipes-core/ncurses/files/CVE-2022-29458.patch
Normal file
@@ -0,0 +1,135 @@
|
||||
From 5f40697e37e195069f55528fc7a1d77e619ad104 Mon Sep 17 00:00:00 2001
|
||||
From: Dan Tran <dantran@microsoft.com>
|
||||
Date: Fri, 13 May 2022 13:28:41 -0700
|
||||
Subject: [PATCH] ncurses 6.3 before patch 20220416 has an out-of-bounds read
|
||||
and segmentation violation in convert_strings in tinfo/read_entry.c in the
|
||||
terminfo library.
|
||||
|
||||
CVE: CVE-2022-29458
|
||||
Upstream-Status: Backport
|
||||
[https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1009870]
|
||||
|
||||
Signed-off-by: Gustavo Lima Chaves <gustavo.chaves@microsoft.com>
|
||||
Signed-off-by: Dan Tran <dantran@microsoft.com>
|
||||
---
|
||||
ncurses/tinfo/alloc_entry.c | 14 ++++++--------
|
||||
ncurses/tinfo/read_entry.c | 25 +++++++++++++++++++------
|
||||
2 files changed, 25 insertions(+), 14 deletions(-)
|
||||
|
||||
diff --git a/ncurses/tinfo/alloc_entry.c b/ncurses/tinfo/alloc_entry.c
|
||||
index 4bf7d6c8..b49ad6aa 100644
|
||||
--- a/ncurses/tinfo/alloc_entry.c
|
||||
+++ b/ncurses/tinfo/alloc_entry.c
|
||||
@@ -48,13 +48,11 @@
|
||||
|
||||
#include <tic.h>
|
||||
|
||||
-MODULE_ID("$Id: alloc_entry.c,v 1.64 2020/02/02 23:34:34 tom Exp $")
|
||||
+MODULE_ID("$Id: alloc_entry.c,v 1.69 2022/04/16 22:46:53 tom Exp $")
|
||||
|
||||
#define ABSENT_OFFSET -1
|
||||
#define CANCELLED_OFFSET -2
|
||||
|
||||
-#define MAX_STRTAB 4096 /* documented maximum entry size */
|
||||
-
|
||||
static char *stringbuf; /* buffer for string capabilities */
|
||||
static size_t next_free; /* next free character in stringbuf */
|
||||
|
||||
@@ -71,8 +69,8 @@ _nc_init_entry(ENTRY * const tp)
|
||||
}
|
||||
#endif
|
||||
|
||||
- if (stringbuf == 0)
|
||||
- TYPE_MALLOC(char, (size_t) MAX_STRTAB, stringbuf);
|
||||
+ if (stringbuf == NULL)
|
||||
+ TYPE_MALLOC(char, (size_t) MAX_ENTRY_SIZE, stringbuf);
|
||||
|
||||
next_free = 0;
|
||||
|
||||
@@ -108,11 +106,11 @@ _nc_save_str(const char *const string)
|
||||
* Cheat a little by making an empty string point to the end of the
|
||||
* previous string.
|
||||
*/
|
||||
- if (next_free < MAX_STRTAB) {
|
||||
+ if (next_free < MAX_ENTRY_SIZE) {
|
||||
result = (stringbuf + next_free - 1);
|
||||
}
|
||||
- } else if (next_free + len < MAX_STRTAB) {
|
||||
- _nc_STRCPY(&stringbuf[next_free], string, MAX_STRTAB);
|
||||
+ } else if (next_free + len < MAX_ENTRY_SIZE) {
|
||||
+ _nc_STRCPY(&stringbuf[next_free], string, MAX_ENTRY_SIZE);
|
||||
DEBUG(7, ("Saved string %s", _nc_visbuf(string)));
|
||||
DEBUG(7, ("at location %d", (int) next_free));
|
||||
next_free += len;
|
||||
diff --git a/ncurses/tinfo/read_entry.c b/ncurses/tinfo/read_entry.c
|
||||
index 5b570b0f..23c2cebc 100644
|
||||
--- a/ncurses/tinfo/read_entry.c
|
||||
+++ b/ncurses/tinfo/read_entry.c
|
||||
@@ -1,5 +1,5 @@
|
||||
/****************************************************************************
|
||||
- * Copyright 2018-2019,2020 Thomas E. Dickey *
|
||||
+ * Copyright 2018-2021,2022 Thomas E. Dickey *
|
||||
* Copyright 1998-2016,2017 Free Software Foundation, Inc. *
|
||||
* *
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a *
|
||||
@@ -42,7 +42,7 @@
|
||||
|
||||
#include <tic.h>
|
||||
|
||||
-MODULE_ID("$Id: read_entry.c,v 1.157 2020/02/02 23:34:34 tom Exp $")
|
||||
+MODULE_ID("$Id: read_entry.c,v 1.162 2022/04/16 21:00:00 tom Exp $")
|
||||
|
||||
#define TYPE_CALLOC(type,elts) typeCalloc(type, (unsigned)(elts))
|
||||
|
||||
@@ -145,6 +145,7 @@ convert_strings(char *buf, char **Strings, int count, int size, char *table)
|
||||
{
|
||||
int i;
|
||||
char *p;
|
||||
+ bool corrupt = FALSE;
|
||||
|
||||
for (i = 0; i < count; i++) {
|
||||
if (IS_NEG1(buf + 2 * i)) {
|
||||
@@ -154,8 +155,20 @@ convert_strings(char *buf, char **Strings, int count, int size, char *table)
|
||||
} else if (MyNumber(buf + 2 * i) > size) {
|
||||
Strings[i] = ABSENT_STRING;
|
||||
} else {
|
||||
- Strings[i] = (MyNumber(buf + 2 * i) + table);
|
||||
- TR(TRACE_DATABASE, ("Strings[%d] = %s", i, _nc_visbuf(Strings[i])));
|
||||
+ int nn = MyNumber(buf + 2 * i);
|
||||
+ if (nn >= 0 && nn < size) {
|
||||
+ Strings[i] = (nn + table);
|
||||
+ TR(TRACE_DATABASE, ("Strings[%d] = %s", i,
|
||||
+ _nc_visbuf(Strings[i])));
|
||||
+ } else {
|
||||
+ if (!corrupt) {
|
||||
+ corrupt = TRUE;
|
||||
+ TR(TRACE_DATABASE,
|
||||
+ ("ignore out-of-range index %d to Strings[]", nn));
|
||||
+ _nc_warning("corrupt data found in convert_strings");
|
||||
+ }
|
||||
+ Strings[i] = ABSENT_STRING;
|
||||
+ }
|
||||
}
|
||||
|
||||
/* make sure all strings are NUL terminated */
|
||||
@@ -776,7 +789,7 @@ _nc_read_tic_entry(char *filename,
|
||||
* looking for compiled (binary) terminfo data.
|
||||
*
|
||||
* cgetent uses a two-level lookup. On the first it uses the given
|
||||
- * name to return a record containing only the aliases for an entry.
|
||||
+ * name to return a record containing only the aliases for an entry.
|
||||
* On the second (using that list of aliases as a key), it returns the
|
||||
* content of the terminal description. We expect second lookup to
|
||||
* return data beginning with the same set of aliases.
|
||||
@@ -833,7 +846,7 @@ _nc_read_tic_entry(char *filename,
|
||||
#endif /* NCURSES_USE_DATABASE */
|
||||
|
||||
/*
|
||||
- * Find and read the compiled entry for a given terminal type, if it exists.
|
||||
+ * Find and read the compiled entry for a given terminal type, if it exists.
|
||||
* We take pains here to make sure no combination of environment variables and
|
||||
* terminal type name can be used to overrun the file buffer.
|
||||
*/
|
||||
--
|
||||
2.36.1
|
||||
|
||||
@@ -4,6 +4,7 @@ SRC_URI += "file://0001-tic-hang.patch \
|
||||
file://0002-configure-reproducible.patch \
|
||||
file://0003-gen-pkgconfig.in-Do-not-include-LDFLAGS-in-generated.patch \
|
||||
file://CVE-2021-39537.patch \
|
||||
file://CVE-2022-29458.patch \
|
||||
"
|
||||
# commit id corresponds to the revision in package version
|
||||
SRCREV = "a669013cd5e9d6434e5301348ea51baf306c93c4"
|
||||
|
||||
@@ -4,3 +4,4 @@ PR = "r1"
|
||||
inherit packagegroup
|
||||
|
||||
RDEPENDS_${PN} = "dropbear"
|
||||
RRECOMMENDS_${PN} = "openssh-sftp-server"
|
||||
|
||||
@@ -11,6 +11,7 @@ import re
|
||||
import sys
|
||||
|
||||
from collections import namedtuple
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
|
||||
version = 1.0
|
||||
@@ -25,12 +26,16 @@ locations = list()
|
||||
|
||||
class SystemdFile():
|
||||
"""Class representing a single systemd configuration file"""
|
||||
def __init__(self, root, path):
|
||||
def __init__(self, root, path, instance_unit_name):
|
||||
self.sections = dict()
|
||||
self._parse(root, path)
|
||||
dirname = os.path.basename(path.name) + ".d"
|
||||
for location in locations:
|
||||
for path2 in sorted((root / location / "system" / dirname).glob("*.conf")):
|
||||
files = (root / location / "system" / dirname).glob("*.conf")
|
||||
if instance_unit_name:
|
||||
inst_dirname = instance_unit_name + ".d"
|
||||
files = chain(files, (root / location / "system" / inst_dirname).glob("*.conf"))
|
||||
for path2 in sorted(files):
|
||||
self._parse(root, path2)
|
||||
|
||||
def _parse(self, root, path):
|
||||
@@ -193,8 +198,11 @@ class SystemdUnit():
|
||||
# if we're enabling an instance, first extract the actual instance
|
||||
# then figure out what the template unit is
|
||||
template = re.match(r"[^@]+@(?P<instance>[^\.]*)\.", self.unit)
|
||||
instance_unit_name = None
|
||||
if template:
|
||||
instance = template.group('instance')
|
||||
if instance != "":
|
||||
instance_unit_name = self.unit
|
||||
unit = re.sub(r"@[^\.]*\.", "@.", self.unit, 1)
|
||||
else:
|
||||
instance = None
|
||||
@@ -206,7 +214,7 @@ class SystemdUnit():
|
||||
# ignore aliases
|
||||
return
|
||||
|
||||
config = SystemdFile(self.root, path)
|
||||
config = SystemdFile(self.root, path, instance_unit_name)
|
||||
if instance == "":
|
||||
try:
|
||||
default_instance = config.get('Install', 'DefaultInstance')[0]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[Unit]
|
||||
Description=Bind mount volatile @where@
|
||||
DefaultDependencies=false
|
||||
DefaultDependencies=no
|
||||
Before=local-fs.target
|
||||
RequiresMountsFor=@whatparent@ @whereparent@
|
||||
ConditionPathIsReadWrite=@whatparent@
|
||||
|
||||
44
meta/recipes-core/zlib/zlib/CVE-2022-37434.patch
Normal file
44
meta/recipes-core/zlib/zlib/CVE-2022-37434.patch
Normal file
@@ -0,0 +1,44 @@
|
||||
From 8617d83d6939754ae3a04fc2d22daa18eeea2a43 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Wed, 17 Aug 2022 10:15:57 +0530
|
||||
Subject: [PATCH] CVE-2022-37434
|
||||
|
||||
Upstream-Status: Backport [https://github.com/madler/zlib/commit/eff308af425b67093bab25f80f1ae950166bece1 & https://github.com/madler/zlib/commit/1eb7682f845ac9e9bf9ae35bbfb3bad5dacbd91d]
|
||||
CVE: CVE-2022-37434
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
Fix a bug when getting a gzip header extra field with inflate().
|
||||
|
||||
If the extra field was larger than the space the user provided with
|
||||
inflateGetHeader(), and if multiple calls of inflate() delivered
|
||||
the extra header data, then there could be a buffer overflow of the
|
||||
provided space. This commit assures that provided space is not
|
||||
exceeded.
|
||||
|
||||
Fix extra field processing bug that dereferences NULL state->head.
|
||||
|
||||
The recent commit to fix a gzip header extra field processing bug
|
||||
introduced the new bug fixed here.
|
||||
---
|
||||
inflate.c | 5 +++--
|
||||
1 file changed, 3 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/inflate.c b/inflate.c
|
||||
index ac333e8..cd01857 100644
|
||||
--- a/inflate.c
|
||||
+++ b/inflate.c
|
||||
@@ -759,8 +759,9 @@ int flush;
|
||||
if (copy > have) copy = have;
|
||||
if (copy) {
|
||||
if (state->head != Z_NULL &&
|
||||
- state->head->extra != Z_NULL) {
|
||||
- len = state->head->extra_len - state->length;
|
||||
+ state->head->extra != Z_NULL &&
|
||||
+ (len = state->head->extra_len - state->length) <
|
||||
+ state->head->extra_max) {
|
||||
zmemcpy(state->head->extra + len, next,
|
||||
len + copy > state->head->extra_max ?
|
||||
state->head->extra_max - len : copy);
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -10,6 +10,7 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/libpng/${BPN}/${PV}/${BPN}-${PV}.tar.xz \
|
||||
file://ldflags-tests.patch \
|
||||
file://CVE-2018-25032.patch \
|
||||
file://run-ptest \
|
||||
file://CVE-2022-37434.patch \
|
||||
"
|
||||
UPSTREAM_CHECK_URI = "http://zlib.net/"
|
||||
|
||||
|
||||
@@ -18,5 +18,5 @@ SRC_URI_append_class-native = " \
|
||||
file://tweak-options-require-tar-1.27.patch \
|
||||
"
|
||||
|
||||
SRC_URI[md5sum] = "60f57c5494e6dfa177504d47bfa0e383"
|
||||
SRC_URI[sha256sum] = "4c27fededf620c0aa522fff1a48577ba08144445341257502e7730f2b1a296e8"
|
||||
SRC_URI[md5sum] = "9d170c8baa1aa36b09698c909f304508"
|
||||
SRC_URI[sha256sum] = "2632c00b0cf0ea19ed7bd6700e6ec5faca93f0045af629d356dc03ad74ae6f10"
|
||||
@@ -0,0 +1,42 @@
|
||||
From a66071ed6a0d1fa666d22dcb78fa6fcb3bf22df3 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Fri, 27 May 2022 14:01:50 +0530
|
||||
Subject: [PATCH] CVE-2022-1304
|
||||
|
||||
Upstream-Status: Backport [https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/commit/?h=maint&id=ab51d587bb9b229b1fade1afd02e1574c1ba5c76]
|
||||
CVE: CVE-2022-1304
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
---
|
||||
lib/ext2fs/extent.c | 8 ++++++++
|
||||
1 file changed, 8 insertions(+)
|
||||
|
||||
diff --git a/lib/ext2fs/extent.c b/lib/ext2fs/extent.c
|
||||
index ac3dbfec9..a1b1905cd 100644
|
||||
--- a/lib/ext2fs/extent.c
|
||||
+++ b/lib/ext2fs/extent.c
|
||||
@@ -495,6 +495,10 @@ retry:
|
||||
ext2fs_le16_to_cpu(eh->eh_entries);
|
||||
newpath->max_entries = ext2fs_le16_to_cpu(eh->eh_max);
|
||||
|
||||
+ /* Make sure there is at least one extent present */
|
||||
+ if (newpath->left <= 0)
|
||||
+ return EXT2_ET_EXTENT_NO_DOWN;
|
||||
+
|
||||
if (path->left > 0) {
|
||||
ix++;
|
||||
newpath->end_blk = ext2fs_le32_to_cpu(ix->ei_block);
|
||||
@@ -1630,6 +1634,10 @@ errcode_t ext2fs_extent_delete(ext2_extent_handle_t handle, int flags)
|
||||
|
||||
cp = path->curr;
|
||||
|
||||
+ /* Sanity check before memmove() */
|
||||
+ if (path->left < 0)
|
||||
+ return EXT2_ET_EXTENT_LEAF_BAD;
|
||||
+
|
||||
if (path->left) {
|
||||
memmove(cp, cp + sizeof(struct ext3_extent_idx),
|
||||
path->left * sizeof(struct ext3_extent_idx));
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -6,6 +6,7 @@ SRC_URI += "file://remove.ldconfig.call.patch \
|
||||
file://mkdir_p.patch \
|
||||
file://0001-configure.ac-correct-AM_GNU_GETTEXT.patch \
|
||||
file://0001-intl-do-not-try-to-use-gettext-defines-that-no-longe.patch \
|
||||
file://CVE-2022-1304.patch \
|
||||
"
|
||||
|
||||
SRC_URI_append_class-native = " file://e2fsprogs-fix-missing-check-for-permission-denied.patch \
|
||||
@@ -53,6 +54,7 @@ do_install () {
|
||||
oe_multilib_header ext2fs/ext2_types.h
|
||||
install -d ${D}${base_bindir}
|
||||
mv ${D}${bindir}/chattr ${D}${base_bindir}/chattr.e2fsprogs
|
||||
mv ${D}${bindir}/lsattr ${D}${base_bindir}/lsattr.e2fsprogs
|
||||
|
||||
install -v -m 755 ${S}/contrib/populate-extfs.sh ${D}${base_sbindir}/
|
||||
|
||||
@@ -101,10 +103,12 @@ FILES_libe2p = "${base_libdir}/libe2p.so.*"
|
||||
FILES_libext2fs = "${libdir}/e2initrd_helper ${base_libdir}/libext2fs.so.*"
|
||||
FILES_${PN}-dev += "${datadir}/*/*.awk ${datadir}/*/*.sed ${base_libdir}/*.so ${bindir}/compile_et ${bindir}/mk_cmds"
|
||||
|
||||
ALTERNATIVE_${PN} = "chattr"
|
||||
ALTERNATIVE_${PN} = "chattr lsattr"
|
||||
ALTERNATIVE_PRIORITY = "100"
|
||||
ALTERNATIVE_LINK_NAME[chattr] = "${base_bindir}/chattr"
|
||||
ALTERNATIVE_TARGET[chattr] = "${base_bindir}/chattr.e2fsprogs"
|
||||
ALTERNATIVE_LINK_NAME[lsattr] = "${base_bindir}/lsattr"
|
||||
ALTERNATIVE_TARGET[lsattr] = "${base_bindir}/lsattr.e2fsprogs"
|
||||
|
||||
ALTERNATIVE_${PN}-doc = "fsck.8"
|
||||
ALTERNATIVE_LINK_NAME[fsck.8] = "${mandir}/man8/fsck.8"
|
||||
|
||||
@@ -100,7 +100,7 @@ BINV = "${PV}"
|
||||
#S = "${WORKDIR}/gcc-${PV}"
|
||||
S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/gcc-${PV}"
|
||||
|
||||
B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}"
|
||||
B ?= "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}"
|
||||
|
||||
target_includedir ?= "${includedir}"
|
||||
target_libdir ?= "${libdir}"
|
||||
|
||||
@@ -18,6 +18,7 @@ INHIBIT_DEFAULT_DEPS = "1"
|
||||
DEPENDS = ""
|
||||
PACKAGES = ""
|
||||
|
||||
B = "${WORKDIR}/build"
|
||||
|
||||
# This needs to be Python to avoid lots of shell variables becoming dependencies.
|
||||
python do_preconfigure () {
|
||||
|
||||
@@ -22,7 +22,7 @@ CVE_PRODUCT = "git-scm:git"
|
||||
# This is about a manpage not mentioning --mirror may "leak" information
|
||||
# in mirrored git repos. Most OE users wouldn't build the docs and
|
||||
# we don't see this as a major issue for our general users/usecases.
|
||||
CVE_CHECK_IGNORE += "CVE-2022-24975"
|
||||
CVE_CHECK_WHITELIST += "CVE-2022-24975"
|
||||
|
||||
PACKAGECONFIG ??= ""
|
||||
PACKAGECONFIG[cvsserver] = ""
|
||||
|
||||
@@ -22,6 +22,9 @@ SRC_URI += "\
|
||||
file://CVE-2021-38297.patch \
|
||||
file://CVE-2022-23806.patch \
|
||||
file://CVE-2022-23772.patch \
|
||||
file://CVE-2021-44717.patch \
|
||||
file://CVE-2022-24675.patch \
|
||||
file://CVE-2021-31525.patch \
|
||||
"
|
||||
|
||||
SRC_URI_append_libc-musl = " file://0009-ld-replace-glibc-dynamic-linker-with-musl.patch"
|
||||
|
||||
38
meta/recipes-devtools/go/go-1.14/CVE-2021-31525.patch
Normal file
38
meta/recipes-devtools/go/go-1.14/CVE-2021-31525.patch
Normal file
@@ -0,0 +1,38 @@
|
||||
From efb465ada003d23353a91ef930be408eb575dba6 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Thu, 16 Jun 2022 17:40:12 +0530
|
||||
Subject: [PATCH] CVE-2021-31525
|
||||
|
||||
Upstream-Status: Backport [https://github.com/argoheyard/lang-net/commit/701957006ef151feb43f86aa99c8a1f474f69282]
|
||||
CVE: CVE-2021-31525
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
---
|
||||
src/vendor/golang.org/x/net/http/httpguts/httplex.go | 10 ++++++----
|
||||
1 file changed, 6 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/src/vendor/golang.org/x/net/http/httpguts/httplex.go b/src/vendor/golang.org/x/net/http/httpguts/httplex.go
|
||||
index e7de24e..c79aa73 100644
|
||||
--- a/src/vendor/golang.org/x/net/http/httpguts/httplex.go
|
||||
+++ b/src/vendor/golang.org/x/net/http/httpguts/httplex.go
|
||||
@@ -137,11 +137,13 @@ func trimOWS(x string) string {
|
||||
// contains token amongst its comma-separated tokens, ASCII
|
||||
// case-insensitively.
|
||||
func headerValueContainsToken(v string, token string) bool {
|
||||
- v = trimOWS(v)
|
||||
- if comma := strings.IndexByte(v, ','); comma != -1 {
|
||||
- return tokenEqual(trimOWS(v[:comma]), token) || headerValueContainsToken(v[comma+1:], token)
|
||||
+ for comma := strings.IndexByte(v, ','); comma != -1; comma = strings.IndexByte(v, ',') {
|
||||
+ if tokenEqual(trimOWS(v[:comma]), token) {
|
||||
+ return true
|
||||
+ }
|
||||
+ v = v[comma+1:]
|
||||
}
|
||||
- return tokenEqual(v, token)
|
||||
+ return tokenEqual(trimOWS(v), token)
|
||||
}
|
||||
|
||||
// lowerASCII returns the ASCII lowercase version of b.
|
||||
--
|
||||
2.25.1
|
||||
|
||||
83
meta/recipes-devtools/go/go-1.14/CVE-2021-44717.patch
Normal file
83
meta/recipes-devtools/go/go-1.14/CVE-2021-44717.patch
Normal file
@@ -0,0 +1,83 @@
|
||||
From 9171c664e7af479aa26bc72f2e7cf4e69d8e0a6f Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Fri, 17 Jun 2022 10:22:47 +0530
|
||||
Subject: [PATCH] CVE-2021-44717
|
||||
|
||||
Upstream-Status: Backport [https://github.com/golang/go/commit/44a3fb49]
|
||||
CVE: CVE-2021-44717
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
syscall: fix ForkLock spurious close(0) on pipe failure
|
||||
Pipe (and therefore forkLockPipe) does not make any guarantees
|
||||
about the state of p after a failed Pipe(p). Avoid that assumption
|
||||
and the too-clever goto, so that we don't accidentally Close a real fd
|
||||
if the failed pipe leaves p[0] or p[1] set >= 0.
|
||||
|
||||
Updates #50057
|
||||
Fixes CVE-2021-44717
|
||||
|
||||
Change-Id: Iff8e19a6efbba0c73cc8b13ecfae381c87600bb4
|
||||
Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1291270
|
||||
Reviewed-by: Ian Lance Taylor <iant@google.com>
|
||||
Reviewed-on: https://go-review.googlesource.com/c/go/+/370514
|
||||
Trust: Filippo Valsorda <filippo@golang.org>
|
||||
Run-TryBot: Filippo Valsorda <filippo@golang.org>
|
||||
TryBot-Result: Gopher Robot <gobot@golang.org>
|
||||
Reviewed-by: Alex Rakoczy <alex@golang.org>
|
||||
---
|
||||
src/syscall/exec_unix.go | 20 ++++++--------------
|
||||
1 file changed, 6 insertions(+), 14 deletions(-)
|
||||
|
||||
diff --git a/src/syscall/exec_unix.go b/src/syscall/exec_unix.go
|
||||
index b3798b6..b73782c 100644
|
||||
--- a/src/syscall/exec_unix.go
|
||||
+++ b/src/syscall/exec_unix.go
|
||||
@@ -151,9 +151,6 @@ func forkExec(argv0 string, argv []string, attr *ProcAttr) (pid int, err error)
|
||||
sys = &zeroSysProcAttr
|
||||
}
|
||||
|
||||
- p[0] = -1
|
||||
- p[1] = -1
|
||||
-
|
||||
// Convert args to C form.
|
||||
argv0p, err := BytePtrFromString(argv0)
|
||||
if err != nil {
|
||||
@@ -194,14 +191,17 @@ func forkExec(argv0 string, argv []string, attr *ProcAttr) (pid int, err error)
|
||||
|
||||
// Allocate child status pipe close on exec.
|
||||
if err = forkExecPipe(p[:]); err != nil {
|
||||
- goto error
|
||||
+ ForkLock.Unlock()
|
||||
+ return 0, err
|
||||
}
|
||||
|
||||
// Kick off child.
|
||||
pid, err1 = forkAndExecInChild(argv0p, argvp, envvp, chroot, dir, attr, sys, p[1])
|
||||
if err1 != 0 {
|
||||
- err = Errno(err1)
|
||||
- goto error
|
||||
+ Close(p[0])
|
||||
+ Close(p[1])
|
||||
+ ForkLock.Unlock()
|
||||
+ return 0, Errno(err1)
|
||||
}
|
||||
ForkLock.Unlock()
|
||||
|
||||
@@ -228,14 +228,6 @@ func forkExec(argv0 string, argv []string, attr *ProcAttr) (pid int, err error)
|
||||
|
||||
// Read got EOF, so pipe closed on exec, so exec succeeded.
|
||||
return pid, nil
|
||||
-
|
||||
-error:
|
||||
- if p[0] >= 0 {
|
||||
- Close(p[0])
|
||||
- Close(p[1])
|
||||
- }
|
||||
- ForkLock.Unlock()
|
||||
- return 0, err
|
||||
}
|
||||
|
||||
// Combination of fork and exec, careful to be thread safe.
|
||||
--
|
||||
2.25.1
|
||||
|
||||
271
meta/recipes-devtools/go/go-1.14/CVE-2022-24675.patch
Normal file
271
meta/recipes-devtools/go/go-1.14/CVE-2022-24675.patch
Normal file
@@ -0,0 +1,271 @@
|
||||
From 1eb931d60a24501a9668e5cb4647593e19115507 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Fri, 17 Jun 2022 12:22:53 +0530
|
||||
Subject: [PATCH] CVE-2022-24675
|
||||
|
||||
Upstream-Status: Backport [https://go-review.googlesource.com/c/go/+/399816/]
|
||||
CVE: CVE-2022-24675
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
src/encoding/pem/pem.go | 174 +++++++++++++++--------------------
|
||||
src/encoding/pem/pem_test.go | 28 +++++-
|
||||
2 files changed, 101 insertions(+), 101 deletions(-)
|
||||
|
||||
diff --git a/src/encoding/pem/pem.go b/src/encoding/pem/pem.go
|
||||
index a7272da..1bee1c1 100644
|
||||
--- a/src/encoding/pem/pem.go
|
||||
+++ b/src/encoding/pem/pem.go
|
||||
@@ -87,123 +87,97 @@ func Decode(data []byte) (p *Block, rest []byte) {
|
||||
// pemStart begins with a newline. However, at the very beginning of
|
||||
// the byte array, we'll accept the start string without it.
|
||||
rest = data
|
||||
- if bytes.HasPrefix(data, pemStart[1:]) {
|
||||
- rest = rest[len(pemStart)-1 : len(data)]
|
||||
- } else if i := bytes.Index(data, pemStart); i >= 0 {
|
||||
- rest = rest[i+len(pemStart) : len(data)]
|
||||
- } else {
|
||||
- return nil, data
|
||||
- }
|
||||
-
|
||||
- typeLine, rest := getLine(rest)
|
||||
- if !bytes.HasSuffix(typeLine, pemEndOfLine) {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
- typeLine = typeLine[0 : len(typeLine)-len(pemEndOfLine)]
|
||||
-
|
||||
- p = &Block{
|
||||
- Headers: make(map[string]string),
|
||||
- Type: string(typeLine),
|
||||
- }
|
||||
-
|
||||
for {
|
||||
- // This loop terminates because getLine's second result is
|
||||
- // always smaller than its argument.
|
||||
- if len(rest) == 0 {
|
||||
+ if bytes.HasPrefix(rest, pemStart[1:]) {
|
||||
+ rest = rest[len(pemStart)-1:]
|
||||
+ } else if i := bytes.Index(rest, pemStart); i >= 0 {
|
||||
+ rest = rest[i+len(pemStart) : len(rest)]
|
||||
+ } else {
|
||||
return nil, data
|
||||
}
|
||||
- line, next := getLine(rest)
|
||||
|
||||
- i := bytes.IndexByte(line, ':')
|
||||
- if i == -1 {
|
||||
- break
|
||||
+ var typeLine []byte
|
||||
+ typeLine, rest = getLine(rest)
|
||||
+ if !bytes.HasSuffix(typeLine, pemEndOfLine) {
|
||||
+ continue
|
||||
}
|
||||
+ typeLine = typeLine[0 : len(typeLine)-len(pemEndOfLine)]
|
||||
|
||||
- // TODO(agl): need to cope with values that spread across lines.
|
||||
- key, val := line[:i], line[i+1:]
|
||||
- key = bytes.TrimSpace(key)
|
||||
- val = bytes.TrimSpace(val)
|
||||
- p.Headers[string(key)] = string(val)
|
||||
- rest = next
|
||||
- }
|
||||
+ p = &Block{
|
||||
+ Headers: make(map[string]string),
|
||||
+ Type: string(typeLine),
|
||||
+ }
|
||||
|
||||
- var endIndex, endTrailerIndex int
|
||||
+ for {
|
||||
+ // This loop terminates because getLine's second result is
|
||||
+ // always smaller than its argument.
|
||||
+ if len(rest) == 0 {
|
||||
+ return nil, data
|
||||
+ }
|
||||
+ line, next := getLine(rest)
|
||||
|
||||
- // If there were no headers, the END line might occur
|
||||
- // immediately, without a leading newline.
|
||||
- if len(p.Headers) == 0 && bytes.HasPrefix(rest, pemEnd[1:]) {
|
||||
- endIndex = 0
|
||||
- endTrailerIndex = len(pemEnd) - 1
|
||||
- } else {
|
||||
- endIndex = bytes.Index(rest, pemEnd)
|
||||
- endTrailerIndex = endIndex + len(pemEnd)
|
||||
- }
|
||||
+ i := bytes.IndexByte(line, ':')
|
||||
+ if i == -1 {
|
||||
+ break
|
||||
+ }
|
||||
|
||||
- if endIndex < 0 {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
+ // TODO(agl): need to cope with values that spread across lines.
|
||||
+ key, val := line[:i], line[i+1:]
|
||||
+ key = bytes.TrimSpace(key)
|
||||
+ val = bytes.TrimSpace(val)
|
||||
+ p.Headers[string(key)] = string(val)
|
||||
+ rest = next
|
||||
+ }
|
||||
|
||||
- // After the "-----" of the ending line, there should be the same type
|
||||
- // and then a final five dashes.
|
||||
- endTrailer := rest[endTrailerIndex:]
|
||||
- endTrailerLen := len(typeLine) + len(pemEndOfLine)
|
||||
- if len(endTrailer) < endTrailerLen {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
+ var endIndex, endTrailerIndex int
|
||||
|
||||
- restOfEndLine := endTrailer[endTrailerLen:]
|
||||
- endTrailer = endTrailer[:endTrailerLen]
|
||||
- if !bytes.HasPrefix(endTrailer, typeLine) ||
|
||||
- !bytes.HasSuffix(endTrailer, pemEndOfLine) {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
+ // If there were no headers, the END line might occur
|
||||
+ // immediately, without a leading newline.
|
||||
+ if len(p.Headers) == 0 && bytes.HasPrefix(rest, pemEnd[1:]) {
|
||||
+ endIndex = 0
|
||||
+ endTrailerIndex = len(pemEnd) - 1
|
||||
+ } else {
|
||||
+ endIndex = bytes.Index(rest, pemEnd)
|
||||
+ endTrailerIndex = endIndex + len(pemEnd)
|
||||
+ }
|
||||
|
||||
- // The line must end with only whitespace.
|
||||
- if s, _ := getLine(restOfEndLine); len(s) != 0 {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
+ if endIndex < 0 {
|
||||
+ continue
|
||||
+ }
|
||||
|
||||
- base64Data := removeSpacesAndTabs(rest[:endIndex])
|
||||
- p.Bytes = make([]byte, base64.StdEncoding.DecodedLen(len(base64Data)))
|
||||
- n, err := base64.StdEncoding.Decode(p.Bytes, base64Data)
|
||||
- if err != nil {
|
||||
- return decodeError(data, rest)
|
||||
- }
|
||||
- p.Bytes = p.Bytes[:n]
|
||||
+ // After the "-----" of the ending line, there should be the same type
|
||||
+ // and then a final five dashes.
|
||||
+ endTrailer := rest[endTrailerIndex:]
|
||||
+ endTrailerLen := len(typeLine) + len(pemEndOfLine)
|
||||
+ if len(endTrailer) < endTrailerLen {
|
||||
+ continue
|
||||
+ }
|
||||
+
|
||||
+ restOfEndLine := endTrailer[endTrailerLen:]
|
||||
+ endTrailer = endTrailer[:endTrailerLen]
|
||||
+ if !bytes.HasPrefix(endTrailer, typeLine) ||
|
||||
+ !bytes.HasSuffix(endTrailer, pemEndOfLine) {
|
||||
+ continue
|
||||
+ }
|
||||
|
||||
- // the -1 is because we might have only matched pemEnd without the
|
||||
- // leading newline if the PEM block was empty.
|
||||
- _, rest = getLine(rest[endIndex+len(pemEnd)-1:])
|
||||
+ // The line must end with only whitespace.
|
||||
+ if s, _ := getLine(restOfEndLine); len(s) != 0 {
|
||||
+ continue
|
||||
+ }
|
||||
|
||||
- return
|
||||
-}
|
||||
+ base64Data := removeSpacesAndTabs(rest[:endIndex])
|
||||
+ p.Bytes = make([]byte, base64.StdEncoding.DecodedLen(len(base64Data)))
|
||||
+ n, err := base64.StdEncoding.Decode(p.Bytes, base64Data)
|
||||
+ if err != nil {
|
||||
+ continue
|
||||
+ }
|
||||
+ p.Bytes = p.Bytes[:n]
|
||||
|
||||
-func decodeError(data, rest []byte) (*Block, []byte) {
|
||||
- // If we get here then we have rejected a likely looking, but
|
||||
- // ultimately invalid PEM block. We need to start over from a new
|
||||
- // position. We have consumed the preamble line and will have consumed
|
||||
- // any lines which could be header lines. However, a valid preamble
|
||||
- // line is not a valid header line, therefore we cannot have consumed
|
||||
- // the preamble line for the any subsequent block. Thus, we will always
|
||||
- // find any valid block, no matter what bytes precede it.
|
||||
- //
|
||||
- // For example, if the input is
|
||||
- //
|
||||
- // -----BEGIN MALFORMED BLOCK-----
|
||||
- // junk that may look like header lines
|
||||
- // or data lines, but no END line
|
||||
- //
|
||||
- // -----BEGIN ACTUAL BLOCK-----
|
||||
- // realdata
|
||||
- // -----END ACTUAL BLOCK-----
|
||||
- //
|
||||
- // we've failed to parse using the first BEGIN line
|
||||
- // and now will try again, using the second BEGIN line.
|
||||
- p, rest := Decode(rest)
|
||||
- if p == nil {
|
||||
- rest = data
|
||||
+ // the -1 is because we might have only matched pemEnd without the
|
||||
+ // leading newline if the PEM block was empty.
|
||||
+ _, rest = getLine(rest[endIndex+len(pemEnd)-1:])
|
||||
+ return p, rest
|
||||
}
|
||||
- return p, rest
|
||||
}
|
||||
|
||||
const pemLineLength = 64
|
||||
diff --git a/src/encoding/pem/pem_test.go b/src/encoding/pem/pem_test.go
|
||||
index 8515b46..4485581 100644
|
||||
--- a/src/encoding/pem/pem_test.go
|
||||
+++ b/src/encoding/pem/pem_test.go
|
||||
@@ -107,6 +107,12 @@ const pemMissingEndingSpace = `
|
||||
dGVzdA==
|
||||
-----ENDBAR-----`
|
||||
|
||||
+const pemMissingEndLine = `
|
||||
+-----BEGIN FOO-----
|
||||
+Header: 1`
|
||||
+
|
||||
+var pemRepeatingBegin = strings.Repeat("-----BEGIN \n", 10)
|
||||
+
|
||||
var badPEMTests = []struct {
|
||||
name string
|
||||
input string
|
||||
@@ -131,14 +137,34 @@ var badPEMTests = []struct {
|
||||
"missing ending space",
|
||||
pemMissingEndingSpace,
|
||||
},
|
||||
+ {
|
||||
+ "repeating begin",
|
||||
+ pemRepeatingBegin,
|
||||
+ },
|
||||
+ {
|
||||
+ "missing end line",
|
||||
+ pemMissingEndLine,
|
||||
+ },
|
||||
}
|
||||
|
||||
func TestBadDecode(t *testing.T) {
|
||||
for _, test := range badPEMTests {
|
||||
- result, _ := Decode([]byte(test.input))
|
||||
+ result, rest := Decode([]byte(test.input))
|
||||
if result != nil {
|
||||
t.Errorf("unexpected success while parsing %q", test.name)
|
||||
}
|
||||
+ if string(rest) != test.input {
|
||||
+ t.Errorf("unexpected rest: %q; want = %q", rest, test.input)
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestCVE202224675(t *testing.T) {
|
||||
+ // Prior to CVE-2022-24675, this input would cause a stack overflow.
|
||||
+ input := []byte(strings.Repeat("-----BEGIN \n", 10000000))
|
||||
+ result, rest := Decode(input)
|
||||
+ if result != nil || !reflect.DeepEqual(rest, input) {
|
||||
+ t.Errorf("Encode of %#v decoded as %#v", input, rest)
|
||||
}
|
||||
}
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -37,6 +37,7 @@ EXTRA_CPAN_BUILD_FLAGS = "--create_packlist=0"
|
||||
|
||||
do_install_append () {
|
||||
rm -rf ${D}${docdir}/perl/html
|
||||
sed -i "s:^#!.*:#!/usr/bin/env perl:" ${D}${bindir}/config_data
|
||||
}
|
||||
|
||||
do_install_ptest() {
|
||||
|
||||
48
meta/recipes-devtools/python/python3-pip/CVE-2021-3572.patch
Normal file
48
meta/recipes-devtools/python/python3-pip/CVE-2021-3572.patch
Normal file
@@ -0,0 +1,48 @@
|
||||
From c4fd13410b9a219f77fc30775d4a0ac9f69725bd Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Thu, 16 Jun 2022 09:52:43 +0530
|
||||
Subject: [PATCH] CVE-2021-3572
|
||||
|
||||
Upstream-Status: Backport [https://github.com/pypa/pip/commit/e46bdda9711392fec0c45c1175bae6db847cb30b]
|
||||
CVE: CVE-2021-3572
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
news/9827.bugfix.rst | 3 +++
|
||||
src/pip/_internal/vcs/git.py | 10 ++++++++--
|
||||
2 files changed, 11 insertions(+), 2 deletions(-)
|
||||
create mode 100644 news/9827.bugfix.rst
|
||||
|
||||
diff --git a/news/9827.bugfix.rst b/news/9827.bugfix.rst
|
||||
new file mode 100644
|
||||
index 0000000..e0d27c3
|
||||
--- /dev/null
|
||||
+++ b/news/9827.bugfix.rst
|
||||
@@ -0,0 +1,3 @@
|
||||
+**SECURITY**: Stop splitting on unicode separators in git references,
|
||||
+which could be maliciously used to install a different revision on the
|
||||
+repository.
|
||||
diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py
|
||||
index 7483303..1b895f6 100644
|
||||
--- a/src/pip/_internal/vcs/git.py
|
||||
+++ b/src/pip/_internal/vcs/git.py
|
||||
@@ -137,9 +137,15 @@ class Git(VersionControl):
|
||||
output = cls.run_command(['show-ref', rev], cwd=dest,
|
||||
show_stdout=False, on_returncode='ignore')
|
||||
refs = {}
|
||||
- for line in output.strip().splitlines():
|
||||
+ # NOTE: We do not use splitlines here since that would split on other
|
||||
+ # unicode separators, which can be maliciously used to install a
|
||||
+ # different revision.
|
||||
+ for line in output.strip().split("\n"):
|
||||
+ line = line.rstrip("\r")
|
||||
+ if not line:
|
||||
+ continue
|
||||
try:
|
||||
- sha, ref = line.split()
|
||||
+ ref_sha, ref_name = line.split(" ", maxsplit=2)
|
||||
except ValueError:
|
||||
# Include the offending line to simplify troubleshooting if
|
||||
# this error ever occurs.
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -6,6 +6,7 @@ LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=8ba06d529c955048e5ddd7c45459eb2e"
|
||||
|
||||
DEPENDS += "python3 python3-setuptools-native"
|
||||
|
||||
SRC_URI = "file://CVE-2021-3572.patch "
|
||||
SRC_URI[md5sum] = "7d42ba49b809604f0df3d55df1c3fd86"
|
||||
SRC_URI[sha256sum] = "7db0c8ea4c7ea51c8049640e8e6e7fde949de672bfa4949920675563a5a6967f"
|
||||
|
||||
|
||||
@@ -57,6 +57,9 @@ CVE_CHECK_WHITELIST += "CVE-2019-18348"
|
||||
|
||||
# This is windows only issue.
|
||||
CVE_CHECK_WHITELIST += "CVE-2020-15523 CVE-2022-26488"
|
||||
# The mailcap module is insecure by design, so this can't be fixed in a meaningful way.
|
||||
# The module will be removed in the future and flaws documented.
|
||||
CVE_CHECK_WHITELIST += "CVE-2015-20107"
|
||||
|
||||
PYTHON_MAJMIN = "3.8"
|
||||
|
||||
|
||||
@@ -98,6 +98,8 @@ SRC_URI = "https://download.qemu.org/${BPN}-${PV}.tar.xz \
|
||||
file://CVE-2020-13253_4.patch \
|
||||
file://CVE-2020-13253_5.patch \
|
||||
file://CVE-2020-13791.patch \
|
||||
file://CVE-2022-35414.patch \
|
||||
file://CVE-2020-27821.patch \
|
||||
"
|
||||
UPSTREAM_CHECK_REGEX = "qemu-(?P<pver>\d+(\.\d+)+)\.tar"
|
||||
|
||||
@@ -254,6 +256,7 @@ PACKAGECONFIG[xkbcommon] = "--enable-xkbcommon,--disable-xkbcommon,libxkbcommon"
|
||||
PACKAGECONFIG[libudev] = "--enable-libudev,--disable-libudev,eudev"
|
||||
PACKAGECONFIG[libxml2] = "--enable-libxml2,--disable-libxml2,libxml2"
|
||||
PACKAGECONFIG[seccomp] = "--enable-seccomp,--disable-seccomp,libseccomp"
|
||||
PACKAGECONFIG[capstone] = "--enable-capstone,--disable-capstone"
|
||||
|
||||
INSANE_SKIP_${PN} = "arch"
|
||||
|
||||
|
||||
73
meta/recipes-devtools/qemu/qemu/CVE-2020-27821.patch
Normal file
73
meta/recipes-devtools/qemu/qemu/CVE-2020-27821.patch
Normal file
@@ -0,0 +1,73 @@
|
||||
From 15222d4636d742f3395fd211fad0cd7e36d9f43e Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Tue, 16 Aug 2022 10:07:01 +0530
|
||||
Subject: [PATCH] CVE-2020-27821
|
||||
|
||||
Upstream-Status: Backport [https://git.qemu.org/?p=qemu.git;a=commit;h=4bfb024bc76973d40a359476dc0291f46e435442]
|
||||
CVE: CVE-2020-27821
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
|
||||
memory: clamp cached translation in case it points to an MMIO region
|
||||
|
||||
In using the address_space_translate_internal API, address_space_cache_init
|
||||
forgot one piece of advice that can be found in the code for
|
||||
address_space_translate_internal:
|
||||
|
||||
/* MMIO registers can be expected to perform full-width accesses based only
|
||||
* on their address, without considering adjacent registers that could
|
||||
* decode to completely different MemoryRegions. When such registers
|
||||
* exist (e.g. I/O ports 0xcf8 and 0xcf9 on most PC chipsets), MMIO
|
||||
* regions overlap wildly. For this reason we cannot clamp the accesses
|
||||
* here.
|
||||
*
|
||||
* If the length is small (as is the case for address_space_ldl/stl),
|
||||
* everything works fine. If the incoming length is large, however,
|
||||
* the caller really has to do the clamping through memory_access_size.
|
||||
*/
|
||||
|
||||
address_space_cache_init is exactly one such case where "the incoming length
|
||||
is large", therefore we need to clamp the resulting length---not to
|
||||
memory_access_size though, since we are not doing an access yet, but to
|
||||
the size of the resulting section. This ensures that subsequent accesses
|
||||
to the cached MemoryRegionSection will be in range.
|
||||
|
||||
With this patch, the enclosed testcase notices that the used ring does
|
||||
not fit into the MSI-X table and prints a "qemu-system-x86_64: Cannot map used"
|
||||
error.
|
||||
|
||||
Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>
|
||||
---
|
||||
exec.c | 10 ++++++++++
|
||||
1 file changed, 10 insertions(+)
|
||||
|
||||
diff --git a/exec.c b/exec.c
|
||||
index 2d6add46..1360051a 100644
|
||||
--- a/exec.c
|
||||
+++ b/exec.c
|
||||
@@ -3632,6 +3632,7 @@ int64_t address_space_cache_init(MemoryRegionCache *cache,
|
||||
AddressSpaceDispatch *d;
|
||||
hwaddr l;
|
||||
MemoryRegion *mr;
|
||||
+ Int128 diff;
|
||||
|
||||
assert(len > 0);
|
||||
|
||||
@@ -3640,6 +3641,15 @@ int64_t address_space_cache_init(MemoryRegionCache *cache,
|
||||
d = flatview_to_dispatch(cache->fv);
|
||||
cache->mrs = *address_space_translate_internal(d, addr, &cache->xlat, &l, true);
|
||||
|
||||
+ /*
|
||||
+ * cache->xlat is now relative to cache->mrs.mr, not to the section itself.
|
||||
+ * Take that into account to compute how many bytes are there between
|
||||
+ * cache->xlat and the end of the section.
|
||||
+ */
|
||||
+ diff = int128_sub(cache->mrs.size,
|
||||
+ int128_make64(cache->xlat - cache->mrs.offset_within_region));
|
||||
+ l = int128_get64(int128_min(diff, int128_make64(l)));
|
||||
+
|
||||
mr = cache->mrs.mr;
|
||||
memory_region_ref(mr);
|
||||
if (memory_access_is_direct(mr, is_write)) {
|
||||
--
|
||||
2.25.1
|
||||
|
||||
53
meta/recipes-devtools/qemu/qemu/CVE-2022-35414.patch
Normal file
53
meta/recipes-devtools/qemu/qemu/CVE-2022-35414.patch
Normal file
@@ -0,0 +1,53 @@
|
||||
From 09a07b5b39c87423df9e8f6574c19a14d36beac5 Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Wed, 27 Jul 2022 10:34:12 +0530
|
||||
Subject: [PATCH] CVE-2022-35414
|
||||
|
||||
Upstream-Status: Backport [https://github.com/qemu/qemu/commit/418ade7849ce7641c0f7333718caf5091a02fd4c]
|
||||
CVE: CVE-2022-35414
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
exec.c | 13 ++++++++++++-
|
||||
1 file changed, 12 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/exec.c b/exec.c
|
||||
index 43c70ffb..2d6add46 100644
|
||||
--- a/exec.c
|
||||
+++ b/exec.c
|
||||
@@ -685,7 +685,7 @@ static void tcg_iommu_free_notifier_list(CPUState *cpu)
|
||||
|
||||
/* Called from RCU critical section */
|
||||
MemoryRegionSection *
|
||||
-address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr addr,
|
||||
+address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr orig_addr,
|
||||
hwaddr *xlat, hwaddr *plen,
|
||||
MemTxAttrs attrs, int *prot)
|
||||
{
|
||||
@@ -694,6 +694,7 @@ address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr addr,
|
||||
IOMMUMemoryRegionClass *imrc;
|
||||
IOMMUTLBEntry iotlb;
|
||||
int iommu_idx;
|
||||
+ hwaddr addr = orig_addr;
|
||||
AddressSpaceDispatch *d = atomic_rcu_read(&cpu->cpu_ases[asidx].memory_dispatch);
|
||||
|
||||
for (;;) {
|
||||
@@ -737,6 +738,16 @@ address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr addr,
|
||||
return section;
|
||||
|
||||
translate_fail:
|
||||
+ /*
|
||||
+ * We should be given a page-aligned address -- certainly
|
||||
+ * tlb_set_page_with_attrs() does so. The page offset of xlat
|
||||
+ * is used to index sections[], and PHYS_SECTION_UNASSIGNED = 0.
|
||||
+ * The page portion of xlat will be logged by memory_region_access_valid()
|
||||
+ * when this memory access is rejected, so use the original untranslated
|
||||
+ * physical address.
|
||||
+ */
|
||||
+ assert((orig_addr & ~TARGET_PAGE_MASK) == 0);
|
||||
+ *xlat = orig_addr;
|
||||
return &d->map.sections[PHYS_SECTION_UNASSIGNED];
|
||||
}
|
||||
#endif
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -9,8 +9,12 @@ SRC_URI += " \
|
||||
file://0001-template-Makefile.in-do-not-write-host-cross-cc-item.patch \
|
||||
"
|
||||
|
||||
SRC_URI[md5sum] = "ede247b56fb862f1f67f9471189b04d4"
|
||||
SRC_URI[sha256sum] = "2755b900a21235b443bb16dadd9032f784d4a88f143d852bc5d154f22b8781f1"
|
||||
SRC_URI[md5sum] = "f972fb0cce662966bec10d5c5f32d042"
|
||||
SRC_URI[sha256sum] = "e7203b0cc09442ed2c08936d483f8ac140ec1c72e37bb5c401646b7866cb5d10"
|
||||
|
||||
# CVE-2021-28966 is Windows specific and not affects Linux OS
|
||||
# https://security-tracker.debian.org/tracker/CVE-2021-28966
|
||||
CVE_CHECK_WHITELIST += "CVE-2021-28966"
|
||||
|
||||
PACKAGECONFIG ??= ""
|
||||
PACKAGECONFIG += "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
|
||||
@@ -13,6 +13,7 @@ SRC_URI = "https://github.com/apple/cups/releases/download/v${PV}/${BP}-source.t
|
||||
file://0002-don-t-try-to-run-generated-binaries.patch \
|
||||
file://0003-cups_1.4.6.bb-Fix-build-on-ppc64.patch \
|
||||
file://0004-cups-fix-multilib-install-file-conflicts.patch\
|
||||
file://CVE-2022-26691.patch \
|
||||
"
|
||||
|
||||
UPSTREAM_CHECK_URI = "https://github.com/apple/cups/releases"
|
||||
@@ -119,4 +120,4 @@ cups_sysroot_preprocess () {
|
||||
|
||||
# -25317 concerns /var/log/cups having lp ownership. Our /var/log/cups is
|
||||
# root:root, so this doesn't apply.
|
||||
CVE_CHECK_WHITELIST += "CVE-2021-25317"
|
||||
CVE_CHECK_WHITELIST += "CVE-2021-25317"
|
||||
|
||||
33
meta/recipes-extended/cups/cups/CVE-2022-26691.patch
Normal file
33
meta/recipes-extended/cups/cups/CVE-2022-26691.patch
Normal file
@@ -0,0 +1,33 @@
|
||||
From de4f8c196106033e4c372dce3e91b9d42b0b9444 Mon Sep 17 00:00:00 2001
|
||||
From: Zdenek Dohnal <zdohnal@redhat.com>
|
||||
Date: Thu, 26 May 2022 06:27:04 +0200
|
||||
Subject: [PATCH] scheduler/cert.c: Fix string comparison (fixes
|
||||
CVE-2022-26691)
|
||||
|
||||
The previous algorithm didn't expect the strings can have a different
|
||||
length, so one string can be a substring of the other and such substring
|
||||
was reported as equal to the longer string.
|
||||
|
||||
CVE: CVE-2022-26691
|
||||
Upstream-Status: Backport [https://github.com/OpenPrinting/cups/commit/de4f8c196106033e4c372dce3e91b9d42b0b9444]
|
||||
Signed-off-by: Steve Sakoman
|
||||
|
||||
---
|
||||
diff --git a/scheduler/cert.c b/scheduler/cert.c
|
||||
index b268bf1b2..9b65b96c9 100644
|
||||
--- a/scheduler/cert.c
|
||||
+++ b/scheduler/cert.c
|
||||
@@ -434,5 +434,12 @@ ctcompare(const char *a, /* I - First string */
|
||||
b ++;
|
||||
}
|
||||
|
||||
- return (result);
|
||||
+ /*
|
||||
+ * The while loop finishes when *a == '\0' or *b == '\0'
|
||||
+ * so after the while loop either both *a and *b == '\0',
|
||||
+ * or one points inside a string, so when we apply logical OR on *a,
|
||||
+ * *b and result, we get a non-zero return value if the compared strings don't match.
|
||||
+ */
|
||||
+
|
||||
+ return (result | *a | *b);
|
||||
}
|
||||
155
meta/recipes-extended/libtirpc/libtirpc/CVE-2021-46828.patch
Normal file
155
meta/recipes-extended/libtirpc/libtirpc/CVE-2021-46828.patch
Normal file
@@ -0,0 +1,155 @@
|
||||
From 48309e7cb230fc539c3edab0b3363f8ce973194f Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Thu, 28 Jul 2022 09:11:04 +0530
|
||||
Subject: [PATCH] CVE-2021-46828
|
||||
|
||||
Upstream-Status: Backport [http://git.linux-nfs.org/?p=steved/libtirpc.git;a=commit;h=86529758570cef4c73fb9b9c4104fdc510f701ed}
|
||||
CVE: CVE-2021-46828
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
src/svc.c | 17 +++++++++++++-
|
||||
src/svc_vc.c | 62 +++++++++++++++++++++++++++++++++++++++++++++++++++-
|
||||
2 files changed, 77 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/svc.c b/src/svc.c
|
||||
index 6db164b..3a8709f 100644
|
||||
--- a/src/svc.c
|
||||
+++ b/src/svc.c
|
||||
@@ -57,7 +57,7 @@
|
||||
|
||||
#define max(a, b) (a > b ? a : b)
|
||||
|
||||
-static SVCXPRT **__svc_xports;
|
||||
+SVCXPRT **__svc_xports;
|
||||
int __svc_maxrec;
|
||||
|
||||
/*
|
||||
@@ -194,6 +194,21 @@ __xprt_do_unregister (xprt, dolock)
|
||||
rwlock_unlock (&svc_fd_lock);
|
||||
}
|
||||
|
||||
+int
|
||||
+svc_open_fds()
|
||||
+{
|
||||
+ int ix;
|
||||
+ int nfds = 0;
|
||||
+
|
||||
+ rwlock_rdlock (&svc_fd_lock);
|
||||
+ for (ix = 0; ix < svc_max_pollfd; ++ix) {
|
||||
+ if (svc_pollfd[ix].fd != -1)
|
||||
+ nfds++;
|
||||
+ }
|
||||
+ rwlock_unlock (&svc_fd_lock);
|
||||
+ return (nfds);
|
||||
+}
|
||||
+
|
||||
/*
|
||||
* Add a service program to the callout list.
|
||||
* The dispatch routine will be called when a rpc request for this
|
||||
diff --git a/src/svc_vc.c b/src/svc_vc.c
|
||||
index c23cd36..1729963 100644
|
||||
--- a/src/svc_vc.c
|
||||
+++ b/src/svc_vc.c
|
||||
@@ -64,6 +64,8 @@
|
||||
|
||||
|
||||
extern rwlock_t svc_fd_lock;
|
||||
+extern SVCXPRT **__svc_xports;
|
||||
+extern int svc_open_fds();
|
||||
|
||||
static SVCXPRT *makefd_xprt(int, u_int, u_int);
|
||||
static bool_t rendezvous_request(SVCXPRT *, struct rpc_msg *);
|
||||
@@ -82,6 +84,7 @@ static void svc_vc_ops(SVCXPRT *);
|
||||
static bool_t svc_vc_control(SVCXPRT *xprt, const u_int rq, void *in);
|
||||
static bool_t svc_vc_rendezvous_control (SVCXPRT *xprt, const u_int rq,
|
||||
void *in);
|
||||
+static int __svc_destroy_idle(int timeout);
|
||||
|
||||
struct cf_rendezvous { /* kept in xprt->xp_p1 for rendezvouser */
|
||||
u_int sendsize;
|
||||
@@ -312,13 +315,14 @@ done:
|
||||
return (xprt);
|
||||
}
|
||||
|
||||
+
|
||||
/*ARGSUSED*/
|
||||
static bool_t
|
||||
rendezvous_request(xprt, msg)
|
||||
SVCXPRT *xprt;
|
||||
struct rpc_msg *msg;
|
||||
{
|
||||
- int sock, flags;
|
||||
+ int sock, flags, nfds, cnt;
|
||||
struct cf_rendezvous *r;
|
||||
struct cf_conn *cd;
|
||||
struct sockaddr_storage addr;
|
||||
@@ -378,6 +382,16 @@ again:
|
||||
|
||||
gettimeofday(&cd->last_recv_time, NULL);
|
||||
|
||||
+ nfds = svc_open_fds();
|
||||
+ if (nfds >= (_rpc_dtablesize() / 5) * 4) {
|
||||
+ /* destroy idle connections */
|
||||
+ cnt = __svc_destroy_idle(15);
|
||||
+ if (cnt == 0) {
|
||||
+ /* destroy least active */
|
||||
+ __svc_destroy_idle(0);
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
return (FALSE); /* there is never an rpc msg to be processed */
|
||||
}
|
||||
|
||||
@@ -819,3 +833,49 @@ __svc_clean_idle(fd_set *fds, int timeout, bool_t cleanblock)
|
||||
{
|
||||
return FALSE;
|
||||
}
|
||||
+
|
||||
+static int
|
||||
+__svc_destroy_idle(int timeout)
|
||||
+{
|
||||
+ int i, ncleaned = 0;
|
||||
+ SVCXPRT *xprt, *least_active;
|
||||
+ struct timeval tv, tdiff, tmax;
|
||||
+ struct cf_conn *cd;
|
||||
+
|
||||
+ gettimeofday(&tv, NULL);
|
||||
+ tmax.tv_sec = tmax.tv_usec = 0;
|
||||
+ least_active = NULL;
|
||||
+ rwlock_wrlock(&svc_fd_lock);
|
||||
+
|
||||
+ for (i = 0; i <= svc_max_pollfd; i++) {
|
||||
+ if (svc_pollfd[i].fd == -1)
|
||||
+ continue;
|
||||
+ xprt = __svc_xports[i];
|
||||
+ if (xprt == NULL || xprt->xp_ops == NULL ||
|
||||
+ xprt->xp_ops->xp_recv != svc_vc_recv)
|
||||
+ continue;
|
||||
+ cd = (struct cf_conn *)xprt->xp_p1;
|
||||
+ if (!cd->nonblock)
|
||||
+ continue;
|
||||
+ if (timeout == 0) {
|
||||
+ timersub(&tv, &cd->last_recv_time, &tdiff);
|
||||
+ if (timercmp(&tdiff, &tmax, >)) {
|
||||
+ tmax = tdiff;
|
||||
+ least_active = xprt;
|
||||
+ }
|
||||
+ continue;
|
||||
+ }
|
||||
+ if (tv.tv_sec - cd->last_recv_time.tv_sec > timeout) {
|
||||
+ __xprt_unregister_unlocked(xprt);
|
||||
+ __svc_vc_dodestroy(xprt);
|
||||
+ ncleaned++;
|
||||
+ }
|
||||
+ }
|
||||
+ if (timeout == 0 && least_active != NULL) {
|
||||
+ __xprt_unregister_unlocked(least_active);
|
||||
+ __svc_vc_dodestroy(least_active);
|
||||
+ ncleaned++;
|
||||
+ }
|
||||
+ rwlock_unlock(&svc_fd_lock);
|
||||
+ return (ncleaned);
|
||||
+}
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -9,7 +9,9 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=f835cce8852481e4b2bbbdd23b5e47f3 \
|
||||
|
||||
PROVIDES = "virtual/librpc"
|
||||
|
||||
SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.bz2"
|
||||
SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.bz2 \
|
||||
file://CVE-2021-46828.patch \
|
||||
"
|
||||
UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/libtirpc/files/libtirpc/"
|
||||
UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)/"
|
||||
SRC_URI[md5sum] = "b25f9cc18bfad50f7c446c77f4ae00bb"
|
||||
|
||||
67
meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch
Normal file
67
meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch
Normal file
@@ -0,0 +1,67 @@
|
||||
From 731d698377dbd1f5b1b90efeb8094602ed59fc40 Mon Sep 17 00:00:00 2001
|
||||
From: Nils Bars <nils.bars@t-online.de>
|
||||
Date: Mon, 17 Jan 2022 16:53:16 +0000
|
||||
Subject: [PATCH] Fix null pointer dereference and use of uninitialized data
|
||||
|
||||
This fixes a bug that causes use of uninitialized heap data if `readbuf` fails
|
||||
to read as many bytes as indicated by the extra field length attribute.
|
||||
Furthermore, this fixes a null pointer dereference if an archive contains an
|
||||
`EF_UNIPATH` extra field but does not have a filename set.
|
||||
---
|
||||
fileio.c | 5 ++++-
|
||||
process.c | 6 +++++-
|
||||
2 files changed, 9 insertions(+), 2 deletions(-)
|
||||
---
|
||||
|
||||
Patch from:
|
||||
https://bugs.launchpad.net/ubuntu/+source/unzip/+bug/1957077
|
||||
https://launchpadlibrarian.net/580782282/0001-Fix-null-pointer-dereference-and-use-of-uninitialized-data.patch
|
||||
Regenerated to apply without offsets.
|
||||
|
||||
CVE: CVE-2021-4217
|
||||
|
||||
Upstream-Status: Pending [infozip upstream inactive]
|
||||
|
||||
Signed-off-by: Joe Slater <joe.slater@windriver.com>
|
||||
|
||||
|
||||
diff --git a/fileio.c b/fileio.c
|
||||
index 14460f3..1dc319e 100644
|
||||
--- a/fileio.c
|
||||
+++ b/fileio.c
|
||||
@@ -2301,8 +2301,11 @@ int do_string(__G__ length, option) /* return PK-type error code */
|
||||
seek_zipf(__G__ G.cur_zipfile_bufstart - G.extra_bytes +
|
||||
(G.inptr-G.inbuf) + length);
|
||||
} else {
|
||||
- if (readbuf(__G__ (char *)G.extra_field, length) == 0)
|
||||
+ unsigned bytes_read = readbuf(__G__ (char *)G.extra_field, length);
|
||||
+ if (bytes_read == 0)
|
||||
return PK_EOF;
|
||||
+ if (bytes_read != length)
|
||||
+ return PK_ERR;
|
||||
/* Looks like here is where extra fields are read */
|
||||
if (getZip64Data(__G__ G.extra_field, length) != PK_COOL)
|
||||
{
|
||||
diff --git a/process.c b/process.c
|
||||
index 5f8f6c6..de843a5 100644
|
||||
--- a/process.c
|
||||
+++ b/process.c
|
||||
@@ -2058,10 +2058,14 @@ int getUnicodeData(__G__ ef_buf, ef_len)
|
||||
G.unipath_checksum = makelong(offset + ef_buf);
|
||||
offset += 4;
|
||||
|
||||
+ if (!G.filename_full) {
|
||||
+ /* Check if we have a unicode extra section but no filename set */
|
||||
+ return PK_ERR;
|
||||
+ }
|
||||
+
|
||||
/*
|
||||
* Compute 32-bit crc
|
||||
*/
|
||||
-
|
||||
chksum = crc32(chksum, (uch *)(G.filename_full),
|
||||
strlen(G.filename_full));
|
||||
|
||||
--
|
||||
2.32.0
|
||||
|
||||
39
meta/recipes-extended/unzip/unzip/CVE-2022-0529.patch
Normal file
39
meta/recipes-extended/unzip/unzip/CVE-2022-0529.patch
Normal file
@@ -0,0 +1,39 @@
|
||||
https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1010355
|
||||
|
||||
CVE: CVE-2022-0529
|
||||
Upstream-Status: Inactive-Upstream [need a new release]
|
||||
|
||||
diff --git a/process.c b/process.c
|
||||
index d2a846e..99b9c7b 100644
|
||||
--- a/process.c
|
||||
+++ b/process.c
|
||||
@@ -2507,13 +2507,15 @@ char *wide_to_local_string(wide_string, escape_all)
|
||||
char buf[9];
|
||||
char *buffer = NULL;
|
||||
char *local_string = NULL;
|
||||
+ size_t buffer_size;
|
||||
|
||||
for (wsize = 0; wide_string[wsize]; wsize++) ;
|
||||
|
||||
if (max_bytes < MAX_ESCAPE_BYTES)
|
||||
max_bytes = MAX_ESCAPE_BYTES;
|
||||
|
||||
- if ((buffer = (char *)malloc(wsize * max_bytes + 1)) == NULL) {
|
||||
+ buffer_size = wsize * max_bytes + 1;
|
||||
+ if ((buffer = (char *)malloc(buffer_size)) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@@ -2552,7 +2554,11 @@ char *wide_to_local_string(wide_string, escape_all)
|
||||
/* no MB for this wide */
|
||||
/* use escape for wide character */
|
||||
char *escape_string = wide_to_escape_string(wide_string[i]);
|
||||
- strcat(buffer, escape_string);
|
||||
+ size_t buffer_len = strlen(buffer);
|
||||
+ size_t escape_string_len = strlen(escape_string);
|
||||
+ if (buffer_len + escape_string_len + 1 > buffer_size)
|
||||
+ escape_string_len = buffer_size - buffer_len - 1;
|
||||
+ strncat(buffer, escape_string, escape_string_len);
|
||||
free(escape_string);
|
||||
}
|
||||
}
|
||||
33
meta/recipes-extended/unzip/unzip/CVE-2022-0530.patch
Normal file
33
meta/recipes-extended/unzip/unzip/CVE-2022-0530.patch
Normal file
@@ -0,0 +1,33 @@
|
||||
https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1010355
|
||||
|
||||
CVE: CVE-2022-0530
|
||||
Upstream-Status: Inactive-Upstream [need a new release]
|
||||
|
||||
diff --git a/fileio.c b/fileio.c
|
||||
index 6290824..77e4b5f 100644
|
||||
--- a/fileio.c
|
||||
+++ b/fileio.c
|
||||
@@ -2361,6 +2361,9 @@ int do_string(__G__ length, option) /* return PK-type error code */
|
||||
/* convert UTF-8 to local character set */
|
||||
fn = utf8_to_local_string(G.unipath_filename,
|
||||
G.unicode_escape_all);
|
||||
+ if (fn == NULL)
|
||||
+ return PK_ERR;
|
||||
+
|
||||
/* make sure filename is short enough */
|
||||
if (strlen(fn) >= FILNAMSIZ) {
|
||||
fn[FILNAMSIZ - 1] = '\0';
|
||||
diff --git a/process.c b/process.c
|
||||
index d2a846e..715bc0f 100644
|
||||
--- a/process.c
|
||||
+++ b/process.c
|
||||
@@ -2605,6 +2605,8 @@ char *utf8_to_local_string(utf8_string, escape_all)
|
||||
int escape_all;
|
||||
{
|
||||
zwchar *wide = utf8_to_wide_string(utf8_string);
|
||||
+ if (wide == NULL)
|
||||
+ return NULL;
|
||||
char *loc = wide_to_local_string(wide, escape_all);
|
||||
free(wide);
|
||||
return loc;
|
||||
|
||||
@@ -26,6 +26,9 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/infozip/UnZip%206.x%20%28latest%29/UnZip%206.0/
|
||||
file://CVE-2019-13232_p1.patch \
|
||||
file://CVE-2019-13232_p2.patch \
|
||||
file://CVE-2019-13232_p3.patch \
|
||||
file://CVE-2021-4217.patch \
|
||||
file://CVE-2022-0529.patch \
|
||||
file://CVE-2022-0530.patch \
|
||||
"
|
||||
UPSTREAM_VERSION_UNKNOWN = "1"
|
||||
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
From bdf3a2630c02a63803309cf0ad4b274234c814ce Mon Sep 17 00:00:00 2001
|
||||
From: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
Date: Tue, 9 Aug 2022 09:45:42 +0530
|
||||
Subject: [PATCH] CVE-2021-46829
|
||||
|
||||
Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/gdk-pixbuf/-/commit/5398f04d772f7f8baf5265715696ed88db0f0512]
|
||||
CVE: CVE-2021-46829
|
||||
Signed-off-by: Hitendra Prajapati <hprajapati@mvista.com>
|
||||
---
|
||||
gdk-pixbuf/io-gif-animation.c | 21 +++++++++++++--------
|
||||
1 file changed, 13 insertions(+), 8 deletions(-)
|
||||
|
||||
diff --git a/gdk-pixbuf/io-gif-animation.c b/gdk-pixbuf/io-gif-animation.c
|
||||
index d742963..9544391 100644
|
||||
--- a/gdk-pixbuf/io-gif-animation.c
|
||||
+++ b/gdk-pixbuf/io-gif-animation.c
|
||||
@@ -364,7 +364,7 @@ composite_frame (GdkPixbufGifAnim *anim, GdkPixbufFrame *frame)
|
||||
for (i = 0; i < n_indexes; i++) {
|
||||
guint8 index = index_buffer[i];
|
||||
guint x, y;
|
||||
- int offset;
|
||||
+ gsize offset;
|
||||
|
||||
if (index == frame->transparent_index)
|
||||
continue;
|
||||
@@ -374,11 +374,13 @@ composite_frame (GdkPixbufGifAnim *anim, GdkPixbufFrame *frame)
|
||||
if (x >= anim->width || y >= anim->height)
|
||||
continue;
|
||||
|
||||
- offset = y * gdk_pixbuf_get_rowstride (anim->last_frame_data) + x * 4;
|
||||
- pixels[offset + 0] = frame->color_map[index * 3 + 0];
|
||||
- pixels[offset + 1] = frame->color_map[index * 3 + 1];
|
||||
- pixels[offset + 2] = frame->color_map[index * 3 + 2];
|
||||
- pixels[offset + 3] = 255;
|
||||
+ if (g_size_checked_mul (&offset, gdk_pixbuf_get_rowstride (anim->last_frame_data), y) &&
|
||||
+ g_size_checked_add (&offset, offset, x * 4)) {
|
||||
+ pixels[offset + 0] = frame->color_map[index * 3 + 0];
|
||||
+ pixels[offset + 1] = frame->color_map[index * 3 + 1];
|
||||
+ pixels[offset + 2] = frame->color_map[index * 3 + 2];
|
||||
+ pixels[offset + 3] = 255;
|
||||
+ }
|
||||
}
|
||||
|
||||
out:
|
||||
@@ -443,8 +445,11 @@ gdk_pixbuf_gif_anim_iter_get_pixbuf (GdkPixbufAnimationIter *anim_iter)
|
||||
x_end = MIN (anim->last_frame->x_offset + anim->last_frame->width, anim->width);
|
||||
y_end = MIN (anim->last_frame->y_offset + anim->last_frame->height, anim->height);
|
||||
for (y = anim->last_frame->y_offset; y < y_end; y++) {
|
||||
- guchar *line = pixels + y * gdk_pixbuf_get_rowstride (anim->last_frame_data) + anim->last_frame->x_offset * 4;
|
||||
- memset (line, 0, (x_end - anim->last_frame->x_offset) * 4);
|
||||
+ gsize offset;
|
||||
+ if (g_size_checked_mul (&offset, gdk_pixbuf_get_rowstride (anim->last_frame_data), y) &&
|
||||
+ g_size_checked_add (&offset, offset, anim->last_frame->x_offset * 4)) {
|
||||
+ memset (pixels + offset, 0, (x_end - anim->last_frame->x_offset) * 4);
|
||||
+ }
|
||||
}
|
||||
break;
|
||||
case GDK_PIXBUF_FRAME_REVERT:
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -26,6 +26,7 @@ SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz \
|
||||
file://missing-test-data.patch \
|
||||
file://CVE-2020-29385.patch \
|
||||
file://CVE-2021-20240.patch \
|
||||
file://CVE-2021-46829.patch \
|
||||
"
|
||||
|
||||
SRC_URI_append_class-target = " \
|
||||
|
||||
33
meta/recipes-graphics/freetype/freetype/CVE-2022-27404.patch
Normal file
33
meta/recipes-graphics/freetype/freetype/CVE-2022-27404.patch
Normal file
@@ -0,0 +1,33 @@
|
||||
From 53dfdcd8198d2b3201a23c4bad9190519ba918db Mon Sep 17 00:00:00 2001
|
||||
From: Werner Lemberg <wl@gnu.org>
|
||||
Date: Thu, 17 Mar 2022 19:24:16 +0100
|
||||
Subject: [PATCH] [sfnt] Avoid invalid face index.
|
||||
|
||||
Fixes #1138.
|
||||
|
||||
* src/sfnt/sfobjs.c (sfnt_init_face), src/sfnt/sfwoff2.c (woff2_open_font):
|
||||
Check `face_index` before decrementing.
|
||||
|
||||
CVE: CVE-2022-27404
|
||||
Upstream-Status: Backport [https://gitlab.freedesktop.org/freetype/freetype/-/commit/53dfdcd8198d2b3201a23c4bad9190519ba918db.patch]
|
||||
Comment: Removed second hunk as sfwoff2.c file is not part of current v2.10.1 code
|
||||
Signed-off-by: Ranjitsinh Rathod <ranjitsinh.rathod@kpit.com>
|
||||
---
|
||||
src/sfnt/sfobjs.c | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/src/sfnt/sfobjs.c b/src/sfnt/sfobjs.c
|
||||
index f9d4d3858..9771c35df 100644
|
||||
--- a/src/sfnt/sfobjs.c
|
||||
+++ b/src/sfnt/sfobjs.c
|
||||
@@ -566,7 +566,7 @@
|
||||
face_index = FT_ABS( face_instance_index ) & 0xFFFF;
|
||||
|
||||
/* value -(N+1) requests information on index N */
|
||||
- if ( face_instance_index < 0 )
|
||||
+ if ( face_instance_index < 0 && face_index > 0 )
|
||||
face_index--;
|
||||
|
||||
if ( face_index >= face->ttc_header.count )
|
||||
--
|
||||
GitLab
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user