mirror of
https://git.yoctoproject.org/poky
synced 2026-02-09 02:03:04 +01:00
There were a couple problems with the handling of precompiled locales. - it gathered the list of locales from the directories - this breaks due to the naming mismatch, e.g. en_US.UTF-8 vs en_US.utf8. - it retained its hardcoded assumption that the non-suffixed locale (en_US, as opposed to en_US.*) is UTF-8, while the others are otherwise. Hardcoding this is both inflexible and just plain wrong for some toolchains. It's most common in desktop distros for 'en_US' to be non-utf8, and ''en_US.UTF-8' is utf8, and this is the case in some external toolchains as well. The code now uses the SUPPORTED file to hold the knowledge it needs. This file not only holds the list of locales to generate, but also maps the locale names to the charsets they correspond to. The code now uses this to assemble its charset map, falling back to the '.' suffix as charset when the locale is not in the map. For precompiled, it now uses the locale->charset knowledge it has, thereby allowing non-utf8 non-suffixed locale names, whereas for non-precompiled, it reverts to the previous assumption, renaming the utf8 locale and forcibly suffixing the others. So, a person maintaining an external toolchain recipe is responsible for ensuring that the SUPPORTED file they provide matches up with the compiled locales in the toolchain, if they want to utilize precompiled locales. I believe in the long term the compiled case should do the same thing precompiled does, and use SUPPORTED or a similar mechanism to encode the knowledge, and if people want all the non-suffixed names to be utf8, they can change that file to do so. This would avoid the hardcoded assumption in the code, as well as consolidating the behavior between the compiled and precompiled cases. (From OE-Core rev: 3f36058923ccda25a3dd85046542e65b6034c09e) Signed-off-by: Christopher Larson <kergoth@gmail.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
383 lines
13 KiB
Plaintext
383 lines
13 KiB
Plaintext
#
|
|
# This class knows how to package up [e]glibc. Its shared since prebuild binary toolchains
|
|
# may need packaging and its pointless to duplicate this code.
|
|
#
|
|
# Caller should set GLIBC_INTERNAL_USE_BINARY_LOCALE to one of:
|
|
# "compile" - Use QEMU to generate the binary locale files
|
|
# "precompiled" - The binary locale files are pregenerated and already present
|
|
# "ondevice" - The device will build the locale files upon first boot through the postinst
|
|
|
|
GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice"
|
|
|
|
python __anonymous () {
|
|
enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", True)
|
|
|
|
pn = d.getVar("PN", True)
|
|
if pn.endswith("-initial"):
|
|
enabled = False
|
|
|
|
if enabled and int(enabled):
|
|
import re
|
|
|
|
target_arch = d.getVar("TARGET_ARCH", True)
|
|
binary_arches = d.getVar("BINARY_LOCALE_ARCHES", True) or ""
|
|
use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or ""
|
|
|
|
for regexp in binary_arches.split(" "):
|
|
r = re.compile(regexp)
|
|
|
|
if r.match(target_arch):
|
|
depends = d.getVar("DEPENDS", True)
|
|
if use_cross_localedef == "1" :
|
|
depends = "%s cross-localedef-native" % depends
|
|
else:
|
|
depends = "%s qemu-native" % depends
|
|
d.setVar("DEPENDS", depends)
|
|
d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile")
|
|
break
|
|
|
|
distro_features = (d.getVar('DISTRO_FEATURES', True) or '').split()
|
|
|
|
# try to fix disable charsets/locales/locale-code compile fail
|
|
if 'libc-charsets' in distro_features and 'libc-locales' in distro_features and 'libc-locale-code' in distro_features:
|
|
d.setVar('PACKAGE_NO_GCONV', '0')
|
|
else:
|
|
d.setVar('PACKAGE_NO_GCONV', '1')
|
|
}
|
|
|
|
OVERRIDES_append = ":${TARGET_ARCH}-${TARGET_OS}"
|
|
|
|
do_configure_prepend() {
|
|
if [ -e ${S}/elf/ldd.bash.in ]; then
|
|
sed -e "s#@BASH@#/bin/sh#" -i ${S}/elf/ldd.bash.in
|
|
fi
|
|
}
|
|
|
|
|
|
|
|
# indentation removed on purpose
|
|
locale_base_postinst() {
|
|
#!/bin/sh
|
|
|
|
if [ "x$D" != "x" ]; then
|
|
exit 1
|
|
fi
|
|
|
|
rm -rf ${TMP_LOCALE}
|
|
mkdir -p ${TMP_LOCALE}
|
|
if [ -f ${libdir}/locale/locale-archive ]; then
|
|
cp ${libdir}/locale/locale-archive ${TMP_LOCALE}/
|
|
fi
|
|
localedef --inputfile=${datadir}/i18n/locales/%s --charmap=%s --prefix=/tmp/locale %s
|
|
mkdir -p ${libdir}/locale/
|
|
mv ${TMP_LOCALE}/locale-archive ${libdir}/locale/
|
|
rm -rf ${TMP_LOCALE}
|
|
}
|
|
|
|
# indentation removed on purpose
|
|
locale_base_postrm() {
|
|
#!/bin/sh
|
|
|
|
rm -rf ${TMP_LOCALE}
|
|
mkdir -p ${TMP_LOCALE}
|
|
if [ -f ${libdir}/locale/locale-archive ]; then
|
|
cp ${libdir}/locale/locale-archive ${TMP_LOCALE}/
|
|
fi
|
|
localedef --delete-from-archive --inputfile=${datadir}/locales/%s --charmap=%s --prefix=/tmp/locale %s
|
|
mv ${TMP_LOCALE}/locale-archive ${libdir}/locale/
|
|
rm -rf ${TMP_LOCALE}
|
|
}
|
|
|
|
|
|
TMP_LOCALE="/tmp/locale${libdir}/locale"
|
|
LOCALETREESRC ?= "${PKGD}"
|
|
|
|
do_prep_locale_tree() {
|
|
treedir=${WORKDIR}/locale-tree
|
|
rm -rf $treedir
|
|
mkdir -p $treedir/${base_bindir} $treedir/${base_libdir} $treedir/${datadir} $treedir/${libdir}/locale
|
|
tar -cf - -C ${LOCALETREESRC}${datadir} -ps i18n | tar -xf - -C $treedir/${datadir}
|
|
# unzip to avoid parsing errors
|
|
for i in $treedir/${datadir}/i18n/charmaps/*gz; do
|
|
gunzip $i
|
|
done
|
|
tar -cf - -C ${LOCALETREESRC}${base_libdir} -ps . | tar -xf - -C $treedir/${base_libdir}
|
|
if [ -f ${STAGING_DIR_NATIVE}${prefix_native}/lib/libgcc_s.* ]; then
|
|
tar -cf - -C ${STAGING_DIR_NATIVE}/${prefix_native}/${base_libdir} -ps libgcc_s.* | tar -xf - -C $treedir/${base_libdir}
|
|
fi
|
|
install -m 0755 ${LOCALETREESRC}${bindir}/localedef $treedir/${base_bindir}
|
|
}
|
|
|
|
do_collect_bins_from_locale_tree() {
|
|
treedir=${WORKDIR}/locale-tree
|
|
|
|
mkdir -p ${PKGD}${libdir}
|
|
tar -cf - -C $treedir/${libdir} -ps locale | tar -xf - -C ${PKGD}${libdir}
|
|
}
|
|
|
|
inherit qemu
|
|
|
|
python package_do_split_gconvs () {
|
|
import os, re
|
|
if (d.getVar('PACKAGE_NO_GCONV', True) == '1'):
|
|
bb.note("package requested not splitting gconvs")
|
|
return
|
|
|
|
if not d.getVar('PACKAGES', True):
|
|
return
|
|
|
|
bpn = d.getVar('BPN', True)
|
|
libdir = d.getVar('libdir', True)
|
|
if not libdir:
|
|
bb.error("libdir not defined")
|
|
return
|
|
datadir = d.getVar('datadir', True)
|
|
if not datadir:
|
|
bb.error("datadir not defined")
|
|
return
|
|
|
|
gconv_libdir = base_path_join(libdir, "gconv")
|
|
charmap_dir = base_path_join(datadir, "i18n", "charmaps")
|
|
locales_dir = base_path_join(datadir, "i18n", "locales")
|
|
binary_locales_dir = base_path_join(libdir, "locale")
|
|
|
|
def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
|
|
deps = []
|
|
f = open(fn, "r")
|
|
c_re = re.compile('^copy "(.*)"')
|
|
i_re = re.compile('^include "(\w+)".*')
|
|
for l in f.readlines():
|
|
m = c_re.match(l) or i_re.match(l)
|
|
if m:
|
|
dp = legitimize_package_name('%s-gconv-%s' % (bpn, m.group(1)))
|
|
if not dp in deps:
|
|
deps.append(dp)
|
|
f.close()
|
|
if deps != []:
|
|
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
|
|
if bpn != 'glibc':
|
|
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
|
|
|
|
do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
|
|
description='gconv module for character set %s', hook=calc_gconv_deps, \
|
|
extra_depends=bpn+'-gconv')
|
|
|
|
def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group):
|
|
deps = []
|
|
f = open(fn, "r")
|
|
c_re = re.compile('^copy "(.*)"')
|
|
i_re = re.compile('^include "(\w+)".*')
|
|
for l in f.readlines():
|
|
m = c_re.match(l) or i_re.match(l)
|
|
if m:
|
|
dp = legitimize_package_name('%s-charmap-%s' % (bpn, m.group(1)))
|
|
if not dp in deps:
|
|
deps.append(dp)
|
|
f.close()
|
|
if deps != []:
|
|
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
|
|
if bpn != 'glibc':
|
|
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
|
|
|
|
do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
|
|
description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
|
|
|
|
def calc_locale_deps(fn, pkg, file_regex, output_pattern, group):
|
|
deps = []
|
|
f = open(fn, "r")
|
|
c_re = re.compile('^copy "(.*)"')
|
|
i_re = re.compile('^include "(\w+)".*')
|
|
for l in f.readlines():
|
|
m = c_re.match(l) or i_re.match(l)
|
|
if m:
|
|
dp = legitimize_package_name(bpn+'-localedata-%s' % m.group(1))
|
|
if not dp in deps:
|
|
deps.append(dp)
|
|
f.close()
|
|
if deps != []:
|
|
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
|
|
if bpn != 'glibc':
|
|
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
|
|
|
|
do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
|
|
description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
|
|
d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv')
|
|
|
|
use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True)
|
|
|
|
dot_re = re.compile("(.*)\.(.*)")
|
|
|
|
# Read in supported locales and associated encodings
|
|
supported = {}
|
|
with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f:
|
|
for line in f.readlines():
|
|
try:
|
|
locale, charset = line.rstrip().split()
|
|
except ValueError:
|
|
continue
|
|
supported[locale] = charset
|
|
|
|
# GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales
|
|
to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True)
|
|
if not to_generate or to_generate == 'all':
|
|
to_generate = supported.keys()
|
|
else:
|
|
to_generate = to_generate.split()
|
|
for locale in to_generate:
|
|
if locale not in supported:
|
|
if '.' in locale:
|
|
charset = locale.split('.')[1]
|
|
else:
|
|
charset = 'UTF-8'
|
|
bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset))
|
|
supported[locale] = charset
|
|
|
|
def output_locale_source(name, pkgname, locale, encoding):
|
|
d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
|
|
(bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)))
|
|
d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \
|
|
% (locale, encoding, locale))
|
|
d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \
|
|
(locale, encoding, locale))
|
|
|
|
def output_locale_binary_rdepends(name, pkgname, locale, encoding):
|
|
m = re.match("(.*)\.(.*)", name)
|
|
if m:
|
|
libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-",""))
|
|
else:
|
|
libc_name = name
|
|
d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
|
|
% (d.getVar('MLPREFIX') + bpn, libc_name)))
|
|
|
|
commands = {}
|
|
|
|
def output_locale_binary(name, pkgname, locale, encoding):
|
|
treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree")
|
|
ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True))
|
|
path = d.getVar("PATH", True)
|
|
i18npath = base_path_join(treedir, datadir, "i18n")
|
|
gconvpath = base_path_join(treedir, "iconvdata")
|
|
outputpath = base_path_join(treedir, libdir, "locale")
|
|
|
|
use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0"
|
|
if use_cross_localedef == "1":
|
|
target_arch = d.getVar('TARGET_ARCH', True)
|
|
locale_arch_options = { \
|
|
"arm": " --uint32-align=4 --little-endian ", \
|
|
"powerpc": " --uint32-align=4 --big-endian ", \
|
|
"powerpc64": " --uint32-align=4 --big-endian ", \
|
|
"mips": " --uint32-align=4 --big-endian ", \
|
|
"mipsel": " --uint32-align=4 --little-endian ", \
|
|
"i586": " --uint32-align=4 --little-endian ", \
|
|
"i686": " --uint32-align=4 --little-endian ", \
|
|
"x86_64": " --uint32-align=4 --little-endian " }
|
|
|
|
if target_arch in locale_arch_options:
|
|
localedef_opts = locale_arch_options[target_arch]
|
|
else:
|
|
bb.error("locale_arch_options not found for target_arch=" + target_arch)
|
|
raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options")
|
|
|
|
localedef_opts += " --force --old-style --no-archive --prefix=%s \
|
|
--inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \
|
|
% (treedir, treedir, datadir, locale, encoding, outputpath, name)
|
|
|
|
cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \
|
|
(path, i18npath, gconvpath, localedef_opts)
|
|
else: # earlier slower qemu way
|
|
qemu = qemu_target_binary(d)
|
|
localedef_opts = "--force --old-style --no-archive --prefix=%s \
|
|
--inputfile=%s/i18n/locales/%s --charmap=%s %s" \
|
|
% (treedir, datadir, locale, encoding, name)
|
|
|
|
qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True)
|
|
if not qemu_options:
|
|
qemu_options = d.getVar('QEMU_OPTIONS', True)
|
|
|
|
cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
|
|
-E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
|
|
(path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts)
|
|
|
|
commands["%s/%s" % (outputpath, name)] = cmd
|
|
|
|
bb.note("generating locale %s (%s)" % (locale, encoding))
|
|
|
|
def output_locale(name, locale, encoding):
|
|
pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
|
|
d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
|
|
d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True)))
|
|
rprovides = ' virtual-locale-%s' % legitimize_package_name(name)
|
|
m = re.match("(.*)_(.*)", name)
|
|
if m:
|
|
rprovides += ' virtual-locale-%s' % m.group(1)
|
|
d.setVar('RPROVIDES_%s' % pkgname, rprovides)
|
|
|
|
if use_bin == "compile":
|
|
output_locale_binary_rdepends(name, pkgname, locale, encoding)
|
|
output_locale_binary(name, pkgname, locale, encoding)
|
|
elif use_bin == "precompiled":
|
|
output_locale_binary_rdepends(name, pkgname, locale, encoding)
|
|
else:
|
|
output_locale_source(name, pkgname, locale, encoding)
|
|
|
|
if use_bin == "compile":
|
|
bb.note("preparing tree for binary locale generation")
|
|
bb.build.exec_func("do_prep_locale_tree", d)
|
|
|
|
utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0)
|
|
encodings = {}
|
|
for locale in to_generate:
|
|
charset = supported[locale]
|
|
if utf8_only and charset != 'UTF-8':
|
|
continue
|
|
|
|
m = dot_re.match(locale)
|
|
if m:
|
|
base = m.group(1)
|
|
else:
|
|
base = locale
|
|
|
|
# Precompiled locales are kept as is, obeying SUPPORTED, while
|
|
# others are adjusted, ensuring that the non-suffixed locales
|
|
# are utf-8, while the suffixed are not.
|
|
if use_bin == "precompiled":
|
|
output_locale(locale, base, charset)
|
|
else:
|
|
if charset == 'UTF-8':
|
|
output_locale(base, base, charset)
|
|
else:
|
|
output_locale('%s.%s' % (base, charset), base, charset)
|
|
|
|
if use_bin == "compile":
|
|
makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile")
|
|
m = open(makefile, "w")
|
|
m.write("all: %s\n\n" % " ".join(commands.keys()))
|
|
for cmd in commands:
|
|
m.write(cmd + ":\n")
|
|
m.write(" " + commands[cmd] + "\n\n")
|
|
m.close()
|
|
d.setVar("B", os.path.dirname(makefile))
|
|
d.setVar("EXTRA_OEMAKE", "${PARALLEL_MAKE}")
|
|
bb.note("Executing binary locale generation makefile")
|
|
bb.build.exec_func("oe_runmake", d)
|
|
bb.note("collecting binary locales from locale tree")
|
|
bb.build.exec_func("do_collect_bins_from_locale_tree", d)
|
|
do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
|
|
output_pattern=bpn+'-binary-localedata-%s', \
|
|
description='binary locale definition for %s', extra_depends='', allow_dirs=True)
|
|
elif use_bin == "precompiled":
|
|
do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
|
|
output_pattern=bpn+'-binary-localedata-%s', \
|
|
description='binary locale definition for %s', extra_depends='', allow_dirs=True)
|
|
else:
|
|
bb.note("generation of binary locales disabled. this may break i18n!")
|
|
|
|
}
|
|
|
|
# We want to do this indirection so that we can safely 'return'
|
|
# from the called function even though we're prepending
|
|
python populate_packages_prepend () {
|
|
bb.build.exec_func('package_do_split_gconvs', d)
|
|
}
|
|
|