mirror of
https://git.yoctoproject.org/poky
synced 2026-04-28 06:32:34 +02:00
Rename /openembedded/ -> /meta/
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@530 311d38ba-8fff-0310-9ca6-ca027cbcb966
This commit is contained in:
182
meta/classes/autotools.bbclass
Normal file
182
meta/classes/autotools.bbclass
Normal file
@@ -0,0 +1,182 @@
|
||||
inherit base
|
||||
|
||||
def autotools_dep_prepend(d):
|
||||
import bb;
|
||||
|
||||
if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1):
|
||||
return ''
|
||||
|
||||
pn = bb.data.getVar('PN', d, 1)
|
||||
deps = ''
|
||||
|
||||
if pn in ['autoconf-native', 'automake-native']:
|
||||
return deps
|
||||
deps += 'autoconf-native automake-native '
|
||||
|
||||
if not pn in ['libtool', 'libtool-native', 'libtool-cross']:
|
||||
deps += 'libtool-native '
|
||||
|
||||
return deps + 'gnu-config-native '
|
||||
|
||||
EXTRA_OEMAKE = ""
|
||||
DEPENDS_prepend = "${@autotools_dep_prepend(d)}"
|
||||
acpaths = "default"
|
||||
EXTRA_AUTORECONF = "--exclude=autopoint"
|
||||
|
||||
def autotools_set_crosscompiling(d):
|
||||
import bb
|
||||
if not bb.data.inherits_class('native', d):
|
||||
return " cross_compiling=yes"
|
||||
return ""
|
||||
|
||||
# EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}"
|
||||
|
||||
oe_runconf () {
|
||||
if [ -x ${S}/configure ] ; then
|
||||
cfgcmd="${S}/configure \
|
||||
--build=${BUILD_SYS} \
|
||||
--host=${HOST_SYS} \
|
||||
--target=${TARGET_SYS} \
|
||||
--prefix=${prefix} \
|
||||
--exec_prefix=${exec_prefix} \
|
||||
--bindir=${bindir} \
|
||||
--sbindir=${sbindir} \
|
||||
--libexecdir=${libexecdir} \
|
||||
--datadir=${datadir} \
|
||||
--sysconfdir=${sysconfdir} \
|
||||
--sharedstatedir=${sharedstatedir} \
|
||||
--localstatedir=${localstatedir} \
|
||||
--libdir=${libdir} \
|
||||
--includedir=${includedir} \
|
||||
--oldincludedir=${oldincludedir} \
|
||||
--infodir=${infodir} \
|
||||
--mandir=${mandir} \
|
||||
${EXTRA_OECONF} \
|
||||
$@"
|
||||
oenote "Running $cfgcmd..."
|
||||
$cfgcmd || oefatal "oe_runconf failed"
|
||||
else
|
||||
oefatal "no configure script found"
|
||||
fi
|
||||
}
|
||||
|
||||
autotools_do_configure() {
|
||||
case ${PN} in
|
||||
autoconf*)
|
||||
;;
|
||||
automake*)
|
||||
;;
|
||||
*)
|
||||
# WARNING: gross hack follows:
|
||||
# An autotools built package generally needs these scripts, however only
|
||||
# automake or libtoolize actually install the current versions of them.
|
||||
# This is a problem in builds that do not use libtool or automake, in the case
|
||||
# where we -need- the latest version of these scripts. e.g. running a build
|
||||
# for a package whose autotools are old, on an x86_64 machine, which the old
|
||||
# config.sub does not support. Work around this by installing them manually
|
||||
# regardless.
|
||||
( for ac in `find ${S} -name configure.in -o -name configure.ac`; do
|
||||
rm -f `dirname $ac`/configure
|
||||
done )
|
||||
if [ -e ${S}/configure.in -o -e ${S}/configure.ac ]; then
|
||||
olddir=`pwd`
|
||||
cd ${S}
|
||||
if [ x"${acpaths}" = xdefault ]; then
|
||||
acpaths=
|
||||
for i in `find ${S} -maxdepth 2 -name \*.m4|grep -v 'aclocal.m4'| \
|
||||
grep -v 'acinclude.m4' | sed -e 's,\(.*/\).*$,\1,'|sort -u`; do
|
||||
acpaths="$acpaths -I $i"
|
||||
done
|
||||
else
|
||||
acpaths="${acpaths}"
|
||||
fi
|
||||
AUTOV=`automake --version |head -n 1 |sed "s/.* //;s/\.[0-9]\+$//"`
|
||||
automake --version
|
||||
echo "AUTOV is $AUTOV"
|
||||
install -d ${STAGING_DIR}/${HOST_SYS}/share/aclocal
|
||||
install -d ${STAGING_DIR}/${HOST_SYS}/share/aclocal-$AUTOV
|
||||
acpaths="$acpaths -I ${STAGING_DIR}/${HOST_SYS}/share/aclocal-$AUTOV -I ${STAGING_DIR}/${HOST_SYS}/share/aclocal"
|
||||
# autoreconf is too shy to overwrite aclocal.m4 if it doesn't look
|
||||
# like it was auto-generated. Work around this by blowing it away
|
||||
# by hand, unless the package specifically asked not to run aclocal.
|
||||
if ! echo ${EXTRA_AUTORECONF} | grep -q "aclocal"; then
|
||||
rm -f aclocal.m4
|
||||
fi
|
||||
if [ -e configure.in ]; then
|
||||
CONFIGURE_AC=configure.in
|
||||
else
|
||||
CONFIGURE_AC=configure.ac
|
||||
fi
|
||||
if grep "^AM_GLIB_GNU_GETTEXT" $CONFIGURE_AC >/dev/null; then
|
||||
if grep "sed.*POTFILES" $CONFIGURE_AC >/dev/null; then
|
||||
: do nothing -- we still have an old unmodified configure.ac
|
||||
else
|
||||
oenote Executing glib-gettextize --force --copy
|
||||
echo "no" | glib-gettextize --force --copy
|
||||
fi
|
||||
fi
|
||||
if grep "^AC_PROG_INTLTOOL" $CONFIGURE_AC >/dev/null; then
|
||||
oenote Executing intltoolize --copy --force --automake
|
||||
intltoolize --copy --force --automake
|
||||
fi
|
||||
oenote Executing autoreconf --verbose --install --force ${EXTRA_AUTORECONF} $acpaths
|
||||
mkdir -p m4
|
||||
autoreconf -Wcross --verbose --install --force ${EXTRA_AUTORECONF} $acpaths || oefatal "autoreconf execution failed."
|
||||
cd $olddir
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
if [ -e ${S}/configure ]; then
|
||||
oe_runconf
|
||||
else
|
||||
oenote "nothing to configure"
|
||||
fi
|
||||
}
|
||||
|
||||
autotools_do_install() {
|
||||
oe_runmake 'DESTDIR=${D}' install
|
||||
}
|
||||
|
||||
STAGE_TEMP="${WORKDIR}/temp-staging"
|
||||
|
||||
autotools_stage_includes() {
|
||||
if [ "${INHIBIT_AUTO_STAGE_INCLUDES}" != "1" ]
|
||||
then
|
||||
rm -rf ${STAGE_TEMP}
|
||||
mkdir -p ${STAGE_TEMP}
|
||||
make DESTDIR="${STAGE_TEMP}" install
|
||||
cp -pPR ${STAGE_TEMP}/${includedir}/* ${STAGING_INCDIR}
|
||||
rm -rf ${STAGE_TEMP}
|
||||
fi
|
||||
}
|
||||
|
||||
autotools_stage_all() {
|
||||
if [ "${INHIBIT_AUTO_STAGE}" = "1" ]
|
||||
then
|
||||
return
|
||||
fi
|
||||
rm -rf ${STAGE_TEMP}
|
||||
mkdir -p ${STAGE_TEMP}
|
||||
oe_runmake DESTDIR="${STAGE_TEMP}" install
|
||||
if [ -d ${STAGE_TEMP}/${includedir} ]; then
|
||||
cp -fpPR ${STAGE_TEMP}/${includedir}/* ${STAGING_INCDIR}
|
||||
fi
|
||||
if [ -d ${STAGE_TEMP}/${libdir} ]
|
||||
then
|
||||
for i in ${STAGE_TEMP}/${libdir}/*.la
|
||||
do
|
||||
if [ ! -f "$i" ]; then
|
||||
cp -fpPR ${STAGE_TEMP}/${libdir}/* ${STAGING_LIBDIR}
|
||||
break
|
||||
fi
|
||||
oe_libinstall -so $(basename $i .la) ${STAGING_LIBDIR}
|
||||
done
|
||||
fi
|
||||
if [ -d ${STAGE_TEMP}/${datadir}/aclocal ]; then
|
||||
install -d ${STAGING_DATADIR}/aclocal
|
||||
cp -fpPR ${STAGE_TEMP}/${datadir}/aclocal/* ${STAGING_DATADIR}/aclocal
|
||||
fi
|
||||
rm -rf ${STAGE_TEMP}
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_configure do_install
|
||||
793
meta/classes/base.bbclass
Normal file
793
meta/classes/base.bbclass
Normal file
@@ -0,0 +1,793 @@
|
||||
PATCHES_DIR="${S}"
|
||||
|
||||
def base_dep_prepend(d):
|
||||
import bb;
|
||||
#
|
||||
# Ideally this will check a flag so we will operate properly in
|
||||
# the case where host == build == target, for now we don't work in
|
||||
# that case though.
|
||||
#
|
||||
deps = ""
|
||||
|
||||
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
|
||||
# we need that built is the responsibility of the patch function / class, not
|
||||
# the application.
|
||||
patchdeps = bb.data.getVar("PATCH_DEPENDS", d, 1)
|
||||
if patchdeps and not patchdeps in bb.data.getVar("PROVIDES", d, 1):
|
||||
deps = patchdeps
|
||||
|
||||
if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
|
||||
if (bb.data.getVar('HOST_SYS', d, 1) !=
|
||||
bb.data.getVar('BUILD_SYS', d, 1)):
|
||||
deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
|
||||
return deps
|
||||
|
||||
def base_read_file(filename):
|
||||
import bb
|
||||
try:
|
||||
f = file( filename, "r" )
|
||||
except IOError, reason:
|
||||
return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
|
||||
else:
|
||||
return f.read().strip()
|
||||
return None
|
||||
|
||||
def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
|
||||
import bb
|
||||
if bb.data.getVar(variable,d,1) == checkvalue:
|
||||
return truevalue
|
||||
else:
|
||||
return falsevalue
|
||||
|
||||
DEPENDS_prepend="${@base_dep_prepend(d)} "
|
||||
|
||||
def base_set_filespath(path, d):
|
||||
import os, bb
|
||||
filespath = []
|
||||
for p in path:
|
||||
overrides = bb.data.getVar("OVERRIDES", d, 1) or ""
|
||||
overrides = overrides + ":"
|
||||
for o in overrides.split(":"):
|
||||
filespath.append(os.path.join(p, o))
|
||||
bb.data.setVar("FILESPATH", ":".join(filespath), d)
|
||||
|
||||
FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
|
||||
|
||||
def oe_filter(f, str, d):
|
||||
from re import match
|
||||
return " ".join(filter(lambda x: match(f, x, 0), str.split()))
|
||||
|
||||
def oe_filter_out(f, str, d):
|
||||
from re import match
|
||||
return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
|
||||
|
||||
die() {
|
||||
oefatal "$*"
|
||||
}
|
||||
|
||||
oenote() {
|
||||
echo "NOTE:" "$*"
|
||||
}
|
||||
|
||||
oewarn() {
|
||||
echo "WARNING:" "$*"
|
||||
}
|
||||
|
||||
oefatal() {
|
||||
echo "FATAL:" "$*"
|
||||
exit 1
|
||||
}
|
||||
|
||||
oedebug() {
|
||||
test $# -ge 2 || {
|
||||
echo "Usage: oedebug level \"message\""
|
||||
exit 1
|
||||
}
|
||||
|
||||
test ${OEDEBUG:-0} -ge $1 && {
|
||||
shift
|
||||
echo "DEBUG:" $*
|
||||
}
|
||||
}
|
||||
|
||||
oe_runmake() {
|
||||
if [ x"$MAKE" = x ]; then MAKE=make; fi
|
||||
oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
|
||||
${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
|
||||
}
|
||||
|
||||
oe_soinstall() {
|
||||
# Purpose: Install shared library file and
|
||||
# create the necessary links
|
||||
# Example:
|
||||
#
|
||||
# oe_
|
||||
#
|
||||
#oenote installing shared library $1 to $2
|
||||
#
|
||||
libname=`basename $1`
|
||||
install -m 755 $1 $2/$libname
|
||||
sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
|
||||
solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
|
||||
ln -sf $libname $2/$sonamelink
|
||||
ln -sf $libname $2/$solink
|
||||
}
|
||||
|
||||
oe_libinstall() {
|
||||
# Purpose: Install a library, in all its forms
|
||||
# Example
|
||||
#
|
||||
# oe_libinstall libltdl ${STAGING_LIBDIR}/
|
||||
# oe_libinstall -C src/libblah libblah ${D}/${libdir}/
|
||||
dir=""
|
||||
libtool=""
|
||||
silent=""
|
||||
require_static=""
|
||||
require_shared=""
|
||||
staging_install=""
|
||||
while [ "$#" -gt 0 ]; do
|
||||
case "$1" in
|
||||
-C)
|
||||
shift
|
||||
dir="$1"
|
||||
;;
|
||||
-s)
|
||||
silent=1
|
||||
;;
|
||||
-a)
|
||||
require_static=1
|
||||
;;
|
||||
-so)
|
||||
require_shared=1
|
||||
;;
|
||||
-*)
|
||||
oefatal "oe_libinstall: unknown option: $1"
|
||||
;;
|
||||
*)
|
||||
break;
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
libname="$1"
|
||||
shift
|
||||
destpath="$1"
|
||||
if [ -z "$destpath" ]; then
|
||||
oefatal "oe_libinstall: no destination path specified"
|
||||
fi
|
||||
if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
|
||||
then
|
||||
staging_install=1
|
||||
fi
|
||||
|
||||
__runcmd () {
|
||||
if [ -z "$silent" ]; then
|
||||
echo >&2 "oe_libinstall: $*"
|
||||
fi
|
||||
$*
|
||||
}
|
||||
|
||||
if [ -z "$dir" ]; then
|
||||
dir=`pwd`
|
||||
fi
|
||||
dotlai=$libname.lai
|
||||
dir=$dir`(cd $dir; find -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
|
||||
olddir=`pwd`
|
||||
__runcmd cd $dir
|
||||
|
||||
lafile=$libname.la
|
||||
if [ -f "$lafile" ]; then
|
||||
# libtool archive
|
||||
eval `cat $lafile|grep "^library_names="`
|
||||
libtool=1
|
||||
else
|
||||
library_names="$libname.so* $libname.dll.a"
|
||||
fi
|
||||
|
||||
__runcmd install -d $destpath/
|
||||
dota=$libname.a
|
||||
if [ -f "$dota" -o -n "$require_static" ]; then
|
||||
__runcmd install -m 0644 $dota $destpath/
|
||||
fi
|
||||
if [ -f "$dotlai" -a -n "$libtool" ]; then
|
||||
if test -n "$staging_install"
|
||||
then
|
||||
# stop libtool using the final directory name for libraries
|
||||
# in staging:
|
||||
__runcmd rm -f $destpath/$libname.la
|
||||
__runcmd sed -e 's/^installed=yes$/installed=no/' -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' $dotlai >$destpath/$libname.la
|
||||
else
|
||||
__runcmd install -m 0644 $dotlai $destpath/$libname.la
|
||||
fi
|
||||
fi
|
||||
|
||||
for name in $library_names; do
|
||||
files=`eval echo $name`
|
||||
for f in $files; do
|
||||
if [ ! -e "$f" ]; then
|
||||
if [ -n "$libtool" ]; then
|
||||
oefatal "oe_libinstall: $dir/$f not found."
|
||||
fi
|
||||
elif [ -L "$f" ]; then
|
||||
__runcmd cp -P "$f" $destpath/
|
||||
elif [ ! -L "$f" ]; then
|
||||
libfile="$f"
|
||||
__runcmd install -m 0755 $libfile $destpath/
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
if [ -z "$libfile" ]; then
|
||||
if [ -n "$require_shared" ]; then
|
||||
oefatal "oe_libinstall: unable to locate shared library"
|
||||
fi
|
||||
elif [ -z "$libtool" ]; then
|
||||
# special case hack for non-libtool .so.#.#.# links
|
||||
baselibfile=`basename "$libfile"`
|
||||
if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
|
||||
sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
|
||||
solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
|
||||
if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
|
||||
__runcmd ln -sf $baselibfile $destpath/$sonamelink
|
||||
fi
|
||||
__runcmd ln -sf $baselibfile $destpath/$solink
|
||||
fi
|
||||
fi
|
||||
|
||||
__runcmd cd "$olddir"
|
||||
}
|
||||
|
||||
oe_machinstall() {
|
||||
# Purpose: Install machine dependent files, if available
|
||||
# If not available, check if there is a default
|
||||
# If no default, just touch the destination
|
||||
# Example:
|
||||
# $1 $2 $3 $4
|
||||
# oe_machinstall -m 0644 fstab ${D}/etc/fstab
|
||||
#
|
||||
# TODO: Check argument number?
|
||||
#
|
||||
filename=`basename $3`
|
||||
dirname=`dirname $3`
|
||||
|
||||
for o in `echo ${OVERRIDES} | tr ':' ' '`; do
|
||||
if [ -e $dirname/$o/$filename ]; then
|
||||
oenote $dirname/$o/$filename present, installing to $4
|
||||
install $1 $2 $dirname/$o/$filename $4
|
||||
return
|
||||
fi
|
||||
done
|
||||
# oenote overrides specific file NOT present, trying default=$3...
|
||||
if [ -e $3 ]; then
|
||||
oenote $3 present, installing to $4
|
||||
install $1 $2 $3 $4
|
||||
else
|
||||
oenote $3 NOT present, touching empty $4
|
||||
touch $4
|
||||
fi
|
||||
}
|
||||
|
||||
addtask showdata
|
||||
do_showdata[nostamp] = "1"
|
||||
python do_showdata() {
|
||||
import sys
|
||||
# emit variables and shell functions
|
||||
bb.data.emit_env(sys.__stdout__, d, True)
|
||||
# emit the metadata which isnt valid shell
|
||||
for e in d.keys():
|
||||
if bb.data.getVarFlag(e, 'python', d):
|
||||
sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1)))
|
||||
}
|
||||
|
||||
addtask listtasks
|
||||
do_listtasks[nostamp] = "1"
|
||||
python do_listtasks() {
|
||||
import sys
|
||||
# emit variables and shell functions
|
||||
#bb.data.emit_env(sys.__stdout__, d)
|
||||
# emit the metadata which isnt valid shell
|
||||
for e in d.keys():
|
||||
if bb.data.getVarFlag(e, 'task', d):
|
||||
sys.__stdout__.write("%s\n" % e)
|
||||
}
|
||||
|
||||
addtask clean
|
||||
do_clean[dirs] = "${TOPDIR}"
|
||||
do_clean[nostamp] = "1"
|
||||
do_clean[bbdepcmd] = ""
|
||||
python base_do_clean() {
|
||||
"""clear the build and temp directories"""
|
||||
dir = bb.data.expand("${WORKDIR}", d)
|
||||
if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
|
||||
bb.note("removing " + dir)
|
||||
os.system('rm -rf ' + dir)
|
||||
|
||||
dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
|
||||
bb.note("removing " + dir)
|
||||
os.system('rm -f '+ dir)
|
||||
}
|
||||
|
||||
addtask mrproper
|
||||
do_mrproper[dirs] = "${TOPDIR}"
|
||||
do_mrproper[nostamp] = "1"
|
||||
do_mrproper[bbdepcmd] = ""
|
||||
python base_do_mrproper() {
|
||||
"""clear downloaded sources, build and temp directories"""
|
||||
dir = bb.data.expand("${DL_DIR}", d)
|
||||
if dir == '/': bb.build.FuncFailed("wrong DATADIR")
|
||||
bb.debug(2, "removing " + dir)
|
||||
os.system('rm -rf ' + dir)
|
||||
bb.build.exec_task('do_clean', d)
|
||||
}
|
||||
|
||||
addtask fetch
|
||||
do_fetch[dirs] = "${DL_DIR}"
|
||||
do_fetch[nostamp] = "1"
|
||||
python base_do_fetch() {
|
||||
import sys
|
||||
|
||||
localdata = bb.data.createCopy(d)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
src_uri = bb.data.getVar('SRC_URI', localdata, 1)
|
||||
if not src_uri:
|
||||
return 1
|
||||
|
||||
try:
|
||||
bb.fetch.init(src_uri.split(),d)
|
||||
except bb.fetch.NoMethodError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("No method: %s" % value)
|
||||
|
||||
try:
|
||||
bb.fetch.go(localdata)
|
||||
except bb.fetch.MissingParameterError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("Missing parameters: %s" % value)
|
||||
except bb.fetch.FetchError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("Fetch failed: %s" % value)
|
||||
}
|
||||
|
||||
def oe_unpack_file(file, data, url = None):
|
||||
import bb, os
|
||||
if not url:
|
||||
url = "file://%s" % file
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z']:
|
||||
efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
cmd = None
|
||||
if file.endswith('.tar'):
|
||||
cmd = 'tar x --no-same-owner -f %s' % file
|
||||
elif file.endswith('.tgz') or file.endswith('.tar.gz'):
|
||||
cmd = 'tar xz --no-same-owner -f %s' % file
|
||||
elif file.endswith('.tbz') or file.endswith('.tar.bz2'):
|
||||
cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
|
||||
cmd = 'gzip -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.bz2'):
|
||||
cmd = 'bzip2 -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.zip'):
|
||||
cmd = 'unzip -q'
|
||||
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
|
||||
if 'dos' in parm:
|
||||
cmd = '%s -a' % cmd
|
||||
cmd = '%s %s' % (cmd, file)
|
||||
elif os.path.isdir(file):
|
||||
filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1))
|
||||
destdir = "."
|
||||
if file[0:len(filesdir)] == filesdir:
|
||||
destdir = file[len(filesdir):file.rfind('/')]
|
||||
destdir = destdir.strip('/')
|
||||
if len(destdir) < 1:
|
||||
destdir = "."
|
||||
elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
|
||||
os.makedirs("%s/%s" % (os.getcwd(), destdir))
|
||||
cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
|
||||
else:
|
||||
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
|
||||
if not 'patch' in parm:
|
||||
# The "destdir" handling was specifically done for FILESPATH
|
||||
# items. So, only do so for file:// entries.
|
||||
if type == "file":
|
||||
destdir = bb.decodeurl(url)[1] or "."
|
||||
else:
|
||||
destdir = "."
|
||||
bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
|
||||
cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
|
||||
if not cmd:
|
||||
return True
|
||||
|
||||
|
||||
dest = os.path.join(os.getcwd(), os.path.basename(file))
|
||||
if os.path.exists(dest):
|
||||
if os.path.samefile(file, dest):
|
||||
return True
|
||||
|
||||
cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
|
||||
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
|
||||
ret = os.system(cmd)
|
||||
return ret == 0
|
||||
|
||||
addtask unpack after do_fetch
|
||||
do_unpack[dirs] = "${WORKDIR}"
|
||||
python base_do_unpack() {
|
||||
import re, os
|
||||
|
||||
localdata = bb.data.createCopy(d)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
src_uri = bb.data.getVar('SRC_URI', localdata)
|
||||
if not src_uri:
|
||||
return
|
||||
src_uri = bb.data.expand(src_uri, localdata)
|
||||
for url in src_uri.split():
|
||||
try:
|
||||
local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
|
||||
except bb.MalformedUrl, e:
|
||||
raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
|
||||
# dont need any parameters for extraction, strip them off
|
||||
local = re.sub(';.*$', '', local)
|
||||
local = os.path.realpath(local)
|
||||
ret = oe_unpack_file(local, localdata, url)
|
||||
if not ret:
|
||||
raise bb.build.FuncFailed()
|
||||
}
|
||||
|
||||
addtask patch after do_unpack
|
||||
do_patch[dirs] = "${WORKDIR}"
|
||||
python base_do_patch() {
|
||||
import re
|
||||
import bb.fetch
|
||||
|
||||
src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split()
|
||||
if not src_uri:
|
||||
return
|
||||
|
||||
patchcleancmd = bb.data.getVar('PATCHCLEANCMD', d, 1)
|
||||
if patchcleancmd:
|
||||
bb.data.setVar("do_patchcleancmd", patchcleancmd, d)
|
||||
bb.data.setVarFlag("do_patchcleancmd", "func", 1, d)
|
||||
bb.build.exec_func("do_patchcleancmd", d)
|
||||
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
for url in src_uri:
|
||||
|
||||
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
|
||||
if not "patch" in parm:
|
||||
continue
|
||||
|
||||
bb.fetch.init([url],d)
|
||||
url = bb.encodeurl((type, host, path, user, pswd, []))
|
||||
local = os.path.join('/', bb.fetch.localpath(url, d))
|
||||
|
||||
# did it need to be unpacked?
|
||||
dots = os.path.basename(local).split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z']:
|
||||
unpacked = os.path.join(bb.data.getVar('WORKDIR', d),'.'.join(dots[0:-1]))
|
||||
else:
|
||||
unpacked = local
|
||||
unpacked = bb.data.expand(unpacked, d)
|
||||
|
||||
if "pnum" in parm:
|
||||
pnum = parm["pnum"]
|
||||
else:
|
||||
pnum = "1"
|
||||
|
||||
if "pname" in parm:
|
||||
pname = parm["pname"]
|
||||
else:
|
||||
pname = os.path.basename(unpacked)
|
||||
|
||||
if "mindate" in parm:
|
||||
mindate = parm["mindate"]
|
||||
else:
|
||||
mindate = 0
|
||||
|
||||
if "maxdate" in parm:
|
||||
maxdate = parm["maxdate"]
|
||||
else:
|
||||
maxdate = "20711226"
|
||||
|
||||
pn = bb.data.getVar('PN', d, 1)
|
||||
srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
|
||||
|
||||
if not srcdate:
|
||||
srcdate = bb.data.getVar('SRCDATE', d, 1)
|
||||
|
||||
if srcdate == "now":
|
||||
srcdate = bb.data.getVar('DATE', d, 1)
|
||||
|
||||
if (maxdate < srcdate) or (mindate > srcdate):
|
||||
if (maxdate < srcdate):
|
||||
bb.note("Patch '%s' is outdated" % pname)
|
||||
|
||||
if (mindate > srcdate):
|
||||
bb.note("Patch '%s' is predated" % pname)
|
||||
|
||||
continue
|
||||
|
||||
bb.note("Applying patch '%s'" % pname)
|
||||
bb.data.setVar("do_patchcmd", bb.data.getVar("PATCHCMD", d, 1) % (pnum, pname, unpacked), d)
|
||||
bb.data.setVarFlag("do_patchcmd", "func", 1, d)
|
||||
bb.data.setVarFlag("do_patchcmd", "dirs", "${WORKDIR} ${S}", d)
|
||||
bb.build.exec_func("do_patchcmd", d)
|
||||
}
|
||||
|
||||
|
||||
addhandler base_eventhandler
|
||||
python base_eventhandler() {
|
||||
from bb import note, error, data
|
||||
from bb.event import Handled, NotHandled, getName
|
||||
import os
|
||||
|
||||
messages = {}
|
||||
messages["Completed"] = "completed"
|
||||
messages["Succeeded"] = "completed"
|
||||
messages["Started"] = "started"
|
||||
messages["Failed"] = "failed"
|
||||
|
||||
name = getName(e)
|
||||
msg = ""
|
||||
if name.startswith("Pkg"):
|
||||
msg += "package %s: " % data.getVar("P", e.data, 1)
|
||||
msg += messages.get(name[3:]) or name[3:]
|
||||
elif name.startswith("Task"):
|
||||
msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task)
|
||||
msg += messages.get(name[4:]) or name[4:]
|
||||
elif name.startswith("Build"):
|
||||
msg += "build %s: " % e.name
|
||||
msg += messages.get(name[5:]) or name[5:]
|
||||
elif name == "UnsatisfiedDep":
|
||||
msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
|
||||
note(msg)
|
||||
|
||||
if name.startswith("BuildStarted"):
|
||||
bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
|
||||
path_to_bbfiles = bb.data.getVar( 'BBFILES', e.data, 1 )
|
||||
path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )]
|
||||
monotone_revision = "<unknown>"
|
||||
try:
|
||||
monotone_revision = file( "%s/MT/revision" % path_to_packages ).read().strip()
|
||||
except IOError:
|
||||
pass
|
||||
bb.data.setVar( 'OE_REVISION', monotone_revision, e.data )
|
||||
statusvars = ['BB_VERSION', 'OE_REVISION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TARGET_FPU']
|
||||
statuslines = ["%-14s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
|
||||
statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines)
|
||||
print statusmsg
|
||||
|
||||
needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
|
||||
pesteruser = []
|
||||
for v in needed_vars:
|
||||
val = bb.data.getVar(v, e.data, 1)
|
||||
if not val or val == 'INVALID':
|
||||
pesteruser.append(v)
|
||||
if pesteruser:
|
||||
bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
|
||||
|
||||
if not data in e.__dict__:
|
||||
return NotHandled
|
||||
|
||||
log = data.getVar("EVENTLOG", e.data, 1)
|
||||
if log:
|
||||
logfile = file(log, "a")
|
||||
logfile.write("%s\n" % msg)
|
||||
logfile.close()
|
||||
|
||||
return NotHandled
|
||||
}
|
||||
|
||||
addtask configure after do_unpack do_patch
|
||||
do_configure[dirs] = "${S} ${B}"
|
||||
do_configure[bbdepcmd] = "do_populate_staging"
|
||||
base_do_configure() {
|
||||
:
|
||||
}
|
||||
|
||||
addtask compile after do_configure
|
||||
do_compile[dirs] = "${S} ${B}"
|
||||
do_compile[bbdepcmd] = "do_populate_staging"
|
||||
base_do_compile() {
|
||||
if [ -e Makefile -o -e makefile ]; then
|
||||
oe_runmake || die "make failed"
|
||||
else
|
||||
oenote "nothing to compile"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
addtask stage after do_compile
|
||||
base_do_stage () {
|
||||
:
|
||||
}
|
||||
|
||||
do_populate_staging[dirs] = "${STAGING_DIR}/${TARGET_SYS}/bin ${STAGING_DIR}/${TARGET_SYS}/lib \
|
||||
${STAGING_DIR}/${TARGET_SYS}/include \
|
||||
${STAGING_DIR}/${BUILD_SYS}/bin ${STAGING_DIR}/${BUILD_SYS}/lib \
|
||||
${STAGING_DIR}/${BUILD_SYS}/include \
|
||||
${STAGING_DATADIR} \
|
||||
${S} ${B}"
|
||||
|
||||
addtask populate_staging after do_compile
|
||||
|
||||
python do_populate_staging () {
|
||||
bb.build.exec_func('do_stage', d)
|
||||
}
|
||||
|
||||
addtask install after do_compile
|
||||
do_install[dirs] = "${S} ${B}"
|
||||
|
||||
base_do_install() {
|
||||
:
|
||||
}
|
||||
|
||||
base_do_package() {
|
||||
:
|
||||
}
|
||||
|
||||
addtask build after do_populate_staging
|
||||
do_build = ""
|
||||
do_build[func] = "1"
|
||||
|
||||
# Functions that update metadata based on files outputted
|
||||
# during the build process.
|
||||
|
||||
SHLIBS = ""
|
||||
RDEPENDS_prepend = " ${SHLIBS}"
|
||||
|
||||
def explode_deps(s):
|
||||
r = []
|
||||
l = s.split()
|
||||
flag = False
|
||||
for i in l:
|
||||
if i[0] == '(':
|
||||
flag = True
|
||||
j = []
|
||||
if flag:
|
||||
j.append(i)
|
||||
if i.endswith(')'):
|
||||
flag = False
|
||||
r[-1] += ' ' + ' '.join(j)
|
||||
else:
|
||||
r.append(i)
|
||||
return r
|
||||
|
||||
python read_shlibdeps () {
|
||||
packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
|
||||
for pkg in packages:
|
||||
rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "")
|
||||
shlibsfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".shlibdeps", d)
|
||||
if os.access(shlibsfile, os.R_OK):
|
||||
fd = file(shlibsfile)
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
for l in lines:
|
||||
rdepends.append(l.rstrip())
|
||||
pcfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".pcdeps", d)
|
||||
if os.access(pcfile, os.R_OK):
|
||||
fd = file(pcfile)
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
for l in lines:
|
||||
rdepends.append(l.rstrip())
|
||||
bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d)
|
||||
}
|
||||
|
||||
python read_subpackage_metadata () {
|
||||
import re
|
||||
|
||||
def decode(str):
|
||||
import codecs
|
||||
c = codecs.getdecoder("string_escape")
|
||||
return c(str)[0]
|
||||
|
||||
data_file = bb.data.expand("${WORKDIR}/install/${PN}.package", d)
|
||||
if os.access(data_file, os.R_OK):
|
||||
f = file(data_file, 'r')
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
r = re.compile("([^:]+):\s*(.*)")
|
||||
for l in lines:
|
||||
m = r.match(l)
|
||||
if m:
|
||||
bb.data.setVar(m.group(1), decode(m.group(2)), d)
|
||||
}
|
||||
|
||||
python __anonymous () {
|
||||
import exceptions
|
||||
need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
|
||||
if need_host:
|
||||
import re
|
||||
this_host = bb.data.getVar('HOST_SYS', d, 1)
|
||||
if not re.match(need_host, this_host):
|
||||
raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
|
||||
|
||||
need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
|
||||
if need_machine:
|
||||
import re
|
||||
this_machine = bb.data.getVar('MACHINE', d, 1)
|
||||
if not re.match(need_machine, this_machine):
|
||||
raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
|
||||
|
||||
pn = bb.data.getVar('PN', d, 1)
|
||||
|
||||
srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
|
||||
if srcdate != None:
|
||||
bb.data.setVar('SRCDATE', srcdate, d)
|
||||
|
||||
use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
|
||||
if use_nls != None:
|
||||
bb.data.setVar('USE_NLS', use_nls, d)
|
||||
}
|
||||
|
||||
python () {
|
||||
import bb, os
|
||||
mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
|
||||
old_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
|
||||
if (old_arch == mach_arch):
|
||||
# Nothing to do
|
||||
return
|
||||
if (bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1) == '0'):
|
||||
return
|
||||
paths = []
|
||||
for p in [ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ]:
|
||||
paths.append(bb.data.expand(os.path.join(p, mach_arch), d))
|
||||
for s in bb.data.getVar('SRC_URI', d, 1).split():
|
||||
local = bb.data.expand(bb.fetch.localpath(s, d), d)
|
||||
for mp in paths:
|
||||
if local.startswith(mp):
|
||||
# bb.note("overriding PACKAGE_ARCH from %s to %s" % (old_arch, mach_arch))
|
||||
bb.data.setVar('PACKAGE_ARCH', mach_arch, d)
|
||||
return
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_patch do_populate_pkgs do_stage
|
||||
|
||||
MIRRORS[func] = "0"
|
||||
MIRRORS () {
|
||||
${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
|
||||
${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
|
||||
${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
|
||||
${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
|
||||
${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
|
||||
${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
|
||||
ftp://ftp.kernel.org/pub http://www.kernel.org/pub
|
||||
ftp://ftp.kernel.org/pub ftp://ftp.us.kernel.org/pub
|
||||
ftp://ftp.kernel.org/pub ftp://ftp.uk.kernel.org/pub
|
||||
ftp://ftp.kernel.org/pub ftp://ftp.hk.kernel.org/pub
|
||||
ftp://ftp.kernel.org/pub ftp://ftp.au.kernel.org/pub
|
||||
ftp://ftp.kernel.org/pub ftp://ftp.jp.kernel.org/pub
|
||||
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
|
||||
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
|
||||
ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
|
||||
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
|
||||
|
||||
ftp://.*/.*/ http://www.oesources.org/source/current/
|
||||
http://.*/.*/ http://www.oesources.org/source/current/
|
||||
}
|
||||
|
||||
20
meta/classes/base_srpm.bbclass
Normal file
20
meta/classes/base_srpm.bbclass
Normal file
@@ -0,0 +1,20 @@
|
||||
inherit base package rpm_core
|
||||
|
||||
SPECFILE="${RPMBUILDPATH}/SPECS/${PN}.spec"
|
||||
|
||||
base_srpm_do_unpack() {
|
||||
test -e ${SRPMFILE} || die "Source rpm \"${SRPMFILE}\"does not exist"
|
||||
if ! test -e ${SPECFILE}; then
|
||||
${RPM} -i ${SRPMFILE}
|
||||
fi
|
||||
test -e ${SPECFILE} || die "Spec file \"${SPECFILE}\" does not exist"
|
||||
${RPMBUILD} -bp ${SPECFILE}
|
||||
}
|
||||
|
||||
base_srpm_do_compile() {
|
||||
${RPMBUILD} -bc ${SPECFILE}
|
||||
}
|
||||
|
||||
base_srpm_do_install() {
|
||||
${RPMBUILD} -bi ${SPECFILE}
|
||||
}
|
||||
36
meta/classes/binconfig.bbclass
Normal file
36
meta/classes/binconfig.bbclass
Normal file
@@ -0,0 +1,36 @@
|
||||
inherit base
|
||||
|
||||
# The namespaces can clash here hence the two step replace
|
||||
def get_binconfig_mangle(d):
|
||||
import bb.data
|
||||
s = "-e ''"
|
||||
if not bb.data.inherits_class('native', d):
|
||||
s += " -e 's:=${libdir}:=OELIBDIR:;'"
|
||||
s += " -e 's:=${includedir}:=OEINCDIR:;'"
|
||||
s += " -e 's:=${datadir}:=OEDATADIR:'"
|
||||
s += " -e 's:=${prefix}:=OEPREFIX:'"
|
||||
s += " -e 's:=${exec_prefix}:=OEEXECPREFIX:'"
|
||||
s += " -e 's:-L${libdir}:-LOELIBDIR:;'"
|
||||
s += " -e 's:-I${includedir}:-IOEINCDIR:;'"
|
||||
s += " -e 's:OELIBDIR:${STAGING_LIBDIR}:;'"
|
||||
s += " -e 's:OEINCDIR:${STAGING_INCDIR}:;'"
|
||||
s += " -e 's:OEDATADIR:${STAGING_DATADIR}:'"
|
||||
s += " -e 's:OEPREFIX:${STAGING_LIBDIR}/..:'"
|
||||
s += " -e 's:OEEXECPREFIX:${STAGING_LIBDIR}/..:'"
|
||||
return s
|
||||
|
||||
# Native package configurations go in ${BINDIR}/<name>-config-native to prevent a collision with cross packages
|
||||
def is_native(d):
|
||||
import bb.data
|
||||
return ["","-native"][bb.data.inherits_class('native', d)]
|
||||
|
||||
BINCONFIG_GLOB ?= "*-config"
|
||||
|
||||
do_stage_append() {
|
||||
for config in `find ${S} -name '${BINCONFIG_GLOB}'`; do
|
||||
configname=`basename $config`${@is_native(d)}
|
||||
install -d ${STAGING_BINDIR}
|
||||
cat $config | sed ${@get_binconfig_mangle(d)} > ${STAGING_BINDIR}/$configname
|
||||
chmod u+x ${STAGING_BINDIR}/$configname
|
||||
done
|
||||
}
|
||||
11
meta/classes/ccache.inc
Normal file
11
meta/classes/ccache.inc
Normal file
@@ -0,0 +1,11 @@
|
||||
# Make ccache use a TMPDIR specific ccache directory if using the crosscompiler,
|
||||
# since it isn't likely to be useful with any other toolchain than the one we just
|
||||
# built, and would otherwise push more useful things out of the default cache.
|
||||
|
||||
CCACHE_DIR_TARGET = "${TMPDIR}/ccache"
|
||||
|
||||
python () {
|
||||
if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
|
||||
bb.data.setVar('CCACHE_DIR', '${CCACHE_DIR_TARGET}', d)
|
||||
bb.data.setVarFlag('CCACHE_DIR', 'export', '1', d)
|
||||
}
|
||||
21
meta/classes/ccdv.bbclass
Normal file
21
meta/classes/ccdv.bbclass
Normal file
@@ -0,0 +1,21 @@
|
||||
python () {
|
||||
if bb.data.getVar('PN', d, 1) in ['ccdv-native']:
|
||||
if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d, 1):
|
||||
bb.data.setVar("DEPENDS", '%s %s' % ("ccdv-native", bb.data.getVar("DEPENDS", d, 1) or ""), d)
|
||||
bb.data.setVar("CC", '%s %s' % ("ccdv", bb.data.getVar("CC", d, 1) or ""), d)
|
||||
bb.data.setVar("BUILD_CC", '%s %s' % ("ccdv", bb.data.getVar("BUILD_CC", d, 1) or ""), d)
|
||||
bb.data.setVar("CCLD", '%s %s' % ("ccdv", bb.data.getVar("CCLD", d, 1) or ""), d)
|
||||
}
|
||||
|
||||
def quiet_libtool(bb,d):
|
||||
deps = (bb.data.getVar('DEPENDS', d, 1) or "").split()
|
||||
if 'libtool-cross' in deps:
|
||||
return "'LIBTOOL=${STAGING_BINDIR}/${HOST_SYS}-libtool --silent'"
|
||||
elif 'libtool-native' in deps:
|
||||
return "'LIBTOOL=${B}/${HOST_SYS}-libtool --silent'"
|
||||
else:
|
||||
return ""
|
||||
|
||||
CCDV = "ccdv"
|
||||
EXTRA_OEMAKE_append = " ${@quiet_libtool(bb,d)}"
|
||||
MAKE += "-s"
|
||||
8
meta/classes/cml1.bbclass
Normal file
8
meta/classes/cml1.bbclass
Normal file
@@ -0,0 +1,8 @@
|
||||
cml1_do_configure() {
|
||||
set -e
|
||||
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
|
||||
oe_runmake oldconfig
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_configure
|
||||
addtask configure after do_unpack do_patch before do_compile
|
||||
20
meta/classes/cpan.bbclass
Normal file
20
meta/classes/cpan.bbclass
Normal file
@@ -0,0 +1,20 @@
|
||||
FILES_${PN} += '${libdir}/perl5'
|
||||
|
||||
cpan_do_configure () {
|
||||
perl Makefile.PL
|
||||
if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then
|
||||
. ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh
|
||||
sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new
|
||||
mv Makefile.new Makefile
|
||||
fi
|
||||
}
|
||||
|
||||
cpan_do_compile () {
|
||||
oe_runmake PASTHRU_INC="${CFLAGS}"
|
||||
}
|
||||
|
||||
cpan_do_install () {
|
||||
oe_runmake install_vendor
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_configure do_compile do_install
|
||||
55
meta/classes/cross.bbclass
Normal file
55
meta/classes/cross.bbclass
Normal file
@@ -0,0 +1,55 @@
|
||||
# Cross packages are built indirectly via dependency,
|
||||
# no need for them to be a direct target of 'world'
|
||||
EXCLUDE_FROM_WORLD = "1"
|
||||
|
||||
PACKAGES = ""
|
||||
|
||||
HOST_ARCH = "${BUILD_ARCH}"
|
||||
HOST_VENDOR = "${BUILD_VENDOR}"
|
||||
HOST_OS = "${BUILD_OS}"
|
||||
HOST_PREFIX = "${BUILD_PREFIX}"
|
||||
HOST_CC_ARCH = "${BUILD_CC_ARCH}"
|
||||
|
||||
CPPFLAGS = "${BUILD_CPPFLAGS}"
|
||||
CFLAGS = "${BUILD_CFLAGS}"
|
||||
CXXFLAGS = "${BUILD_CFLAGS}"
|
||||
LDFLAGS = "${BUILD_LDFLAGS}"
|
||||
LDFLAGS_build-darwin = "-L${STAGING_DIR}/${BUILD_SYS}/lib "
|
||||
|
||||
# Overrides for paths
|
||||
|
||||
# Path prefixes
|
||||
base_prefix = "${exec_prefix}"
|
||||
prefix = "${CROSS_DIR}"
|
||||
exec_prefix = "${prefix}"
|
||||
|
||||
# Base paths
|
||||
base_bindir = "${base_prefix}/bin"
|
||||
base_sbindir = "${base_prefix}/bin"
|
||||
base_libdir = "${base_prefix}/lib"
|
||||
|
||||
# Architecture independent paths
|
||||
datadir = "${prefix}/share"
|
||||
sysconfdir = "${prefix}/etc"
|
||||
sharedstatedir = "${prefix}/com"
|
||||
localstatedir = "${prefix}/var"
|
||||
infodir = "${datadir}/info"
|
||||
mandir = "${datadir}/man"
|
||||
docdir = "${datadir}/doc"
|
||||
servicedir = "${prefix}/srv"
|
||||
|
||||
# Architecture dependent paths
|
||||
bindir = "${exec_prefix}/bin"
|
||||
sbindir = "${exec_prefix}/bin"
|
||||
libexecdir = "${exec_prefix}/libexec"
|
||||
libdir = "${exec_prefix}/lib"
|
||||
includedir = "${exec_prefix}/include"
|
||||
oldincludedir = "${exec_prefix}/include"
|
||||
|
||||
do_stage () {
|
||||
oe_runmake install
|
||||
}
|
||||
|
||||
do_install () {
|
||||
:
|
||||
}
|
||||
101
meta/classes/debian.bbclass
Normal file
101
meta/classes/debian.bbclass
Normal file
@@ -0,0 +1,101 @@
|
||||
STAGING_PKGMAPS_DIR = "${STAGING_DIR}/pkgmaps/debian"
|
||||
|
||||
# Debain package renaming only occurs when a package is built
|
||||
# We therefore have to make sure we build all runtime packages
|
||||
# before building the current package to make the packages runtime
|
||||
# depends are correct
|
||||
BUILD_ALL_DEPS = "1"
|
||||
|
||||
python debian_package_name_hook () {
|
||||
import glob, copy, stat, errno, re
|
||||
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
packages = bb.data.getVar('PACKAGES', d, 1)
|
||||
|
||||
def socrunch(s):
|
||||
s = s.lower().replace('_', '-')
|
||||
m = re.match("^(.*)(.)\.so\.(.*)$", s)
|
||||
if m is None:
|
||||
return None
|
||||
if m.group(2) in '0123456789':
|
||||
bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
|
||||
else:
|
||||
bin = m.group(1) + m.group(2) + m.group(3)
|
||||
dev = m.group(1) + m.group(2)
|
||||
return (bin, dev)
|
||||
|
||||
def isexec(path):
|
||||
try:
|
||||
s = os.stat(path)
|
||||
except (os.error, AttributeError):
|
||||
return 0
|
||||
return (s[stat.ST_MODE] & stat.S_IEXEC)
|
||||
|
||||
def auto_libname(packages, orig_pkg):
|
||||
bin_re = re.compile(".*/s?bin$")
|
||||
lib_re = re.compile(".*/lib$")
|
||||
so_re = re.compile("lib.*\.so")
|
||||
sonames = []
|
||||
has_bins = 0
|
||||
has_libs = 0
|
||||
pkg_dir = os.path.join(workdir, "install", orig_pkg)
|
||||
for root, dirs, files in os.walk(pkg_dir):
|
||||
if bin_re.match(root) and files:
|
||||
has_bins = 1
|
||||
if lib_re.match(root) and files:
|
||||
has_libs = 1
|
||||
for f in files:
|
||||
if so_re.match(f):
|
||||
fp = os.path.join(root, f)
|
||||
cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + fp + " 2>/dev/null"
|
||||
fd = os.popen(cmd)
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
for l in lines:
|
||||
m = re.match("\s+SONAME\s+([^\s]*)", l)
|
||||
if m and not m.group(1) in sonames:
|
||||
sonames.append(m.group(1))
|
||||
|
||||
bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
|
||||
soname = None
|
||||
if len(sonames) == 1:
|
||||
soname = sonames[0]
|
||||
elif len(sonames) > 1:
|
||||
lead = bb.data.getVar('LEAD_SONAME', d, 1)
|
||||
if lead:
|
||||
r = re.compile(lead)
|
||||
filtered = []
|
||||
for s in sonames:
|
||||
if r.match(s):
|
||||
filtered.append(s)
|
||||
if len(filtered) == 1:
|
||||
soname = filtered[0]
|
||||
elif len(filtered) > 1:
|
||||
bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
|
||||
else:
|
||||
bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
|
||||
else:
|
||||
bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))
|
||||
|
||||
if has_libs and not has_bins and soname:
|
||||
soname_result = socrunch(soname)
|
||||
if soname_result:
|
||||
(pkgname, devname) = soname_result
|
||||
for pkg in packages.split():
|
||||
if (bb.data.getVar('PKG_' + pkg, d) or bb.data.getVar('DEBIAN_NOAUTONAME_' + pkg, d)):
|
||||
continue
|
||||
if pkg == orig_pkg:
|
||||
newpkg = pkgname
|
||||
else:
|
||||
newpkg = pkg.replace(orig_pkg, devname, 1)
|
||||
if newpkg != pkg:
|
||||
bb.data.setVar('PKG_' + pkg, newpkg, d)
|
||||
|
||||
for pkg in (bb.data.getVar('AUTO_LIBNAME_PKGS', d, 1) or "").split():
|
||||
auto_libname(packages, pkg)
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS package_name_hook
|
||||
|
||||
DEBIAN_NAMES = 1
|
||||
|
||||
14
meta/classes/distutils-base.bbclass
Normal file
14
meta/classes/distutils-base.bbclass
Normal file
@@ -0,0 +1,14 @@
|
||||
EXTRA_OEMAKE = ""
|
||||
DEPENDS += "${@["python-native python", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}"
|
||||
RDEPENDS += "python-core"
|
||||
|
||||
def python_dir(d):
|
||||
import os, bb
|
||||
staging_incdir = bb.data.getVar( "STAGING_INCDIR", d, 1 )
|
||||
if os.path.exists( "%s/python2.3" % staging_incdir ): return "python2.3"
|
||||
if os.path.exists( "%s/python2.4" % staging_incdir ): return "python2.4"
|
||||
raise "No Python in STAGING_INCDIR. Forgot to build python-native ?"
|
||||
|
||||
PYTHON_DIR = "${@python_dir(d)}"
|
||||
FILES_${PN} = "${bindir} ${libdir} ${libdir}/${PYTHON_DIR}"
|
||||
|
||||
15
meta/classes/distutils.bbclass
Normal file
15
meta/classes/distutils.bbclass
Normal file
@@ -0,0 +1,15 @@
|
||||
inherit distutils-base
|
||||
|
||||
distutils_do_compile() {
|
||||
BUILD_SYS=${BUILD_SYS} HOST_SYS=${HOST_SYS} \
|
||||
${STAGING_BINDIR}/python setup.py build || \
|
||||
oefatal "python setup.py build execution failed."
|
||||
}
|
||||
|
||||
distutils_do_install() {
|
||||
BUILD_SYS=${BUILD_SYS} HOST_SYS=${HOST_SYS} \
|
||||
${STAGING_BINDIR}/python setup.py install --prefix=${D}/${prefix} --install-data=${D}/${datadir} || \
|
||||
oefatal "python setup.py install execution failed."
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_compile do_install
|
||||
37
meta/classes/e.bbclass
Normal file
37
meta/classes/e.bbclass
Normal file
@@ -0,0 +1,37 @@
|
||||
MAINTAINER = "Justin Patrin <papercrane@reversefold.com>"
|
||||
HOMEPAGE = "http://www.enlightenment.org"
|
||||
SECTION = "e/apps"
|
||||
|
||||
inherit autotools pkgconfig binconfig
|
||||
|
||||
do_prepsources () {
|
||||
make clean distclean || true
|
||||
}
|
||||
addtask prepsources after do_fetch before do_unpack
|
||||
|
||||
def binconfig_suffix(d):
|
||||
import bb
|
||||
return ["","-native"][bb.data.inherits_class('native', d)]
|
||||
|
||||
export CURL_CONFIG = "${STAGING_BINDIR}/curl-config${@binconfig_suffix(d)}"
|
||||
export EDB_CONFIG = "${STAGING_BINDIR}/edb-config${@binconfig_suffix(d)}"
|
||||
export EET_CONFIG = "${STAGING_BINDIR}/eet-config${@binconfig_suffix(d)}"
|
||||
export EVAS_CONFIG = "${STAGING_BINDIR}/evas-config${@binconfig_suffix(d)}"
|
||||
export ECORE_CONFIG = "${STAGING_BINDIR}/ecore-config${@binconfig_suffix(d)}"
|
||||
export EMBRYO_CONFIG = "${STAGING_BINDIR}/embryo-config${@binconfig_suffix(d)}"
|
||||
export ENGRAVE_CONFIG = "${STAGING_BINDIR}/engrave-config${@binconfig_suffix(d)}"
|
||||
export ENLIGHTENMENT_CONFIG = "${STAGING_BINDIR}/enlightenment-config${@binconfig_suffix(d)}"
|
||||
export EPSILON_CONFIG = "${STAGING_BINDIR}/epsilon-config${@binconfig_suffix(d)}"
|
||||
export EPEG_CONFIG = "${STAGING_BINDIR}/epeg-config${@binconfig_suffix(d)}"
|
||||
export ESMART_CONFIG = "${STAGING_BINDIR}/esmart-config${@binconfig_suffix(d)}"
|
||||
export FREETYPE_CONFIG = "${STAGING_BINDIR}/freetype-config${@binconfig_suffix(d)}"
|
||||
export IMLIB2_CONFIG = "${STAGING_BINDIR}/imlib2-config${@binconfig_suffix(d)}"
|
||||
|
||||
do_compile_prepend() {
|
||||
find ${S} -name Makefile | xargs sed -i 's:/usr/include:${STAGING_INCDIR}:'
|
||||
find ${S} -name Makefile | xargs sed -i 's:/usr/X11R6/include:${STAGING_INCDIR}:'
|
||||
}
|
||||
|
||||
PACKAGES = "${PN} ${PN}-themes"
|
||||
FILES_${PN} = "${libdir}/lib*.so*"
|
||||
FILES_${PN}-themes = "${datadir}/${PN}/themes ${datadir}/${PN}/data ${datadir}/${PN}/fonts ${datadir}/${PN}/pointers ${datadir}/${PN}/images ${datadir}/${PN}/users ${datadir}/${PN}/images ${datadir}/${PN}/styles"
|
||||
49
meta/classes/efl.bbclass
Normal file
49
meta/classes/efl.bbclass
Normal file
@@ -0,0 +1,49 @@
|
||||
inherit e
|
||||
|
||||
SECTION = "e/libs"
|
||||
|
||||
SRCNAME = "${@bb.data.getVar('PN', d, 1).replace('-native', '')}"
|
||||
SRC_URI = "${E_URI}/${SRCNAME}-${PV}.tar.gz"
|
||||
S = "${WORKDIR}/${SRCNAME}-${PV}"
|
||||
|
||||
INHIBIT_AUTO_STAGE_INCLUDES = "1"
|
||||
INHIBIT_NATIVE_STAGE_INSTALL = "1"
|
||||
|
||||
libdirectory = "src/lib"
|
||||
libraries = "lib${SRCNAME}"
|
||||
headers = "${@bb.data.getVar('SRCNAME',d,1).capitalize()}.h"
|
||||
|
||||
do_stage_append () {
|
||||
for i in ${libraries}
|
||||
do
|
||||
oe_libinstall -C ${libdirectory} $i ${STAGING_LIBDIR}
|
||||
done
|
||||
for i in ${headers}
|
||||
do
|
||||
install -m 0644 ${libdirectory}/$i ${STAGING_INCDIR}
|
||||
done
|
||||
|
||||
# Install binaries automatically for native builds
|
||||
if [ "${@binconfig_suffix(d)}" = "-native" ]
|
||||
then
|
||||
|
||||
# Most EFL binaries start with the package name
|
||||
for i in src/bin/${SRCNAME}*
|
||||
do
|
||||
if [ -x $i -a -f $i ]
|
||||
then
|
||||
|
||||
# Don't install anything with an extension (.so, etc)
|
||||
if echo $i | grep -v \\.
|
||||
then
|
||||
${HOST_SYS}-libtool --mode=install install -m 0755 $i ${STAGING_BINDIR}
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
PACKAGES = "${PN} ${PN}-themes ${PN}-dev ${PN}-examples"
|
||||
FILES_${PN}-dev = "${bindir}/${PN}-config ${libdir}/pkgconfig ${libdir}/lib*.?a ${libdir}/lib*.a"
|
||||
FILES_${PN}-examples = "${bindir} ${datadir}"
|
||||
|
||||
5
meta/classes/flow-lossage.bbclass
Normal file
5
meta/classes/flow-lossage.bbclass
Normal file
@@ -0,0 +1,5 @@
|
||||
# gcc-3.4 blows up in gtktext with -frename-registers on arm-linux
|
||||
python () {
|
||||
cflags = (bb.data.getVar('CFLAGS', d, 1) or '').replace('-frename-registers', '')
|
||||
bb.data.setVar('CFLAGS', cflags, d)
|
||||
}
|
||||
59
meta/classes/gconf.bbclass
Normal file
59
meta/classes/gconf.bbclass
Normal file
@@ -0,0 +1,59 @@
|
||||
DEPENDS += "gconf"
|
||||
|
||||
gconf_postinst() {
|
||||
if [ "$1" = configure ]; then
|
||||
if [ "x$D" != "x" ]; then
|
||||
exit 1
|
||||
fi
|
||||
SCHEMA_LOCATION=/etc/gconf/schemas
|
||||
for SCHEMA in ${SCHEMA_FILES}; do
|
||||
if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then
|
||||
HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \
|
||||
gconftool-2 \
|
||||
--makefile-install-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null
|
||||
fi
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
gconf_prerm() {
|
||||
if [ "$1" = remove ] || [ "$1" = upgrade ]; then
|
||||
SCHEMA_LOCATION=/etc/gconf/schemas
|
||||
for SCHEMA in ${SCHEMA_FILES}; do
|
||||
if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then
|
||||
HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \
|
||||
gconftool-2 \
|
||||
--makefile-uninstall-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null
|
||||
fi
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
python populate_packages_append () {
|
||||
import os.path, re
|
||||
packages = bb.data.getVar('PACKAGES', d, 1).split()
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
|
||||
for pkg in packages:
|
||||
schema_dir = '%s/install/%s/etc/gconf/schemas' % (workdir, pkg)
|
||||
schemas = []
|
||||
schema_re = re.compile(".*\.schemas$")
|
||||
if os.path.exists(schema_dir):
|
||||
for f in os.listdir(schema_dir):
|
||||
if schema_re.match(f):
|
||||
schemas.append(f)
|
||||
if schemas != []:
|
||||
bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
|
||||
bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d)
|
||||
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
|
||||
if not postinst:
|
||||
postinst = '#!/bin/sh\n'
|
||||
postinst += bb.data.getVar('gconf_postinst', d, 1)
|
||||
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
|
||||
prerm = bb.data.getVar('pkg_prerm_%s' % pkg, d, 1) or bb.data.getVar('pkg_prerm', d, 1)
|
||||
if not prerm:
|
||||
prerm = '#!/bin/sh\n'
|
||||
prerm += bb.data.getVar('gconf_prerm', d, 1)
|
||||
bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d)
|
||||
|
||||
}
|
||||
12
meta/classes/gettext.bbclass
Normal file
12
meta/classes/gettext.bbclass
Normal file
@@ -0,0 +1,12 @@
|
||||
python () {
|
||||
# Remove the NLS bits if USE_NLS is no.
|
||||
if bb.data.getVar('USE_NLS', d, 1) == 'no':
|
||||
cfg = oe_filter_out('^--(dis|en)able-nls$', bb.data.getVar('EXTRA_OECONF', d, 1) or "", d)
|
||||
cfg += " --disable-nls"
|
||||
depends = bb.data.getVar('DEPENDS', d, 1) or ""
|
||||
bb.data.setVar('DEPENDS', oe_filter_out('^(virtual/libiconv|virtual/libintl)$', depends, d), d)
|
||||
bb.data.setVar('EXTRA_OECONF', cfg, d)
|
||||
}
|
||||
|
||||
DEPENDS =+ "gettext-native"
|
||||
EXTRA_OECONF += "--enable-nls"
|
||||
20
meta/classes/gnome.bbclass
Normal file
20
meta/classes/gnome.bbclass
Normal file
@@ -0,0 +1,20 @@
|
||||
def gnome_verdir(v):
|
||||
import re
|
||||
m = re.match("([0-9]+)\.([0-9]+)\..*", v)
|
||||
return "%s.%s" % (m.group(1), m.group(2))
|
||||
|
||||
SECTION ?= "x11/gnome"
|
||||
SRC_URI = "${GNOME_MIRROR}/${PN}/${@gnome_verdir("${PV}")}/${PN}-${PV}.tar.bz2"
|
||||
|
||||
DEPENDS += "gnome-common"
|
||||
|
||||
FILES_${PN} += "${datadir}/application-registry ${datadir}/mime-info \
|
||||
${datadir}/gnome-2.0"
|
||||
|
||||
inherit autotools pkgconfig gconf
|
||||
|
||||
EXTRA_AUTORECONF += "-I ${STAGING_DIR}/${HOST_SYS}/share/aclocal/gnome2-macros"
|
||||
|
||||
gnome_stage_includes() {
|
||||
autotools_stage_includes
|
||||
}
|
||||
17
meta/classes/gpe.bbclass
Normal file
17
meta/classes/gpe.bbclass
Normal file
@@ -0,0 +1,17 @@
|
||||
DEPENDS_prepend = "coreutils-native virtual/libintl intltool-native "
|
||||
GPE_TARBALL_SUFFIX ?= "gz"
|
||||
SRC_URI = "${GPE_MIRROR}/${PN}-${PV}.tar.${GPE_TARBALL_SUFFIX}"
|
||||
FILES_${PN} += "${datadir}/gpe ${datadir}/application-registry"
|
||||
MAINTAINER ?= "GPE Team <gpe@handhelds.org>"
|
||||
|
||||
inherit gettext
|
||||
|
||||
gpe_do_compile() {
|
||||
oe_runmake PREFIX=${prefix}
|
||||
}
|
||||
|
||||
gpe_do_install() {
|
||||
oe_runmake PREFIX=${prefix} DESTDIR=${D} install
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_compile do_install
|
||||
38
meta/classes/gtk-icon-cache.bbclass
Normal file
38
meta/classes/gtk-icon-cache.bbclass
Normal file
@@ -0,0 +1,38 @@
|
||||
FILES_${PN} += "${datadir}/icons/hicolor"
|
||||
|
||||
gtk-icon-cache_postinst() {
|
||||
if [ "x$D" != "x" ]; then
|
||||
exit 1
|
||||
fi
|
||||
gtk-update-icon-cache -q /usr/share/icons/hicolor
|
||||
}
|
||||
|
||||
gtk-icon-cache_postrm() {
|
||||
gtk-update-icon-cache -q /usr/share/icons/hicolor
|
||||
}
|
||||
|
||||
python populate_packages_append () {
|
||||
import os.path
|
||||
packages = bb.data.getVar('PACKAGES', d, 1).split()
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
|
||||
for pkg in packages:
|
||||
icon_dir = '%s/install/%s/%s/icons/hicolor' % (workdir, pkg, bb.data.getVar('datadir', d, 1))
|
||||
if not os.path.exists(icon_dir):
|
||||
continue
|
||||
|
||||
bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
|
||||
|
||||
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
|
||||
if not postinst:
|
||||
postinst = '#!/bin/sh\n'
|
||||
postinst += bb.data.getVar('gtk-icon-cache_postinst', d, 1)
|
||||
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
|
||||
|
||||
postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1)
|
||||
if not postrm:
|
||||
postrm = '#!/bin/sh\n'
|
||||
postrm += bb.data.getVar('gtk-icon-cache_postrm', d, 1)
|
||||
bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d)
|
||||
}
|
||||
|
||||
156
meta/classes/icecc.bbclass
Normal file
156
meta/classes/icecc.bbclass
Normal file
@@ -0,0 +1,156 @@
|
||||
# IceCream distributed compiling support
|
||||
#
|
||||
# We need to create a tar.bz2 of our toolchain and set
|
||||
# ICECC_VERSION, ICECC_CXX and ICEC_CC
|
||||
#
|
||||
|
||||
def create_env(bb,d):
|
||||
"""
|
||||
Create a tar.bz of the current toolchain
|
||||
"""
|
||||
|
||||
# Constin native-native compilation no environment needed if
|
||||
# host prefix is empty (let us duplicate the query for ease)
|
||||
prefix = bb.data.expand('${HOST_PREFIX}', d)
|
||||
if len(prefix) == 0:
|
||||
return ""
|
||||
|
||||
import tarfile
|
||||
import socket
|
||||
import time
|
||||
import os
|
||||
ice_dir = bb.data.expand('${CROSS_DIR}', d)
|
||||
prefix = bb.data.expand('${HOST_PREFIX}' , d)
|
||||
distro = bb.data.expand('${DISTRO}', d)
|
||||
target_sys = bb.data.expand('${TARGET_SYS}', d)
|
||||
#float = bb.data.getVar('${TARGET_FPU}', d)
|
||||
float = "anyfloat"
|
||||
name = socket.gethostname()
|
||||
|
||||
try:
|
||||
os.stat(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2')
|
||||
os.stat(ice_dir + '/' + target_sys + '/bin/g++')
|
||||
except:
|
||||
return ""
|
||||
|
||||
VERSION = '3.4.3'
|
||||
cross_name = prefix + distro + target_sys + float +VERSION+ name
|
||||
tar_file = ice_dir + '/ice/' + cross_name + '.tar.bz2'
|
||||
|
||||
try:
|
||||
os.stat(tar_file)
|
||||
return tar_file
|
||||
except:
|
||||
try:
|
||||
os.makedirs(ice_dir+'/ice')
|
||||
except:
|
||||
pass
|
||||
|
||||
# FIXME find out the version of the compiler
|
||||
tar = tarfile.open(tar_file, 'w:bz2')
|
||||
tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
|
||||
target_sys + 'cross/lib/ld-linux.so.2')
|
||||
tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
|
||||
target_sys + 'cross/lib/ld-2.3.3.so')
|
||||
tar.add(ice_dir + '/' + target_sys + '/lib/libc-2.3.3.so',
|
||||
target_sys + 'cross/lib/libc-2.3.3.so')
|
||||
tar.add(ice_dir + '/' + target_sys + '/lib/libc.so.6',
|
||||
target_sys + 'cross/lib/libc.so.6')
|
||||
tar.add(ice_dir + '/' + target_sys + '/bin/gcc',
|
||||
target_sys + 'cross/usr/bin/gcc')
|
||||
tar.add(ice_dir + '/' + target_sys + '/bin/g++',
|
||||
target_sys + 'cross/usr/bin/g++')
|
||||
tar.add(ice_dir + '/' + target_sys + '/bin/as',
|
||||
target_sys + 'cross/usr/bin/as')
|
||||
tar.add(ice_dir + '/lib/gcc/' + target_sys +'/'+ VERSION + '/specs',
|
||||
target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/specs')
|
||||
tar.add(ice_dir + '/libexec/gcc/'+target_sys+'/' + VERSION + '/cc1',
|
||||
target_sys + 'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1')
|
||||
tar.add(ice_dir + '/libexec/gcc/arm-linux/' + VERSION + '/cc1plus',
|
||||
target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1plus')
|
||||
tar.close()
|
||||
return tar_file
|
||||
|
||||
|
||||
def create_path(compilers, type, bb, d):
|
||||
"""
|
||||
Create Symlinks for the icecc in the staging directory
|
||||
"""
|
||||
import os
|
||||
|
||||
staging = bb.data.expand('${STAGING_DIR}', d) + "/ice/" + type
|
||||
icecc = bb.data.getVar('ICECC_PATH', d)
|
||||
|
||||
# Create the dir if necessary
|
||||
try:
|
||||
os.stat(staging)
|
||||
except:
|
||||
os.makedirs(staging)
|
||||
|
||||
|
||||
for compiler in compilers:
|
||||
gcc_path = staging + "/" + compiler
|
||||
try:
|
||||
os.stat(gcc_path)
|
||||
except:
|
||||
os.symlink(icecc, gcc_path)
|
||||
|
||||
return staging + ":"
|
||||
|
||||
|
||||
def use_icc_version(bb,d):
|
||||
# Constin native native
|
||||
prefix = bb.data.expand('${HOST_PREFIX}', d)
|
||||
if len(prefix) == 0:
|
||||
return "no"
|
||||
|
||||
|
||||
native = bb.data.expand('${PN}', d)
|
||||
blacklist = [ "-cross", "-native" ]
|
||||
|
||||
for black in blacklist:
|
||||
if black in native:
|
||||
return "no"
|
||||
|
||||
return "yes"
|
||||
|
||||
def icc_path(bb,d,compile):
|
||||
native = bb.data.expand('${PN}', d)
|
||||
blacklist = [ "ulibc", "glibc", "ncurses" ]
|
||||
for black in blacklist:
|
||||
if black in native:
|
||||
return ""
|
||||
|
||||
if "-native" in native:
|
||||
compile = False
|
||||
if "-cross" in native:
|
||||
compile = False
|
||||
|
||||
prefix = bb.data.expand('${HOST_PREFIX}', d)
|
||||
if compile and len(prefix) != 0:
|
||||
return create_path( [prefix+"gcc", prefix+"g++"], "cross", bb, d )
|
||||
elif not compile or len(prefix) == 0:
|
||||
return create_path( ["gcc", "g++"], "native", bb, d)
|
||||
|
||||
|
||||
def icc_version(bb,d):
|
||||
return create_env(bb,d)
|
||||
|
||||
|
||||
#
|
||||
# set the IceCream environment variables
|
||||
do_configure_prepend() {
|
||||
export PATH=${@icc_path(bb,d,False)}$PATH
|
||||
export ICECC_CC="gcc"
|
||||
export ICECC_CXX="g++"
|
||||
}
|
||||
|
||||
do_compile_prepend() {
|
||||
export PATH=${@icc_path(bb,d,True)}$PATH
|
||||
export ICECC_CC="${HOST_PREFIX}gcc"
|
||||
export ICECC_CXX="${HOST_PREFIX}g++"
|
||||
|
||||
if [ "${@use_icc_version(bb,d)}" = "yes" ]; then
|
||||
export ICECC_VERSION="${@icc_version(bb,d)}"
|
||||
fi
|
||||
}
|
||||
76
meta/classes/image_ipk.bbclass
Normal file
76
meta/classes/image_ipk.bbclass
Normal file
@@ -0,0 +1,76 @@
|
||||
inherit rootfs_ipk
|
||||
|
||||
# We need to follow RDEPENDS and RRECOMMENDS for images
|
||||
BUILD_ALL_DEPS = "1"
|
||||
|
||||
# Images are generally built explicitly, do not need to be part of world.
|
||||
EXCLUDE_FROM_WORLD = "1"
|
||||
|
||||
USE_DEVFS ?= "0"
|
||||
|
||||
DEPENDS += "makedevs-native"
|
||||
PACKAGE_ARCH = "${MACHINE_ARCH}"
|
||||
|
||||
def get_image_deps(d):
|
||||
import bb
|
||||
str = ""
|
||||
for type in (bb.data.getVar('IMAGE_FSTYPES', d, 1) or "").split():
|
||||
deps = bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or ""
|
||||
if deps:
|
||||
str += " %s" % deps
|
||||
return str
|
||||
|
||||
DEPENDS += "${@get_image_deps(d)}"
|
||||
|
||||
IMAGE_DEVICE_TABLE ?= "${@bb.which(bb.data.getVar('BBPATH', d, 1), 'files/device_table-minimal.txt')}"
|
||||
IMAGE_POSTPROCESS_COMMAND ?= ""
|
||||
|
||||
# Must call real_do_rootfs() from inside here, rather than as a separate
|
||||
# task, so that we have a single fakeroot context for the whole process.
|
||||
fakeroot do_rootfs () {
|
||||
set -x
|
||||
rm -rf ${IMAGE_ROOTFS}
|
||||
|
||||
if [ "${USE_DEVFS}" != "1" ]; then
|
||||
mkdir -p ${IMAGE_ROOTFS}/dev
|
||||
makedevs -r ${IMAGE_ROOTFS} -D ${IMAGE_DEVICE_TABLE}
|
||||
fi
|
||||
|
||||
real_do_rootfs
|
||||
|
||||
insert_feed_uris
|
||||
|
||||
rm -f ${IMAGE_ROOTFS}${libdir}/ipkg/lists/oe
|
||||
|
||||
${IMAGE_PREPROCESS_COMMAND}
|
||||
|
||||
export TOPDIR=${TOPDIR}
|
||||
|
||||
for type in ${IMAGE_FSTYPES}; do
|
||||
if test -z "$FAKEROOTKEY"; then
|
||||
fakeroot -i ${TMPDIR}/fakedb.image bbimage -t $type -e ${FILE}
|
||||
else
|
||||
bbimage -n "${IMAGE_NAME}" -t "$type" -e "${FILE}"
|
||||
fi
|
||||
done
|
||||
|
||||
${IMAGE_POSTPROCESS_COMMAND}
|
||||
}
|
||||
|
||||
insert_feed_uris () {
|
||||
|
||||
echo "Building feeds for [${DISTRO}].."
|
||||
|
||||
for line in ${FEED_URIS}
|
||||
do
|
||||
# strip leading and trailing spaces/tabs, then split into name and uri
|
||||
line_clean="`echo "$line"|sed 's/^[ \t]*//;s/[ \t]*$//'`"
|
||||
feed_name="`echo "$line_clean" | sed -n 's/\(.*\)##\(.*\)/\1/p'`"
|
||||
feed_uri="`echo "$line_clean" | sed -n 's/\(.*\)##\(.*\)/\2/p'`"
|
||||
|
||||
echo "Added $feed_name feed with URL $feed_uri"
|
||||
|
||||
# insert new feed-sources
|
||||
echo "src/gz $feed_name $feed_uri" >> ${IMAGE_ROOTFS}/etc/ipkg/${feed_name}-feed.conf
|
||||
done
|
||||
}
|
||||
26
meta/classes/kernel-arch.bbclass
Normal file
26
meta/classes/kernel-arch.bbclass
Normal file
@@ -0,0 +1,26 @@
|
||||
#
|
||||
# set the ARCH environment variable for kernel compilation (including
|
||||
# modules). return value must match one of the architecture directories
|
||||
# in the kernel source "arch" directory
|
||||
#
|
||||
|
||||
valid_archs = "alpha cris ia64 m68knommu ppc sh \
|
||||
sparc64 x86_64 arm h8300 m32r mips \
|
||||
ppc64 sh64 um arm26 i386 m68k \
|
||||
parisc s390 sparc v850"
|
||||
|
||||
def map_kernel_arch(a, d):
|
||||
import bb, re
|
||||
|
||||
valid_archs = bb.data.getVar('valid_archs', d, 1).split()
|
||||
|
||||
if re.match('(i.86|athlon)$', a): return 'i386'
|
||||
elif re.match('arm26$', a): return 'arm26'
|
||||
elif re.match('armeb$', a): return 'arm'
|
||||
elif re.match('powerpc$', a): return 'ppc'
|
||||
elif re.match('mipsel$', a): return 'mips'
|
||||
elif a in valid_archs: return a
|
||||
else:
|
||||
bb.error("cannot map '%s' to a linux kernel architecture" % a)
|
||||
|
||||
export ARCH = "${@map_kernel_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}"
|
||||
435
meta/classes/kernel.bbclass
Normal file
435
meta/classes/kernel.bbclass
Normal file
@@ -0,0 +1,435 @@
|
||||
inherit module_strip
|
||||
|
||||
PROVIDES += "virtual/kernel"
|
||||
DEPENDS += "virtual/${TARGET_PREFIX}depmod-${@get_kernelmajorversion('${PV}')} virtual/${TARGET_PREFIX}gcc${KERNEL_CCSUFFIX} update-modules"
|
||||
|
||||
inherit kernel-arch
|
||||
|
||||
PACKAGES_DYNAMIC += "kernel-module-*"
|
||||
PACKAGES_DYNAMIC += "kernel-image-*"
|
||||
|
||||
export OS = "${TARGET_OS}"
|
||||
export CROSS_COMPILE = "${TARGET_PREFIX}"
|
||||
KERNEL_IMAGETYPE = "zImage"
|
||||
|
||||
KERNEL_PRIORITY = "${@bb.data.getVar('PV',d,1).split('-')[0].split('.')[-1]}"
|
||||
|
||||
# [jbowler 20051109] ${PV}${KERNEL_LOCALVERSION} is used throughout this
|
||||
# .bbclass to (apparently) find the full 'uname -r' kernel version, this
|
||||
# should be the same as UTS_RELEASE or (in this file) KERNEL_VERSION:
|
||||
# KERNELRELEASE=$(VERSION).$(PATCHLEVEL).$(SUBLEVEL)$(EXTRAVERSION)$(LOCALVERSION)
|
||||
# but since this is not certain this overridable setting is used here:
|
||||
KERNEL_RELEASE ?= "${PV}${KERNEL_LOCALVERSION}"
|
||||
|
||||
KERNEL_CCSUFFIX ?= ""
|
||||
KERNEL_LDSUFFIX ?= ""
|
||||
|
||||
# Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture
|
||||
# specific options necessary for building the kernel and modules.
|
||||
#FIXME: should be this: TARGET_CC_KERNEL_ARCH ?= "${TARGET_CC_ARCH}"
|
||||
TARGET_CC_KERNEL_ARCH ?= ""
|
||||
HOST_CC_KERNEL_ARCH ?= "${TARGET_CC_KERNEL_ARCH}"
|
||||
TARGET_LD_KERNEL_ARCH ?= ""
|
||||
HOST_LD_KERNEL_ARCH ?= "${TARGET_LD_KERNEL_ARCH}"
|
||||
|
||||
KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc${KERNEL_CCSUFFIX} ${HOST_CC_KERNEL_ARCH}"
|
||||
KERNEL_LD = "${LD}${KERNEL_LDSUFFIX} ${HOST_LD_KERNEL_ARCH}"
|
||||
|
||||
KERNEL_OUTPUT = "arch/${ARCH}/boot/${KERNEL_IMAGETYPE}"
|
||||
KERNEL_IMAGEDEST = "boot"
|
||||
|
||||
#
|
||||
# configuration
|
||||
#
|
||||
export CMDLINE_CONSOLE = "console=${@bb.data.getVar("KERNEL_CONSOLE",d,1) or "ttyS0"}"
|
||||
|
||||
# parse kernel ABI version out of <linux/version.h>
|
||||
def get_kernelversion(p):
|
||||
import re
|
||||
try:
|
||||
f = open(p, 'r')
|
||||
except IOError:
|
||||
return None
|
||||
l = f.readlines()
|
||||
f.close()
|
||||
r = re.compile("#define UTS_RELEASE \"(.*)\"")
|
||||
for s in l:
|
||||
m = r.match(s)
|
||||
if m:
|
||||
return m.group(1)
|
||||
return None
|
||||
|
||||
def get_kernelmajorversion(p):
|
||||
import re
|
||||
r = re.compile("([0-9]+\.[0-9]+).*")
|
||||
m = r.match(p);
|
||||
if m:
|
||||
return m.group(1)
|
||||
return None
|
||||
|
||||
KERNEL_VERSION = "${@get_kernelversion('${S}/include/linux/version.h')}"
|
||||
KERNEL_MAJOR_VERSION = "${@get_kernelmajorversion('${KERNEL_VERSION}')}"
|
||||
|
||||
KERNEL_LOCALVERSION ?= ""
|
||||
|
||||
# kernels are generally machine specific
|
||||
PACKAGE_ARCH = "${MACHINE_ARCH}"
|
||||
|
||||
kernel_do_compile() {
|
||||
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
|
||||
oe_runmake include/linux/version.h CC="${KERNEL_CC}" LD="${KERNEL_LD}"
|
||||
if [ "${KERNEL_MAJOR_VERSION}" != "2.6" ]; then
|
||||
oe_runmake dep CC="${KERNEL_CC}" LD="${KERNEL_LD}"
|
||||
fi
|
||||
oe_runmake ${KERNEL_IMAGETYPE} CC="${KERNEL_CC}" LD="${KERNEL_LD}"
|
||||
if (grep -q -i -e '^CONFIG_MODULES=y$' .config); then
|
||||
oe_runmake modules CC="${KERNEL_CC}" LD="${KERNEL_LD}"
|
||||
else
|
||||
oenote "no modules to compile"
|
||||
fi
|
||||
}
|
||||
|
||||
kernel_do_stage() {
|
||||
ASMDIR=`readlink include/asm`
|
||||
|
||||
mkdir -p ${STAGING_KERNEL_DIR}/include/$ASMDIR
|
||||
cp -fR include/$ASMDIR/* ${STAGING_KERNEL_DIR}/include/$ASMDIR/
|
||||
rm -f $ASMDIR ${STAGING_KERNEL_DIR}/include/asm
|
||||
ln -sf $ASMDIR ${STAGING_KERNEL_DIR}/include/asm
|
||||
|
||||
mkdir -p ${STAGING_KERNEL_DIR}/include/asm-generic
|
||||
cp -fR include/asm-generic/* ${STAGING_KERNEL_DIR}/include/asm-generic/
|
||||
|
||||
mkdir -p ${STAGING_KERNEL_DIR}/include/linux
|
||||
cp -fR include/linux/* ${STAGING_KERNEL_DIR}/include/linux/
|
||||
|
||||
mkdir -p ${STAGING_KERNEL_DIR}/include/net
|
||||
cp -fR include/net/* ${STAGING_KERNEL_DIR}/include/net/
|
||||
|
||||
mkdir -p ${STAGING_KERNEL_DIR}/include/pcmcia
|
||||
cp -fR include/pcmcia/* ${STAGING_KERNEL_DIR}/include/pcmcia/
|
||||
|
||||
if [ -d include/sound ]; then
|
||||
mkdir -p ${STAGING_KERNEL_DIR}/include/sound
|
||||
cp -fR include/sound/* ${STAGING_KERNEL_DIR}/include/sound/
|
||||
fi
|
||||
|
||||
if [ -d drivers/sound ]; then
|
||||
# 2.4 alsa needs some headers from this directory
|
||||
mkdir -p ${STAGING_KERNEL_DIR}/include/drivers/sound
|
||||
cp -fR drivers/sound/*.h ${STAGING_KERNEL_DIR}/include/drivers/sound/
|
||||
fi
|
||||
|
||||
install -m 0644 .config ${STAGING_KERNEL_DIR}/config-${KERNEL_RELEASE}
|
||||
ln -sf config-${KERNEL_RELEASE} ${STAGING_KERNEL_DIR}/.config
|
||||
ln -sf config-${KERNEL_RELEASE} ${STAGING_KERNEL_DIR}/kernel-config
|
||||
echo "${KERNEL_VERSION}" >${STAGING_KERNEL_DIR}/kernel-abiversion
|
||||
echo "${S}" >${STAGING_KERNEL_DIR}/kernel-source
|
||||
echo "${KERNEL_CCSUFFIX}" >${STAGING_KERNEL_DIR}/kernel-ccsuffix
|
||||
echo "${KERNEL_LDSUFFIX}" >${STAGING_KERNEL_DIR}/kernel-ldsuffix
|
||||
[ -e Rules.make ] && install -m 0644 Rules.make ${STAGING_KERNEL_DIR}/
|
||||
[ -e Makefile ] && install -m 0644 Makefile ${STAGING_KERNEL_DIR}/
|
||||
|
||||
# Check if arch/${ARCH}/Makefile exists and install it
|
||||
if [ -e arch/${ARCH}/Makefile ]; then
|
||||
install -d ${STAGING_KERNEL_DIR}/arch/${ARCH}
|
||||
install -m 0644 arch/${ARCH}/Makefile ${STAGING_KERNEL_DIR}/arch/${ARCH}
|
||||
fi
|
||||
cp -fR include/config* ${STAGING_KERNEL_DIR}/include/
|
||||
install -m 0644 ${KERNEL_OUTPUT} ${STAGING_KERNEL_DIR}/${KERNEL_IMAGETYPE}
|
||||
install -m 0644 System.map ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE}
|
||||
[ -e Module.symvers ] && install -m 0644 Module.symvers ${STAGING_KERNEL_DIR}/
|
||||
|
||||
cp -fR scripts ${STAGING_KERNEL_DIR}/
|
||||
}
|
||||
|
||||
kernel_do_install() {
|
||||
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
|
||||
if (grep -q -i -e '^CONFIG_MODULES=y$' .config); then
|
||||
oe_runmake DEPMOD=echo INSTALL_MOD_PATH="${D}" modules_install
|
||||
else
|
||||
oenote "no modules to install"
|
||||
fi
|
||||
|
||||
install -d ${D}/${KERNEL_IMAGEDEST}
|
||||
install -d ${D}/boot
|
||||
install -m 0644 ${KERNEL_OUTPUT} ${D}/${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${KERNEL_RELEASE}
|
||||
install -m 0644 System.map ${D}/boot/System.map-${KERNEL_RELEASE}
|
||||
install -m 0644 .config ${D}/boot/config-${KERNEL_RELEASE}
|
||||
install -d ${D}/etc/modutils
|
||||
|
||||
# Check if scripts/genksyms exists and if so, build it
|
||||
if [ -e scripts/genksyms/ ]; then
|
||||
oe_runmake SUBDIRS="scripts/genksyms"
|
||||
fi
|
||||
|
||||
install -d ${STAGING_KERNEL_DIR}
|
||||
cp -fR scripts ${STAGING_KERNEL_DIR}/
|
||||
}
|
||||
|
||||
kernel_do_configure() {
|
||||
yes '' | oe_runmake oldconfig
|
||||
}
|
||||
|
||||
pkg_postinst_kernel () {
|
||||
update-alternatives --install /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE} ${KERNEL_IMAGETYPE} /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${KERNEL_RELEASE} ${KERNEL_PRIORITY} || true
|
||||
}
|
||||
|
||||
pkg_postrm_kernel () {
|
||||
update-alternatives --remove ${KERNEL_IMAGETYPE} /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${KERNEL_RELEASE} || true
|
||||
}
|
||||
|
||||
inherit cml1
|
||||
|
||||
EXPORT_FUNCTIONS do_compile do_install do_stage do_configure
|
||||
|
||||
PACKAGES = "kernel kernel-image kernel-dev"
|
||||
FILES = ""
|
||||
FILES_kernel-image = "/boot/${KERNEL_IMAGETYPE}*"
|
||||
FILES_kernel-dev = "/boot/System.map* /boot/config*"
|
||||
RDEPENDS_kernel = "kernel-image-${KERNEL_VERSION}"
|
||||
PKG_kernel-image = "kernel-image-${KERNEL_VERSION}"
|
||||
ALLOW_EMPTY_kernel = "1"
|
||||
ALLOW_EMPTY_kernel-image = "1"
|
||||
|
||||
pkg_postinst_kernel-image () {
|
||||
if [ ! -e "$D/lib/modules/${KERNEL_RELEASE}" ]; then
|
||||
mkdir -p $D/lib/modules/${KERNEL_RELEASE}
|
||||
fi
|
||||
if [ -n "$D" ]; then
|
||||
${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION}
|
||||
else
|
||||
depmod -A
|
||||
fi
|
||||
}
|
||||
|
||||
pkg_postinst_modules () {
|
||||
if [ -n "$D" ]; then
|
||||
${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION}
|
||||
else
|
||||
depmod -A
|
||||
update-modules || true
|
||||
fi
|
||||
}
|
||||
|
||||
pkg_postrm_modules () {
|
||||
update-modules || true
|
||||
}
|
||||
|
||||
autoload_postinst_fragment() {
|
||||
if [ x"$D" = "x" ]; then
|
||||
modprobe %s || true
|
||||
fi
|
||||
}
|
||||
|
||||
# autoload defaults (alphabetically sorted)
|
||||
module_autoload_hidp = "hidp"
|
||||
module_autoload_ipv6 = "ipv6"
|
||||
module_autoload_ipsec = "ipsec"
|
||||
module_autoload_ircomm-tty = "ircomm-tty"
|
||||
module_autoload_rfcomm = "rfcomm"
|
||||
module_autoload_sa1100-rtc = "sa1100-rtc"
|
||||
|
||||
# alias defaults (alphabetically sorted)
|
||||
module_conf_af_packet = "alias net-pf-17 af_packet"
|
||||
module_conf_bluez = "alias net-pf-31 bluez"
|
||||
module_conf_bnep = "alias bt-proto-4 bnep"
|
||||
module_conf_hci_uart = "alias tty-ldisc-15 hci_uart"
|
||||
module_conf_l2cap = "alias bt-proto-0 l2cap"
|
||||
module_conf_sco = "alias bt-proto-2 sco"
|
||||
module_conf_rfcomm = "alias bt-proto-3 rfcomm"
|
||||
|
||||
python populate_packages_prepend () {
|
||||
def extract_modinfo(file):
|
||||
import os, re
|
||||
tmpfile = os.tmpnam()
|
||||
cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (bb.data.getVar("PATH", d, 1), bb.data.getVar("HOST_PREFIX", d, 1) or "", file, tmpfile)
|
||||
os.system(cmd)
|
||||
f = open(tmpfile)
|
||||
l = f.read().split("\000")
|
||||
f.close()
|
||||
os.unlink(tmpfile)
|
||||
exp = re.compile("([^=]+)=(.*)")
|
||||
vals = {}
|
||||
for i in l:
|
||||
m = exp.match(i)
|
||||
if not m:
|
||||
continue
|
||||
vals[m.group(1)] = m.group(2)
|
||||
return vals
|
||||
|
||||
def parse_depmod():
|
||||
import os, re
|
||||
|
||||
dvar = bb.data.getVar('D', d, 1)
|
||||
if not dvar:
|
||||
bb.error("D not defined")
|
||||
return
|
||||
|
||||
kernelver = bb.data.getVar('KERNEL_RELEASE', d, 1)
|
||||
kernelver_stripped = kernelver
|
||||
m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
|
||||
if m:
|
||||
kernelver_stripped = m.group(1)
|
||||
path = bb.data.getVar("PATH", d, 1)
|
||||
host_prefix = bb.data.getVar("HOST_PREFIX", d, 1) or ""
|
||||
major_version = bb.data.getVar('KERNEL_MAJOR_VERSION', d, 1)
|
||||
|
||||
cmd = "PATH=\"%s\" %sdepmod-%s -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, major_version, dvar, dvar, kernelver, kernelver_stripped)
|
||||
f = os.popen(cmd, 'r')
|
||||
|
||||
deps = {}
|
||||
pattern0 = "^(.*\.k?o):..*$"
|
||||
pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$"
|
||||
pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$"
|
||||
pattern3 = "^\t(.*\.k?o)\s*\\\$"
|
||||
pattern4 = "^\t(.*\.k?o)\s*$"
|
||||
|
||||
line = f.readline()
|
||||
while line:
|
||||
if not re.match(pattern0, line):
|
||||
line = f.readline()
|
||||
continue
|
||||
m1 = re.match(pattern1, line)
|
||||
if m1:
|
||||
deps[m1.group(1)] = m1.group(2).split()
|
||||
else:
|
||||
m2 = re.match(pattern2, line)
|
||||
if m2:
|
||||
deps[m2.group(1)] = m2.group(2).split()
|
||||
line = f.readline()
|
||||
m3 = re.match(pattern3, line)
|
||||
while m3:
|
||||
deps[m2.group(1)].extend(m3.group(1).split())
|
||||
line = f.readline()
|
||||
m3 = re.match(pattern3, line)
|
||||
m4 = re.match(pattern4, line)
|
||||
deps[m2.group(1)].extend(m4.group(1).split())
|
||||
line = f.readline()
|
||||
f.close()
|
||||
return deps
|
||||
|
||||
def get_dependencies(file, pattern, format):
|
||||
file = file.replace(bb.data.getVar('D', d, 1) or '', '', 1)
|
||||
|
||||
if module_deps.has_key(file):
|
||||
import os.path, re
|
||||
dependencies = []
|
||||
for i in module_deps[file]:
|
||||
m = re.match(pattern, os.path.basename(i))
|
||||
if not m:
|
||||
continue
|
||||
on = legitimize_package_name(m.group(1))
|
||||
dependency_pkg = format % on
|
||||
v = bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) or "0"
|
||||
if v == "1":
|
||||
kv = bb.data.getVar("KERNEL_MAJOR_VERSION", d, 1)
|
||||
dependency_pkg = "%s-%s" % (dependency_pkg, kv)
|
||||
dependencies.append(dependency_pkg)
|
||||
return dependencies
|
||||
return []
|
||||
|
||||
def frob_metadata(file, pkg, pattern, format, basename):
|
||||
import re
|
||||
vals = extract_modinfo(file)
|
||||
|
||||
dvar = bb.data.getVar('D', d, 1)
|
||||
|
||||
# If autoloading is requested, output /etc/modutils/<name> and append
|
||||
# appropriate modprobe commands to the postinst
|
||||
autoload = bb.data.getVar('module_autoload_%s' % basename, d, 1)
|
||||
if autoload:
|
||||
name = '%s/etc/modutils/%s' % (dvar, basename)
|
||||
f = open(name, 'w')
|
||||
for m in autoload.split():
|
||||
f.write('%s\n' % m)
|
||||
f.close()
|
||||
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1)
|
||||
if not postinst:
|
||||
bb.fatal("pkg_postinst_%s not defined" % pkg)
|
||||
postinst += bb.data.getVar('autoload_postinst_fragment', d, 1) % autoload
|
||||
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
|
||||
|
||||
# Write out any modconf fragment
|
||||
modconf = bb.data.getVar('module_conf_%s' % basename, d, 1)
|
||||
if modconf:
|
||||
name = '%s/etc/modutils/%s.conf' % (dvar, basename)
|
||||
f = open(name, 'w')
|
||||
f.write("%s\n" % modconf)
|
||||
f.close()
|
||||
|
||||
files = bb.data.getVar('FILES_%s' % pkg, d, 1)
|
||||
files = "%s /etc/modutils/%s /etc/modutils/%s.conf" % (files, basename, basename)
|
||||
bb.data.setVar('FILES_%s' % pkg, files, d)
|
||||
|
||||
if vals.has_key("description"):
|
||||
old_desc = bb.data.getVar('DESCRIPTION_' + pkg, d, 1) or ""
|
||||
bb.data.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"], d)
|
||||
|
||||
rdepends_str = bb.data.getVar('RDEPENDS_' + pkg, d, 1)
|
||||
if rdepends_str:
|
||||
rdepends = rdepends_str.split()
|
||||
else:
|
||||
rdepends = []
|
||||
rdepends.extend(get_dependencies(file, pattern, format))
|
||||
bb.data.setVar('RDEPENDS_' + pkg, ' '.join(rdepends), d)
|
||||
|
||||
module_deps = parse_depmod()
|
||||
module_regex = '^(.*)\.k?o$'
|
||||
module_pattern = 'kernel-module-%s'
|
||||
|
||||
postinst = bb.data.getVar('pkg_postinst_modules', d, 1)
|
||||
postrm = bb.data.getVar('pkg_postrm_modules', d, 1)
|
||||
do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-image-%s' % bb.data.getVar("KERNEL_VERSION", d, 1))
|
||||
|
||||
import re, os
|
||||
metapkg = "kernel-modules"
|
||||
bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d)
|
||||
bb.data.setVar('FILES_' + metapkg, "", d)
|
||||
blacklist = [ 'kernel-dev', 'kernel-image' ]
|
||||
for l in module_deps.values():
|
||||
for i in l:
|
||||
pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
|
||||
blacklist.append(pkg)
|
||||
metapkg_rdepends = []
|
||||
packages = bb.data.getVar('PACKAGES', d, 1).split()
|
||||
for pkg in packages[1:]:
|
||||
if not pkg in blacklist and not pkg in metapkg_rdepends:
|
||||
metapkg_rdepends.append(pkg)
|
||||
bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d)
|
||||
bb.data.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package', d)
|
||||
packages.append(metapkg)
|
||||
bb.data.setVar('PACKAGES', ' '.join(packages), d)
|
||||
|
||||
v = bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) or "0"
|
||||
if v == "1":
|
||||
kv = bb.data.getVar("KERNEL_MAJOR_VERSION", d, 1)
|
||||
packages = bb.data.getVar("PACKAGES", d, 1).split()
|
||||
module_re = re.compile("^kernel-module-")
|
||||
|
||||
newmetapkg = "kernel-modules-%s" % kv
|
||||
bb.data.setVar('ALLOW_EMPTY_' + newmetapkg, "1", d)
|
||||
bb.data.setVar('FILES_' + newmetapkg, "", d)
|
||||
|
||||
newmetapkg_rdepends = []
|
||||
|
||||
for p in packages:
|
||||
if not module_re.match(p):
|
||||
continue
|
||||
pkg = bb.data.getVar("PKG_%s" % p, d, 1) or p
|
||||
newpkg = "%s-%s" % (pkg, kv)
|
||||
bb.data.setVar("PKG_%s" % p, newpkg, d)
|
||||
rprovides = bb.data.getVar("RPROVIDES_%s" % p, d, 1)
|
||||
if rprovides:
|
||||
rprovides = "%s %s" % (rprovides, pkg)
|
||||
else:
|
||||
rprovides = pkg
|
||||
bb.data.setVar("RPROVIDES_%s" % p, rprovides, d)
|
||||
newmetapkg_rdepends.append(newpkg)
|
||||
|
||||
bb.data.setVar('RDEPENDS_' + newmetapkg, ' '.join(newmetapkg_rdepends), d)
|
||||
bb.data.setVar('DESCRIPTION_' + newmetapkg, 'Kernel modules meta package', d)
|
||||
packages.append(newmetapkg)
|
||||
bb.data.setVar('PACKAGES', ' '.join(packages), d)
|
||||
|
||||
}
|
||||
9
meta/classes/lib_package.bbclass
Normal file
9
meta/classes/lib_package.bbclass
Normal file
@@ -0,0 +1,9 @@
|
||||
PACKAGES = "${PN} ${PN}-dev ${PN}-doc ${PN}-bin"
|
||||
|
||||
FILES_${PN} = "${libexecdir} ${libdir}/lib*.so.* \
|
||||
${sysconfdir} ${sharedstatedir} ${localstatedir} \
|
||||
/lib/*.so* ${datadir}/${PN} ${libdir}/${PN}"
|
||||
FILES_${PN}-dev = "${includedir} ${libdir}/lib*.so ${libdir}/*.la \
|
||||
${libdir}/*.a ${libdir}/pkgconfig /lib/*.a /lib/*.o \
|
||||
${datadir}/aclocal ${bindir}/*-config"
|
||||
FILES_${PN}-bin = "${bindir} ${sbindir} /bin /sbin"
|
||||
19
meta/classes/linux_modules.bbclass
Normal file
19
meta/classes/linux_modules.bbclass
Normal file
@@ -0,0 +1,19 @@
|
||||
def get_kernelmajorversion(p):
|
||||
import re
|
||||
r = re.compile("([0-9]+\.[0-9]+).*")
|
||||
m = r.match(p);
|
||||
if m:
|
||||
return m.group(1)
|
||||
return None
|
||||
|
||||
def linux_module_packages(s, d):
|
||||
import bb, os.path
|
||||
suffix = ""
|
||||
if (bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) == "1"):
|
||||
file = bb.data.expand('${STAGING_KERNEL_DIR}/kernel-abiversion', d)
|
||||
if (os.path.exists(file)):
|
||||
suffix = "-%s" % (get_kernelmajorversion(base_read_file(file)))
|
||||
return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split()))
|
||||
|
||||
# that's all
|
||||
|
||||
80
meta/classes/manifest.bbclass
Normal file
80
meta/classes/manifest.bbclass
Normal file
@@ -0,0 +1,80 @@
|
||||
|
||||
python read_manifest () {
|
||||
import sys, bb.manifest
|
||||
mfn = bb.data.getVar("MANIFEST", d, 1)
|
||||
if os.access(mfn, os.R_OK):
|
||||
# we have a manifest, so emit do_stage and do_populate_pkgs,
|
||||
# and stuff some additional bits of data into the metadata store
|
||||
mfile = file(mfn, "r")
|
||||
manifest = bb.manifest.parse(mfile, d)
|
||||
if not manifest:
|
||||
return
|
||||
|
||||
bb.data.setVar('manifest', manifest, d)
|
||||
}
|
||||
|
||||
python parse_manifest () {
|
||||
manifest = bb.data.getVar("manifest", d)
|
||||
if not manifest:
|
||||
return
|
||||
for func in ("do_populate_staging", "do_populate_pkgs"):
|
||||
value = bb.manifest.emit(func, manifest, d)
|
||||
if value:
|
||||
bb.data.setVar("manifest_" + func, value, d)
|
||||
bb.data.delVarFlag("manifest_" + func, "python", d)
|
||||
bb.data.delVarFlag("manifest_" + func, "fakeroot", d)
|
||||
bb.data.setVarFlag("manifest_" + func, "func", 1, d)
|
||||
packages = []
|
||||
for l in manifest:
|
||||
if "pkg" in l and l["pkg"] is not None:
|
||||
packages.append(l["pkg"])
|
||||
bb.data.setVar("PACKAGES", " ".join(packages), d)
|
||||
}
|
||||
|
||||
python __anonymous () {
|
||||
try:
|
||||
bb.build.exec_func('read_manifest', d)
|
||||
bb.build.exec_func('parse_manifest', d)
|
||||
except exceptions.KeyboardInterrupt:
|
||||
raise
|
||||
except Exception, e:
|
||||
bb.error("anonymous function: %s" % e)
|
||||
pass
|
||||
}
|
||||
|
||||
#python do_populate_staging () {
|
||||
# if not bb.data.getVar('manifest', d):
|
||||
# bb.build.exec_func('do_emit_manifest', d)
|
||||
# if bb.data.getVar('do_stage', d):
|
||||
# bb.build.exec_func('do_stage', d)
|
||||
# else:
|
||||
# bb.build.exec_func('manifest_do_populate_staging', d)
|
||||
#}
|
||||
|
||||
#addtask populate_pkgs after do_compile
|
||||
#python do_populate_pkgs () {
|
||||
# if not bb.data.getVar('manifest', d):
|
||||
# bb.build.exec_func('do_emit_manifest', d)
|
||||
# bb.build.exec_func('manifest_do_populate_pkgs', d)
|
||||
# bb.build.exec_func('package_do_shlibs', d)
|
||||
#}
|
||||
|
||||
addtask emit_manifest
|
||||
python do_emit_manifest () {
|
||||
# FIXME: emit a manifest here
|
||||
# 1) adjust PATH to hit the wrapper scripts
|
||||
wrappers = bb.which(bb.data.getVar("BBPATH", d, 1), 'build/install', 0)
|
||||
path = (bb.data.getVar('PATH', d, 1) or '').split(':')
|
||||
path.insert(0, os.path.dirname(wrappers))
|
||||
bb.data.setVar('PATH', ':'.join(path), d)
|
||||
# 2) exec_func("do_install", d)
|
||||
bb.build.exec_func('do_install', d)
|
||||
# 3) read in data collected by the wrappers
|
||||
print("Got here2 213")
|
||||
bb.build.exec_func('read_manifest', d)
|
||||
# 4) mangle the manifest we just generated, get paths back into
|
||||
# our variable form
|
||||
# 5) write it back out
|
||||
# 6) re-parse it to ensure the generated functions are proper
|
||||
bb.build.exec_func('parse_manifest', d)
|
||||
}
|
||||
25
meta/classes/module-base.bbclass
Normal file
25
meta/classes/module-base.bbclass
Normal file
@@ -0,0 +1,25 @@
|
||||
inherit module_strip
|
||||
|
||||
inherit kernel-arch
|
||||
|
||||
export OS = "${TARGET_OS}"
|
||||
export CROSS_COMPILE = "${TARGET_PREFIX}"
|
||||
|
||||
export KERNEL_VERSION = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-abiversion')}"
|
||||
export KERNEL_SOURCE = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-source')}"
|
||||
KERNEL_OBJECT_SUFFIX = "${@[".o", ".ko"][base_read_file('${STAGING_KERNEL_DIR}/kernel-abiversion') > "2.6.0"]}"
|
||||
KERNEL_CCSUFFIX = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-ccsuffix')}"
|
||||
KERNEL_LDSUFFIX = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-ldsuffix')}"
|
||||
|
||||
# Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture
|
||||
# specific options necessary for building the kernel and modules.
|
||||
TARGET_CC_KERNEL_ARCH ?= ""
|
||||
HOST_CC_KERNEL_ARCH ?= "${TARGET_CC_KERNEL_ARCH}"
|
||||
TARGET_LD_KERNEL_ARCH ?= ""
|
||||
HOST_LD_KERNEL_ARCH ?= "${TARGET_LD_KERNEL_ARCH}"
|
||||
|
||||
KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc${KERNEL_CCSUFFIX} ${HOST_CC_KERNEL_ARCH}"
|
||||
KERNEL_LD = "${LD}${KERNEL_LDSUFFIX} ${HOST_LD_KERNEL_ARCH}"
|
||||
|
||||
# kernel modules are generally machine specific
|
||||
PACKAGE_ARCH = "${MACHINE_ARCH}"
|
||||
51
meta/classes/module.bbclass
Normal file
51
meta/classes/module.bbclass
Normal file
@@ -0,0 +1,51 @@
|
||||
RDEPENDS += "kernel (${KERNEL_VERSION})"
|
||||
DEPENDS += "virtual/kernel"
|
||||
|
||||
inherit module-base
|
||||
|
||||
python populate_packages_prepend() {
|
||||
v = bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) or "0"
|
||||
if v == "1":
|
||||
kv = bb.data.getVar("KERNEL_VERSION", d, 1)
|
||||
packages = bb.data.getVar("PACKAGES", d, 1)
|
||||
for p in packages.split():
|
||||
pkg = bb.data.getVar("PKG_%s" % p, d, 1) or p
|
||||
newpkg = "%s-%s" % (pkg, kv)
|
||||
bb.data.setVar("PKG_%s" % p, newpkg, d)
|
||||
rprovides = bb.data.getVar("RPROVIDES_%s" % p, d, 1)
|
||||
if rprovides:
|
||||
rprovides = "%s %s" % (rprovides, pkg)
|
||||
else:
|
||||
rprovides = pkg
|
||||
bb.data.setVar("RPROVIDES_%s" % p, rprovides, d)
|
||||
}
|
||||
|
||||
module_do_compile() {
|
||||
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
|
||||
oe_runmake KERNEL_PATH=${STAGING_KERNEL_DIR} \
|
||||
KERNEL_SRC=${STAGING_KERNEL_DIR} \
|
||||
KERNEL_VERSION=${KERNEL_VERSION} \
|
||||
CC="${KERNEL_CC}" LD="${KERNEL_LD}" \
|
||||
${MAKE_TARGETS}
|
||||
}
|
||||
|
||||
module_do_install() {
|
||||
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
|
||||
oe_runmake DEPMOD=echo INSTALL_MOD_PATH="${D}" CC="${KERNEL_CC}" LD="${KERNEL_LD}" modules_install
|
||||
}
|
||||
|
||||
pkg_postinst_append () {
|
||||
if [ -n "$D" ]; then
|
||||
exit 1
|
||||
fi
|
||||
depmod -A
|
||||
update-modules || true
|
||||
}
|
||||
|
||||
pkg_postrm_append () {
|
||||
update-modules || true
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_compile do_install
|
||||
|
||||
FILES_${PN} = "/etc /lib/modules"
|
||||
18
meta/classes/module_strip.bbclass
Normal file
18
meta/classes/module_strip.bbclass
Normal file
@@ -0,0 +1,18 @@
|
||||
#DEPENDS_append = " module-strip"
|
||||
|
||||
do_strip_modules () {
|
||||
for p in ${PACKAGES}; do
|
||||
if test -e ${WORKDIR}/install/$p/lib/modules; then
|
||||
modules="`find ${WORKDIR}/install/$p/lib/modules -name \*${KERNEL_OBJECT_SUFFIX}`"
|
||||
if [ -n "$modules" ]; then
|
||||
${STRIP} -v -g $modules
|
||||
# NM="${CROSS_DIR}/bin/${HOST_PREFIX}nm" OBJCOPY="${CROSS_DIR}/bin/${HOST_PREFIX}objcopy" strip_module $modules
|
||||
fi
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
python do_package_append () {
|
||||
if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, 1) != '1'):
|
||||
bb.build.exec_func('do_strip_modules', d)
|
||||
}
|
||||
53
meta/classes/mozilla.bbclass
Normal file
53
meta/classes/mozilla.bbclass
Normal file
@@ -0,0 +1,53 @@
|
||||
SECTION = "x11/utils"
|
||||
DEPENDS += "gnu-config-native virtual/libintl xt libxi \
|
||||
zip-native gtk+ orbit2 libidl-native"
|
||||
LICENSE = "MPL NPL"
|
||||
SRC_URI += "file://mozconfig"
|
||||
|
||||
inherit gettext
|
||||
|
||||
EXTRA_OECONF = "--target=${TARGET_SYS} --host=${BUILD_SYS} \
|
||||
--build=${BUILD_SYS} --prefix=${prefix}"
|
||||
EXTRA_OEMAKE = "'HOST_LIBIDL_LIBS=${HOST_LIBIDL_LIBS}' \
|
||||
'HOST_LIBIDL_CFLAGS=${HOST_LIBIDL_CFLAGS}'"
|
||||
SELECTED_OPTIMIZATION = "-Os -fsigned-char -fno-strict-aliasing"
|
||||
|
||||
export CROSS_COMPILE = "1"
|
||||
export MOZCONFIG = "${WORKDIR}/mozconfig"
|
||||
export MOZ_OBJDIR = "${S}"
|
||||
|
||||
export CONFIGURE_ARGS = "${EXTRA_OECONF}"
|
||||
export HOST_LIBIDL_CFLAGS = "`${HOST_LIBIDL_CONFIG} --cflags`"
|
||||
export HOST_LIBIDL_LIBS = "`${HOST_LIBIDL_CONFIG} --libs`"
|
||||
export HOST_LIBIDL_CONFIG = "PKG_CONFIG_PATH=${STAGING_BINDIR}/../share/pkgconfig pkg-config libIDL-2.0"
|
||||
export HOST_CC = "${BUILD_CC}"
|
||||
export HOST_CXX = "${BUILD_CXX}"
|
||||
export HOST_CFLAGS = "${BUILD_CFLAGS}"
|
||||
export HOST_CXXFLAGS = "${BUILD_CXXFLAGS}"
|
||||
export HOST_LDFLAGS = "${BUILD_LDFLAGS}"
|
||||
export HOST_RANLIB = "${BUILD_RANLIB}"
|
||||
export HOST_AR = "${BUILD_AR}"
|
||||
|
||||
mozilla_do_configure() {
|
||||
(
|
||||
set -e
|
||||
for cg in `find ${S} -name config.guess`; do
|
||||
install -m 0755 \
|
||||
${STAGING_BINDIR}/../share/gnu-config/config.guess \
|
||||
${STAGING_BINDIR}/../share/gnu-config/config.sub \
|
||||
`dirname $cg`/
|
||||
done
|
||||
)
|
||||
oe_runmake -f client.mk ${MOZ_OBJDIR}/Makefile \
|
||||
${MOZ_OBJDIR}/config.status
|
||||
}
|
||||
|
||||
mozilla_do_compile() {
|
||||
oe_runmake -f client.mk build_all
|
||||
}
|
||||
|
||||
mozilla_do_install() {
|
||||
oe_runmake DESTDIR="${D}" destdir="${D}" install
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_configure do_compile do_install
|
||||
22
meta/classes/multimachine.bbclass
Normal file
22
meta/classes/multimachine.bbclass
Normal file
@@ -0,0 +1,22 @@
|
||||
STAMP = "${TMPDIR}/stamps/${MULTIMACH_ARCH}-${HOST_OS}/${PF}"
|
||||
WORKDIR = "${TMPDIR}/work/${MULTIMACH_ARCH}-${HOST_OS}/${PF}"
|
||||
STAGING_KERNEL_DIR = "${STAGING_DIR}/${MULTIMACH_ARCH}-${HOST_OS}/kernel"
|
||||
|
||||
# Find any machine specific sub packages and if present, mark the
|
||||
# whole package as machine specific for multimachine purposes.
|
||||
python __anonymous () {
|
||||
packages = bb.data.getVar('PACKAGES', d, 1).split()
|
||||
macharch = bb.data.getVar('MACHINE_ARCH', d, 1)
|
||||
multiarch = bb.data.getVar('PACKAGE_ARCH', d, 1)
|
||||
|
||||
for pkg in packages:
|
||||
pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
|
||||
|
||||
# We could look for != PACKAGE_ARCH here but how to choose
|
||||
# if multiple differences are present?
|
||||
# Look through IPKG_ARCHS for the priority order?
|
||||
if pkgarch and pkgarch == macharch:
|
||||
multiarch = macharch
|
||||
|
||||
bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
|
||||
}
|
||||
95
meta/classes/native.bbclass
Normal file
95
meta/classes/native.bbclass
Normal file
@@ -0,0 +1,95 @@
|
||||
inherit base
|
||||
|
||||
# Native packages are built indirectly via dependency,
|
||||
# no need for them to be a direct target of 'world'
|
||||
EXCLUDE_FROM_WORLD = "1"
|
||||
|
||||
PACKAGES = ""
|
||||
PACKAGE_ARCH = "${BUILD_ARCH}"
|
||||
|
||||
# When this class has packaging enabled, setting
|
||||
# RPROVIDES becomes unnecessary.
|
||||
RPROVIDES = "${PN}"
|
||||
|
||||
# Need to resolve package RDEPENDS as well as DEPENDS
|
||||
BUILD_ALL_DEPS = "1"
|
||||
|
||||
# Break the circular dependency as a result of DEPENDS
|
||||
# in package.bbclass
|
||||
PACKAGE_DEPENDS = ""
|
||||
|
||||
TARGET_ARCH = "${BUILD_ARCH}"
|
||||
TARGET_OS = "${BUILD_OS}"
|
||||
TARGET_VENDOR = "${BUILD_VENDOR}"
|
||||
TARGET_PREFIX = "${BUILD_PREFIX}"
|
||||
TARGET_CC_ARCH = "${BUILD_CC_ARCH}"
|
||||
|
||||
HOST_ARCH = "${BUILD_ARCH}"
|
||||
HOST_OS = "${BUILD_OS}"
|
||||
HOST_VENDOR = "${BUILD_VENDOR}"
|
||||
HOST_PREFIX = "${BUILD_PREFIX}"
|
||||
HOST_CC_ARCH = "${BUILD_CC_ARCH}"
|
||||
|
||||
CPPFLAGS = "${BUILD_CPPFLAGS}"
|
||||
CFLAGS = "${BUILD_CFLAGS}"
|
||||
CXXFLAGS = "${BUILD_CFLAGS}"
|
||||
LDFLAGS = "${BUILD_LDFLAGS}"
|
||||
LDFLAGS_build-darwin = "-L${STAGING_DIR}/${BUILD_SYS}/lib "
|
||||
|
||||
|
||||
# set the compiler as well. It could have been set to something else
|
||||
export CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_ARCH}"
|
||||
export CXX = "${CCACHE}${HOST_PREFIX}g++ ${HOST_CC_ARCH}"
|
||||
export F77 = "${CCACHE}${HOST_PREFIX}g77 ${HOST_CC_ARCH}"
|
||||
export CPP = "${HOST_PREFIX}gcc -E"
|
||||
export LD = "${HOST_PREFIX}ld"
|
||||
export CCLD = "${CC}"
|
||||
export AR = "${HOST_PREFIX}ar"
|
||||
export AS = "${HOST_PREFIX}as"
|
||||
export RANLIB = "${HOST_PREFIX}ranlib"
|
||||
export STRIP = "${HOST_PREFIX}strip"
|
||||
|
||||
|
||||
# Path prefixes
|
||||
base_prefix = "${exec_prefix}"
|
||||
prefix = "${STAGING_DIR}"
|
||||
exec_prefix = "${STAGING_DIR}/${BUILD_ARCH}-${BUILD_OS}"
|
||||
|
||||
# Base paths
|
||||
base_bindir = "${base_prefix}/bin"
|
||||
base_sbindir = "${base_prefix}/bin"
|
||||
base_libdir = "${base_prefix}/lib"
|
||||
|
||||
# Architecture independent paths
|
||||
sysconfdir = "${prefix}/etc"
|
||||
sharedstatedir = "${prefix}/com"
|
||||
localstatedir = "${prefix}/var"
|
||||
infodir = "${datadir}/info"
|
||||
mandir = "${datadir}/man"
|
||||
docdir = "${datadir}/doc"
|
||||
servicedir = "${prefix}/srv"
|
||||
|
||||
# Architecture dependent paths
|
||||
bindir = "${exec_prefix}/bin"
|
||||
sbindir = "${exec_prefix}/bin"
|
||||
libexecdir = "${exec_prefix}/libexec"
|
||||
libdir = "${exec_prefix}/lib"
|
||||
includedir = "${exec_prefix}/include"
|
||||
oldincludedir = "${exec_prefix}/include"
|
||||
|
||||
# Datadir is made arch dependent here, primarily
|
||||
# for autoconf macros, and other things that
|
||||
# may be manipulated to handle crosscompilation
|
||||
# issues.
|
||||
datadir = "${exec_prefix}/share"
|
||||
|
||||
do_stage () {
|
||||
if [ "${INHIBIT_NATIVE_STAGE_INSTALL}" != "1" ]
|
||||
then
|
||||
oe_runmake install
|
||||
fi
|
||||
}
|
||||
|
||||
do_install () {
|
||||
true
|
||||
}
|
||||
18
meta/classes/nslu2-jffs2-image.bbclass
Normal file
18
meta/classes/nslu2-jffs2-image.bbclass
Normal file
@@ -0,0 +1,18 @@
|
||||
NSLU2_SLUGIMAGE_ARGS ?= ""
|
||||
|
||||
nslu2_pack_image () {
|
||||
install -d ${DEPLOY_DIR_IMAGE}/slug
|
||||
install -m 0644 ${STAGING_LIBDIR}/nslu2-binaries/RedBoot \
|
||||
${STAGING_LIBDIR}/nslu2-binaries/Trailer \
|
||||
${STAGING_LIBDIR}/nslu2-binaries/SysConf \
|
||||
${DEPLOY_DIR_IMAGE}/slug/
|
||||
install -m 0644 ${DEPLOY_DIR_IMAGE}/zImage-${IMAGE_BASENAME} ${DEPLOY_DIR_IMAGE}/slug/vmlinuz
|
||||
install -m 0644 ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.jffs2 ${DEPLOY_DIR_IMAGE}/slug/flashdisk.jffs2
|
||||
cd ${DEPLOY_DIR_IMAGE}/slug
|
||||
slugimage -p -b RedBoot -s SysConf -r Ramdisk:1,Flashdisk:flashdisk.jffs2 -t Trailer \
|
||||
-o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.flashdisk.img ${NSLU2_SLUGIMAGE_ARGS}
|
||||
rm -rf ${DEPLOY_DIR_IMAGE}/slug
|
||||
}
|
||||
|
||||
EXTRA_IMAGEDEPENDS += 'slugimage-native nslu2-linksys-firmware'
|
||||
IMAGE_POSTPROCESS_COMMAND += "nslu2_pack_image; "
|
||||
4
meta/classes/nslu2-mirrors.bbclass
Normal file
4
meta/classes/nslu2-mirrors.bbclass
Normal file
@@ -0,0 +1,4 @@
|
||||
MIRRORS_append () {
|
||||
ftp://.*/.*/ http://sources.nslu2-linux.org/sources/
|
||||
http://.*/.*/ http://sources.nslu2-linux.org/sources/
|
||||
}
|
||||
18
meta/classes/nslu2-ramdisk-image.bbclass
Normal file
18
meta/classes/nslu2-ramdisk-image.bbclass
Normal file
@@ -0,0 +1,18 @@
|
||||
NSLU2_SLUGIMAGE_ARGS ?= ""
|
||||
|
||||
nslu2_pack_image () {
|
||||
install -d ${DEPLOY_DIR_IMAGE}/slug
|
||||
install -m 0644 ${STAGING_LIBDIR}/nslu2-binaries/RedBoot \
|
||||
${STAGING_LIBDIR}/nslu2-binaries/Trailer \
|
||||
${STAGING_LIBDIR}/nslu2-binaries/SysConf \
|
||||
${DEPLOY_DIR_IMAGE}/slug/
|
||||
install -m 0644 ${DEPLOY_DIR_IMAGE}/zImage-${IMAGE_BASENAME} ${DEPLOY_DIR_IMAGE}/slug/vmlinuz
|
||||
install -m 0644 ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ext2.gz ${DEPLOY_DIR_IMAGE}/slug/ramdisk.ext2.gz
|
||||
cd ${DEPLOY_DIR_IMAGE}/slug
|
||||
slugimage -p -b RedBoot -s SysConf -r Ramdisk:ramdisk.ext2.gz -t Trailer \
|
||||
-o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.ramdisk.img ${NSLU2_SLUGIMAGE_ARGS}
|
||||
rm -rf ${DEPLOY_DIR_IMAGE}/slug
|
||||
}
|
||||
|
||||
EXTRA_IMAGEDEPENDS += 'slugimage-native nslu2-linksys-firmware'
|
||||
IMAGE_POSTPROCESS_COMMAND += "nslu2_pack_image; "
|
||||
6
meta/classes/nylon-mirrors.bbclass
Normal file
6
meta/classes/nylon-mirrors.bbclass
Normal file
@@ -0,0 +1,6 @@
|
||||
MIRRORS_append () {
|
||||
ftp://.*/.*/ http://meshcube.org/nylon/stable/sources/
|
||||
http://.*/.*/ http://meshcube.org/nylon/stable/sources/
|
||||
ftp://.*/.*/ http://meshcube.org/nylon/unstable/sources/
|
||||
http://.*/.*/ http://meshcube.org/nylon/unstable/sources/
|
||||
}
|
||||
16
meta/classes/oebuildstamp.bbclass
Normal file
16
meta/classes/oebuildstamp.bbclass
Normal file
@@ -0,0 +1,16 @@
|
||||
#
|
||||
# Because base.oeclasses set the variable
|
||||
#
|
||||
# do_fetch[nostamp] = "1"
|
||||
# do_build[nostamp] = "1"
|
||||
#
|
||||
# for every build we're doing oemake calls all of the phases to check if
|
||||
# something new is to download. This class unset's this nostamp flag. This
|
||||
# makes a package "finished", once it's completely build.
|
||||
#
|
||||
# This means that the subsequent builds are faster, but when you change the
|
||||
# behaviour of the package, e.g. by adding INHERIT="package_ipk", you won't
|
||||
# get the ipk file except you delete the build stamp manually or all of them
|
||||
# with oebuild clean <oe-file>.
|
||||
|
||||
do_build[nostamp] = ""
|
||||
174
meta/classes/oelint.bbclass
Normal file
174
meta/classes/oelint.bbclass
Normal file
@@ -0,0 +1,174 @@
|
||||
addtask lint before do_fetch
|
||||
do_lint[nostamp] = 1
|
||||
python do_lint() {
|
||||
def testVar(var, explain=None):
|
||||
try:
|
||||
s = d[var]
|
||||
return s["content"]
|
||||
except KeyError:
|
||||
bb.error("%s is not set" % var)
|
||||
if explain: bb.note(explain)
|
||||
return None
|
||||
|
||||
|
||||
##############################
|
||||
# Test that DESCRIPTION exists
|
||||
#
|
||||
testVar("DESCRIPTION")
|
||||
|
||||
|
||||
##############################
|
||||
# Test that HOMEPAGE exists
|
||||
#
|
||||
s = testVar("HOMEPAGE")
|
||||
if s=="unknown":
|
||||
bb.error("HOMEPAGE is not set")
|
||||
elif not s.startswith("http://"):
|
||||
bb.error("HOMEPAGE doesn't start with http://")
|
||||
|
||||
|
||||
|
||||
##############################
|
||||
# Test for valid LICENSE
|
||||
#
|
||||
valid_licenses = {
|
||||
"GPL-2" : "GPLv2",
|
||||
"GPL LGPL FDL" : True,
|
||||
"GPL PSF" : True,
|
||||
"GPL/QPL" : True,
|
||||
"GPL" : True,
|
||||
"GPLv2" : True,
|
||||
"IBM" : True,
|
||||
"LGPL GPL" : True,
|
||||
"LGPL" : True,
|
||||
"MIT" : True,
|
||||
"OSL" : True,
|
||||
"Perl" : True,
|
||||
"Public Domain" : True,
|
||||
"QPL" : "GPL/QPL",
|
||||
}
|
||||
s = testVar("LICENSE")
|
||||
if s=="unknown":
|
||||
bb.error("LICENSE is not set")
|
||||
elif s.startswith("Vendor"):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
newlic = valid_licenses[s]
|
||||
if newlic == False:
|
||||
bb.note("LICENSE '%s' is not recommended" % s)
|
||||
elif newlic != True:
|
||||
bb.note("LICENSE '%s' is not recommended, better use '%s'" % (s, newsect))
|
||||
except:
|
||||
bb.note("LICENSE '%s' is not recommended" % s)
|
||||
|
||||
|
||||
##############################
|
||||
# Test for valid MAINTAINER
|
||||
#
|
||||
s = testVar("MAINTAINER")
|
||||
if s=="OpenEmbedded Team <oe@handhelds.org>":
|
||||
bb.error("explicit MAINTAINER is missing, using default")
|
||||
elif s and s.find("@") == -1:
|
||||
bb.error("You forgot to put an e-mail address into MAINTAINER")
|
||||
|
||||
|
||||
##############################
|
||||
# Test for valid SECTION
|
||||
#
|
||||
# if Correct section: True section name is valid
|
||||
# False section name is invalid, no suggestion
|
||||
# string section name is invalid, better name suggested
|
||||
#
|
||||
valid_sections = {
|
||||
# Current Section Correct section
|
||||
"apps" : True,
|
||||
"audio" : True,
|
||||
"base" : True,
|
||||
"console/games" : True,
|
||||
"console/net" : "console/network",
|
||||
"console/network" : True,
|
||||
"console/utils" : True,
|
||||
"devel" : True,
|
||||
"developing" : "devel",
|
||||
"devel/python" : True,
|
||||
"fonts" : True,
|
||||
"games" : True,
|
||||
"games/libs" : True,
|
||||
"gnome/base" : True,
|
||||
"gnome/libs" : True,
|
||||
"gpe" : True,
|
||||
"gpe/libs" : True,
|
||||
"gui" : False,
|
||||
"libc" : "libs",
|
||||
"libs" : True,
|
||||
"libs/net" : True,
|
||||
"multimedia" : True,
|
||||
"net" : "network",
|
||||
"NET" : "network",
|
||||
"network" : True,
|
||||
"opie/applets" : True,
|
||||
"opie/applications" : True,
|
||||
"opie/base" : True,
|
||||
"opie/codecs" : True,
|
||||
"opie/decorations" : True,
|
||||
"opie/fontfactories" : True,
|
||||
"opie/fonts" : True,
|
||||
"opie/games" : True,
|
||||
"opie/help" : True,
|
||||
"opie/inputmethods" : True,
|
||||
"opie/libs" : True,
|
||||
"opie/multimedia" : True,
|
||||
"opie/pim" : True,
|
||||
"opie/setting" : "opie/settings",
|
||||
"opie/settings" : True,
|
||||
"opie/Shell" : False,
|
||||
"opie/styles" : True,
|
||||
"opie/today" : True,
|
||||
"scientific" : True,
|
||||
"utils" : True,
|
||||
"x11" : True,
|
||||
"x11/libs" : True,
|
||||
"x11/wm" : True,
|
||||
}
|
||||
s = testVar("SECTION")
|
||||
if s:
|
||||
try:
|
||||
newsect = valid_sections[s]
|
||||
if newsect == False:
|
||||
bb.note("SECTION '%s' is not recommended" % s)
|
||||
elif newsect != True:
|
||||
bb.note("SECTION '%s' is not recommended, better use '%s'" % (s, newsect))
|
||||
except:
|
||||
bb.note("SECTION '%s' is not recommended" % s)
|
||||
|
||||
if not s.islower():
|
||||
bb.error("SECTION should only use lower case")
|
||||
|
||||
|
||||
|
||||
|
||||
##############################
|
||||
# Test for valid PRIORITY
|
||||
#
|
||||
valid_priorities = {
|
||||
"standard" : True,
|
||||
"required" : True,
|
||||
"optional" : True,
|
||||
"extra" : True,
|
||||
}
|
||||
s = testVar("PRIORITY")
|
||||
if s:
|
||||
try:
|
||||
newprio = valid_priorities[s]
|
||||
if newprio == False:
|
||||
bb.note("PRIORITY '%s' is not recommended" % s)
|
||||
elif newprio != True:
|
||||
bb.note("PRIORITY '%s' is not recommended, better use '%s'" % (s, newprio))
|
||||
except:
|
||||
bb.note("PRIORITY '%s' is not recommended" % s)
|
||||
|
||||
if not s.islower():
|
||||
bb.error("PRIORITY should only use lower case")
|
||||
|
||||
}
|
||||
105
meta/classes/opie.bbclass
Normal file
105
meta/classes/opie.bbclass
Normal file
@@ -0,0 +1,105 @@
|
||||
#
|
||||
# This oeclass takes care about some of the itchy details of installing parts
|
||||
# of Opie applications. Depending on quicklaunch or not, plugin or not, the
|
||||
# TARGET is either a shared object, a shared object with a link to quicklauncher,
|
||||
# or a usual binary.
|
||||
#
|
||||
# You have to provide two things: 1.) A proper SECTION field, and 2.) a proper APPNAME
|
||||
# Then opie.oeclass will:
|
||||
# * create the directory for the binary and install the binary file(s)
|
||||
# * for applications: create the directory for the .desktop and install the .desktop file
|
||||
# * for quicklauncher applications: create the startup symlink to the quicklauncher
|
||||
# You can override the automatic detection of APPTYPE, valid values are 'quicklaunch', 'binary', 'plugin'
|
||||
# You can override the default location of APPDESKTOP (<workdir>/apps/<section>/)
|
||||
#
|
||||
|
||||
inherit palmtop
|
||||
|
||||
# Note that when CVS changes to 1.2.2, the dash
|
||||
# should be removed from OPIE_CVS_PV to convert
|
||||
# to the standardised version format
|
||||
OPIE_CVS_PV = "1.2.1+cvs-${SRCDATE}"
|
||||
|
||||
DEPENDS_prepend = "${@["libopie2 ", ""][(bb.data.getVar('PN', d, 1) == 'libopie2')]}"
|
||||
|
||||
# to be consistent, put all targets into workdir
|
||||
# NOTE: leave one space at the end, other files are expecting that
|
||||
EXTRA_QMAKEVARS_POST += "DESTDIR=${S} "
|
||||
|
||||
# Opie standard TAG value
|
||||
TAG = "${@'v' + bb.data.getVar('PV',d,1).replace('.', '_')}"
|
||||
|
||||
# plan for later:
|
||||
# add common scopes for opie applications, see qmake-native/common.pro
|
||||
# qmake should care about all the details then. qmake can do that, i know it :)
|
||||
#
|
||||
|
||||
python opie_do_opie_install() {
|
||||
import os, shutil
|
||||
section = bb.data.getVar( "SECTION", d ).split( '/' )[1] or "Applications"
|
||||
section = section.title()
|
||||
if section in ( "Base", "Libs" ):
|
||||
bb.note( "Section = Base or Libs. Target won't be installed automatically." )
|
||||
return
|
||||
|
||||
# SECTION : BINDIR DESKTOPDIR
|
||||
dirmap = { "Applets" : ( "/plugins/applets", None ),
|
||||
"Applications" : ( "<BINDIR>", "/apps/Applications" ),
|
||||
"Multimedia" : ( "<BINDIR>", "/apps/Applications" ),
|
||||
"Games" : ( "<BINDIR>", "/apps/Games" ),
|
||||
"Settings" : ( "<BINDIR>", "/apps/Settings" ),
|
||||
"Pim" : ( "<BINDIR>", "/apps/1Pim" ),
|
||||
"Examples" : ( "<BINDIR>", "/apps/Examples" ),
|
||||
"Shell" : ( "/bin", "/apps/Opie-SH" ),
|
||||
"Codecs" : ( "/plugins/codecs", None ),
|
||||
"Decorations" : ( "/plugins/decorations", None ),
|
||||
"Inputmethods" : ( "/plugins/inputmethods", None ),
|
||||
"Fontfactories" : ( "/plugins/fontfactories", None ),
|
||||
"Security" : ( "/plugins/security", None ),
|
||||
"Styles" : ( "/plugins/styles", None ),
|
||||
"Today" : ( "/plugins/today", None ),
|
||||
"Datebook" : ( "/plugins/holidays", None ),
|
||||
"Networksettings" : ( "/plugins/networksettings", None ) }
|
||||
|
||||
if section not in dirmap:
|
||||
raise ValueError, "Unknown section '%s'. Valid sections are: %s" % ( section, dirmap.keys() )
|
||||
|
||||
bindir, desktopdir = dirmap[section]
|
||||
APPNAME = bb.data.getVar( "APPNAME", d, True ) or bb.data.getVar( "PN", d, True )
|
||||
APPTYPE = bb.data.getVar( "APPTYPE", d, True )
|
||||
if not APPTYPE:
|
||||
if bindir == "<BINDIR>":
|
||||
APPTYPE = "quicklaunch"
|
||||
else:
|
||||
APPTYPE = "plugin"
|
||||
|
||||
appmap = { "binary":"/bin", "quicklaunch":"/plugins/application" }
|
||||
if bindir == "<BINDIR>": bindir = appmap[APPTYPE]
|
||||
|
||||
bb.note( "Section='%s', bindir='%s', desktopdir='%s', name='%s', type='%s'" %
|
||||
( section, bindir, desktopdir, APPNAME, APPTYPE ) )
|
||||
|
||||
S = bb.data.getVar( "S", d, 1 )
|
||||
D = "%s/image" % bb.data.getVar( "WORKDIR", d, True )
|
||||
WORKDIR = bb.data.getVar( "WORKDIR", d, True )
|
||||
palmtopdir = bb.data.getVar( "palmtopdir", d )
|
||||
APPDESKTOP = bb.data.getVar( "APPDESKTOP", d, True ) or "%s/%s" % ( WORKDIR, desktopdir )
|
||||
|
||||
if desktopdir is not None:
|
||||
os.system( "install -d %s%s%s/" % ( D, palmtopdir, desktopdir ) )
|
||||
os.system( "install -m 0644 %s/%s.desktop %s%s%s/" % ( APPDESKTOP, APPNAME, D, palmtopdir, desktopdir ) )
|
||||
|
||||
os.system( "install -d %s%s%s/" % ( D, palmtopdir, bindir ) )
|
||||
|
||||
if APPTYPE == "binary":
|
||||
os.system( "install -m 0755 %s/%s %s%s%s/" % ( S, APPNAME, D, palmtopdir, bindir ) )
|
||||
elif APPTYPE == "quicklaunch":
|
||||
os.system( "install -m 0755 %s/lib%s.so %s%s%s/" % ( S, APPNAME, D, palmtopdir, bindir ) )
|
||||
os.system( "install -d %s%s/bin/" % ( D, palmtopdir ) )
|
||||
os.system( "ln -sf %s/bin/quicklauncher %s%s/bin/%s" % ( palmtopdir, D, palmtopdir, APPNAME ) )
|
||||
elif APPTYPE == "plugin":
|
||||
os.system( "install -m 0755 %s/lib%s.so %s%s%s/" % ( S, APPNAME, D, palmtopdir, bindir ) )
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_opie_install
|
||||
addtask opie_install after do_compile before do_populate_staging
|
||||
163
meta/classes/opie_i18n.bbclass
Normal file
163
meta/classes/opie_i18n.bbclass
Normal file
@@ -0,0 +1,163 @@
|
||||
# classes/opie_i18n.oeclass Matthias 'CoreDump' Hentges 16-10-2004
|
||||
#
|
||||
# Automatically builds i18n ipks for opie packages. It downloads opie-i18n from opie CVS
|
||||
# and tries to guess the name of the .ts file based on the package name:
|
||||
# ${PN}.ts, lib${PN}.ts and opie-${PN}.ts are all valid. The .ts "guessing" can be
|
||||
# disabled by setting I18N_FILES in the .oe file.
|
||||
#
|
||||
# Todo:
|
||||
#
|
||||
|
||||
I18N_STATS = "1"
|
||||
SRC_URI += "${HANDHELDS_CVS};module=opie/i18n"
|
||||
DEPENDS += "opie-i18n"
|
||||
|
||||
die () {
|
||||
echo -e "opie_18n: ERROR: $1"
|
||||
exit 1
|
||||
}
|
||||
|
||||
python do_build_opie_i18n_data() {
|
||||
|
||||
import os, bb, re
|
||||
workdir = bb.data.getVar("WORKDIR", d, 1)
|
||||
packages = bb.data.getVar("PACKAGES", d, 1)
|
||||
files = bb.data.getVar("FILES", d, 1)
|
||||
section = bb.data.getVar("SECTION", d, 1)
|
||||
pn = bb.data.getVar("PN", d, 1)
|
||||
rdepends = bb.data.getVar("RDEPENDS", d, 1)
|
||||
|
||||
if os.path.exists(workdir + "/PACKAGES.tmp"):
|
||||
fd = open(workdir + "/PACKAGES.tmp", 'r')
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
|
||||
bb.data.setVar('PACKAGES', " ".join(lines).lower() + " " + packages, d)
|
||||
|
||||
fd = open(workdir + "/FILES.tmp", 'r')
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
|
||||
for l in lines:
|
||||
x = re.split("\#", l)
|
||||
bb.data.setVar('FILES_%s' % x[0].lower(), " " + x[1].strip('\n'), d)
|
||||
bb.data.setVar('SECTION_%s' % x[0].lower(), "opie/translations", d)
|
||||
bb.data.setVar('RDEPENDS_%s' % x[0].lower(), pn, d)
|
||||
|
||||
bb.data.setVar('SECTION_%s' % pn, section, d)
|
||||
bb.data.setVar('RDEPENDS', rdepends, d)
|
||||
else:
|
||||
bb.note("No translations found for package " + pn)
|
||||
}
|
||||
|
||||
do_build_opie_i18n () {
|
||||
|
||||
cd "${WORKDIR}/i18n" || die "ERROR:\nCouldn't find Opies i18n sources in ${PN}/i18n\nMake sure that <inherit opie_i18n> or <inherit opie> is *below* <SRC_URIS =>!"
|
||||
|
||||
if test -z "${I18N_FILES}"
|
||||
then
|
||||
package_name="`echo "${PN}"| sed "s/^opie\-//"`"
|
||||
package_name2="`echo "${PN}"| sed "s/^opie\-//;s/\-//"`"
|
||||
test "$package_name" != "$package_name2" && I18N_FILES="${package_name}.ts lib${package_name}.ts opie-${package_name}.ts ${package_name2}.ts lib${package_name2}.ts opie-${package_name2}.ts"
|
||||
test "$package_name" = "$package_name2" && I18N_FILES="${package_name}.ts lib${package_name}.ts opie-${package_name}.ts"
|
||||
echo -e "I18N Datafiles: ${I18N_FILES} (auto-detected)\nYou can overide the auto-detection by setting I18N_FILES in your .oe file"
|
||||
else
|
||||
echo "I18N Datafiles: ${I18N_FILES} (provided by .bb)"
|
||||
fi
|
||||
|
||||
rm -f "${WORKDIR}/FILES.tmp" "${WORKDIR}/PACKAGES.tmp"
|
||||
|
||||
echo -e "\nFILES is set to [${FILES}]\n"
|
||||
|
||||
for file in ${I18N_FILES}
|
||||
do
|
||||
echo "Working on [$file]"
|
||||
for ts_file in `ls -1 */*.ts | egrep "/$file"`
|
||||
do
|
||||
echo -e "\tCompiling [$ts_file]"
|
||||
cd "${WORKDIR}/i18n/`dirname $ts_file`" || die "[${WORKDIR}/i18n/`dirname $ts_file`] not found"
|
||||
opie-lrelease "`basename $ts_file`" || die "lrelease failed! Make sure that <inherit opie_i18n> or <inherit opie> is *below* <DEPENDS =>!"
|
||||
|
||||
# $lang is the language as in de_DE, $lang_sane replaces "_" with "-"
|
||||
# to allow packaging as "_" is not allowed in a package name
|
||||
lang="`echo "$ts_file" | sed -n "s#\(.*\)/\(.*\)#\1#p"`"
|
||||
lang_sane="`echo "$ts_file" | sed -n "s#\(.*\)/\(.*\)#\1#p"|sed s/\_/\-/`"
|
||||
echo -e "\tPackaging [`basename $ts_file`] for language [$lang]"
|
||||
|
||||
install -d ${D}${palmtopdir}/i18n/$lang
|
||||
install -m 0644 ${WORKDIR}/i18n/$lang/.directory ${D}${palmtopdir}/i18n/$lang/
|
||||
install -m 0644 ${WORKDIR}/i18n/$lang/*.qm "${D}${palmtopdir}/i18n/$lang/"
|
||||
|
||||
# As it is not possible to modify OE vars from within a _shell_ function,
|
||||
# some major hacking was needed. These two files will be read by the python
|
||||
# function do_build_opie_i18n_data() which sets the variables FILES_* and
|
||||
# PACKAGES as needed.
|
||||
echo -n "${PN}-${lang_sane} " >> "${WORKDIR}/PACKAGES.tmp"
|
||||
echo -e "${PN}-${lang_sane}#${palmtopdir}/i18n/$lang" >> "${WORKDIR}/FILES.tmp"
|
||||
|
||||
ts_found_something=1
|
||||
done
|
||||
|
||||
if test "$ts_found_something" != 1
|
||||
then
|
||||
echo -e "\tNo translations found"
|
||||
else
|
||||
ts_found_something=""
|
||||
ts_found="$ts_found $file"
|
||||
fi
|
||||
|
||||
# Only used for debugging purposes
|
||||
test "${I18N_STATS}" = 1 && cd "${WORKDIR}/i18n"
|
||||
|
||||
echo -e "Completed [$file]\n\n"
|
||||
done
|
||||
|
||||
qt_dirs="apps bin etc lib pics plugins share sounds"
|
||||
|
||||
for dir in $qt_dirs
|
||||
do
|
||||
dir_="$dir_ ${palmtopdir}/$dir "
|
||||
done
|
||||
|
||||
|
||||
# If we don't adjust FILES to exclude the i18n directory, we will end up with
|
||||
# _lots_ of empty i18n/$lang directories in the original .ipk.
|
||||
if (echo "${FILES}" | egrep "${palmtopdir}/? |${palmtopdir}/?$") &>/dev/null
|
||||
then
|
||||
echo "NOTE: FILES was set to ${palmtopdir} which would include the i18n directory"
|
||||
echo -e "\n\nI'll remove ${palmtopdir} from FILES and replace it with all directories"
|
||||
echo "below QtPalmtop, except i18n ($qt_dirs). See classes/opie_i18n.oeclass for details"
|
||||
|
||||
# Removes /opt/QtPalmtop from FILES but keeps /opt/QtPalmtop/$some_dir
|
||||
FILES="`echo "$FILES"| sed "s#${palmtopdir}[/]\?\$\|${palmtopdir}[/]\? ##"`"
|
||||
|
||||
echo "${PN}#$FILES $dir_" >> "${WORKDIR}/FILES.tmp"
|
||||
fi
|
||||
|
||||
# This is the common case for OPIE apps which are installed by opie.oeclass magic
|
||||
if test -z "${FILES}"
|
||||
then
|
||||
echo "NOTE:"
|
||||
echo -e "Since FILES is empty, i'll add all directories below ${palmtopdir} to it,\nexcluding i18n: ( $qt_dirs )"
|
||||
echo "${PN}#$FILES $dir_" >> "${WORKDIR}/FILES.tmp"
|
||||
fi
|
||||
|
||||
if ! test -e "${WORKDIR}/PACKAGES.tmp" -a "${I18N_STATS}" = 1
|
||||
then
|
||||
echo "No translations for package [${PN}]" >> /tmp/oe-i18n-missing.log
|
||||
else
|
||||
echo "Using [$ts_found ] for package [${PN}]" >> /tmp/oe-i18n.log
|
||||
fi
|
||||
|
||||
# While this might not be very elegant, it safes a _ton_ of space (~30Mb) for
|
||||
# each opie package.
|
||||
for file in $(ls */*.ts | egrep -v "`echo "$ts_found"| sed "s/^\ //;s/\ /\|/"`")
|
||||
do
|
||||
rm "$file"
|
||||
done
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
addtask build_opie_i18n before do_compile
|
||||
addtask build_opie_i18n_data after do_build_opie_i18n before do_compile
|
||||
729
meta/classes/package.bbclass
Normal file
729
meta/classes/package.bbclass
Normal file
@@ -0,0 +1,729 @@
|
||||
def legitimize_package_name(s):
|
||||
return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
|
||||
|
||||
STAGING_PKGMAPS_DIR ?= "${STAGING_DIR}/pkgmaps"
|
||||
|
||||
def add_package_mapping (pkg, new_name, d):
|
||||
import bb, os
|
||||
|
||||
def encode(str):
|
||||
import codecs
|
||||
c = codecs.getencoder("string_escape")
|
||||
return c(str)[0]
|
||||
|
||||
pmap_dir = bb.data.getVar('STAGING_PKGMAPS_DIR', d, 1)
|
||||
|
||||
bb.mkdirhier(pmap_dir)
|
||||
|
||||
data_file = os.path.join(pmap_dir, pkg)
|
||||
|
||||
f = open(data_file, 'w')
|
||||
f.write("%s\n" % encode(new_name))
|
||||
f.close()
|
||||
|
||||
def get_package_mapping (pkg, d):
|
||||
import bb, os
|
||||
|
||||
def decode(str):
|
||||
import codecs
|
||||
c = codecs.getdecoder("string_escape")
|
||||
return c(str)[0]
|
||||
|
||||
data_file = bb.data.expand("${STAGING_PKGMAPS_DIR}/%s" % pkg, d)
|
||||
|
||||
if os.access(data_file, os.R_OK):
|
||||
f = file(data_file, 'r')
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
for l in lines:
|
||||
return decode(l).strip()
|
||||
return pkg
|
||||
|
||||
def runtime_mapping_rename (varname, d):
|
||||
import bb, os
|
||||
|
||||
#bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, 1)))
|
||||
|
||||
new_depends = []
|
||||
for depend in explode_deps(bb.data.getVar(varname, d, 1) or ""):
|
||||
# Have to be careful with any version component of the depend
|
||||
split_depend = depend.split(' (')
|
||||
new_depend = get_package_mapping(split_depend[0].strip(), d)
|
||||
if len(split_depend) > 1:
|
||||
new_depends.append("%s (%s" % (new_depend, split_depend[1]))
|
||||
else:
|
||||
new_depends.append(new_depend)
|
||||
|
||||
bb.data.setVar(varname, " ".join(new_depends) or None, d)
|
||||
|
||||
#bb.note("%s after: %s" % (varname, bb.data.getVar(varname, d, 1)))
|
||||
|
||||
python package_mapping_rename_hook () {
|
||||
runtime_mapping_rename("RDEPENDS", d)
|
||||
runtime_mapping_rename("RRECOMMENDS", d)
|
||||
runtime_mapping_rename("RSUGGESTS", d)
|
||||
runtime_mapping_rename("RPROVIDES", d)
|
||||
runtime_mapping_rename("RREPLACES", d)
|
||||
runtime_mapping_rename("RCONFLICTS", d)
|
||||
}
|
||||
|
||||
|
||||
def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None):
|
||||
import os, os.path, bb
|
||||
|
||||
dvar = bb.data.getVar('D', d, 1)
|
||||
if not dvar:
|
||||
bb.error("D not defined")
|
||||
return
|
||||
|
||||
packages = bb.data.getVar('PACKAGES', d, 1).split()
|
||||
if not packages:
|
||||
# nothing to do
|
||||
return
|
||||
|
||||
if postinst:
|
||||
postinst = '#!/bin/sh\n' + postinst + '\n'
|
||||
if postrm:
|
||||
postrm = '#!/bin/sh\n' + postrm + '\n'
|
||||
if not recursive:
|
||||
objs = os.listdir(dvar + root)
|
||||
else:
|
||||
objs = []
|
||||
for walkroot, dirs, files in os.walk(dvar + root):
|
||||
for file in files:
|
||||
relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
|
||||
if relpath:
|
||||
objs.append(relpath)
|
||||
|
||||
if extra_depends == None:
|
||||
extra_depends = bb.data.getVar('PKG_' + packages[0], d, 1) or packages[0]
|
||||
|
||||
for o in objs:
|
||||
import re, stat
|
||||
if match_path:
|
||||
m = re.match(file_regex, o)
|
||||
else:
|
||||
m = re.match(file_regex, os.path.basename(o))
|
||||
|
||||
if not m:
|
||||
continue
|
||||
f = os.path.join(dvar + root, o)
|
||||
mode = os.lstat(f).st_mode
|
||||
if not (stat.S_ISREG(mode) or (allow_dirs and stat.S_ISDIR(mode))):
|
||||
continue
|
||||
on = legitimize_package_name(m.group(1))
|
||||
pkg = output_pattern % on
|
||||
if not pkg in packages:
|
||||
if prepend:
|
||||
packages = [pkg] + packages
|
||||
else:
|
||||
packages.append(pkg)
|
||||
the_files = [os.path.join(root, o)]
|
||||
if aux_files_pattern:
|
||||
if type(aux_files_pattern) is list:
|
||||
for fp in aux_files_pattern:
|
||||
the_files.append(fp % on)
|
||||
else:
|
||||
the_files.append(aux_files_pattern % on)
|
||||
if aux_files_pattern_verbatim:
|
||||
if type(aux_files_pattern_verbatim) is list:
|
||||
for fp in aux_files_pattern_verbatim:
|
||||
the_files.append(fp % m.group(1))
|
||||
else:
|
||||
the_files.append(aux_files_pattern_verbatim % m.group(1))
|
||||
bb.data.setVar('FILES_' + pkg, " ".join(the_files), d)
|
||||
if extra_depends != '':
|
||||
the_depends = bb.data.getVar('RDEPENDS_' + pkg, d, 1)
|
||||
if the_depends:
|
||||
the_depends = '%s %s' % (the_depends, extra_depends)
|
||||
else:
|
||||
the_depends = extra_depends
|
||||
bb.data.setVar('RDEPENDS_' + pkg, the_depends, d)
|
||||
bb.data.setVar('DESCRIPTION_' + pkg, description % on, d)
|
||||
if postinst:
|
||||
bb.data.setVar('pkg_postinst_' + pkg, postinst, d)
|
||||
if postrm:
|
||||
bb.data.setVar('pkg_postrm_' + pkg, postrm, d)
|
||||
else:
|
||||
oldfiles = bb.data.getVar('FILES_' + pkg, d, 1)
|
||||
if not oldfiles:
|
||||
bb.fatal("Package '%s' exists but has no files" % pkg)
|
||||
bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d)
|
||||
if callable(hook):
|
||||
hook(f, pkg, file_regex, output_pattern, m.group(1))
|
||||
|
||||
bb.data.setVar('PACKAGES', ' '.join(packages), d)
|
||||
|
||||
# Function to strip a single file, called from RUNSTRIP below
|
||||
# A working 'file' (one which works on the target architecture)
|
||||
# is necessary for this stuff to work.
|
||||
#PACKAGE_DEPENDS ?= "file-native"
|
||||
#DEPENDS_prepend =+ "${PACKAGE_DEPENDS} "
|
||||
#FIXME: this should be "" when any errors are gone!
|
||||
IGNORE_STRIP_ERRORS ?= "1"
|
||||
|
||||
runstrip() {
|
||||
local ro st
|
||||
st=0
|
||||
if { file "$1" || {
|
||||
oewarn "file $1: failed (forced strip)" >&2
|
||||
echo 'not stripped'
|
||||
}
|
||||
} | grep -q 'not stripped'
|
||||
then
|
||||
oenote "${STRIP} $1"
|
||||
ro=
|
||||
test -w "$1" || {
|
||||
ro=1
|
||||
chmod +w "$1"
|
||||
}
|
||||
'${STRIP}' "$1"
|
||||
st=$?
|
||||
test -n "$ro" && chmod -w "$1"
|
||||
if test $st -ne 0
|
||||
then
|
||||
oewarn "runstrip: ${STRIP} $1: strip failed" >&2
|
||||
if [ x${IGNORE_STRIP_ERRORS} == x1 ]
|
||||
then
|
||||
#FIXME: remove this, it's for error detection
|
||||
if file "$1" 2>/dev/null >&2
|
||||
then
|
||||
(oefatal "${STRIP} $1: command failed" >/dev/tty)
|
||||
else
|
||||
(oefatal "file $1: command failed" >/dev/tty)
|
||||
fi
|
||||
st=0
|
||||
fi
|
||||
fi
|
||||
else
|
||||
oenote "runstrip: skip $1"
|
||||
fi
|
||||
return $st
|
||||
}
|
||||
|
||||
python populate_packages () {
|
||||
import glob, stat, errno, re
|
||||
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
if not workdir:
|
||||
bb.error("WORKDIR not defined, unable to package")
|
||||
return
|
||||
|
||||
import os # path manipulations
|
||||
outdir = bb.data.getVar('DEPLOY_DIR', d, 1)
|
||||
if not outdir:
|
||||
bb.error("DEPLOY_DIR not defined, unable to package")
|
||||
return
|
||||
bb.mkdirhier(outdir)
|
||||
|
||||
dvar = bb.data.getVar('D', d, 1)
|
||||
if not dvar:
|
||||
bb.error("D not defined, unable to package")
|
||||
return
|
||||
bb.mkdirhier(dvar)
|
||||
|
||||
packages = bb.data.getVar('PACKAGES', d, 1)
|
||||
if not packages:
|
||||
bb.debug(1, "PACKAGES not defined, nothing to package")
|
||||
return
|
||||
|
||||
pn = bb.data.getVar('PN', d, 1)
|
||||
if not pn:
|
||||
bb.error("PN not defined")
|
||||
return
|
||||
|
||||
os.chdir(dvar)
|
||||
|
||||
def isexec(path):
|
||||
try:
|
||||
s = os.stat(path)
|
||||
except (os.error, AttributeError):
|
||||
return 0
|
||||
return (s[stat.ST_MODE] & stat.S_IEXEC)
|
||||
|
||||
# Sanity check PACKAGES for duplicates - should be moved to
|
||||
# sanity.bbclass once we have he infrastucture
|
||||
pkgs = []
|
||||
for pkg in packages.split():
|
||||
if pkg in pkgs:
|
||||
bb.error("%s is listed in PACKAGES mutliple times. Undefined behaviour will result." % pkg)
|
||||
pkgs += pkg
|
||||
|
||||
for pkg in packages.split():
|
||||
localdata = bb.data.createCopy(d)
|
||||
root = os.path.join(workdir, "install", pkg)
|
||||
|
||||
os.system('rm -rf %s' % root)
|
||||
|
||||
bb.data.setVar('ROOT', '', localdata)
|
||||
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
|
||||
pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1)
|
||||
if not pkgname:
|
||||
pkgname = pkg
|
||||
bb.data.setVar('PKG', pkgname, localdata)
|
||||
|
||||
overrides = bb.data.getVar('OVERRIDES', localdata, 1)
|
||||
if not overrides:
|
||||
raise bb.build.FuncFailed('OVERRIDES not defined')
|
||||
bb.data.setVar('OVERRIDES', overrides+':'+pkg, localdata)
|
||||
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
root = bb.data.getVar('ROOT', localdata, 1)
|
||||
bb.mkdirhier(root)
|
||||
filesvar = bb.data.getVar('FILES', localdata, 1) or ""
|
||||
files = filesvar.split()
|
||||
stripfunc = ""
|
||||
for file in files:
|
||||
if os.path.isabs(file):
|
||||
file = '.' + file
|
||||
if not os.path.islink(file):
|
||||
if os.path.isdir(file):
|
||||
newfiles = [ os.path.join(file,x) for x in os.listdir(file) ]
|
||||
if newfiles:
|
||||
files += newfiles
|
||||
continue
|
||||
globbed = glob.glob(file)
|
||||
if globbed:
|
||||
if [ file ] != globbed:
|
||||
files += globbed
|
||||
continue
|
||||
if (not os.path.islink(file)) and (not os.path.exists(file)):
|
||||
continue
|
||||
fpath = os.path.join(root,file)
|
||||
dpath = os.path.dirname(fpath)
|
||||
bb.mkdirhier(dpath)
|
||||
if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, 1) != '1') and not os.path.islink(file) and isexec(file):
|
||||
stripfunc += "\trunstrip %s || st=1\n" % fpath
|
||||
ret = bb.movefile(file,fpath)
|
||||
if ret is None or ret == 0:
|
||||
raise bb.build.FuncFailed("File population failed")
|
||||
if not stripfunc == "":
|
||||
from bb import build
|
||||
# strip
|
||||
bb.data.setVar('RUNSTRIP', '\tlocal st\n\tst=0\n%s\treturn $st' % stripfunc, localdata)
|
||||
bb.data.setVarFlag('RUNSTRIP', 'func', 1, localdata)
|
||||
bb.build.exec_func('RUNSTRIP', localdata)
|
||||
del localdata
|
||||
os.chdir(workdir)
|
||||
|
||||
unshipped = []
|
||||
for root, dirs, files in os.walk(dvar):
|
||||
for f in files:
|
||||
path = os.path.join(root[len(dvar):], f)
|
||||
unshipped.append(path)
|
||||
|
||||
if unshipped != []:
|
||||
bb.note("the following files were installed but not shipped in any package:")
|
||||
for f in unshipped:
|
||||
bb.note(" " + f)
|
||||
|
||||
bb.build.exec_func("package_name_hook", d)
|
||||
|
||||
for pkg in packages.split():
|
||||
pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1)
|
||||
if pkgname is None:
|
||||
bb.data.setVar('PKG_%s' % pkg, pkg, d)
|
||||
else:
|
||||
add_package_mapping(pkg, pkgname, d)
|
||||
|
||||
dangling_links = {}
|
||||
pkg_files = {}
|
||||
for pkg in packages.split():
|
||||
dangling_links[pkg] = []
|
||||
pkg_files[pkg] = []
|
||||
inst_root = os.path.join(workdir, "install", pkg)
|
||||
for root, dirs, files in os.walk(inst_root):
|
||||
for f in files:
|
||||
path = os.path.join(root, f)
|
||||
rpath = path[len(inst_root):]
|
||||
pkg_files[pkg].append(rpath)
|
||||
try:
|
||||
s = os.stat(path)
|
||||
except OSError, (err, strerror):
|
||||
if err != errno.ENOENT:
|
||||
raise
|
||||
target = os.readlink(path)
|
||||
if target[0] != '/':
|
||||
target = os.path.join(root[len(inst_root):], target)
|
||||
dangling_links[pkg].append(os.path.normpath(target))
|
||||
|
||||
for pkg in packages.split():
|
||||
rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "")
|
||||
for l in dangling_links[pkg]:
|
||||
found = False
|
||||
bb.debug(1, "%s contains dangling link %s" % (pkg, l))
|
||||
for p in packages.split():
|
||||
for f in pkg_files[p]:
|
||||
if f == l:
|
||||
found = True
|
||||
bb.debug(1, "target found in %s" % p)
|
||||
if p == pkg:
|
||||
break
|
||||
dp = bb.data.getVar('PKG_' + p, d, 1) or p
|
||||
if not dp in rdepends:
|
||||
rdepends.append(dp)
|
||||
break
|
||||
if found == False:
|
||||
bb.note("%s contains dangling symlink to %s" % (pkg, l))
|
||||
bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d)
|
||||
|
||||
def write_if_exists(f, pkg, var):
|
||||
def encode(str):
|
||||
import codecs
|
||||
c = codecs.getencoder("string_escape")
|
||||
return c(str)[0]
|
||||
|
||||
val = bb.data.getVar('%s_%s' % (var, pkg), d, 1)
|
||||
if val:
|
||||
f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
|
||||
|
||||
data_file = os.path.join(workdir, "install", pn + ".package")
|
||||
f = open(data_file, 'w')
|
||||
f.write("PACKAGES: %s\n" % packages)
|
||||
for pkg in packages.split():
|
||||
write_if_exists(f, pkg, 'DESCRIPTION')
|
||||
write_if_exists(f, pkg, 'RDEPENDS')
|
||||
write_if_exists(f, pkg, 'RPROVIDES')
|
||||
write_if_exists(f, pkg, 'PKG')
|
||||
write_if_exists(f, pkg, 'ALLOW_EMPTY')
|
||||
write_if_exists(f, pkg, 'FILES')
|
||||
write_if_exists(f, pkg, 'pkg_postinst')
|
||||
write_if_exists(f, pkg, 'pkg_postrm')
|
||||
write_if_exists(f, pkg, 'pkg_preinst')
|
||||
write_if_exists(f, pkg, 'pkg_prerm')
|
||||
f.close()
|
||||
bb.build.exec_func("read_subpackage_metadata", d)
|
||||
}
|
||||
|
||||
ldconfig_postinst_fragment() {
|
||||
if [ x"$D" = "x" ]; then
|
||||
ldconfig
|
||||
fi
|
||||
}
|
||||
|
||||
python package_do_shlibs() {
|
||||
import os, re, os.path
|
||||
|
||||
exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0)
|
||||
if exclude_shlibs:
|
||||
bb.note("not generating shlibs")
|
||||
return
|
||||
|
||||
lib_re = re.compile("^lib.*\.so")
|
||||
libdir_re = re.compile(".*/lib$")
|
||||
|
||||
packages = bb.data.getVar('PACKAGES', d, 1)
|
||||
if not packages:
|
||||
bb.debug(1, "no packages to build; not calculating shlibs")
|
||||
return
|
||||
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
if not workdir:
|
||||
bb.error("WORKDIR not defined")
|
||||
return
|
||||
|
||||
staging = bb.data.getVar('STAGING_DIR', d, 1)
|
||||
if not staging:
|
||||
bb.error("STAGING_DIR not defined")
|
||||
return
|
||||
|
||||
ver = bb.data.getVar('PV', d, 1)
|
||||
if not ver:
|
||||
bb.error("PV not defined")
|
||||
return
|
||||
|
||||
target_sys = bb.data.getVar('TARGET_SYS', d, 1)
|
||||
if not target_sys:
|
||||
bb.error("TARGET_SYS not defined")
|
||||
return
|
||||
|
||||
shlibs_dir = os.path.join(staging, target_sys, "shlibs")
|
||||
old_shlibs_dir = os.path.join(staging, "shlibs")
|
||||
bb.mkdirhier(shlibs_dir)
|
||||
|
||||
needed = {}
|
||||
for pkg in packages.split():
|
||||
needs_ldconfig = False
|
||||
bb.debug(2, "calculating shlib provides for %s" % pkg)
|
||||
|
||||
pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1)
|
||||
if not pkgname:
|
||||
pkgname = pkg
|
||||
|
||||
needed[pkg] = []
|
||||
sonames = list()
|
||||
top = os.path.join(workdir, "install", pkg)
|
||||
for root, dirs, files in os.walk(top):
|
||||
for file in files:
|
||||
soname = None
|
||||
path = os.path.join(root, file)
|
||||
if os.access(path, os.X_OK) or lib_re.match(file):
|
||||
cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + path + " 2>/dev/null"
|
||||
fd = os.popen(cmd)
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
for l in lines:
|
||||
m = re.match("\s+NEEDED\s+([^\s]*)", l)
|
||||
if m:
|
||||
needed[pkg].append(m.group(1))
|
||||
m = re.match("\s+SONAME\s+([^\s]*)", l)
|
||||
if m and not m.group(1) in sonames:
|
||||
sonames.append(m.group(1))
|
||||
if m and libdir_re.match(root):
|
||||
needs_ldconfig = True
|
||||
shlibs_file = os.path.join(shlibs_dir, pkgname + ".list")
|
||||
if os.path.exists(shlibs_file):
|
||||
os.remove(shlibs_file)
|
||||
shver_file = os.path.join(shlibs_dir, pkgname + ".ver")
|
||||
if os.path.exists(shver_file):
|
||||
os.remove(shver_file)
|
||||
if len(sonames):
|
||||
fd = open(shlibs_file, 'w')
|
||||
for s in sonames:
|
||||
fd.write(s + '\n')
|
||||
fd.close()
|
||||
fd = open(shver_file, 'w')
|
||||
fd.write(ver + '\n')
|
||||
fd.close()
|
||||
if needs_ldconfig:
|
||||
bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
|
||||
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
|
||||
if not postinst:
|
||||
postinst = '#!/bin/sh\n'
|
||||
postinst += bb.data.getVar('ldconfig_postinst_fragment', d, 1)
|
||||
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
|
||||
|
||||
shlib_provider = {}
|
||||
list_re = re.compile('^(.*)\.list$')
|
||||
for dir in [old_shlibs_dir, shlibs_dir]:
|
||||
if not os.path.exists(dir):
|
||||
continue
|
||||
for file in os.listdir(dir):
|
||||
m = list_re.match(file)
|
||||
if m:
|
||||
dep_pkg = m.group(1)
|
||||
fd = open(os.path.join(dir, file))
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
ver_file = os.path.join(dir, dep_pkg + '.ver')
|
||||
lib_ver = None
|
||||
if os.path.exists(ver_file):
|
||||
fd = open(ver_file)
|
||||
lib_ver = fd.readline().rstrip()
|
||||
fd.close()
|
||||
for l in lines:
|
||||
shlib_provider[l.rstrip()] = (dep_pkg, lib_ver)
|
||||
|
||||
|
||||
for pkg in packages.split():
|
||||
bb.debug(2, "calculating shlib requirements for %s" % pkg)
|
||||
|
||||
p_pkg = bb.data.getVar("PKG_%s" % pkg, d, 1) or pkg
|
||||
|
||||
deps = list()
|
||||
for n in needed[pkg]:
|
||||
if n in shlib_provider.keys():
|
||||
(dep_pkg, ver_needed) = shlib_provider[n]
|
||||
|
||||
if dep_pkg == p_pkg:
|
||||
continue
|
||||
|
||||
if ver_needed:
|
||||
dep = "%s (>= %s)" % (dep_pkg, ver_needed)
|
||||
else:
|
||||
dep = dep_pkg
|
||||
if not dep in deps:
|
||||
deps.append(dep)
|
||||
else:
|
||||
bb.note("Couldn't find shared library provider for %s" % n)
|
||||
|
||||
|
||||
deps_file = os.path.join(workdir, "install", pkg + ".shlibdeps")
|
||||
if os.path.exists(deps_file):
|
||||
os.remove(deps_file)
|
||||
if len(deps):
|
||||
fd = open(deps_file, 'w')
|
||||
for dep in deps:
|
||||
fd.write(dep + '\n')
|
||||
fd.close()
|
||||
}
|
||||
|
||||
python package_do_pkgconfig () {
|
||||
import re, os
|
||||
|
||||
packages = bb.data.getVar('PACKAGES', d, 1)
|
||||
if not packages:
|
||||
bb.debug(1, "no packages to build; not calculating pkgconfig dependencies")
|
||||
return
|
||||
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
if not workdir:
|
||||
bb.error("WORKDIR not defined")
|
||||
return
|
||||
|
||||
staging = bb.data.getVar('STAGING_DIR', d, 1)
|
||||
if not staging:
|
||||
bb.error("STAGING_DIR not defined")
|
||||
return
|
||||
|
||||
target_sys = bb.data.getVar('TARGET_SYS', d, 1)
|
||||
if not target_sys:
|
||||
bb.error("TARGET_SYS not defined")
|
||||
return
|
||||
|
||||
shlibs_dir = os.path.join(staging, target_sys, "shlibs")
|
||||
old_shlibs_dir = os.path.join(staging, "shlibs")
|
||||
bb.mkdirhier(shlibs_dir)
|
||||
|
||||
pc_re = re.compile('(.*)\.pc$')
|
||||
var_re = re.compile('(.*)=(.*)')
|
||||
field_re = re.compile('(.*): (.*)')
|
||||
|
||||
pkgconfig_provided = {}
|
||||
pkgconfig_needed = {}
|
||||
for pkg in packages.split():
|
||||
pkgconfig_provided[pkg] = []
|
||||
pkgconfig_needed[pkg] = []
|
||||
top = os.path.join(workdir, "install", pkg)
|
||||
for root, dirs, files in os.walk(top):
|
||||
for file in files:
|
||||
m = pc_re.match(file)
|
||||
if m:
|
||||
pd = bb.data.init()
|
||||
name = m.group(1)
|
||||
pkgconfig_provided[pkg].append(name)
|
||||
path = os.path.join(root, file)
|
||||
if not os.access(path, os.R_OK):
|
||||
continue
|
||||
f = open(path, 'r')
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
for l in lines:
|
||||
m = var_re.match(l)
|
||||
if m:
|
||||
name = m.group(1)
|
||||
val = m.group(2)
|
||||
bb.data.setVar(name, bb.data.expand(val, pd), pd)
|
||||
continue
|
||||
m = field_re.match(l)
|
||||
if m:
|
||||
hdr = m.group(1)
|
||||
exp = bb.data.expand(m.group(2), pd)
|
||||
if hdr == 'Requires':
|
||||
pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
|
||||
|
||||
for pkg in packages.split():
|
||||
ppkg = bb.data.getVar("PKG_" + pkg, d, 1) or pkg
|
||||
pkgs_file = os.path.join(shlibs_dir, ppkg + ".pclist")
|
||||
if os.path.exists(pkgs_file):
|
||||
os.remove(pkgs_file)
|
||||
if pkgconfig_provided[pkg] != []:
|
||||
f = open(pkgs_file, 'w')
|
||||
for p in pkgconfig_provided[pkg]:
|
||||
f.write('%s\n' % p)
|
||||
f.close()
|
||||
|
||||
for dir in [old_shlibs_dir, shlibs_dir]:
|
||||
if not os.path.exists(dir):
|
||||
continue
|
||||
for file in os.listdir(dir):
|
||||
m = re.match('^(.*)\.pclist$', file)
|
||||
if m:
|
||||
pkg = m.group(1)
|
||||
fd = open(os.path.join(dir, file))
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
pkgconfig_provided[pkg] = []
|
||||
for l in lines:
|
||||
pkgconfig_provided[pkg].append(l.rstrip())
|
||||
|
||||
for pkg in packages.split():
|
||||
deps = []
|
||||
for n in pkgconfig_needed[pkg]:
|
||||
found = False
|
||||
for k in pkgconfig_provided.keys():
|
||||
if n in pkgconfig_provided[k]:
|
||||
if k != pkg and not (k in deps):
|
||||
deps.append(k)
|
||||
found = True
|
||||
if found == False:
|
||||
bb.note("couldn't find pkgconfig module '%s' in any package" % n)
|
||||
deps_file = os.path.join(workdir, "install", pkg + ".pcdeps")
|
||||
if os.path.exists(deps_file):
|
||||
os.remove(deps_file)
|
||||
if len(deps):
|
||||
fd = open(deps_file, 'w')
|
||||
for dep in deps:
|
||||
fd.write(dep + '\n')
|
||||
fd.close()
|
||||
}
|
||||
|
||||
python package_do_split_locales() {
|
||||
import os
|
||||
|
||||
if (bb.data.getVar('PACKAGE_NO_LOCALE', d, 1) == '1'):
|
||||
bb.debug(1, "package requested not splitting locales")
|
||||
return
|
||||
|
||||
packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
|
||||
if not packages:
|
||||
bb.debug(1, "no packages to build; not splitting locales")
|
||||
return
|
||||
|
||||
datadir = bb.data.getVar('datadir', d, 1)
|
||||
if not datadir:
|
||||
bb.note("datadir not defined")
|
||||
return
|
||||
|
||||
dvar = bb.data.getVar('D', d, 1)
|
||||
if not dvar:
|
||||
bb.error("D not defined")
|
||||
return
|
||||
|
||||
pn = bb.data.getVar('PN', d, 1)
|
||||
if not pn:
|
||||
bb.error("PN not defined")
|
||||
return
|
||||
|
||||
if pn + '-locale' in packages:
|
||||
packages.remove(pn + '-locale')
|
||||
|
||||
localedir = os.path.join(dvar + datadir, 'locale')
|
||||
|
||||
if not os.path.isdir(localedir):
|
||||
bb.debug(1, "No locale files in this package")
|
||||
return
|
||||
|
||||
locales = os.listdir(localedir)
|
||||
|
||||
mainpkg = packages[0]
|
||||
|
||||
for l in locales:
|
||||
ln = legitimize_package_name(l)
|
||||
pkg = pn + '-locale-' + ln
|
||||
packages.append(pkg)
|
||||
bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d)
|
||||
bb.data.setVar('RDEPENDS_' + pkg, '${PKG_%s} virtual-locale-%s' % (mainpkg, ln), d)
|
||||
bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d)
|
||||
bb.data.setVar('DESCRIPTION_' + pkg, '%s translation for %s' % (l, pn), d)
|
||||
|
||||
bb.data.setVar('PACKAGES', ' '.join(packages), d)
|
||||
|
||||
rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "").split()
|
||||
rdep.append('%s-locale*' % pn)
|
||||
bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d)
|
||||
}
|
||||
|
||||
PACKAGEFUNCS = "do_install package_do_split_locales \
|
||||
populate_packages package_do_shlibs \
|
||||
package_do_pkgconfig read_shlibdeps"
|
||||
python package_do_package () {
|
||||
for f in (bb.data.getVar('PACKAGEFUNCS', d, 1) or '').split():
|
||||
bb.build.exec_func(f, d)
|
||||
}
|
||||
|
||||
do_package[dirs] = "${D}"
|
||||
populate_packages[dirs] = "${D}"
|
||||
EXPORT_FUNCTIONS do_package do_shlibs do_split_locales mapping_rename_hook
|
||||
addtask package before do_build after do_populate_staging
|
||||
234
meta/classes/package_ipk.bbclass
Normal file
234
meta/classes/package_ipk.bbclass
Normal file
@@ -0,0 +1,234 @@
|
||||
inherit package
|
||||
DEPENDS_prepend="${@["ipkg-utils-native ", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}"
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "ipkg-collateral ipkg ipkg-link"
|
||||
PACKAGEFUNCS += "do_package_ipk"
|
||||
|
||||
python package_ipk_fn () {
|
||||
from bb import data
|
||||
bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d)
|
||||
}
|
||||
|
||||
python package_ipk_install () {
|
||||
import os, sys
|
||||
pkg = bb.data.getVar('PKG', d, 1)
|
||||
pkgfn = bb.data.getVar('PKGFN', d, 1)
|
||||
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
|
||||
ipkdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1)
|
||||
stagingdir = bb.data.getVar('STAGING_DIR', d, 1)
|
||||
tmpdir = bb.data.getVar('TMPDIR', d, 1)
|
||||
|
||||
if None in (pkg,pkgfn,rootfs):
|
||||
raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
|
||||
try:
|
||||
bb.mkdirhier(rootfs)
|
||||
os.chdir(rootfs)
|
||||
except OSError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
print value
|
||||
raise bb.build.FuncFailed
|
||||
|
||||
# Generate ipk.conf if it or the stamp doesnt exist
|
||||
conffile = os.path.join(stagingdir,"ipkg.conf")
|
||||
if not os.access(conffile, os.R_OK):
|
||||
ipkg_archs = bb.data.getVar('IPKG_ARCHS',d)
|
||||
if ipkg_archs is None:
|
||||
bb.error("IPKG_ARCHS missing")
|
||||
raise FuncFailed
|
||||
ipkg_archs = ipkg_archs.split()
|
||||
arch_priority = 1
|
||||
|
||||
f = open(conffile,"w")
|
||||
for arch in ipkg_archs:
|
||||
f.write("arch %s %s\n" % ( arch, arch_priority ))
|
||||
arch_priority += 1
|
||||
f.write("src local file:%s" % ipkdir)
|
||||
f.close()
|
||||
|
||||
|
||||
if (not os.access(os.path.join(ipkdir,"Packages"), os.R_OK) or
|
||||
not os.access(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"),os.R_OK)):
|
||||
ret = os.system('ipkg-make-index -p %s %s ' % (os.path.join(ipkdir, "Packages"), ipkdir))
|
||||
if (ret != 0 ):
|
||||
raise bb.build.FuncFailed
|
||||
f=open(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"),"w")
|
||||
f.close()
|
||||
|
||||
ret = os.system('ipkg-cl -o %s -f %s update' % (rootfs, conffile))
|
||||
ret = os.system('ipkg-cl -o %s -f %s install %s' % (rootfs, conffile, pkgfn))
|
||||
if (ret != 0 ):
|
||||
raise bb.build.FuncFailed
|
||||
}
|
||||
|
||||
python do_package_ipk () {
|
||||
import copy # to back up env data
|
||||
import sys
|
||||
import re
|
||||
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
if not workdir:
|
||||
bb.error("WORKDIR not defined, unable to package")
|
||||
return
|
||||
|
||||
import os # path manipulations
|
||||
outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1)
|
||||
if not outdir:
|
||||
bb.error("DEPLOY_DIR_IPK not defined, unable to package")
|
||||
return
|
||||
bb.mkdirhier(outdir)
|
||||
|
||||
dvar = bb.data.getVar('D', d, 1)
|
||||
if not dvar:
|
||||
bb.error("D not defined, unable to package")
|
||||
return
|
||||
bb.mkdirhier(dvar)
|
||||
|
||||
packages = bb.data.getVar('PACKAGES', d, 1)
|
||||
if not packages:
|
||||
bb.debug(1, "PACKAGES not defined, nothing to package")
|
||||
return
|
||||
|
||||
tmpdir = bb.data.getVar('TMPDIR', d, 1)
|
||||
# Invalidate the packages file
|
||||
if os.access(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"),os.R_OK):
|
||||
os.unlink(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"))
|
||||
|
||||
if packages == []:
|
||||
bb.debug(1, "No packages; nothing to do")
|
||||
return
|
||||
|
||||
for pkg in packages.split():
|
||||
localdata = bb.data.createCopy(d)
|
||||
root = "%s/install/%s" % (workdir, pkg)
|
||||
|
||||
bb.data.setVar('ROOT', '', localdata)
|
||||
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
|
||||
pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1)
|
||||
if not pkgname:
|
||||
pkgname = pkg
|
||||
bb.data.setVar('PKG', pkgname, localdata)
|
||||
|
||||
overrides = bb.data.getVar('OVERRIDES', localdata)
|
||||
if not overrides:
|
||||
raise bb.build.FuncFailed('OVERRIDES not defined')
|
||||
overrides = bb.data.expand(overrides, localdata)
|
||||
bb.data.setVar('OVERRIDES', overrides + ':' + pkg, localdata)
|
||||
|
||||
bb.data.update_data(localdata)
|
||||
basedir = os.path.join(os.path.dirname(root))
|
||||
pkgoutdir = outdir
|
||||
bb.mkdirhier(pkgoutdir)
|
||||
os.chdir(root)
|
||||
from glob import glob
|
||||
g = glob('*')
|
||||
try:
|
||||
del g[g.index('CONTROL')]
|
||||
del g[g.index('./CONTROL')]
|
||||
except ValueError:
|
||||
pass
|
||||
if not g and not bb.data.getVar('ALLOW_EMPTY', localdata):
|
||||
from bb import note
|
||||
note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
|
||||
continue
|
||||
controldir = os.path.join(root, 'CONTROL')
|
||||
bb.mkdirhier(controldir)
|
||||
try:
|
||||
ctrlfile = file(os.path.join(controldir, 'control'), 'w')
|
||||
except OSError:
|
||||
raise bb.build.FuncFailed("unable to open control file for writing.")
|
||||
|
||||
fields = []
|
||||
fields.append(["Version: %s-%s\n", ['PV', 'PR']])
|
||||
fields.append(["Description: %s\n", ['DESCRIPTION']])
|
||||
fields.append(["Section: %s\n", ['SECTION']])
|
||||
fields.append(["Priority: %s\n", ['PRIORITY']])
|
||||
fields.append(["Maintainer: %s\n", ['MAINTAINER']])
|
||||
fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']])
|
||||
fields.append(["OE: %s\n", ['P']])
|
||||
fields.append(["Homepage: %s\n", ['HOMEPAGE']])
|
||||
|
||||
def pullData(l, d):
|
||||
l2 = []
|
||||
for i in l:
|
||||
l2.append(bb.data.getVar(i, d, 1))
|
||||
return l2
|
||||
|
||||
ctrlfile.write("Package: %s\n" % pkgname)
|
||||
# check for required fields
|
||||
try:
|
||||
for (c, fs) in fields:
|
||||
for f in fs:
|
||||
if bb.data.getVar(f, localdata) is None:
|
||||
raise KeyError(f)
|
||||
ctrlfile.write(c % tuple(pullData(fs, localdata)))
|
||||
except KeyError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
ctrlfile.close()
|
||||
raise bb.build.FuncFailed("Missing field for ipk generation: %s" % value)
|
||||
# more fields
|
||||
|
||||
bb.build.exec_func("mapping_rename_hook", localdata)
|
||||
|
||||
rdepends = explode_deps(bb.data.getVar("RDEPENDS", localdata, 1) or "")
|
||||
rrecommends = explode_deps(bb.data.getVar("RRECOMMENDS", localdata, 1) or "")
|
||||
rsuggests = (bb.data.getVar("RSUGGESTS", localdata, 1) or "").split()
|
||||
rprovides = (bb.data.getVar("RPROVIDES", localdata, 1) or "").split()
|
||||
rreplaces = (bb.data.getVar("RREPLACES", localdata, 1) or "").split()
|
||||
rconflicts = (bb.data.getVar("RCONFLICTS", localdata, 1) or "").split()
|
||||
if rdepends:
|
||||
ctrlfile.write("Depends: %s\n" % ", ".join(rdepends))
|
||||
if rsuggests:
|
||||
ctrlfile.write("Suggests: %s\n" % ", ".join(rsuggests))
|
||||
if rrecommends:
|
||||
ctrlfile.write("Recommends: %s\n" % ", ".join(rrecommends))
|
||||
if rprovides:
|
||||
ctrlfile.write("Provides: %s\n" % ", ".join(rprovides))
|
||||
if rreplaces:
|
||||
ctrlfile.write("Replaces: %s\n" % ", ".join(rreplaces))
|
||||
if rconflicts:
|
||||
ctrlfile.write("Conflicts: %s\n" % ", ".join(rconflicts))
|
||||
src_uri = bb.data.getVar("SRC_URI", localdata, 1)
|
||||
if src_uri:
|
||||
src_uri = re.sub("\s+", " ", src_uri)
|
||||
ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
|
||||
ctrlfile.close()
|
||||
|
||||
for script in ["preinst", "postinst", "prerm", "postrm"]:
|
||||
scriptvar = bb.data.getVar('pkg_%s' % script, localdata, 1)
|
||||
if not scriptvar:
|
||||
continue
|
||||
try:
|
||||
scriptfile = file(os.path.join(controldir, script), 'w')
|
||||
except OSError:
|
||||
raise bb.build.FuncFailed("unable to open %s script file for writing." % script)
|
||||
scriptfile.write(scriptvar)
|
||||
scriptfile.close()
|
||||
os.chmod(os.path.join(controldir, script), 0755)
|
||||
|
||||
conffiles_str = bb.data.getVar("CONFFILES", localdata, 1)
|
||||
if conffiles_str:
|
||||
try:
|
||||
conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
|
||||
except OSError:
|
||||
raise bb.build.FuncFailed("unable to open conffiles for writing.")
|
||||
for f in conffiles_str.split():
|
||||
conffiles.write('%s\n' % f)
|
||||
conffiles.close()
|
||||
|
||||
os.chdir(basedir)
|
||||
ret = os.system("PATH=\"%s\" %s %s %s" % (bb.data.getVar("PATH", localdata, 1),
|
||||
bb.data.getVar("IPKGBUILDCMD",d,1), pkg, pkgoutdir))
|
||||
if ret != 0:
|
||||
raise bb.build.FuncFailed("ipkg-build execution failed")
|
||||
|
||||
for script in ["preinst", "postinst", "prerm", "postrm", "control" ]:
|
||||
scriptfile = os.path.join(controldir, script)
|
||||
try:
|
||||
os.remove(scriptfile)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
os.rmdir(controldir)
|
||||
except OSError:
|
||||
pass
|
||||
del localdata
|
||||
}
|
||||
133
meta/classes/package_rpm.bbclass
Normal file
133
meta/classes/package_rpm.bbclass
Normal file
@@ -0,0 +1,133 @@
|
||||
inherit package
|
||||
inherit rpm_core
|
||||
|
||||
RPMBUILD="rpmbuild --short-circuit ${RPMOPTS}"
|
||||
PACKAGEFUNCS += "do_package_rpm"
|
||||
|
||||
python write_specfile() {
|
||||
from bb import data, build
|
||||
import sys
|
||||
out_vartranslate = {
|
||||
"PKG": "Name",
|
||||
"PV": "Version",
|
||||
"PR": "Release",
|
||||
"DESCRIPTION": "%description",
|
||||
"ROOT": "BuildRoot",
|
||||
"LICENSE": "License",
|
||||
"SECTION": "Group",
|
||||
}
|
||||
|
||||
root = bb.data.getVar('ROOT', d)
|
||||
|
||||
# get %files
|
||||
filesvar = bb.data.expand(bb.data.getVar('FILES', d), d) or ""
|
||||
from glob import glob
|
||||
files = filesvar.split()
|
||||
todelete = []
|
||||
for file in files:
|
||||
if file[0] == '.':
|
||||
newfile = file[1:]
|
||||
files[files.index(file)] = newfile
|
||||
file = newfile
|
||||
else:
|
||||
newfile = file
|
||||
realfile = os.path.join(root, './'+file)
|
||||
if not glob(realfile):
|
||||
todelete.append(files[files.index(newfile)])
|
||||
for r in todelete:
|
||||
try:
|
||||
del files[files.index(r)]
|
||||
except ValueError:
|
||||
pass
|
||||
if not files:
|
||||
from bb import note
|
||||
note("Not creating empty archive for %s-%s-%s" % (bb.data.getVar('PKG',d, 1), bb.data.getVar('PV', d, 1), bb.data.getVar('PR', d, 1)))
|
||||
return
|
||||
|
||||
# output .spec using this metadata store
|
||||
try:
|
||||
from __builtin__ import file
|
||||
if not bb.data.getVar('OUTSPECFILE', d):
|
||||
raise OSError('eek!')
|
||||
specfile = file(bb.data.getVar('OUTSPECFILE', d), 'w')
|
||||
except OSError:
|
||||
raise bb.build.FuncFailed("unable to open spec file for writing.")
|
||||
|
||||
# fd = sys.__stdout__
|
||||
fd = specfile
|
||||
for var in out_vartranslate.keys():
|
||||
if out_vartranslate[var][0] == "%":
|
||||
continue
|
||||
fd.write("%s\t: %s\n" % (out_vartranslate[var], bb.data.getVar(var, d)))
|
||||
fd.write("Summary\t: .\n")
|
||||
|
||||
for var in out_vartranslate.keys():
|
||||
if out_vartranslate[var][0] != "%":
|
||||
continue
|
||||
fd.write(out_vartranslate[var] + "\n")
|
||||
fd.write(bb.data.getVar(var, d) + "\n\n")
|
||||
|
||||
fd.write("%files\n")
|
||||
for file in files:
|
||||
fd.write("%s\n" % file)
|
||||
|
||||
fd.close()
|
||||
|
||||
# call out rpm -bb on the .spec, thereby creating an rpm
|
||||
|
||||
bb.data.setVar('BUILDSPEC', "${RPMBUILD} -bb ${OUTSPECFILE}\n", d)
|
||||
bb.data.setVarFlag('BUILDSPEC', 'func', '1', d)
|
||||
bb.build.exec_func('BUILDSPEC', d)
|
||||
|
||||
# move the rpm into the pkgoutdir
|
||||
rpm = bb.data.expand('${RPMBUILDPATH}/RPMS/${TARGET_ARCH}/${PKG}-${PV}-${PR}.${TARGET_ARCH}.rpm', d)
|
||||
outrpm = bb.data.expand('${DEPLOY_DIR_RPM}/${PKG}-${PV}-${PR}.${TARGET_ARCH}.rpm', d)
|
||||
bb.movefile(rpm, outrpm)
|
||||
}
|
||||
|
||||
python do_package_rpm () {
|
||||
workdir = bb.data.getVar('WORKDIR', d)
|
||||
if not workdir:
|
||||
raise bb.build.FuncFailed("WORKDIR not defined")
|
||||
workdir = bb.data.expand(workdir, d)
|
||||
|
||||
import os # path manipulations
|
||||
outdir = bb.data.getVar('DEPLOY_DIR_RPM', d)
|
||||
if not outdir:
|
||||
raise bb.build.FuncFailed("DEPLOY_DIR_RPM not defined")
|
||||
outdir = bb.data.expand(outdir, d)
|
||||
bb.mkdirhier(outdir)
|
||||
|
||||
packages = bb.data.getVar('PACKAGES', d)
|
||||
if not packages:
|
||||
packages = "${PN}"
|
||||
bb.data.setVar('FILES', '', d)
|
||||
ddir = bb.data.expand(bb.data.getVar('D', d), d)
|
||||
bb.mkdirhier(ddir)
|
||||
bb.data.setVar(bb.data.expand('FILES_${PN}', d), ''.join([ "./%s" % x for x in os.listdir(ddir)]), d)
|
||||
packages = bb.data.expand(packages, d)
|
||||
|
||||
for pkg in packages.split():
|
||||
localdata = bb.data.createCopy(d)
|
||||
root = "%s/install/%s" % (workdir, pkg)
|
||||
|
||||
bb.data.setVar('ROOT', '', localdata)
|
||||
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
|
||||
bb.data.setVar('PKG', pkg, localdata)
|
||||
|
||||
overrides = bb.data.getVar('OVERRIDES', localdata)
|
||||
if not overrides:
|
||||
raise bb.build.FuncFailed('OVERRIDES not defined')
|
||||
overrides = bb.data.expand(overrides, localdata)
|
||||
bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata)
|
||||
|
||||
bb.data.update_data(localdata)
|
||||
# stuff
|
||||
root = bb.data.getVar('ROOT', localdata)
|
||||
basedir = os.path.dirname(root)
|
||||
pkgoutdir = outdir
|
||||
bb.mkdirhier(pkgoutdir)
|
||||
bb.data.setVar('OUTSPECFILE', os.path.join(workdir, "%s.spec" % pkg), localdata)
|
||||
bb.build.exec_func('write_specfile', localdata)
|
||||
del localdata
|
||||
}
|
||||
99
meta/classes/package_tar.bbclass
Normal file
99
meta/classes/package_tar.bbclass
Normal file
@@ -0,0 +1,99 @@
|
||||
inherit package
|
||||
|
||||
PACKAGEFUNCS += "do_package_tar"
|
||||
|
||||
python package_tar_fn () {
|
||||
import os
|
||||
from bb import data
|
||||
fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d), "%s-%s-%s.tar.gz" % (bb.data.getVar('PKG', d), bb.data.getVar('PV', d), bb.data.getVar('PR', d)))
|
||||
fn = bb.data.expand(fn, d)
|
||||
bb.data.setVar('PKGFN', fn, d)
|
||||
}
|
||||
|
||||
python package_tar_install () {
|
||||
import os, sys
|
||||
pkg = bb.data.getVar('PKG', d, 1)
|
||||
pkgfn = bb.data.getVar('PKGFN', d, 1)
|
||||
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
|
||||
|
||||
if None in (pkg,pkgfn,rootfs):
|
||||
bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
|
||||
raise bb.build.FuncFailed
|
||||
try:
|
||||
bb.mkdirhier(rootfs)
|
||||
os.chdir(rootfs)
|
||||
except OSError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
print value
|
||||
raise bb.build.FuncFailed
|
||||
|
||||
if not os.access(pkgfn, os.R_OK):
|
||||
bb.debug(1, "%s does not exist, skipping" % pkgfn)
|
||||
raise bb.build.FuncFailed
|
||||
|
||||
ret = os.system('zcat %s | tar -xf -' % pkgfn)
|
||||
if ret != 0:
|
||||
raise bb.build.FuncFailed
|
||||
}
|
||||
|
||||
python do_package_tar () {
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
if not workdir:
|
||||
bb.error("WORKDIR not defined, unable to package")
|
||||
return
|
||||
|
||||
import os # path manipulations
|
||||
outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1)
|
||||
if not outdir:
|
||||
bb.error("DEPLOY_DIR_TAR not defined, unable to package")
|
||||
return
|
||||
bb.mkdirhier(outdir)
|
||||
|
||||
dvar = bb.data.getVar('D', d, 1)
|
||||
if not dvar:
|
||||
bb.error("D not defined, unable to package")
|
||||
return
|
||||
bb.mkdirhier(dvar)
|
||||
|
||||
packages = bb.data.getVar('PACKAGES', d, 1)
|
||||
if not packages:
|
||||
bb.debug(1, "PACKAGES not defined, nothing to package")
|
||||
return
|
||||
|
||||
for pkg in packages.split():
|
||||
localdata = bb.data.createCopy(d)
|
||||
root = "%s/install/%s" % (workdir, pkg)
|
||||
|
||||
bb.data.setVar('ROOT', '', localdata)
|
||||
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
|
||||
bb.data.setVar('PKG', pkg, localdata)
|
||||
|
||||
overrides = bb.data.getVar('OVERRIDES', localdata)
|
||||
if not overrides:
|
||||
raise bb.build.FuncFailed('OVERRIDES not defined')
|
||||
overrides = bb.data.expand(overrides, localdata)
|
||||
bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata)
|
||||
|
||||
bb.data.update_data(localdata)
|
||||
# stuff
|
||||
root = bb.data.getVar('ROOT', localdata)
|
||||
bb.mkdirhier(root)
|
||||
basedir = os.path.dirname(root)
|
||||
pkgoutdir = outdir
|
||||
bb.mkdirhier(pkgoutdir)
|
||||
bb.build.exec_func('package_tar_fn', localdata)
|
||||
tarfn = bb.data.getVar('PKGFN', localdata, 1)
|
||||
# if os.path.exists(tarfn):
|
||||
# del localdata
|
||||
# continue
|
||||
os.chdir(root)
|
||||
from glob import glob
|
||||
if not glob('*'):
|
||||
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
|
||||
continue
|
||||
ret = os.system("tar -czvf %s %s" % (tarfn, '.'))
|
||||
if ret != 0:
|
||||
bb.error("Creation of tar %s failed." % tarfn)
|
||||
# end stuff
|
||||
del localdata
|
||||
}
|
||||
20
meta/classes/palmtop.bbclass
Normal file
20
meta/classes/palmtop.bbclass
Normal file
@@ -0,0 +1,20 @@
|
||||
# this build class sets up qmake variables to
|
||||
# * build using the Qt Windowing System (QWS)
|
||||
# * use qt
|
||||
# * link against supc++ instead of stdc++
|
||||
# * use threads, if requested via PALMTOP_USE_MULTITHREADED_QT = "yes"
|
||||
# inherit this class to build programs against libqpe
|
||||
# inherit opie if you want to build programs against libopie2
|
||||
# don't override EXTRA_QMAKEVARS_POST, if you use inherit this class
|
||||
|
||||
inherit qmake
|
||||
|
||||
# special case for DISTRO = sharprom
|
||||
CPP_SUPPORT_LIB = "LIBS-=-lstdc++ LIBS+=-lsupc++"
|
||||
CPP_SUPPORT_LIB_sharprom = "LIBS-=-lstdc++"
|
||||
EXTRA_QMAKEVARS_POST += "DEFINES+=QWS CONFIG+=qt ${CPP_SUPPORT_LIB}"
|
||||
EXTRA_QMAKEVARS_POST += '${@base_conditional("PALMTOP_USE_MULTITHREADED_QT", "yes", "CONFIG+=thread", "CONFIG-=thread",d)}'
|
||||
EXTRA_QMAKEVARS_POST += "${@["LIBS+=-lqpe ", ""][(bb.data.getVar('PN', d, 1) == 'libqpe-opie')]}"
|
||||
DEPENDS_prepend = "${@["virtual/libqpe1 uicmoc-native ", ""][(bb.data.getVar('PN', d, 1) == 'libqpe-opie')]}"
|
||||
|
||||
FILES_${PN} = "${palmtopdir}"
|
||||
7
meta/classes/patcher.bbclass
Normal file
7
meta/classes/patcher.bbclass
Normal file
@@ -0,0 +1,7 @@
|
||||
# Now that BitBake/OpenEmbedded uses Quilt by default, you can simply add an
|
||||
# inherit patcher
|
||||
# to one of your config files to let BB/OE use patcher again.
|
||||
|
||||
PATCHCLEANCMD = "patcher -B"
|
||||
PATCHCMD = "patcher -R -p '%s' -n '%s' -i '%s'"
|
||||
PATCH_DEPENDS = "${@["patcher-native", ""][(bb.data.getVar('PN', d, 1) == 'patcher-native')]}"
|
||||
29
meta/classes/pkg_distribute.bbclass
Normal file
29
meta/classes/pkg_distribute.bbclass
Normal file
@@ -0,0 +1,29 @@
|
||||
PKG_DISTRIBUTECOMMAND[func] = "1"
|
||||
python do_distribute_packages () {
|
||||
cmd = bb.data.getVar('PKG_DISTRIBUTECOMMAND', d, 1)
|
||||
if not cmd:
|
||||
raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined")
|
||||
bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d)
|
||||
}
|
||||
|
||||
addtask distribute_packages before do_build after do_fetch
|
||||
|
||||
PKG_DIST_LOCAL ?= "symlink"
|
||||
PKG_DISTRIBUTEDIR ?= "${DEPLOY_DIR}/packages"
|
||||
|
||||
PKG_DISTRIBUTECOMMAND () {
|
||||
p=`dirname ${FILE}`
|
||||
d=`basename $p`
|
||||
mkdir -p ${PKG_DISTRIBUTEDIR}
|
||||
case "${PKG_DIST_LOCAL}" in
|
||||
copy)
|
||||
# use this weird tar command to copy because we want to
|
||||
# exclude the BitKeeper directories
|
||||
test -e ${PKG_DISTRIBUTEDIR}/${d} || mkdir ${PKG_DISTRIBUTEDIR}/${d};
|
||||
(cd ${p}; tar -c --exclude SCCS -f - . ) | tar -C ${PKG_DISTRIBUTEDIR}/${d} -xpf -
|
||||
;;
|
||||
symlink)
|
||||
ln -sf $p ${PKG_DISTRIBUTEDIR}/
|
||||
;;
|
||||
esac
|
||||
}
|
||||
22
meta/classes/pkg_metainfo.bbclass
Normal file
22
meta/classes/pkg_metainfo.bbclass
Normal file
@@ -0,0 +1,22 @@
|
||||
python do_pkg_write_metainfo () {
|
||||
deploydir = bb.data.getVar('DEPLOY_DIR', d, 1)
|
||||
if not deploydir:
|
||||
bb.error("DEPLOY_DIR not defined, unable to write package info")
|
||||
return
|
||||
|
||||
try:
|
||||
infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a')
|
||||
except OSError:
|
||||
raise bb.build.FuncFailed("unable to open package-info file for writing.")
|
||||
|
||||
name = bb.data.getVar('PN', d, 1)
|
||||
version = bb.data.getVar('PV', d, 1)
|
||||
desc = bb.data.getVar('DESCRIPTION', d, 1)
|
||||
page = bb.data.getVar('HOMEPAGE', d, 1)
|
||||
lic = bb.data.getVar('LICENSE', d, 1)
|
||||
|
||||
infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
|
||||
infofile.close()
|
||||
}
|
||||
|
||||
addtask pkg_write_metainfo after do_package before do_build
|
||||
28
meta/classes/pkgconfig.bbclass
Normal file
28
meta/classes/pkgconfig.bbclass
Normal file
@@ -0,0 +1,28 @@
|
||||
inherit base
|
||||
|
||||
DEPENDS_prepend = "pkgconfig-native "
|
||||
|
||||
# The namespaces can clash here hence the two step replace
|
||||
def get_pkgconfig_mangle(d):
|
||||
import bb.data
|
||||
s = "-e ''"
|
||||
if not bb.data.inherits_class('native', d):
|
||||
s += " -e 's:=${libdir}:=OELIBDIR:;'"
|
||||
s += " -e 's:=${includedir}:=OEINCDIR:;'"
|
||||
s += " -e 's:=${datadir}:=OEDATADIR:'"
|
||||
s += " -e 's:=${prefix}:=OEPREFIX:'"
|
||||
s += " -e 's:=${exec_prefix}:=OEEXECPREFIX:'"
|
||||
s += " -e 's:OELIBDIR:${STAGING_LIBDIR}:;'"
|
||||
s += " -e 's:OEINCDIR:${STAGING_INCDIR}:;'"
|
||||
s += " -e 's:OEDATADIR:${STAGING_DATADIR}:'"
|
||||
s += " -e 's:OEPREFIX:${STAGING_LIBDIR}/..:'"
|
||||
s += " -e 's:OEEXECPREFIX:${STAGING_LIBDIR}/..:'"
|
||||
return s
|
||||
|
||||
do_stage_append () {
|
||||
for pc in `find ${S} -name '*.pc' | grep -v -- '-uninstalled.pc$'`; do
|
||||
pcname=`basename $pc`
|
||||
install -d ${PKG_CONFIG_PATH}
|
||||
cat $pc | sed ${@get_pkgconfig_mangle(d)} > ${PKG_CONFIG_PATH}/$pcname
|
||||
done
|
||||
}
|
||||
4
meta/classes/poky.bbclass
Normal file
4
meta/classes/poky.bbclass
Normal file
@@ -0,0 +1,4 @@
|
||||
MIRRORS_append () {
|
||||
ftp://.*/.*/ http://www.o-hand.com/~richard/poky/sources/
|
||||
http://.*/.*/ http://www.o-hand.com/~richard/poky/sources/
|
||||
}
|
||||
44
meta/classes/qmake-base.bbclass
Normal file
44
meta/classes/qmake-base.bbclass
Normal file
@@ -0,0 +1,44 @@
|
||||
DEPENDS_prepend = "qmake-native "
|
||||
|
||||
OE_QMAKE_PLATFORM = "${TARGET_OS}-oe-g++"
|
||||
QMAKESPEC := "${QMAKE_MKSPEC_PATH}/${OE_QMAKE_PLATFORM}"
|
||||
|
||||
# We override this completely to eliminate the -e normally passed in
|
||||
EXTRA_OEMAKE = ' MAKEFLAGS= '
|
||||
|
||||
export OE_QMAKE_CC="${CC}"
|
||||
export OE_QMAKE_CFLAGS="${CFLAGS}"
|
||||
export OE_QMAKE_CXX="${CXX}"
|
||||
export OE_QMAKE_CXXFLAGS="-fno-exceptions -fno-rtti ${CXXFLAGS}"
|
||||
export OE_QMAKE_LDFLAGS="${LDFLAGS}"
|
||||
export OE_QMAKE_LINK="${CCLD}"
|
||||
export OE_QMAKE_AR="${AR}"
|
||||
export OE_QMAKE_STRIP="${STRIP}"
|
||||
export OE_QMAKE_UIC="${STAGING_BINDIR}/uic"
|
||||
export OE_QMAKE_MOC="${STAGING_BINDIR}/moc"
|
||||
export OE_QMAKE_RCC="non-existant"
|
||||
export OE_QMAKE_QMAKE="${STAGING_BINDIR}/qmake"
|
||||
export OE_QMAKE_RPATH="-Wl,-rpath-link,"
|
||||
|
||||
# default to qte2 via bb.conf, inherit qt3x11 to configure for qt3x11
|
||||
export OE_QMAKE_INCDIR_QT="${QTDIR}/include"
|
||||
export OE_QMAKE_LIBDIR_QT="${QTDIR}/lib"
|
||||
export OE_QMAKE_LIBS_QT="qte"
|
||||
export OE_QMAKE_LIBS_X11=""
|
||||
|
||||
oe_qmake_mkspecs () {
|
||||
mkdir -p mkspecs/${OE_QMAKE_PLATFORM}
|
||||
for f in ${QMAKE_MKSPEC_PATH}/${OE_QMAKE_PLATFORM}/*; do
|
||||
if [ -L $f ]; then
|
||||
lnk=`readlink $f`
|
||||
if [ -f mkspecs/${OE_QMAKE_PLATFORM}/$lnk ]; then
|
||||
ln -s $lnk mkspecs/${OE_QMAKE_PLATFORM}/`basename $f`
|
||||
else
|
||||
cp $f mkspecs/${OE_QMAKE_PLATFORM}/
|
||||
fi
|
||||
else
|
||||
cp $f mkspecs/${OE_QMAKE_PLATFORM}/
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
57
meta/classes/qmake.bbclass
Normal file
57
meta/classes/qmake.bbclass
Normal file
@@ -0,0 +1,57 @@
|
||||
inherit qmake-base
|
||||
|
||||
qmake_do_configure() {
|
||||
case ${QMAKESPEC} in
|
||||
*linux-oe-g++|*linux-uclibc-oe-g++|*linux-gnueabi-oe-g++)
|
||||
;;
|
||||
*-oe-g++)
|
||||
die Unsupported target ${TARGET_OS} for oe-g++ qmake spec
|
||||
;;
|
||||
*)
|
||||
oenote Searching for qmake spec file
|
||||
paths="${QMAKE_MKSPEC_PATH}/qws/${TARGET_OS}-${TARGET_ARCH}-g++"
|
||||
paths="${QMAKE_MKSPEC_PATH}/${TARGET_OS}-g++ $paths"
|
||||
|
||||
if (echo "${TARGET_ARCH}"|grep -q 'i.86'); then
|
||||
paths="${QMAKE_MKSPEC_PATH}/qws/${TARGET_OS}-x86-g++ $paths"
|
||||
fi
|
||||
for i in $paths; do
|
||||
if test -e $i; then
|
||||
export QMAKESPEC=$i
|
||||
break
|
||||
fi
|
||||
done
|
||||
;;
|
||||
esac
|
||||
|
||||
oenote "using qmake spec in ${QMAKESPEC}, using profiles '${QMAKE_PROFILES}'"
|
||||
|
||||
if [ -z "${QMAKE_PROFILES}" ]; then
|
||||
PROFILES="`ls *.pro`"
|
||||
else
|
||||
PROFILES="${QMAKE_PROFILES}"
|
||||
fi
|
||||
|
||||
if [ -z "$PROFILES" ]; then
|
||||
die "QMAKE_PROFILES not set and no profiles found in $PWD"
|
||||
fi
|
||||
|
||||
if [ ! -z "${EXTRA_QMAKEVARS_POST}" ]; then
|
||||
AFTER="-after"
|
||||
QMAKE_VARSUBST_POST="${EXTRA_QMAKEVARS_POST}"
|
||||
oenote "qmake postvar substitution: ${EXTRA_QMAKEVARS_POST}"
|
||||
fi
|
||||
|
||||
if [ ! -z "${EXTRA_QMAKEVARS_PRE}" ]; then
|
||||
QMAKE_VARSUBST_PRE="${EXTRA_QMAKEVARS_PRE}"
|
||||
oenote "qmake prevar substitution: ${EXTRA_QMAKEVARS_PRE}"
|
||||
fi
|
||||
|
||||
#oenote "Calling '${OE_QMAKE_QMAKE} -makefile -spec ${QMAKESPEC} -o Makefile $QMAKE_VARSUBST_PRE $AFTER $PROFILES $QMAKE_VARSUBST_POST'"
|
||||
unset QMAKESPEC || true
|
||||
${OE_QMAKE_QMAKE} -makefile -spec ${QMAKESPEC} -o Makefile $QMAKE_VARSUBST_PRE $AFTER $PROFILES $QMAKE_VARSUBST_POST || die "Error calling ${OE_QMAKE_QMAKE} on $PROFILES"
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_configure
|
||||
|
||||
addtask configure after do_unpack do_patch before do_compile
|
||||
36
meta/classes/qpf.bbclass
Normal file
36
meta/classes/qpf.bbclass
Normal file
@@ -0,0 +1,36 @@
|
||||
PACKAGE_ARCH = "all"
|
||||
|
||||
do_configure() {
|
||||
:
|
||||
}
|
||||
|
||||
do_compile() {
|
||||
:
|
||||
}
|
||||
|
||||
pkg_postinst_fonts() {
|
||||
#!/bin/sh
|
||||
set -e
|
||||
. /etc/profile
|
||||
${sbindir}/update-qtfontdir
|
||||
}
|
||||
|
||||
pkg_postrm_fonts() {
|
||||
#!/bin/sh
|
||||
set -e
|
||||
. /etc/profile
|
||||
${sbindir}/update-qtfontdir -f
|
||||
}
|
||||
|
||||
python populate_packages_prepend() {
|
||||
postinst = bb.data.getVar('pkg_postinst_fonts', d, 1)
|
||||
postrm = bb.data.getVar('pkg_postrm_fonts', d, 1)
|
||||
fontdir = bb.data.getVar('palmtopdir', d, 1) + '/lib/fonts'
|
||||
pkgregex = "^([a-z-]*_[0-9]*).*.qpf$"
|
||||
pkgpattern = bb.data.getVar('QPF_PKGPATTERN', d, 1) or 'qpf-%s'
|
||||
pkgdescription = bb.data.getVar('QPF_DESCRIPTION', d, 1) or 'QPF font %s'
|
||||
|
||||
do_split_packages(d, root=fontdir, file_regex=pkgregex, output_pattern=pkgpattern,
|
||||
description=pkgdescription, postinst=postinst, postrm=postrm, recursive=True, hook=None,
|
||||
extra_depends='qpf-font-common')
|
||||
}
|
||||
11
meta/classes/qt3e.bbclass
Normal file
11
meta/classes/qt3e.bbclass
Normal file
@@ -0,0 +1,11 @@
|
||||
#
|
||||
# override variables set by qmake-base to compile Qt/X11 apps
|
||||
#
|
||||
export QTDIR="${STAGING_DIR}/${HOST_SYS}/qte3"
|
||||
export QTEDIR="${STAGING_DIR}/${HOST_SYS}/qte3"
|
||||
export OE_QMAKE_UIC="${STAGING_BINDIR}/uic3"
|
||||
export OE_QMAKE_MOC="${STAGING_BINDIR}/moc3"
|
||||
export OE_QMAKE_CXXFLAGS="${CXXFLAGS} "
|
||||
export OE_QMAKE_INCDIR_QT="${QTEDIR}/include"
|
||||
export OE_QMAKE_LIBDIR_QT="${QTEDIR}/lib"
|
||||
export OE_QMAKE_LIBS_QT="qte"
|
||||
15
meta/classes/qt3x11.bbclass
Normal file
15
meta/classes/qt3x11.bbclass
Normal file
@@ -0,0 +1,15 @@
|
||||
DEPENDS_prepend = "${@["qt3x11 ", ""][(bb.data.getVar('PN', d, 1) == 'qt-x11-free')]}"
|
||||
EXTRA_QMAKEVARS_POST += "CONFIG+=thread"
|
||||
#
|
||||
# override variables set by qmake-base to compile Qt/X11 apps
|
||||
#
|
||||
export QTDIR = "${STAGING_DIR}/${HOST_SYS}/qt3"
|
||||
export OE_QMAKE_UIC = "${STAGING_BINDIR}/uic3"
|
||||
export OE_QMAKE_MOC = "${STAGING_BINDIR}/moc3"
|
||||
export OE_QMAKE_CXXFLAGS = "${CXXFLAGS} -DQT_NO_XIM"
|
||||
export OE_QMAKE_INCDIR_QT = "${QTDIR}/include"
|
||||
export OE_QMAKE_LIBDIR_QT = "${QTDIR}/lib"
|
||||
export OE_QMAKE_LIBS_QT = "qt"
|
||||
export OE_QMAKE_LIBS_X11 = "-lXext -lX11 -lm"
|
||||
|
||||
|
||||
17
meta/classes/qt4x11.bbclass
Normal file
17
meta/classes/qt4x11.bbclass
Normal file
@@ -0,0 +1,17 @@
|
||||
DEPENDS_prepend = "qmake2-native "
|
||||
DEPENDS_prepend = "${@["qt4x11 ", ""][(bb.data.getVar('PN', d, 1) == 'qt4-x11-free')]}"
|
||||
#
|
||||
# override variables set by qmake-base to compile Qt4/X11 apps
|
||||
#
|
||||
export QTDIR = "${STAGING_DIR}/${HOST_SYS}/qt4"
|
||||
export QMAKESPEC = "${QTDIR}/mkspecs/${TARGET_OS}-oe-g++"
|
||||
export OE_QMAKE_UIC = "${STAGING_BINDIR}/uic4"
|
||||
export OE_QMAKE_MOC = "${STAGING_BINDIR}/moc4"
|
||||
export OE_QMAKE_RCC = "${STAGING_BINDIR}/rcc4"
|
||||
export OE_QMAKE_QMAKE = "${STAGING_BINDIR}/qmake2"
|
||||
export OE_QMAKE_LINK = "${CXX}"
|
||||
export OE_QMAKE_CXXFLAGS = "${CXXFLAGS}"
|
||||
export OE_QMAKE_INCDIR_QT = "${QTDIR}/include"
|
||||
export OE_QMAKE_LIBDIR_QT = "${QTDIR}/lib"
|
||||
export OE_QMAKE_LIBS_QT = "qt"
|
||||
export OE_QMAKE_LIBS_X11 = "-lXext -lX11 -lm"
|
||||
22
meta/classes/rm_work.bbclass
Normal file
22
meta/classes/rm_work.bbclass
Normal file
@@ -0,0 +1,22 @@
|
||||
#
|
||||
# Removes source after build
|
||||
#
|
||||
# To use it add that line to conf/local.conf:
|
||||
#
|
||||
# INHERIT += "rm_work"
|
||||
#
|
||||
|
||||
do_rm_work () {
|
||||
cd ${WORKDIR}
|
||||
for dir in *
|
||||
do
|
||||
if [ `basename ${S}` == $dir ]; then
|
||||
rm -rf $dir/*
|
||||
elif [ $dir != 'temp' ]; then
|
||||
rm -rf $dir
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
addtask rm_work before do_build
|
||||
addtask rm_work after do_package
|
||||
145
meta/classes/rootfs_ipk.bbclass
Normal file
145
meta/classes/rootfs_ipk.bbclass
Normal file
@@ -0,0 +1,145 @@
|
||||
#
|
||||
# Creates a root filesystem out of IPKs
|
||||
#
|
||||
# This rootfs can be mounted via root-nfs or it can be put into an cramfs/jffs etc.
|
||||
# See image_ipk.oeclass for a usage of this.
|
||||
#
|
||||
|
||||
DEPENDS_prepend="ipkg-native ipkg-utils-native fakeroot-native "
|
||||
DEPENDS_append=" ${EXTRA_IMAGEDEPENDS}"
|
||||
|
||||
PACKAGES = ""
|
||||
|
||||
do_rootfs[nostamp] = 1
|
||||
do_rootfs[dirs] = ${TOPDIR}
|
||||
do_build[nostamp] = 1
|
||||
|
||||
IPKG_ARGS = "-f ${T}/ipkg.conf -o ${IMAGE_ROOTFS}"
|
||||
|
||||
ROOTFS_POSTPROCESS_COMMAND ?= ""
|
||||
|
||||
PID = "${@os.getpid()}"
|
||||
|
||||
# some default locales
|
||||
IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
|
||||
|
||||
LINGUAS_INSTALL = "${@" ".join(map(lambda s: "locale-base-%s" % s, bb.data.getVar('IMAGE_LINGUAS', d, 1).split()))}"
|
||||
|
||||
real_do_rootfs () {
|
||||
set -x
|
||||
|
||||
mkdir -p ${IMAGE_ROOTFS}/dev
|
||||
|
||||
if [ -z "${DEPLOY_KEEP_PACKAGES}" ]; then
|
||||
rm -f ${DEPLOY_DIR_IPK}/Packages
|
||||
touch ${DEPLOY_DIR_IPK}/Packages
|
||||
ipkg-make-index -r ${DEPLOY_DIR_IPK}/Packages -p ${DEPLOY_DIR_IPK}/Packages -l ${DEPLOY_DIR_IPK}/Packages.filelist -m ${DEPLOY_DIR_IPK}
|
||||
fi
|
||||
mkdir -p ${T}
|
||||
echo "src oe file:${DEPLOY_DIR_IPK}" > ${T}/ipkg.conf
|
||||
ipkgarchs="${IPKG_ARCHS}"
|
||||
priority=1
|
||||
for arch in $ipkgarchs; do
|
||||
echo "arch $arch $priority" >> ${T}/ipkg.conf
|
||||
priority=$(expr $priority + 5)
|
||||
done
|
||||
ipkg-cl ${IPKG_ARGS} update
|
||||
if [ ! -z "${LINGUAS_INSTALL}" ]; then
|
||||
ipkg-cl ${IPKG_ARGS} install glibc-localedata-i18n
|
||||
for i in ${LINGUAS_INSTALL}; do
|
||||
ipkg-cl ${IPKG_ARGS} install $i
|
||||
done
|
||||
fi
|
||||
if [ ! -z "${IPKG_INSTALL}" ]; then
|
||||
ipkg-cl ${IPKG_ARGS} install ${IPKG_INSTALL}
|
||||
fi
|
||||
|
||||
export D=${IMAGE_ROOTFS}
|
||||
export IPKG_OFFLINE_ROOT=${IMAGE_ROOTFS}
|
||||
mkdir -p ${IMAGE_ROOTFS}/etc/ipkg/
|
||||
grep "^arch" ${T}/ipkg.conf >${IMAGE_ROOTFS}/etc/ipkg/arch.conf
|
||||
|
||||
for i in ${IMAGE_ROOTFS}${libdir}/ipkg/info/*.preinst; do
|
||||
if [ -f $i ] && ! sh $i; then
|
||||
ipkg-cl ${IPKG_ARGS} flag unpacked `basename $i .preinst`
|
||||
fi
|
||||
done
|
||||
for i in ${IMAGE_ROOTFS}${libdir}/ipkg/info/*.postinst; do
|
||||
if [ -f $i ] && ! sh $i configure; then
|
||||
ipkg-cl ${IPKG_ARGS} flag unpacked `basename $i .postinst`
|
||||
fi
|
||||
done
|
||||
|
||||
install -d ${IMAGE_ROOTFS}/${sysconfdir}
|
||||
echo ${BUILDNAME} > ${IMAGE_ROOTFS}/${sysconfdir}/version
|
||||
|
||||
${ROOTFS_POSTPROCESS_COMMAND}
|
||||
|
||||
log_check rootfs
|
||||
}
|
||||
|
||||
log_check() {
|
||||
set +x
|
||||
for target in $*
|
||||
do
|
||||
lf_path="${WORKDIR}/temp/log.do_$target.${PID}"
|
||||
|
||||
echo "log_check: Using $lf_path as logfile"
|
||||
|
||||
if test -e "$lf_path"
|
||||
then
|
||||
lf_txt="`cat $lf_path`"
|
||||
|
||||
for keyword_die in "Cannot find package" "exit 1" ERR Fail
|
||||
do
|
||||
|
||||
if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") &>/dev/null
|
||||
then
|
||||
echo "log_check: There were error messages in the logfile"
|
||||
echo -e "log_check: Matched keyword: [$keyword_die]\n"
|
||||
echo "$lf_txt" | grep -v log_check | grep -i "$keyword_die"
|
||||
echo ""
|
||||
do_exit=1
|
||||
fi
|
||||
done
|
||||
test "$do_exit" = 1 && exit 1
|
||||
else
|
||||
echo "Cannot find logfile [$lf_path]"
|
||||
fi
|
||||
echo "Logfile is clean"
|
||||
done
|
||||
|
||||
set -x
|
||||
|
||||
}
|
||||
|
||||
fakeroot do_rootfs () {
|
||||
rm -rf ${IMAGE_ROOTFS}
|
||||
real_do_rootfs
|
||||
}
|
||||
|
||||
# set '*' as the rootpassword so the images
|
||||
# can decide if they want it or not
|
||||
|
||||
zap_root_password () {
|
||||
sed 's%^root:[^:]*:%root:*:%' < ${IMAGE_ROOTFS}/etc/passwd >${IMAGE_ROOTFS}/etc/passwd.new
|
||||
mv ${IMAGE_ROOTFS}/etc/passwd.new ${IMAGE_ROOTFS}/etc/passwd
|
||||
}
|
||||
|
||||
create_etc_timestamp() {
|
||||
date +%2m%2d%2H%2M%Y >${IMAGE_ROOTFS}/etc/timestamp
|
||||
}
|
||||
|
||||
# Turn any symbolic /sbin/init link into a file
|
||||
remove_init_link () {
|
||||
if [ -h ${IMAGE_ROOTFS}/sbin/init ]; then
|
||||
LINKFILE=${IMAGE_ROOTFS}`readlink ${IMAGE_ROOTFS}/sbin/init`
|
||||
rm ${IMAGE_ROOTFS}/sbin/init
|
||||
cp $LINKFILE ${IMAGE_ROOTFS}/sbin/init
|
||||
fi
|
||||
}
|
||||
|
||||
# export the zap_root_password, create_etc_timestamp and remote_init_link
|
||||
EXPORT_FUNCTIONS zap_root_password create_etc_timestamp remove_init_link
|
||||
|
||||
addtask rootfs before do_build after do_install
|
||||
16
meta/classes/rpm_core.bbclass
Normal file
16
meta/classes/rpm_core.bbclass
Normal file
@@ -0,0 +1,16 @@
|
||||
RPMBUILDPATH="${WORKDIR}/rpm"
|
||||
|
||||
RPMOPTS="--rcfile=${WORKDIR}/rpmrc"
|
||||
RPMOPTS="--rcfile=${WORKDIR}/rpmrc --target ${TARGET_SYS}"
|
||||
RPM="rpm ${RPMOPTS}"
|
||||
RPMBUILD="rpmbuild --buildroot ${D} --short-circuit ${RPMOPTS}"
|
||||
|
||||
rpm_core_do_preprpm() {
|
||||
mkdir -p ${RPMBUILDPATH}/{SPECS,RPMS/{i386,i586,i686,noarch,ppc,mips,mipsel,arm},SRPMS,SOURCES,BUILD}
|
||||
echo 'macrofiles:/usr/lib/rpm/macros:${WORKDIR}/macros' > ${WORKDIR}/rpmrc
|
||||
echo '%_topdir ${RPMBUILDPATH}' > ${WORKDIR}/macros
|
||||
echo '%_repackage_dir ${WORKDIR}' >> ${WORKDIR}/macros
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_preprpm
|
||||
addtask preprpm before do_fetch
|
||||
112
meta/classes/sanity.bbclass
Normal file
112
meta/classes/sanity.bbclass
Normal file
@@ -0,0 +1,112 @@
|
||||
#
|
||||
# Sanity check the users setup for common misconfigurations
|
||||
#
|
||||
|
||||
def raise_sanity_error(msg):
|
||||
import bb
|
||||
bb.fatal(""" Openembedded's config sanity checker detected a potential misconfiguration.
|
||||
Either fix the cause of this error or at your own risk disable the checker (see sanity.conf).
|
||||
Following is the list of potential problems / advisories:
|
||||
|
||||
%s""" % msg)
|
||||
|
||||
def check_conf_exists(fn, data):
|
||||
import bb, os
|
||||
|
||||
bbpath = []
|
||||
fn = bb.data.expand(fn, data)
|
||||
vbbpath = bb.data.getVar("BBPATH", data)
|
||||
if vbbpath:
|
||||
bbpath += vbbpath.split(":")
|
||||
for p in bbpath:
|
||||
currname = os.path.join(bb.data.expand(p, data), fn)
|
||||
if os.access(currname, os.R_OK):
|
||||
return True
|
||||
return False
|
||||
|
||||
def check_app_exists(app, d):
|
||||
from bb import which, data
|
||||
|
||||
app = data.expand(app, d)
|
||||
path = data.getVar('PATH', d)
|
||||
return len(which(path, app)) != 0
|
||||
|
||||
|
||||
def check_sanity(e):
|
||||
from bb import note, error, data, __version__
|
||||
from bb.event import Handled, NotHandled, getName
|
||||
try:
|
||||
from distutils.version import LooseVersion
|
||||
except ImportError:
|
||||
def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
|
||||
import os
|
||||
|
||||
# Check the bitbake version meets minimum requirements
|
||||
minversion = data.getVar('BB_MIN_VERSION', e.data , True)
|
||||
if not minversion:
|
||||
# Hack: BB_MIN_VERSION hasn't been parsed yet so return
|
||||
# and wait for the next call
|
||||
print "Foo %s" % minversion
|
||||
return
|
||||
|
||||
if (LooseVersion(__version__) < LooseVersion(minversion)):
|
||||
raise_sanity_error('Bitbake version %s is required and version %s was found' % (minversion, __version__))
|
||||
|
||||
# Check TARGET_ARCH is set
|
||||
if data.getVar('TARGET_ARCH', e.data, True) == 'INVALID':
|
||||
raise_sanity_error('Please set TARGET_ARCH directly, or choose a MACHINE or DISTRO that does so.')
|
||||
|
||||
# Check TARGET_OS is set
|
||||
if data.getVar('TARGET_OS', e.data, True) == 'INVALID':
|
||||
raise_sanity_error('Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.')
|
||||
|
||||
# Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf
|
||||
if "diffstat-native" not in data.getVar('ASSUME_PROVIDED', e.data, True).split():
|
||||
raise_sanity_error('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf')
|
||||
|
||||
# Check the MACHINE is valid
|
||||
if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data):
|
||||
raise_sanity_error('Please set a valid MACHINE in your local.conf')
|
||||
|
||||
# Check the distro is valid
|
||||
if not check_conf_exists("conf/distro/${DISTRO}.conf", e.data):
|
||||
raise_sanity_error('Please set a valid DISTRO in your local.conf')
|
||||
|
||||
if not check_app_exists("${MAKE}", e.data):
|
||||
raise_sanity_error('GNU make missing. Please install GNU make')
|
||||
|
||||
if not check_app_exists('${BUILD_PREFIX}gcc', e.data):
|
||||
raise_sanity_error('C Host-Compiler is missing, please install one' )
|
||||
|
||||
if not check_app_exists('${BUILD_PREFIX}g++', e.data):
|
||||
raise_sanity_error('C++ Host-Compiler is missing, please install one' )
|
||||
|
||||
if not check_app_exists('patch', e.data):
|
||||
raise_sanity_error('Please install the patch utility, preferable GNU patch.')
|
||||
|
||||
if not check_app_exists('diffstat', e.data):
|
||||
raise_sanity_error('Please install the diffstat utility')
|
||||
|
||||
if not check_app_exists('texi2html', e.data):
|
||||
raise_sanity_error('Please install the texi2html binary')
|
||||
|
||||
if not check_app_exists('cvs', e.data):
|
||||
raise_sanity_error('Please install the cvs utility')
|
||||
|
||||
if not check_app_exists('svn', e.data):
|
||||
raise_sanity_error('Please install the svn utility')
|
||||
|
||||
oes_bb_conf = data.getVar( 'OES_BITBAKE_CONF', e.data, True )
|
||||
if not oes_bb_conf:
|
||||
raise_sanity_error('You do not include OpenEmbeddeds version of conf/bitbake.conf')
|
||||
|
||||
addhandler check_sanity_eventhandler
|
||||
python check_sanity_eventhandler() {
|
||||
from bb import note, error, data, __version__
|
||||
from bb.event import getName
|
||||
|
||||
if getName(e) == "BuildStarted":
|
||||
check_sanity(e)
|
||||
|
||||
return NotHandled
|
||||
}
|
||||
13
meta/classes/scons.bbclass
Normal file
13
meta/classes/scons.bbclass
Normal file
@@ -0,0 +1,13 @@
|
||||
DEPENDS += "python-scons-native"
|
||||
|
||||
scons_do_compile() {
|
||||
${STAGING_BINDIR}/scons || \
|
||||
oefatal "scons build execution failed."
|
||||
}
|
||||
|
||||
scons_do_install() {
|
||||
${STAGING_BINDIR}/scons install || \
|
||||
oefatal "scons install execution failed."
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_compile do_install
|
||||
26
meta/classes/sdk.bbclass
Normal file
26
meta/classes/sdk.bbclass
Normal file
@@ -0,0 +1,26 @@
|
||||
# SDK packages are built either explicitly by the user,
|
||||
# or indirectly via dependency. No need to be in 'world'.
|
||||
EXCLUDE_FROM_WORLD = "1"
|
||||
|
||||
SDK_NAME = "${TARGET_ARCH}/oe"
|
||||
PACKAGE_ARCH = "${BUILD_ARCH}"
|
||||
|
||||
HOST_ARCH = "${BUILD_ARCH}"
|
||||
HOST_VENDOR = "${BUILD_VENDOR}"
|
||||
HOST_OS = "${BUILD_OS}"
|
||||
HOST_PREFIX = "${BUILD_PREFIX}"
|
||||
HOST_CC_ARCH = "${BUILD_CC_ARCH}"
|
||||
|
||||
CPPFLAGS = "${BUILD_CPPFLAGS}"
|
||||
CFLAGS = "${BUILD_CFLAGS}"
|
||||
CXXFLAGS = "${BUILD_CFLAGS}"
|
||||
LDFLAGS = "${BUILD_LDFLAGS}"
|
||||
|
||||
prefix = "/usr/local/${SDK_NAME}"
|
||||
exec_prefix = "${prefix}"
|
||||
base_prefix = "${exec_prefix}"
|
||||
|
||||
FILES_${PN} = "${prefix}"
|
||||
|
||||
|
||||
|
||||
44
meta/classes/sdl.bbclass
Normal file
44
meta/classes/sdl.bbclass
Normal file
@@ -0,0 +1,44 @@
|
||||
#
|
||||
# (C) Michael 'Mickey' Lauer <mickey@Vanille.de>
|
||||
#
|
||||
|
||||
DEPENDS += "virtual/libsdl libsdl-mixer libsdl-image"
|
||||
|
||||
APPDESKTOP ?= "${PN}.desktop"
|
||||
APPNAME ?= "${PN}"
|
||||
APPIMAGE ?= "${PN}.png"
|
||||
|
||||
sdl_do_sdl_install() {
|
||||
install -d ${D}${palmtopdir}/bin
|
||||
install -d ${D}${palmtopdir}/pics
|
||||
install -d ${D}${palmtopdir}/apps/Games
|
||||
ln -sf ${bindir}/${APPNAME} ${D}${palmtopdir}/bin/${APPNAME}
|
||||
install -m 0644 ${APPIMAGE} ${D}${palmtopdir}/pics/${PN}.png
|
||||
|
||||
if [ -e "${APPDESKTOP}" ]
|
||||
then
|
||||
echo ${APPDESKTOP} present, installing to palmtopdir...
|
||||
install -m 0644 ${APPDESKTOP} ${D}${palmtopdir}/apps/Games/${PN}.desktop
|
||||
else
|
||||
echo ${APPDESKTOP} not present, creating one on-the-fly...
|
||||
cat >${D}${palmtopdir}/apps/Games/${PN}.desktop <<EOF
|
||||
[Desktop Entry]
|
||||
Note=Auto Generated... this may be not what you want
|
||||
Comment=${DESCRIPTION}
|
||||
Exec=${APPNAME}
|
||||
Icon=${APPIMAGE}
|
||||
Type=Application
|
||||
Name=${PN}
|
||||
EOF
|
||||
fi
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_sdl_install
|
||||
addtask sdl_install after do_compile before do_populate_staging
|
||||
|
||||
SECTION = "x11/games"
|
||||
SECTION_${PN}-opie = "opie/games"
|
||||
|
||||
PACKAGES += "${PN}-opie"
|
||||
RDEPENDS_${PN}-opie += "${PN}"
|
||||
FILES_${PN}-opie = "${palmtopdir}"
|
||||
58
meta/classes/sip.bbclass
Normal file
58
meta/classes/sip.bbclass
Normal file
@@ -0,0 +1,58 @@
|
||||
# Build Class for Sip based Python Bindings
|
||||
# (C) Michael 'Mickey' Lauer <mickey@Vanille.de>
|
||||
#
|
||||
|
||||
DEPENDS =+ "sip-native python-sip"
|
||||
|
||||
# default stuff, do not uncomment
|
||||
# EXTRA_SIPTAGS = "-tWS_QWS -tQtPE_1_6_0 -tQt_2_3_1"
|
||||
|
||||
sip_do_generate() {
|
||||
if [ -z "${SIP_MODULES}" ]; then
|
||||
MODULES="`ls sip/*mod.sip`"
|
||||
else
|
||||
MODULES="${SIP_MODULES}"
|
||||
fi
|
||||
|
||||
if [ -z "$MODULES" ]; then
|
||||
die "SIP_MODULES not set and no modules found in $PWD"
|
||||
else
|
||||
oenote "using modules '${SIP_MODULES}' and tags '${EXTRA_SIPTAGS}'"
|
||||
fi
|
||||
|
||||
if [ -z "${EXTRA_SIPTAGS}" ]; then
|
||||
die "EXTRA_SIPTAGS needs to be set!"
|
||||
else
|
||||
SIPTAGS="${EXTRA_SIPTAGS}"
|
||||
fi
|
||||
|
||||
if [ ! -z "${SIP_FEATURES}" ]; then
|
||||
FEATURES="-z ${SIP_FEATURES}"
|
||||
oenote "sip feature file: ${SIP_FEATURES}"
|
||||
fi
|
||||
|
||||
for module in $MODULES
|
||||
do
|
||||
install -d ${module}/
|
||||
oenote "calling 'sip -I sip -I ${STAGING_SIPDIR} ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.pro.in sip/${module}/${module}mod.sip'"
|
||||
sip -I ${STAGING_SIPDIR} -I sip ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.sbf sip/${module}/${module}mod.sip \
|
||||
|| die "Error calling sip on ${module}"
|
||||
cat ${module}/${module}.sbf | sed s,target,TARGET, \
|
||||
| sed s,sources,SOURCES, \
|
||||
| sed s,headers,HEADERS, \
|
||||
| sed s,"moc_HEADERS =","HEADERS +=", \
|
||||
>${module}/${module}.pro
|
||||
echo "TEMPLATE=lib" >>${module}/${module}.pro
|
||||
[ "${module}" = "qt" ] && echo "" >>${module}/${module}.pro
|
||||
[ "${module}" = "qtcanvas" ] && echo "" >>${module}/${module}.pro
|
||||
[ "${module}" = "qttable" ] && echo "" >>${module}/${module}.pro
|
||||
[ "${module}" = "qwt" ] && echo "" >>${module}/${module}.pro
|
||||
[ "${module}" = "qtpe" ] && echo "" >>${module}/${module}.pro
|
||||
[ "${module}" = "qtpe" ] && echo "LIBS+=-lqpe" >>${module}/${module}.pro
|
||||
true
|
||||
done
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_generate
|
||||
|
||||
addtask generate after do_unpack do_patch before do_configure
|
||||
111
meta/classes/sourcepkg.bbclass
Normal file
111
meta/classes/sourcepkg.bbclass
Normal file
@@ -0,0 +1,111 @@
|
||||
DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/source"
|
||||
EXCLUDE_FROM ?= ".pc autom4te.cache"
|
||||
|
||||
# used as part of a path. make sure it's set
|
||||
DISTRO ?= "openembedded"
|
||||
|
||||
def get_src_tree(d):
|
||||
import bb
|
||||
import os, os.path
|
||||
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
if not workdir:
|
||||
bb.error("WORKDIR not defined, unable to find source tree.")
|
||||
return
|
||||
|
||||
s = bb.data.getVar('S', d, 0)
|
||||
if not s:
|
||||
bb.error("S not defined, unable to find source tree.")
|
||||
return
|
||||
|
||||
s_tree_raw = s.split('/')[1]
|
||||
s_tree = bb.data.expand(s_tree_raw, d)
|
||||
|
||||
src_tree_path = os.path.join(workdir, s_tree)
|
||||
try:
|
||||
os.listdir(src_tree_path)
|
||||
except OSError:
|
||||
bb.fatal("Expected to find source tree in '%s' which doesn't exist." % src_tree_path)
|
||||
bb.debug("Assuming source tree is '%s'" % src_tree_path)
|
||||
|
||||
return s_tree
|
||||
|
||||
sourcepkg_do_create_orig_tgz(){
|
||||
|
||||
mkdir -p ${DEPLOY_DIR_SRC}
|
||||
cd ${WORKDIR}
|
||||
for i in ${EXCLUDE_FROM}; do
|
||||
echo $i >> temp/exclude-from-file
|
||||
done
|
||||
|
||||
src_tree=${@get_src_tree(d)}
|
||||
|
||||
echo $src_tree
|
||||
oenote "Creating .orig.tar.gz in ${DEPLOY_DIR_SRC}/${P}.orig.tar.gz"
|
||||
tar cvzf ${DEPLOY_DIR_SRC}/${P}.orig.tar.gz --exclude-from temp/exclude-from-file $src_tree
|
||||
cp -pPR $src_tree $src_tree.orig
|
||||
}
|
||||
|
||||
sourcepkg_do_archive_bb() {
|
||||
|
||||
src_tree=${@get_src_tree(d)}
|
||||
dest=${WORKDIR}/$src_tree/${DISTRO}
|
||||
mkdir -p $dest
|
||||
|
||||
cp ${FILE} $dest
|
||||
}
|
||||
|
||||
python sourcepkg_do_dumpdata() {
|
||||
import os
|
||||
import os.path
|
||||
|
||||
workdir = bb.data.getVar('WORKDIR', d, 1)
|
||||
distro = bb.data.getVar('DISTRO', d, 1)
|
||||
s_tree = get_src_tree(d)
|
||||
openembeddeddir = os.path.join(workdir, s_tree, distro)
|
||||
dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d))
|
||||
|
||||
try:
|
||||
os.mkdir(openembeddeddir)
|
||||
except OSError:
|
||||
# dir exists
|
||||
pass
|
||||
|
||||
bb.note("Dumping metadata into '%s'" % dumpfile)
|
||||
f = open(dumpfile, "w")
|
||||
# emit variables and shell functions
|
||||
bb.data.emit_env(f, d, True)
|
||||
# emit the metadata which isnt valid shell
|
||||
for e in d.keys():
|
||||
if bb.data.getVarFlag(e, 'python', d):
|
||||
f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1)))
|
||||
f.close()
|
||||
}
|
||||
|
||||
sourcepkg_do_create_diff_gz(){
|
||||
|
||||
cd ${WORKDIR}
|
||||
for i in ${EXCLUDE_FROM}; do
|
||||
echo $i >> temp/exclude-from-file
|
||||
done
|
||||
|
||||
|
||||
src_tree=${@get_src_tree(d)}
|
||||
|
||||
for i in `find . -maxdepth 1 -type f`; do
|
||||
mkdir -p $src_tree/${DISTRO}/files
|
||||
cp $i $src_tree/${DISTRO}/files
|
||||
done
|
||||
|
||||
oenote "Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz"
|
||||
LC_ALL=C TZ=UTC0 diff --exclude-from=temp/exclude-from-file -Naur $src_tree.orig $src_tree | gzip -c > ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz
|
||||
rm -rf $src_tree.orig
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_create_orig_tgz do_archive_bb do_dumpdata do_create_diff_gz
|
||||
|
||||
addtask create_orig_tgz after do_unpack before do_patch
|
||||
addtask archive_bb after do_patch before do_dumpdata
|
||||
addtask dumpdata after archive_bb before do_create_diff_gz
|
||||
addtask create_diff_gz after do_dump_data before do_configure
|
||||
|
||||
40
meta/classes/src_distribute.bbclass
Normal file
40
meta/classes/src_distribute.bbclass
Normal file
@@ -0,0 +1,40 @@
|
||||
include conf/licenses.conf
|
||||
|
||||
SRC_DISTRIBUTECOMMAND[func] = "1"
|
||||
python do_distribute_sources () {
|
||||
l = bb.data.createCopy(d)
|
||||
bb.data.update_data(l)
|
||||
licenses = (bb.data.getVar('LICENSE', d, 1) or "").split()
|
||||
if not licenses:
|
||||
bb.note("LICENSE not defined")
|
||||
src_distribute_licenses = (bb.data.getVar('SRC_DISTRIBUTE_LICENSES', d, 1) or "").split()
|
||||
# Explanation:
|
||||
# Space seperated items in LICENSE must *all* be distributable
|
||||
# Each space seperated item may be used under any number of | seperated licenses.
|
||||
# If any of those | seperated licenses are distributable, then that component is.
|
||||
# i.e. LICENSE = "GPL LGPL"
|
||||
# In this case, both components are distributable.
|
||||
# LICENSE = "GPL|QPL|Proprietary"
|
||||
# In this case, GPL is distributable, so the component is.
|
||||
valid = 1
|
||||
for l in licenses:
|
||||
lvalid = 0
|
||||
for i in l.split("|"):
|
||||
if i in src_distribute_licenses:
|
||||
lvalid = 1
|
||||
if lvalid != 1:
|
||||
valid = 0
|
||||
if valid == 0:
|
||||
bb.note("Licenses (%s) are not all listed in SRC_DISTRIBUTE_LICENSES, skipping source distribution" % licenses)
|
||||
return
|
||||
import re
|
||||
for s in (bb.data.getVar('A', d, 1) or "").split():
|
||||
s = re.sub(';.*$', '', s)
|
||||
cmd = bb.data.getVar('SRC_DISTRIBUTECOMMAND', d, 1)
|
||||
if not cmd:
|
||||
raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined")
|
||||
bb.data.setVar('SRC', s, d)
|
||||
bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d)
|
||||
}
|
||||
|
||||
addtask distribute_sources before do_build after do_fetch
|
||||
31
meta/classes/src_distribute_local.bbclass
Normal file
31
meta/classes/src_distribute_local.bbclass
Normal file
@@ -0,0 +1,31 @@
|
||||
inherit src_distribute
|
||||
|
||||
# SRC_DIST_LOCAL possible values:
|
||||
# copy copies the files from ${A} to the distributedir
|
||||
# symlink symlinks the files from ${A} to the distributedir
|
||||
# move+symlink moves the files into distributedir, and symlinks them back
|
||||
SRC_DIST_LOCAL ?= "move+symlink"
|
||||
SRC_DISTRIBUTEDIR ?= "${DEPLOY_DIR}/sources"
|
||||
SRC_DISTRIBUTECOMMAND () {
|
||||
s="${SRC}"
|
||||
if [ ! -L "$s" ] && (echo "$s"|grep "^${DL_DIR}"); then
|
||||
:
|
||||
else
|
||||
exit 0;
|
||||
fi
|
||||
mkdir -p ${SRC_DISTRIBUTEDIR}
|
||||
case "${SRC_DIST_LOCAL}" in
|
||||
copy)
|
||||
test -e $s.md5 && cp -f $s.md5 ${SRC_DISTRIBUTEDIR}/
|
||||
cp -f $s ${SRC_DISTRIBUTEDIR}/
|
||||
;;
|
||||
symlink)
|
||||
test -e $s.md5 && ln -sf $s.md5 ${SRC_DISTRIBUTEDIR}/
|
||||
ln -sf $s ${SRC_DISTRIBUTEDIR}/
|
||||
;;
|
||||
move+symlink)
|
||||
mv $s ${SRC_DISTRIBUTEDIR}/
|
||||
ln -sf ${SRC_DISTRIBUTEDIR}/`basename $s` $s
|
||||
;;
|
||||
esac
|
||||
}
|
||||
28
meta/classes/srec.bbclass
Normal file
28
meta/classes/srec.bbclass
Normal file
@@ -0,0 +1,28 @@
|
||||
#
|
||||
# Creates .srec files from images.
|
||||
#
|
||||
# Useful for loading with Yamon.
|
||||
|
||||
# Define SREC_VMAADDR in your machine.conf.
|
||||
|
||||
SREC_CMD = "${TARGET_PREFIX}objcopy -O srec -I binary --adjust-vma ${SREC_VMAADDR} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.${type} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.${type}.srec"
|
||||
|
||||
# Do not build srec files for these types of images:
|
||||
SREC_SKIP = "tar"
|
||||
|
||||
do_srec[nostamp] = 1
|
||||
|
||||
do_srec () {
|
||||
if [ ${SREC_VMAADDR} = "" ] ; then
|
||||
oefatal Cannot do_srec without SREC_VMAADDR defined.
|
||||
fi
|
||||
for type in ${IMAGE_FSTYPES}; do
|
||||
for skiptype in ${SREC_SKIP}; do
|
||||
if [ $type = $skiptype ] ; then continue 2 ; fi
|
||||
done
|
||||
${SREC_CMD}
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
addtask srec after do_rootfs before do_build
|
||||
332
meta/classes/tinderclient.bbclass
Normal file
332
meta/classes/tinderclient.bbclass
Normal file
@@ -0,0 +1,332 @@
|
||||
def tinder_http_post(server, selector, content_type, body):
|
||||
import httplib
|
||||
# now post it
|
||||
for i in range(0,5):
|
||||
try:
|
||||
h = httplib.HTTP(server)
|
||||
h.putrequest('POST', selector)
|
||||
h.putheader('content-type', content_type)
|
||||
h.putheader('content-length', str(len(body)))
|
||||
h.endheaders()
|
||||
h.send(body)
|
||||
errcode, errmsg, headers = h.getreply()
|
||||
#print errcode, errmsg, headers
|
||||
return (errcode,errmsg, headers, h.file)
|
||||
except:
|
||||
# try again
|
||||
pass
|
||||
|
||||
def tinder_form_data(bound, dict, log):
|
||||
output = []
|
||||
#br
|
||||
# for each key in the dictionary
|
||||
for name in dict:
|
||||
output.append( "--" + bound )
|
||||
output.append( 'Content-Disposition: form-data; name="%s"' % name )
|
||||
output.append( "" )
|
||||
output.append( dict[name] )
|
||||
if log:
|
||||
output.append( "--" + bound )
|
||||
output.append( 'Content-Disposition: form-data; name="log"; filename="log.txt"' )
|
||||
output.append( '' )
|
||||
output.append( log )
|
||||
output.append( '--' + bound + '--' )
|
||||
output.append( '' )
|
||||
|
||||
return "\r\n".join(output)
|
||||
|
||||
def tinder_time_string():
|
||||
"""
|
||||
Return the time as GMT
|
||||
"""
|
||||
return ""
|
||||
|
||||
def tinder_format_http_post(d,status,log):
|
||||
"""
|
||||
Format the Tinderbox HTTP post with the data needed
|
||||
for the tinderbox to be happy.
|
||||
"""
|
||||
|
||||
from bb import data, build
|
||||
import os,random
|
||||
|
||||
# the variables we will need to send on this form post
|
||||
variables = {
|
||||
"tree" : data.getVar('TINDER_TREE', d, True),
|
||||
"machine_name" : data.getVar('TINDER_MACHINE', d, True),
|
||||
"os" : os.uname()[0],
|
||||
"os_version" : os.uname()[2],
|
||||
"compiler" : "gcc",
|
||||
"clobber" : data.getVar('TINDER_CLOBBER', d, True)
|
||||
}
|
||||
|
||||
# optionally add the status
|
||||
if status:
|
||||
variables["status"] = str(status)
|
||||
|
||||
# try to load the machine id
|
||||
# we only need on build_status.pl but sending it
|
||||
# always does not hurt
|
||||
try:
|
||||
f = file(data.getVar('TMPDIR',d,True)+'/tinder-machine.id', 'r')
|
||||
id = f.read()
|
||||
variables['machine_id'] = id
|
||||
except:
|
||||
pass
|
||||
|
||||
# the boundary we will need
|
||||
boundary = "----------------------------------%d" % int(random.random()*1000000000000)
|
||||
|
||||
# now format the body
|
||||
body = tinder_form_data( boundary, variables, log )
|
||||
|
||||
return ("multipart/form-data; boundary=%s" % boundary),body
|
||||
|
||||
|
||||
def tinder_build_start(d):
|
||||
"""
|
||||
Inform the tinderbox that a build is starting. We do this
|
||||
by posting our name and tree to the build_start.pl script
|
||||
on the server.
|
||||
"""
|
||||
from bb import data
|
||||
|
||||
# get the body and type
|
||||
content_type, body = tinder_format_http_post(d,None,None)
|
||||
server = data.getVar('TINDER_HOST', d, True )
|
||||
url = data.getVar('TINDER_URL', d, True )
|
||||
|
||||
selector = url + "/xml/build_start.pl"
|
||||
|
||||
#print "selector %s and url %s" % (selector, url)
|
||||
|
||||
# now post it
|
||||
errcode, errmsg, headers, h_file = tinder_http_post(server,selector,content_type, body)
|
||||
#print errcode, errmsg, headers
|
||||
report = h_file.read()
|
||||
|
||||
# now let us find the machine id that was assigned to us
|
||||
search = "<machine id='"
|
||||
report = report[report.find(search)+len(search):]
|
||||
report = report[0:report.find("'")]
|
||||
|
||||
import bb
|
||||
bb.note("Machine ID assigned by tinderbox: %s" % report )
|
||||
|
||||
# now we will need to save the machine number
|
||||
# we will override any previous numbers
|
||||
f = file(data.getVar('TMPDIR', d, True)+"/tinder-machine.id", 'w')
|
||||
f.write(report)
|
||||
|
||||
|
||||
def tinder_send_http(d, status, log):
|
||||
"""
|
||||
Send this log as build status
|
||||
"""
|
||||
from bb import data
|
||||
|
||||
|
||||
# get the body and type
|
||||
content_type, body = tinder_format_http_post(d,status,log)
|
||||
server = data.getVar('TINDER_HOST', d, True )
|
||||
url = data.getVar('TINDER_URL', d, True )
|
||||
|
||||
selector = url + "/xml/build_status.pl"
|
||||
|
||||
# now post it
|
||||
errcode, errmsg, headers, h_file = tinder_http_post(server,selector,content_type, body)
|
||||
#print errcode, errmsg, headers
|
||||
#print h.file.read()
|
||||
|
||||
|
||||
def tinder_print_info(d):
|
||||
"""
|
||||
Print the TinderBox Info
|
||||
Including informations of the BaseSystem and the Tree
|
||||
we use.
|
||||
"""
|
||||
|
||||
from bb import data
|
||||
import os
|
||||
# get the local vars
|
||||
|
||||
time = tinder_time_string()
|
||||
ops = os.uname()[0]
|
||||
version = os.uname()[2]
|
||||
url = data.getVar( 'TINDER_URL' , d, True )
|
||||
tree = data.getVar( 'TINDER_TREE', d, True )
|
||||
branch = data.getVar( 'TINDER_BRANCH', d, True )
|
||||
srcdate = data.getVar( 'SRCDATE', d, True )
|
||||
machine = data.getVar( 'MACHINE', d, True )
|
||||
distro = data.getVar( 'DISTRO', d, True )
|
||||
bbfiles = data.getVar( 'BBFILES', d, True )
|
||||
tarch = data.getVar( 'TARGET_ARCH', d, True )
|
||||
fpu = data.getVar( 'TARGET_FPU', d, True )
|
||||
oerev = data.getVar( 'OE_REVISION', d, True ) or "unknown"
|
||||
|
||||
# there is a bug with tipple quoted strings
|
||||
# i will work around but will fix the original
|
||||
# bug as well
|
||||
output = []
|
||||
output.append("== Tinderbox Info" )
|
||||
output.append("Time: %(time)s" )
|
||||
output.append("OS: %(ops)s" )
|
||||
output.append("%(version)s" )
|
||||
output.append("Compiler: gcc" )
|
||||
output.append("Tinderbox Client: 0.1" )
|
||||
output.append("Tinderbox Client Last Modified: yesterday" )
|
||||
output.append("Tinderbox Protocol: 0.1" )
|
||||
output.append("URL: %(url)s" )
|
||||
output.append("Tree: %(tree)s" )
|
||||
output.append("Config:" )
|
||||
output.append("branch = '%(branch)s'" )
|
||||
output.append("TARGET_ARCH = '%(tarch)s'" )
|
||||
output.append("TARGET_FPU = '%(fpu)s'" )
|
||||
output.append("SRCDATE = '%(srcdate)s'" )
|
||||
output.append("MACHINE = '%(machine)s'" )
|
||||
output.append("DISTRO = '%(distro)s'" )
|
||||
output.append("BBFILES = '%(bbfiles)s'" )
|
||||
output.append("OEREV = '%(oerev)s'" )
|
||||
output.append("== End Tinderbox Client Info" )
|
||||
|
||||
# now create the real output
|
||||
return "\n".join(output) % vars()
|
||||
|
||||
|
||||
def tinder_print_env():
|
||||
"""
|
||||
Print the environment variables of this build
|
||||
"""
|
||||
from bb import data
|
||||
import os
|
||||
|
||||
time_start = tinder_time_string()
|
||||
time_end = tinder_time_string()
|
||||
|
||||
# build the environment
|
||||
env = ""
|
||||
for var in os.environ:
|
||||
env += "%s=%s\n" % (var, os.environ[var])
|
||||
|
||||
output = []
|
||||
output.append( "---> TINDERBOX RUNNING env %(time_start)s" )
|
||||
output.append( env )
|
||||
output.append( "<--- TINDERBOX FINISHED (SUCCESS) %(time_end)s" )
|
||||
|
||||
return "\n".join(output) % vars()
|
||||
|
||||
def tinder_tinder_start(d, event):
|
||||
"""
|
||||
PRINT the configuration of this build
|
||||
"""
|
||||
|
||||
time_start = tinder_time_string()
|
||||
config = tinder_print_info(d)
|
||||
#env = tinder_print_env()
|
||||
time_end = tinder_time_string()
|
||||
packages = " ".join( event.getPkgs() )
|
||||
|
||||
output = []
|
||||
output.append( "---> TINDERBOX PRINTING CONFIGURATION %(time_start)s" )
|
||||
output.append( config )
|
||||
#output.append( env )
|
||||
output.append( "<--- TINDERBOX FINISHED PRINTING CONFIGURATION %(time_end)s" )
|
||||
output.append( "---> TINDERBOX BUILDING '%(packages)s'" )
|
||||
output.append( "<--- TINDERBOX STARTING BUILD NOW" )
|
||||
|
||||
output.append( "" )
|
||||
|
||||
return "\n".join(output) % vars()
|
||||
|
||||
def tinder_do_tinder_report(event):
|
||||
"""
|
||||
Report to the tinderbox:
|
||||
On the BuildStart we will inform the box directly
|
||||
On the other events we will write to the TINDER_LOG and
|
||||
when the Task is finished we will send the report.
|
||||
|
||||
The above is not yet fully implemented. Currently we send
|
||||
information immediately. The caching/queuing needs to be
|
||||
implemented. Also sending more or less information is not
|
||||
implemented yet.
|
||||
"""
|
||||
from bb.event import getName
|
||||
from bb import data, mkdirhier, build
|
||||
import os, glob
|
||||
|
||||
# variables
|
||||
name = getName(event)
|
||||
log = ""
|
||||
status = 1
|
||||
#print asd
|
||||
# Check what we need to do Build* shows we start or are done
|
||||
if name == "BuildStarted":
|
||||
tinder_build_start(event.data)
|
||||
log = tinder_tinder_start(event.data,event)
|
||||
|
||||
try:
|
||||
# truncate the tinder log file
|
||||
f = file(data.getVar('TINDER_LOG', event.data, True), 'rw+')
|
||||
f.truncate(0)
|
||||
f.close()
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
# Append the Task-Log (compile,configure...) to the log file
|
||||
# we will send to the server
|
||||
if name == "TaskSucceeded" or name == "TaskFailed":
|
||||
log_file = glob.glob("%s/log.%s.*" % (data.getVar('T', event.data, True), event.task))
|
||||
|
||||
if len(log_file) != 0:
|
||||
to_file = data.getVar('TINDER_LOG', event.data, True)
|
||||
log += "".join(open(log_file[0], 'r').readlines())
|
||||
|
||||
# set the right 'HEADER'/Summary for the TinderBox
|
||||
if name == "TaskStarted":
|
||||
log += "---> TINDERBOX Task %s started\n" % event.task
|
||||
elif name == "TaskSucceeded":
|
||||
log += "<--- TINDERBOX Task %s done (SUCCESS)\n" % event.task
|
||||
elif name == "TaskFailed":
|
||||
log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task
|
||||
elif name == "PkgStarted":
|
||||
log += "---> TINDERBOX Package %s started\n" % data.getVar('P', event.data, True)
|
||||
elif name == "PkgSucceeded":
|
||||
log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % data.getVar('P', event.data, True)
|
||||
elif name == "PkgFailed":
|
||||
build.exec_task('do_clean', event.data)
|
||||
log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % data.getVar('P', event.data, True)
|
||||
status = 200
|
||||
elif name == "BuildCompleted":
|
||||
log += "Build Completed\n"
|
||||
status = 100
|
||||
elif name == "MultipleProviders":
|
||||
log += "---> TINDERBOX Multiple Providers\n"
|
||||
log += "multiple providers are available (%s);\n" % ", ".join(event.getCandidates())
|
||||
log += "consider defining PREFERRED_PROVIDER_%s\n" % event.getItem()
|
||||
log += "is runtime: %d\n" % event.isRuntime()
|
||||
log += "<--- TINDERBOX Multiple Providers\n"
|
||||
elif name == "NoProvider":
|
||||
log += "Error: No Provider for: %s\n" % event.getItem()
|
||||
log += "Error:Was Runtime: %d\n" % event.isRuntime()
|
||||
status = 200
|
||||
|
||||
# now post the log
|
||||
if len(log) == 0:
|
||||
return
|
||||
|
||||
# for now we will use the http post method as it is the only one
|
||||
log_post_method = tinder_send_http
|
||||
log_post_method(event.data, status, log)
|
||||
|
||||
|
||||
# we want to be an event handler
|
||||
addhandler tinderclient_eventhandler
|
||||
python tinderclient_eventhandler() {
|
||||
from bb import note, error, data
|
||||
from bb.event import NotHandled
|
||||
do_tinder_report = data.getVar('TINDER_REPORT', e.data, True)
|
||||
if do_tinder_report and do_tinder_report == "1":
|
||||
tinder_do_tinder_report(e)
|
||||
|
||||
return NotHandled
|
||||
}
|
||||
77
meta/classes/tmake.bbclass
Normal file
77
meta/classes/tmake.bbclass
Normal file
@@ -0,0 +1,77 @@
|
||||
DEPENDS_prepend="tmake "
|
||||
|
||||
python tmake_do_createpro() {
|
||||
import glob, sys
|
||||
from bb import note
|
||||
out_vartranslate = {
|
||||
"TMAKE_HEADERS": "HEADERS",
|
||||
"TMAKE_INTERFACES": "INTERFACES",
|
||||
"TMAKE_TEMPLATE": "TEMPLATE",
|
||||
"TMAKE_CONFIG": "CONFIG",
|
||||
"TMAKE_DESTDIR": "DESTDIR",
|
||||
"TMAKE_SOURCES": "SOURCES",
|
||||
"TMAKE_DEPENDPATH": "DEPENDPATH",
|
||||
"TMAKE_INCLUDEPATH": "INCLUDEPATH",
|
||||
"TMAKE_TARGET": "TARGET",
|
||||
"TMAKE_LIBS": "LIBS",
|
||||
}
|
||||
s = data.getVar('S', d, 1) or ""
|
||||
os.chdir(s)
|
||||
profiles = (data.getVar('TMAKE_PROFILES', d, 1) or "").split()
|
||||
if not profiles:
|
||||
profiles = ["*.pro"]
|
||||
for pro in profiles:
|
||||
ppro = glob.glob(pro)
|
||||
if ppro:
|
||||
if ppro != [pro]:
|
||||
del profiles[profiles.index(pro)]
|
||||
profiles += ppro
|
||||
continue
|
||||
if ppro[0].find('*'):
|
||||
del profiles[profiles.index(pro)]
|
||||
continue
|
||||
else:
|
||||
del profiles[profiles.index(pro)]
|
||||
if len(profiles) != 0:
|
||||
return
|
||||
|
||||
# output .pro using this metadata store
|
||||
try:
|
||||
from __builtin__ import file
|
||||
profile = file(data.expand('${PN}.pro', d), 'w')
|
||||
except OSError:
|
||||
raise FuncFailed("unable to open pro file for writing.")
|
||||
|
||||
# fd = sys.__stdout__
|
||||
fd = profile
|
||||
for var in out_vartranslate.keys():
|
||||
val = data.getVar(var, d, 1)
|
||||
if val:
|
||||
fd.write("%s\t: %s\n" % (out_vartranslate[var], val))
|
||||
|
||||
# if fd is not sys.__stdout__:
|
||||
fd.close()
|
||||
}
|
||||
|
||||
tmake_do_configure() {
|
||||
paths="${STAGING_DATADIR}/tmake/qws/${TARGET_OS}-${TARGET_ARCH}-g++ $STAGING_DIR/share/tmake/$OS-g++"
|
||||
if (echo "${TARGET_ARCH}"|grep -q 'i.86'); then
|
||||
paths="${STAGING_DATADIR}/tmake/qws/${TARGET_OS}-x86-g++ $paths"
|
||||
fi
|
||||
for i in $paths; do
|
||||
if test -e $i; then
|
||||
export TMAKEPATH=$i
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "${TMAKE_PROFILES}" ]; then
|
||||
TMAKE_PROFILES="`ls *.pro`"
|
||||
fi
|
||||
tmake -o Makefile $TMAKE_PROFILES || die "Error calling tmake on ${TMAKE_PROFILES}"
|
||||
}
|
||||
|
||||
EXPORT_FUNCTIONS do_configure do_createpro
|
||||
|
||||
addtask configure after do_unpack do_patch before do_compile
|
||||
addtask createpro before do_configure after do_unpack do_patch
|
||||
33
meta/classes/update-alternatives.bbclass
Normal file
33
meta/classes/update-alternatives.bbclass
Normal file
@@ -0,0 +1,33 @@
|
||||
# defaults
|
||||
ALTERNATIVE_PRIORITY = "10"
|
||||
ALTERNATIVE_LINK = "${bindir}/${ALTERNATIVE_NAME}"
|
||||
|
||||
update_alternatives_postinst() {
|
||||
update-alternatives --install ${ALTERNATIVE_LINK} ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH} ${ALTERNATIVE_PRIORITY}
|
||||
}
|
||||
|
||||
update_alternatives_postrm() {
|
||||
update-alternatives --remove ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH}
|
||||
}
|
||||
|
||||
python __anonymous() {
|
||||
if bb.data.getVar('ALTERNATIVE_NAME', d) == None:
|
||||
raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d)
|
||||
if bb.data.getVar('ALTERNATIVE_PATH', d) == None:
|
||||
raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % bb.data.getVar('FILE', d)
|
||||
}
|
||||
|
||||
python populate_packages_prepend () {
|
||||
pkg = bb.data.getVar('PN', d, 1)
|
||||
bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
|
||||
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
|
||||
if not postinst:
|
||||
postinst = '#!/bin/sh\n'
|
||||
postinst += bb.data.getVar('update_alternatives_postinst', d, 1)
|
||||
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
|
||||
postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1)
|
||||
if not postrm:
|
||||
postrm = '#!/bin/sh\n'
|
||||
postrm += bb.data.getVar('update_alternatives_postrm', d, 1)
|
||||
bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d)
|
||||
}
|
||||
69
meta/classes/update-rc.d.bbclass
Normal file
69
meta/classes/update-rc.d.bbclass
Normal file
@@ -0,0 +1,69 @@
|
||||
DEPENDS_append = " update-rc.d"
|
||||
RDEPENDS_append = " update-rc.d"
|
||||
|
||||
INITSCRIPT_PARAMS ?= "defaults"
|
||||
|
||||
INIT_D_DIR = "${sysconfdir}/init.d"
|
||||
|
||||
updatercd_postinst() {
|
||||
if test "x$D" != "x"; then
|
||||
D="-r $D"
|
||||
else
|
||||
D="-s"
|
||||
fi
|
||||
update-rc.d $D ${INITSCRIPT_NAME} ${INITSCRIPT_PARAMS}
|
||||
}
|
||||
|
||||
updatercd_prerm() {
|
||||
if test "x$D" != "x"; then
|
||||
D="-r $D"
|
||||
else
|
||||
${INIT_D_DIR}/${INITSCRIPT_NAME} stop
|
||||
fi
|
||||
}
|
||||
|
||||
updatercd_postrm() {
|
||||
update-rc.d $D ${INITSCRIPT_NAME} remove
|
||||
}
|
||||
|
||||
python __anonymous() {
|
||||
if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None:
|
||||
if bb.data.getVar('INITSCRIPT_NAME', d) == None:
|
||||
raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d)
|
||||
if bb.data.getVar('INITSCRIPT_PARAMS', d) == None:
|
||||
raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_PARAMS" % bb.data.getVar('FILE', d)
|
||||
}
|
||||
|
||||
python populate_packages_prepend () {
|
||||
def update_rcd_package(pkg):
|
||||
bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
|
||||
localdata = bb.data.createCopy(d)
|
||||
overrides = bb.data.getVar("OVERRIDES", localdata, 1)
|
||||
bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
postinst = bb.data.getVar('pkg_postinst', localdata, 1)
|
||||
if not postinst:
|
||||
postinst = '#!/bin/sh\n'
|
||||
postinst += bb.data.getVar('updatercd_postinst', localdata, 1)
|
||||
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
|
||||
prerm = bb.data.getVar('pkg_prerm', localdata, 1)
|
||||
if not prerm:
|
||||
prerm = '#!/bin/sh\n'
|
||||
prerm += bb.data.getVar('updatercd_prerm', localdata, 1)
|
||||
bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d)
|
||||
postrm = bb.data.getVar('pkg_postrm', localdata, 1)
|
||||
if not postrm:
|
||||
postrm = '#!/bin/sh\n'
|
||||
postrm += bb.data.getVar('updatercd_postrm', localdata, 1)
|
||||
bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d)
|
||||
|
||||
pkgs = bb.data.getVar('INITSCRIPT_PACKAGES', d, 1)
|
||||
if pkgs == None:
|
||||
pkgs = bb.data.getVar('PN', d, 1)
|
||||
packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
|
||||
if not pkgs in packages and packages != []:
|
||||
pkgs = packages[0]
|
||||
for pkg in pkgs.split():
|
||||
update_rcd_package(pkg)
|
||||
}
|
||||
33
meta/classes/wrt-image.bbclass
Normal file
33
meta/classes/wrt-image.bbclass
Normal file
@@ -0,0 +1,33 @@
|
||||
# we dont need the kernel in the image
|
||||
ROOTFS_POSTPROCESS_COMMAND += "rm -f ${IMAGE_ROOTFS}/boot/zImage*"
|
||||
|
||||
def wrt_get_kernel_version(d):
|
||||
import bb
|
||||
if bb.data.inherits_class('image_ipk', d):
|
||||
skd = bb.data.getVar('STAGING_KERNEL_DIR', d, 1)
|
||||
return base_read_file(skd+'/kernel-abiversion')
|
||||
return "-no kernel version for available-"
|
||||
|
||||
wrt_create_images() {
|
||||
I=${DEPLOY_DIR}/images
|
||||
KERNEL_VERSION="${@wrt_get_kernel_version(d)}"
|
||||
|
||||
for type in ${IMAGE_FSTYPES}; do
|
||||
# generic
|
||||
trx -o ${I}/wrt-generic-${type}.trx ${I}/loader.gz \
|
||||
${I}/wrt-kernel-${KERNEL_VERSION}.lzma -a 0x10000 ${I}/${IMAGE_NAME}.rootfs.${type}
|
||||
|
||||
# WRT54GS
|
||||
addpattern -2 -i ${I}/wrt-generic-${type}.trx -o ${I}/wrt54gs-${type}.trx -g
|
||||
|
||||
# WRT54G
|
||||
sed "1s,^W54S,W54G," ${I}/wrt54gs-${type}.trx > ${I}/wrt54g-${type}.trx
|
||||
|
||||
# motorola
|
||||
motorola-bin ${I}/wrt-generic-${type}.trx ${I}/motorola-${type}.bin
|
||||
done;
|
||||
}
|
||||
|
||||
IMAGE_POSTPROCESS_COMMAND += "wrt_create_images;"
|
||||
|
||||
DEPENDS_prepend = "${@["wrt-imagetools-native ", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}"
|
||||
19
meta/classes/xfce.bbclass
Normal file
19
meta/classes/xfce.bbclass
Normal file
@@ -0,0 +1,19 @@
|
||||
# xfce.oeclass
|
||||
# Copyright (C) 2004, Advanced Micro Devices, Inc. All Rights Reserved
|
||||
# Released under the MIT license (see packages/COPYING)
|
||||
|
||||
# Global class to make it easier to maintain XFCE packages
|
||||
|
||||
HOMEPAGE = "http://www.xfce.org"
|
||||
LICENSE = "LGPL-2"
|
||||
|
||||
SRC_URI = "http://www.us.xfce.org/archive/xfce-${PV}/src/${PN}-${PV}.tar.gz"
|
||||
|
||||
inherit autotools
|
||||
|
||||
EXTRA_OECONF += "--with-pluginsdir=${libdir}/xfce4/panel-plugins/"
|
||||
|
||||
# FIXME: Put icons in their own package too?
|
||||
|
||||
FILES_${PN} += "${datadir}/icons/* ${datadir}/applications/* ${libdir}/xfce4/modules/*.so*"
|
||||
FILES_${PN}-doc += "${datadir}/xfce4/doc"
|
||||
15
meta/classes/xlibs.bbclass
Normal file
15
meta/classes/xlibs.bbclass
Normal file
@@ -0,0 +1,15 @@
|
||||
LICENSE= "BSD-X"
|
||||
SECTION = "x11/libs"
|
||||
|
||||
XLIBS_CVS = "${FREEDESKTOP_CVS}/xlibs"
|
||||
|
||||
inherit autotools pkgconfig
|
||||
|
||||
do_stage() {
|
||||
oe_runmake install prefix=${STAGING_DIR} \
|
||||
bindir=${STAGING_BINDIR} \
|
||||
includedir=${STAGING_INCDIR} \
|
||||
libdir=${STAGING_LIBDIR} \
|
||||
datadir=${STAGING_DATADIR} \
|
||||
mandir=${STAGING_DATADIR}/man
|
||||
}
|
||||
414
meta/conf/bitbake.conf
Normal file
414
meta/conf/bitbake.conf
Normal file
@@ -0,0 +1,414 @@
|
||||
##################################################################
|
||||
# Standard target filesystem paths.
|
||||
##################################################################
|
||||
|
||||
# Path prefixes
|
||||
export base_prefix = ""
|
||||
export prefix = "/usr"
|
||||
export exec_prefix = "${prefix}"
|
||||
|
||||
# Base paths
|
||||
export base_bindir = "${base_prefix}/bin"
|
||||
export base_sbindir = "${base_prefix}/sbin"
|
||||
export base_libdir = "${base_prefix}/lib"
|
||||
|
||||
# Architecture independent paths
|
||||
export datadir = "${prefix}/share"
|
||||
export sysconfdir = "/etc"
|
||||
export sharedstatedir = "${prefix}/com"
|
||||
export localstatedir = "/var"
|
||||
export infodir = "${datadir}/info"
|
||||
export mandir = "${datadir}/man"
|
||||
export docdir = "${datadir}/doc"
|
||||
export servicedir = "/srv"
|
||||
|
||||
# Architecture dependent paths
|
||||
export bindir = "${exec_prefix}/bin"
|
||||
export sbindir = "${exec_prefix}/sbin"
|
||||
export libexecdir = "${exec_prefix}/libexec"
|
||||
export libdir = "${exec_prefix}/lib"
|
||||
export includedir = "${exec_prefix}/include"
|
||||
export oldincludedir = "${exec_prefix}/include"
|
||||
|
||||
##################################################################
|
||||
# Architecture-dependent build variables.
|
||||
##################################################################
|
||||
|
||||
BUILD_ARCH = "${@os.uname()[4]}"
|
||||
BUILD_OS = "${@os.uname()[0].lower()}"
|
||||
BUILD_VENDOR = ""
|
||||
BUILD_SYS = "${BUILD_ARCH}${BUILD_VENDOR}-${BUILD_OS}"
|
||||
BUILD_PREFIX = ""
|
||||
BUILD_CC_ARCH = ""
|
||||
|
||||
HOST_ARCH = "${TARGET_ARCH}"
|
||||
HOST_OS = "${TARGET_OS}"
|
||||
HOST_VENDOR = "${TARGET_VENDOR}"
|
||||
HOST_SYS = "${HOST_ARCH}${HOST_VENDOR}-${HOST_OS}"
|
||||
HOST_PREFIX = "${TARGET_PREFIX}"
|
||||
HOST_CC_ARCH = "${TARGET_CC_ARCH}"
|
||||
|
||||
TARGET_ARCH = "INVALID"
|
||||
TARGET_OS = "INVALID"
|
||||
TARGET_VENDOR = "${BUILD_VENDOR}"
|
||||
TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}"
|
||||
TARGET_PREFIX = "${TARGET_SYS}-"
|
||||
TARGET_CC_ARCH = ""
|
||||
|
||||
PACKAGE_ARCH = "${HOST_ARCH}"
|
||||
MACHINE_ARCH = "${@[bb.data.getVar('HOST_ARCH', d, 1), bb.data.getVar('MACHINE', d, 1)][bool(bb.data.getVar('MACHINE', d, 1))]}"
|
||||
IPKG_ARCHS = "all any noarch ${TARGET_ARCH} ${IPKG_EXTRA_ARCHS} ${MACHINE}"
|
||||
|
||||
##################################################################
|
||||
# Date/time variables.
|
||||
##################################################################
|
||||
|
||||
DATE := "${@time.strftime('%Y%m%d',time.gmtime())}"
|
||||
TIME := "${@time.strftime('%H%M%S',time.gmtime())}"
|
||||
DATETIME = "${DATE}${TIME}"
|
||||
|
||||
##################################################################
|
||||
# Openembedded Software Prerequisites.
|
||||
##################################################################
|
||||
|
||||
# python-native should be here but python relies on building
|
||||
# its own in staging
|
||||
ASSUME_PROVIDED = "cvs-native svn-native bzip2-native diffstat-native patch-native python-native-runtime perl-native-runtime texinfo-native"
|
||||
|
||||
##################################################################
|
||||
# Package default variables.
|
||||
##################################################################
|
||||
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[1] or '1.0'}"
|
||||
PR = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[2] or 'r0'}"
|
||||
PF = "${PN}-${PV}-${PR}"
|
||||
P = "${PN}-${PV}"
|
||||
|
||||
# Package info.
|
||||
|
||||
SECTION = "base"
|
||||
PRIORITY = "optional"
|
||||
DESCRIPTION = "Version ${PV}-${PR} of package ${PN}"
|
||||
LICENSE = "unknown"
|
||||
MAINTAINER = "OpenEmbedded Team <oe@handhelds.org>"
|
||||
HOMEPAGE = "unknown"
|
||||
|
||||
# Package dependencies and provides.
|
||||
|
||||
DEPENDS = ""
|
||||
RDEPENDS = ""
|
||||
PROVIDES = ""
|
||||
PROVIDES_prepend = "${P} ${PF} ${PN} "
|
||||
RPROVIDES = ""
|
||||
|
||||
PACKAGES = "${PN} ${PN}-doc ${PN}-dev ${PN}-locale"
|
||||
FILES = ""
|
||||
FILES_${PN} = "${bindir} ${sbindir} ${libexecdir} ${libdir}/lib*.so.* \
|
||||
${sysconfdir} ${sharedstatedir} ${localstatedir} \
|
||||
/bin /sbin /lib/*.so* ${datadir}/${PN} ${libdir}/${PN} \
|
||||
${datadir}/pixmaps ${datadir}/applications \
|
||||
${datadir}/idl ${datadir}/omf ${datadir}/sounds \
|
||||
${libdir}/bonobo/servers"
|
||||
SECTION_${PN}-doc = "doc"
|
||||
FILES_${PN}-doc = "${docdir} ${mandir} ${infodir} ${datadir}/gtk-doc \
|
||||
${datadir}/gnome/help"
|
||||
SECTION_${PN}-dev = "devel"
|
||||
FILES_${PN}-dev = "${includedir} ${libdir}/lib*.so ${libdir}/*.la \
|
||||
${libdir}/*.a ${libdir}/*.o ${libdir}/pkgconfig \
|
||||
/lib/*.a /lib/*.o ${datadir}/aclocal"
|
||||
FILES_${PN}-locale = "${datadir}/locale"
|
||||
|
||||
# File manifest
|
||||
|
||||
export MANIFEST = "${FILESDIR}/manifest"
|
||||
|
||||
FILE_DIRNAME = "${@os.path.dirname(bb.data.getVar('FILE', d))}"
|
||||
FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}"
|
||||
FILESDIR = "${@bb.which(bb.data.getVar('FILESPATH', d, 1), '.')}"
|
||||
|
||||
##################################################################
|
||||
# General work and output directories for the build system.
|
||||
##################################################################
|
||||
|
||||
TMPDIR = "${TOPDIR}/tmp"
|
||||
CACHE = "${TMPDIR}/cache"
|
||||
DL_DIR = "${TMPDIR}/downloads"
|
||||
CVSDIR = "${DL_DIR}/cvs"
|
||||
GITDIR = "${DL_DIR}/git"
|
||||
|
||||
STAMP = "${TMPDIR}/stamps/${PF}"
|
||||
WORKDIR = "${TMPDIR}/work/${PF}"
|
||||
T = "${WORKDIR}/temp"
|
||||
D = "${WORKDIR}/image"
|
||||
S = "${WORKDIR}/${P}"
|
||||
B = "${S}"
|
||||
|
||||
STAGING_DIR = "${TMPDIR}/staging"
|
||||
STAGING_BINDIR = "${STAGING_DIR}/${BUILD_SYS}/bin"
|
||||
STAGING_LIBDIR = "${STAGING_DIR}/${HOST_SYS}/lib"
|
||||
STAGING_INCDIR = "${STAGING_DIR}/${HOST_SYS}/include"
|
||||
STAGING_DATADIR = "${STAGING_DIR}/${HOST_SYS}/share"
|
||||
STAGING_LOADER_DIR = "${STAGING_DIR}/${HOST_SYS}/loader"
|
||||
|
||||
DEPLOY_DIR = "${TMPDIR}/deploy"
|
||||
DEPLOY_DIR_TAR = "${DEPLOY_DIR}/tar"
|
||||
DEPLOY_DIR_IPK = "${DEPLOY_DIR}/ipk"
|
||||
DEPLOY_DIR_RPM = "${DEPLOY_DIR}/rpm"
|
||||
|
||||
##################################################################
|
||||
# Kernel info.
|
||||
##################################################################
|
||||
|
||||
OLDEST_KERNEL = "2.4.0"
|
||||
STAGING_KERNEL_DIR = "${STAGING_DIR}/${HOST_SYS}/kernel"
|
||||
|
||||
##################################################################
|
||||
# Specific image creation and rootfs population info.
|
||||
##################################################################
|
||||
|
||||
DEPLOY_DIR_IMAGE = "${DEPLOY_DIR}/images"
|
||||
IMAGE_ROOTFS = "${TMPDIR}/rootfs"
|
||||
IMAGE_BASENAME = "rootfs"
|
||||
IMAGE_NAME = "${IMAGE_BASENAME}-${MACHINE}-${DATETIME}"
|
||||
IMAGE_CMD = ""
|
||||
IMAGE_CMD_jffs2 = "mkfs.jffs2 --root=${IMAGE_ROOTFS} --faketime \
|
||||
--output=${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.jffs2 \
|
||||
${EXTRA_IMAGECMD}"
|
||||
IMAGE_CMD_cramfs = "mkcramfs ${IMAGE_ROOTFS} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.cramfs ${EXTRA_IMAGECMD}"
|
||||
IMAGE_CMD_ext2 = "genext2fs -b ${IMAGE_ROOTFS_SIZE} -d ${IMAGE_ROOTFS} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ext2 ${EXTRA_IMAGECMD}"
|
||||
IMAGE_CMD_ext2.gz = "mkdir ${DEPLOY_DIR_IMAGE}/tmp.gz; genext2fs -b ${IMAGE_ROOTFS_SIZE} -d ${IMAGE_ROOTFS} ${DEPLOY_DIR_IMAGE}/tmp.gz/${IMAGE_NAME}.rootfs.ext2 ${EXTRA_IMAGECMD}; gzip -f -9 ${DEPLOY_DIR_IMAGE}/tmp.gz/${IMAGE_NAME}.rootfs.ext2; mv ${DEPLOY_DIR_IMAGE}/tmp.gz/${IMAGE_NAME}.rootfs.ext2.gz ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ext2.gz; rmdir ${DEPLOY_DIR_IMAGE}/tmp.gz"
|
||||
IMAGE_CMD_squashfs = "mksquashfs ${IMAGE_ROOTFS} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.squashfs ${EXTRA_IMAGECMD} -noappend"
|
||||
IMAGE_CMD_tar = "cd ${IMAGE_ROOTFS} && tar -jcvf ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.tar.bz2 ."
|
||||
IMAGE_CMD_tar.gz = "cd ${IMAGE_ROOTFS} && tar -zcvf ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.tar.gz ."
|
||||
IMAGE_CMD_tar.bz2 = "cd ${IMAGE_ROOTFS} && tar -jcvf ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.tar.bz2 ."
|
||||
EXTRA_IMAGECMD = ""
|
||||
EXTRA_IMAGECMD_jffs2 = "--pad --little-endian --eraseblock=0x40000"
|
||||
EXTRA_IMAGECMD_squashfs = "-le -b 16384"
|
||||
IMAGE_FSTYPE = "jffs2"
|
||||
IMAGE_FSTYPES = "${IMAGE_FSTYPE}"
|
||||
IMAGE_ROOTFS_SIZE_ext2 = "65536"
|
||||
IMAGE_ROOTFS_SIZE_ext2.gz = "65536"
|
||||
|
||||
IMAGE_DEPENDS = ""
|
||||
IMAGE_DEPENDS_jffs2 = "mtd-utils-native"
|
||||
IMAGE_DEPENDS_cramfs = "cramfs-native"
|
||||
IMAGE_DEPENDS_ext2 = "genext2fs-native"
|
||||
IMAGE_DEPENDS_ext2.gz = "genext2fs-native"
|
||||
IMAGE_DEPENDS_squashfs = "squashfs-tools-native"
|
||||
EXTRA_IMAGEDEPENDS = ""
|
||||
|
||||
##################################################################
|
||||
# Toolchain info.
|
||||
##################################################################
|
||||
|
||||
CROSS_DIR = "${TMPDIR}/cross"
|
||||
CROSS_DATADIR = "${CROSS_DIR}/share"
|
||||
export PATH_prepend = "${STAGING_BINDIR}/${HOST_SYS}:${STAGING_BINDIR}:${CROSS_DIR}/bin:"
|
||||
|
||||
##################################################################
|
||||
# Build utility info.
|
||||
##################################################################
|
||||
|
||||
CCACHE = "${@bb.which(bb.data.getVar('PATH', d), 'ccache') and 'ccache '}"
|
||||
|
||||
export CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_ARCH}"
|
||||
export CXX = "${CCACHE}${HOST_PREFIX}g++ ${HOST_CC_ARCH}"
|
||||
export F77 = "${CCACHE}${HOST_PREFIX}g77 ${HOST_CC_ARCH}"
|
||||
export CPP = "${HOST_PREFIX}gcc -E"
|
||||
export LD = "${HOST_PREFIX}ld"
|
||||
export CCLD = "${CC}"
|
||||
export AR = "${HOST_PREFIX}ar"
|
||||
export AS = "${HOST_PREFIX}as"
|
||||
export RANLIB = "${HOST_PREFIX}ranlib"
|
||||
export STRIP = "${HOST_PREFIX}strip"
|
||||
|
||||
export BUILD_CC = "${CCACHE}${BUILD_PREFIX}gcc ${BUILD_CC_ARCH}"
|
||||
export BUILD_CXX = "${CCACHE}${BUILD_PREFIX}g++ ${BUILD_CC_ARCH}"
|
||||
export BUILD_F77 = "${CCACHE}${BUILD_PREFIX}g77 ${BUILD_CC_ARCH}"
|
||||
export BUILD_CPP = "${BUILD_PREFIX}cpp"
|
||||
export BUILD_LD = "${BUILD_PREFIX}ld"
|
||||
export BUILD_CCLD = "${BUILD_PREFIX}gcc"
|
||||
export BUILD_AR = "${BUILD_PREFIX}ar"
|
||||
export BUILD_RANLIB = "${BUILD_PREFIX}ranlib"
|
||||
export BUILD_STRIP = "${BUILD_PREFIX}strip"
|
||||
|
||||
export MAKE = "make"
|
||||
EXTRA_OEMAKE = "-e MAKEFLAGS="
|
||||
|
||||
##################################################################
|
||||
# Build flags and options.
|
||||
##################################################################
|
||||
|
||||
export BUILD_CPPFLAGS = "-isystem${STAGING_DIR}/${BUILD_SYS}/include"
|
||||
export CPPFLAGS = "${TARGET_CPPFLAGS}"
|
||||
export TARGET_CPPFLAGS = "-isystem${STAGING_DIR}/${TARGET_SYS}/include"
|
||||
|
||||
export BUILD_CFLAGS = "${BUILD_CPPFLAGS} ${BUILD_OPTIMIZATION}"
|
||||
export CFLAGS = "${TARGET_CFLAGS}"
|
||||
export TARGET_CFLAGS = "${TARGET_CPPFLAGS} ${SELECTED_OPTIMIZATION}"
|
||||
|
||||
export BUILD_CXXFLAGS = "${BUILD_CFLAGS} -fpermissive"
|
||||
export CXXFLAGS = "${TARGET_CXXFLAGS}"
|
||||
export TARGET_CXXFLAGS = "${TARGET_CFLAGS} -fpermissive"
|
||||
|
||||
export BUILD_LDFLAGS = "-L${STAGING_DIR}/${BUILD_SYS}/lib \
|
||||
-Wl,-rpath-link,${STAGING_DIR}/${BUILD_SYS}/lib \
|
||||
-Wl,-rpath,${STAGING_DIR}/${BUILD_SYS}/lib -Wl,-O1"
|
||||
export LDFLAGS = "${TARGET_LDFLAGS}"
|
||||
export TARGET_LDFLAGS = "-L${STAGING_DIR}/${TARGET_SYS}/lib \
|
||||
-Wl,-rpath-link,${STAGING_DIR}/${TARGET_SYS}/lib \
|
||||
-Wl,-O1"
|
||||
|
||||
# Which flags to leave by strip-flags() in bin/build/oebuild.sh ?
|
||||
ALLOWED_FLAGS = "-O -mcpu -march -pipe"
|
||||
|
||||
# Pass parallel make options to the compile task only
|
||||
EXTRA_OEMAKE_prepend_task_do_compile = "${PARALLEL_MAKE} "
|
||||
|
||||
##################################################################
|
||||
# Optimization flags.
|
||||
##################################################################
|
||||
|
||||
FULL_OPTIMIZATION = "-fexpensive-optimizations -fomit-frame-pointer -frename-registers -O2"
|
||||
DEBUG_OPTIMIZATION = "-O -g"
|
||||
SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][bb.data.getVar('DEBUG_BUILD', d, 1) == '1'], d, 1)}"
|
||||
BUILD_OPTIMIZATION = "-O2"
|
||||
|
||||
##################################################################
|
||||
# Bootstrap stuff.
|
||||
##################################################################
|
||||
|
||||
BOOTSTRAP_EXTRA_RDEPENDS = ""
|
||||
BOOTSTRAP_EXTRA_RRECOMMENDS = ""
|
||||
|
||||
##################################################################
|
||||
# Palmtop stuff.
|
||||
##################################################################
|
||||
|
||||
export QTDIR = "${STAGING_DIR}/${HOST_SYS}/qt2"
|
||||
export QPEDIR = "${STAGING_DIR}/${HOST_SYS}"
|
||||
export OPIEDIR = "${STAGING_DIR}/${HOST_SYS}"
|
||||
export palmtopdir = "/opt/QtPalmtop"
|
||||
export palmqtdir = "/opt/QtPalmtop"
|
||||
|
||||
##################################################################
|
||||
# Download locations and utilities.
|
||||
##################################################################
|
||||
|
||||
GNU_MIRROR = "ftp://ftp.gnu.org/gnu"
|
||||
DEBIAN_MIRROR = "ftp://ftp.debian.org/debian/pool"
|
||||
SOURCEFORGE_MIRROR = "http://heanet.dl.sourceforge.net/sourceforge"
|
||||
GPE_MIRROR = "http://handhelds.org/pub/projects/gpe/source"
|
||||
XLIBS_MIRROR = "http://xlibs.freedesktop.org/release"
|
||||
XORG_MIRROR = "http://xorg.freedesktop.org/releases"
|
||||
GNOME_MIRROR = "http://ftp.gnome.org/pub/GNOME/sources"
|
||||
FREEBSD_MIRROR = "ftp://ftp.freebsd.org/pub/FreeBSD/"
|
||||
HANDHELDS_CVS = "cvs://anoncvs:anoncvs@anoncvs.handhelds.org/cvs"
|
||||
E_CVS = "cvs://anonymous@anoncvs.enlightenment.org/var/cvs/e"
|
||||
E_URI = "http://enlightenment.freedesktop.org/files"
|
||||
FREEDESKTOP_CVS = "cvs://anoncvs:anoncvs@anoncvs.freedesktop.org/cvs"
|
||||
GENTOO_MIRROR = "http://distro.ibiblio.org/pub/linux/distributions/gentoo/distfiles"
|
||||
APACHE_MIRROR = "http://www.apache.org/dist"
|
||||
|
||||
# You can use the mirror of your country to get faster downloads by putting
|
||||
# export DEBIAN_MIRROR = "ftp://ftp.de.debian.org/debian/pool"
|
||||
# export SOURCEFORGE_MIRROR = "http://belnet.dl.sourceforge.net/sourceforge"
|
||||
# into your local.conf
|
||||
|
||||
FETCHCOMMAND = "ERROR, this must be a BitBake bug"
|
||||
FETCHCOMMAND_wget = "/usr/bin/env wget -t 5 --passive-ftp -P ${DL_DIR} ${URI}"
|
||||
FETCHCOMMAND_cvs = "/usr/bin/env cvs -d${CVSROOT} co ${CVSCOOPTS} ${CVSMODULE}"
|
||||
RESUMECOMMAND = "ERROR, this must be a BitBake bug"
|
||||
RESUMECOMMAND_wget = "/usr/bin/env wget -c -t 5 --passive-ftp -P ${DL_DIR} ${URI}"
|
||||
UPDATECOMMAND = "ERROR, this must be a BitBake bug"
|
||||
UPDATECOMMAND_cvs = "/usr/bin/env cvs -d${CVSROOT} update -d -P ${CVSCOOPTS}"
|
||||
SRCDATE = "${DATE}"
|
||||
|
||||
SRC_URI = "file://${FILE}"
|
||||
|
||||
##################################################################
|
||||
# Miscellaneous utilities.
|
||||
##################################################################
|
||||
|
||||
MKTEMPDIRCMD = "mktemp -d -q ${TMPBASE}"
|
||||
MKTEMPCMD = "mktemp -q ${TMPBASE}"
|
||||
|
||||
# Program to be used to patch sources, use 'inherit patcher' to overwrite this:
|
||||
|
||||
PATCHCLEANCMD = 'if [ -n "`quilt applied`" ]; then quilt pop -a -R -f || exit 1; fi'
|
||||
PATCHCMD = "pnum='%s'; name='%s'; patch='%s'; mkdir -p patches ; quilt upgrade >/dev/null 2>&1; quilt import -f -p $pnum -n $name $patch; chmod u+w patches/$name; quilt push"
|
||||
PATCH_DEPENDS = "quilt-native"
|
||||
|
||||
# GNU patch tries to be intellgent about checking out read-only files from
|
||||
# a RCS, which freaks out those special folks with active Perforce clients
|
||||
# the following makes patch ignore RCS:
|
||||
|
||||
export PATCH_GET=0
|
||||
|
||||
# Program to be used to build ipkg packages
|
||||
|
||||
IPKGBUILDCMD = "ipkg-build -o 0 -g 0"
|
||||
|
||||
##################################################################
|
||||
# Not sure about the rest of this yet.
|
||||
##################################################################
|
||||
|
||||
# slot - currently unused by OE. portage remnants
|
||||
SLOT = "0"
|
||||
|
||||
# Other
|
||||
|
||||
export PKG_CONFIG_PATH = "${STAGING_DATADIR}/pkgconfig"
|
||||
export PKG_CONFIG_DISABLE_UNINSTALLED = "yes"
|
||||
|
||||
export QMAKE_MKSPEC_PATH = "${STAGING_DIR}/${BUILD_SYS}/share/qmake"
|
||||
export STAGING_SIPDIR = "${STAGING_DIR}/${BUILD_SYS}/share/sip"
|
||||
export STAGING_IDLDIR = "${STAGING_DATADIR}/idl"
|
||||
|
||||
# default test results for autoconf
|
||||
# possible candidate for moving into autotools.oeclass -CL
|
||||
export CONFIG_SITE = "${@bb.which(bb.data.getVar('BBPATH', d, 1), 'site/%s-%s' % (bb.data.getVar('HOST_ARCH', d, 1), bb.data.getVar('HOST_OS', d, 1)))}"
|
||||
|
||||
# library package naming
|
||||
AUTO_LIBNAME_PKGS = "${PACKAGES}"
|
||||
|
||||
###
|
||||
### Config file processing
|
||||
###
|
||||
|
||||
# This means that an envionment variable named '<foo>_arm' overrides an
|
||||
# environment variable '<foo>' (when ${TARGET_ARCH} is arm). And the same: an
|
||||
# environment variable '<foo>_ramses' overrides both '<foo>' and '<foo>_arm
|
||||
# when ${MACHINE} is 'ramses'. And finally '<foo>_local' overrides anything.
|
||||
#
|
||||
# This works for functions as well, they are really just environment variables.
|
||||
#OVERRIDES = "local:${MACHINE}:${DISTRO}:${TARGET_OS}:${TARGET_ARCH}:build-${BUILD_OS}"
|
||||
# Alternative OVERRIDES to make compilation fail fast, we will enable it by default soon
|
||||
OVERRIDES = "local:${MACHINE}:${DISTRO}:${TARGET_OS}:${TARGET_ARCH}:build-${BUILD_OS}:fail-fast:pn-${PN}"
|
||||
|
||||
##################################################################
|
||||
# Include the rest of the config files.
|
||||
##################################################################
|
||||
|
||||
include conf/site.conf
|
||||
include conf/auto.conf
|
||||
include conf/local.conf
|
||||
include conf/build/${BUILD_SYS}.conf
|
||||
include conf/target/${TARGET_SYS}.conf
|
||||
include conf/machine/${MACHINE}.conf
|
||||
include conf/distro/${DISTRO}.conf
|
||||
include conf/documentation.conf
|
||||
require conf/sanity.conf
|
||||
|
||||
##################################################################
|
||||
# Weak variables (usually to retain backwards compatibility)
|
||||
##################################################################
|
||||
|
||||
PCMCIA_MANAGER ?= "pcmcia-cs"
|
||||
|
||||
|
||||
##################################################################
|
||||
# Magic Cookie for SANITY CHECK
|
||||
##################################################################
|
||||
OES_BITBAKE_CONF = "1"
|
||||
126
meta/conf/distro/angstrom-2006.9.conf
Normal file
126
meta/conf/distro/angstrom-2006.9.conf
Normal file
@@ -0,0 +1,126 @@
|
||||
#@--------------------------------------------------------------------
|
||||
#@TYPE: Distribution
|
||||
#@NAME: Angstrom <http://www.angstrom-distribution.org>
|
||||
#@DESCRIPTION: The Linux Distribution for Kernel 2.6 based devices
|
||||
#@MAINTAINER: Koen Kooi <koen@dominion.kabel.utwente.nl>
|
||||
#@MAINTAINER: Michael 'Mickey' Lauer <mickey@Vanille.de>
|
||||
#@--------------------------------------------------------------------
|
||||
|
||||
#DISTRO_VERSION = "2006.9"
|
||||
DISTRO_VERSION = "test-${DATE}"
|
||||
|
||||
include conf/distro/angstrom.conf
|
||||
|
||||
DISTRO_TYPE = "debug"
|
||||
#DISTRO_TYPE = "release"
|
||||
#!!!!! DON'T FORGET TO ENABLE ZAPROOTPASSWD !!!!!
|
||||
|
||||
|
||||
FEED_URIS += " \
|
||||
base##${ANGSTROM_URI}/unstable/feed/base/"
|
||||
# base##${ANGSTROM_URI}/releases/${DISTRO_VERSION}/feed/base \
|
||||
# ${MACHINE}##${ANGSTROM_URI}/releases/${DISTRO_VERSION}/feed/${MACHINE} \
|
||||
# updates##${ANGSTROM_URI}/releases/${DISTRO_VERSION}/feed/updates"
|
||||
|
||||
SRCDATE = "20060518"
|
||||
#SRCDATE_handhelds-pxa-2.6 = "20060516"
|
||||
|
||||
CVS_TARBALL_STASH = "\
|
||||
http://ewi546.ewi.utwente.nl/mirror/www.openzaurus.org/official/unstable/3.5.4/sources/ \
|
||||
http://www.oesources.org/source/current/"
|
||||
|
||||
# Opie
|
||||
#use 1337 mt version of opie to have a webbrowser
|
||||
PALMTOP_USE_MULTITHREADED_QT = "yes"
|
||||
QTE_VERSION = "2.3.10"
|
||||
OPIE_VERSION = "1.2.2"
|
||||
include conf/distro/preferred-opie-versions.inc
|
||||
|
||||
# GPE
|
||||
include conf/distro/preferred-gpe-versions-2.8.inc
|
||||
|
||||
#zap extra stuff taking place in $MACHINE.conf
|
||||
GPE_EXTRA_INSTALL = ""
|
||||
|
||||
# E
|
||||
include conf/distro/preferred-e-versions.inc
|
||||
|
||||
PREFERRED_PROVIDERS += "virtual/xserver:xserver-kdrive"
|
||||
PREFERRED_PROVIDERS += "virtual/gconf:gconf-dbus"
|
||||
PREFERRED_PROVIDER_libx11 = "diet-x11"
|
||||
|
||||
|
||||
#Make sure we use 2.6 on machines with a 2.4/2.6 selector
|
||||
KERNEL = "kernel26"
|
||||
ZKERNEL_VERSION = "2.6"
|
||||
|
||||
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc-initial:gcc-cross-initial"
|
||||
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc:gcc-cross"
|
||||
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}g++:gcc-cross"
|
||||
|
||||
PREFERRED_PROVIDER_virtual/libiconv = "glibc"
|
||||
PREFERRED_PROVIDER_virtual/libintl = "glibc"
|
||||
|
||||
#EABI stuff
|
||||
PREFERRED_PROVIDER_virtual/arm-angstrom-linux-gnueabi-libc-for-gcc = "glibc-intermediate"
|
||||
PREFERRED_PROVIDER_virtual/arm-linux-libc-for-gcc = "glibc-intermediate"
|
||||
|
||||
|
||||
#use EABI toolchain
|
||||
PREFERRED_VERSION_gcc ?= "4.1.1"
|
||||
PREFERRED_VERSION_gcc-cross ?= "4.1.1"
|
||||
PREFERRED_VERSION_gcc-cross-initial ?= "4.1.1"
|
||||
PREFERRED_VERSION_binutils ?= "2.16.91.0.7"
|
||||
PREFERRED_VERSION_binutils-cross ?= "2.16.91.0.7"
|
||||
PREFERRED_VERSION_linux-libc-headers ?= "2.6.15.99"
|
||||
PREFERRED_VERSION_glibc ?= "2.4"
|
||||
PREFERRED_VERSION_glibc-intermediate ?= "2.4"
|
||||
|
||||
# To use an EABI compatible version 3 series gcc, either uncomment
|
||||
# the lines below or set them in local.conf:
|
||||
#
|
||||
# PREFERRED_VERSION_gcc-cross = "3.4.4+csl-arm-2005q3"
|
||||
# PREFERRED_VERSION_gcc-cross-initial = "3.4.4+csl-arm-2005q3"
|
||||
|
||||
|
||||
PREFERRED_VERSION_orinoco-modules_h3600 = "0.13e"
|
||||
PREFERRED_VERSION_orinoco-modules_h3900 = "0.13e"
|
||||
PREFERRED_VERSION_dbus ?= "0.61"
|
||||
PREFERRED_VERSION_gstreamer ?= "0.10.6"
|
||||
|
||||
PREFERRED_PROVIDER_hostap-conf ?= "hostap-conf"
|
||||
PREFERRED_PROVIDER_hostap-modules_h2200 ?= "hostap-modules"
|
||||
PREFERRED_PROVIDER_hostap-modules_hx4700 ?= "hostap-modules"
|
||||
PREFERRED_VERSION_hostap-modules ?= "0.4.7"
|
||||
|
||||
#Down here we put stuff we want to install into machines without polluting conf/machine/ with distro stuff
|
||||
# c7x0, akita, spitz, nokia770, h2200, h6300, ipaq-pxa270, simpad
|
||||
|
||||
#### Bootstrap options ####
|
||||
|
||||
PCMCIA_MANAGER_c7x0 = "pcmciautils"
|
||||
PCMCIA_MANAGER_akita = "pcmciautils"
|
||||
PCMCIA_MANAGER_spitz = "pcmciautils"
|
||||
PCMCIA_MANAGER_nokia770 = "pcmciautils"
|
||||
PCMCIA_MANAGER_h2200 = "pcmciautils"
|
||||
PCMCIA_MANAGER_h6300 = "pcmciautils"
|
||||
PCMCIA_MANAGER_ipaq-pxa270 = "pcmciautils"
|
||||
|
||||
|
||||
|
||||
#### GPE section ###
|
||||
|
||||
#Install libgtkinput in devices without a keyboard
|
||||
GPE_EXTRA_INSTALL_append_ipaq-pxa270 = " libgtkinput"
|
||||
GPE_EXTRA_INSTALL_append_h2200 = " libgtkinput"
|
||||
GPE_EXTRA_INSTALL_append_h6300 = " libgtkinput"
|
||||
GPE_EXTRA_INSTALL_append_simpad = " libgtkinput"
|
||||
GPE_EXTRA_INSTALL_append_nokia770 = " libgtkinput"
|
||||
|
||||
#As soon as a kill switch is in place we can add it to devices with a keyboard
|
||||
#GPE_EXTRA_INSTALL_append_c7x0 = " libgtkinput"
|
||||
#GPE_EXTRA_INSTALL_append_tosa = " libgtkinput"
|
||||
#GPE_EXTRA_INSTALL_append_akita = " libgtkinput"
|
||||
#GPE_EXTRA_INSTALL_append_spitz = " libgtkinput"
|
||||
|
||||
|
||||
45
meta/conf/distro/angstrom.conf
Normal file
45
meta/conf/distro/angstrom.conf
Normal file
@@ -0,0 +1,45 @@
|
||||
#@TYPE: Distribution
|
||||
#@NAME: Angstrom
|
||||
#@DESCRIPTION: Distribution configuration for Angstrom
|
||||
|
||||
DISTRO = "angstrom"
|
||||
DISTRO_NAME = "Angstrom"
|
||||
|
||||
#Use this variable in feeds and other parts that need a URI
|
||||
ANGSTROM_URI = "http://www.angstrom-distribution.org/"
|
||||
|
||||
#Set the default maintainer to angstrom-dev
|
||||
MAINTAINER = "Angstrom Developers <angstrom-dev@handhelds.org>"
|
||||
|
||||
#use ipkg package format with debian style naming
|
||||
#use multimachine buildrules
|
||||
INHERIT += "package_ipk debian multimachine"
|
||||
|
||||
#Generate locales on the buildsystem instead of on the target. Speeds up first boot, set to "1" to enable
|
||||
PREFERRED_PROVIDER_qemu-native = "qemu-qop-nogfx-native"
|
||||
ENABLE_BINARY_LOCALE_GENERATION ?= ""
|
||||
|
||||
|
||||
#Use the ARM EABI when building for an ARM cpu. We can't use overrides
|
||||
#here because this breaks all places where ":=" is used.
|
||||
TARGET_VENDOR = "${@['','-angstrom'][bb.data.getVar('TARGET_ARCH',d,1)=='arm']}"
|
||||
TARGET_OS = "linux${@['','-gnueabi'][bb.data.getVar('TARGET_ARCH',d,1)=='arm']}"
|
||||
|
||||
#mess with compiler flags to use -Os instead of -O2
|
||||
#Please see http://free-electrons.com/doc/embedded_linux_optimizations/img47.html for some more info
|
||||
FULL_OPTIMIZATION = "-fexpensive-optimizations -fomit-frame-pointer -frename-registers -Os"
|
||||
BUILD_OPTIMIZATION = "-Os"
|
||||
|
||||
#ARM EABI is softfloat by default, but let's make sure :)
|
||||
TARGET_FPU_arm = "soft"
|
||||
|
||||
#Always ship these packages
|
||||
BOOTSTRAP_EXTRA_DEPENDS += "angstrom-version coreutils dropbear sysvinit"
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "angstrom-version coreutils dropbear sysvinit"
|
||||
|
||||
#Name the generated images in a sane way
|
||||
IMAGE_NAME = "${DISTRO_NAME}-${IMAGE_BASENAME}-${DISTRO_VERSION}-${MACHINE}"
|
||||
DEPLOY_DIR_IMAGE = ${DEPLOY_DIR}/images/${MACHINE}
|
||||
|
||||
# Angstrom *always* has some form of release config, so error out if someone thinks he knows better
|
||||
DISTRO_CHECK := "${@bb.data.getVar("DISTRO_VERSION",d,1) or bb.fatal('Remove this line or set a dummy DISTRO_VERSION if you really want to build an unversioned distro')}"
|
||||
77
meta/conf/distro/familiar-unstable.conf
Normal file
77
meta/conf/distro/familiar-unstable.conf
Normal file
@@ -0,0 +1,77 @@
|
||||
DISTRO = "familiar"
|
||||
DISTRO_NAME = "Familiar Linux"
|
||||
DISTRO_VERSION = "unstable-${DATE}"
|
||||
# Do not touch this file before notifying familiar-dev@handhelds.org
|
||||
|
||||
include conf/distro/familiar.conf
|
||||
|
||||
DISTRO_TYPE = "debug"
|
||||
#DISTRO_TYPE = "release"
|
||||
#!!!!! DON'T FORGET TO ENABLE ZAPROOTPASSWD !!!!!
|
||||
|
||||
|
||||
FEED_URIS += " \
|
||||
base##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/base \
|
||||
${MACHINE}##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/machine/${MACHINE} \
|
||||
updates##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/updates \
|
||||
locale-en##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/locale/en \
|
||||
locale-fr##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/locale/fr \
|
||||
locale-de##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/locale/de"
|
||||
|
||||
#SRCDATE = 20050331
|
||||
#SRCDATE = "now"
|
||||
|
||||
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc-initial:gcc-cross-initial"
|
||||
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc:gcc-cross"
|
||||
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}g++:gcc-cross"
|
||||
|
||||
PREFERRED_PROVIDER_virtual/libiconv=glibc
|
||||
PREFERRED_PROVIDER_virtual/libintl=glibc
|
||||
|
||||
PREFERRED_VERSION_hostap-modules ?= "0.3.9"
|
||||
|
||||
#2.4 machines prefer 0.13e ones
|
||||
PREFERRED_VERSION_orinoco-modules ?= "0.13e"
|
||||
|
||||
#but 0.13e doesn't build against 2.6
|
||||
PREFERRED_VERSION_orinoco-modules_h2200 ?= "0.15"
|
||||
PREFERRED_VERSION_orinoco-modules_ipaq-pxa270 ?= "0.15"
|
||||
|
||||
|
||||
# The CSL compiler is unusable because
|
||||
# 1) certain programs stop to compile
|
||||
# 2) more programs segfault
|
||||
PREFERRED_VERSION_gcc ?= "3.4.4"
|
||||
PREFERRED_VERSION_gcc-cross ?= "3.4.4"
|
||||
PREFERRED_VERSION_gcc-cross-initial ?= "3.4.4"
|
||||
|
||||
#
|
||||
# PIN the familiar build to a version
|
||||
#
|
||||
PREFERRED_VERSION_binutils-cross ?= "2.15.94.0.1"
|
||||
PREFERRED_VERSION_binutils ?= "2.15.94.0.1"
|
||||
|
||||
#
|
||||
# Opie
|
||||
#
|
||||
|
||||
OPIE_VERSION = "1.2.2"
|
||||
QTE_VERSION = "2.3.10"
|
||||
PALMTOP_USE_MULTITHREADED_QT = "yes"
|
||||
include conf/distro/preferred-opie-versions.inc
|
||||
|
||||
#
|
||||
# GPE
|
||||
#
|
||||
|
||||
PREFERRED_PROVIDERS += "virtual/xserver:xserver-kdrive"
|
||||
PREFERRED_PROVIDERS += "virtual/gconf:gconf-dbus"
|
||||
PREFERRED_PROVIDER_libx11 = "diet-x11"
|
||||
|
||||
include conf/distro/preferred-gpe-versions-2.8.inc
|
||||
|
||||
#
|
||||
# E
|
||||
#
|
||||
include conf/distro/preferred-e-versions.inc
|
||||
|
||||
17
meta/conf/distro/familiar.conf
Normal file
17
meta/conf/distro/familiar.conf
Normal file
@@ -0,0 +1,17 @@
|
||||
#@TYPE: Distribution
|
||||
#@NAME: Familiar Linux
|
||||
#@DESCRIPTION: Distribution configuration for Familiar Linux (handhelds.org)
|
||||
|
||||
MAINTAINER ?= "Familiar Developers <familiar-dev@handhelds.org>"
|
||||
|
||||
INHERIT += "package_ipk debian multimachine"
|
||||
TARGET_OS = "linux"
|
||||
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "familiar-version"
|
||||
IMAGE_NAME = "${IMAGE_BASENAME}-${DISTRO_VERSION}-${MACHINE}"
|
||||
|
||||
ENABLE_BINARY_LOCALE_GENERATION ?= "1"
|
||||
PARALLEL_INSTALL_MODULES = "1"
|
||||
UDEV_DEVFS_RULES = "1"
|
||||
|
||||
DISTRO_CHECK := "${@bb.data.getVar("DISTRO_VERSION",d,1) or bb.fatal('Remove this line or set a dummy DISTRO_VERSION if you really want to build an unversioned distro')}"
|
||||
40
meta/conf/distro/maemo-1.0.conf
Normal file
40
meta/conf/distro/maemo-1.0.conf
Normal file
@@ -0,0 +1,40 @@
|
||||
include conf/distro/familiar.conf
|
||||
|
||||
DISTRO = "maemo"
|
||||
DISTRO_NAME = "Maemo Linux"
|
||||
DISTRO_VERSION = "v1.0b-${DATE}"
|
||||
|
||||
DISTRO_TYPE = "debug"
|
||||
#DISTRO_TYPE = "release"
|
||||
|
||||
FEED_URIS += " \
|
||||
base##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/base \
|
||||
updates##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/updates"
|
||||
|
||||
#SRCDATE = 20050331
|
||||
|
||||
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc-initial:gcc-cross-initial"
|
||||
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc:gcc-cross"
|
||||
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}g++:gcc-cross"
|
||||
|
||||
#PREFERRED_VERSION_binutils-cross = "2.15.91.0.2"
|
||||
#PREFERRED_VERSION_gcc-cross = "3.4.4"
|
||||
#PREFERRED_VERSION_gcc-cross-initial = "3.4.4
|
||||
#PREFERRED_VERSION_libtool-native = "1.5.6"
|
||||
#PREFERRED_VERSION_libtool-cross= "1.5.6"
|
||||
|
||||
#
|
||||
# GPE
|
||||
#
|
||||
|
||||
PREFERRED_PROVIDERS += "virtual/xserver:xserver-kdrive"
|
||||
PREFERRED_PROVIDERS += "virtual/gconf:gconf-dbus"
|
||||
PREFERRED_PROVIDER_libx11 = "diet-x11"
|
||||
|
||||
include conf/distro/preferred-gpe-versions.inc
|
||||
|
||||
#
|
||||
# Maemo
|
||||
#
|
||||
|
||||
include conf/distro/maemo-preferred.inc
|
||||
10
meta/conf/distro/maemo-preferred.inc
Normal file
10
meta/conf/distro/maemo-preferred.inc
Normal file
@@ -0,0 +1,10 @@
|
||||
PREFERRED_PROVIDER_gconf = "gconf-osso"
|
||||
PREFERRED_PROVIDER_tslib = "tslib-maemo"
|
||||
PREFERRED_VERSION_dbus = "0.23.1-osso5"
|
||||
PREFERRED_VERSION_audiofile = "0.2.6-3osso4"
|
||||
PREFERRED_PROVIDER_esd = "osso-esd"
|
||||
PREFERRED_VERSION_gtk+ = "2.6.4-1.osso7"
|
||||
PREFERRED_VERSION_glib-2.0 = "2.6.4"
|
||||
PREFERRED_VERSION_pango = "1.8.1"
|
||||
PREFERRED_VERSION_atk = "1.9.0"
|
||||
PREFERRED_VERSION_diet-x11 ?= "6.2.1"
|
||||
71
meta/conf/distro/openzaurus-unstable.conf
Normal file
71
meta/conf/distro/openzaurus-unstable.conf
Normal file
@@ -0,0 +1,71 @@
|
||||
#@--------------------------------------------------------------------
|
||||
#@TYPE: Distribution
|
||||
#@NAME: OpenZaurus <http://www.openzaurus.org>
|
||||
#@DESCRIPTION: A Linux Distribution for the Sharp Zaurus family
|
||||
#@--------------------------------------------------------------------
|
||||
|
||||
DISTRO_VERSION = ".dev-snapshot-${DATE}"
|
||||
|
||||
CVS_TARBALL_STASH = "\
|
||||
http://ewi546.ewi.utwente.nl/mirror/www.openzaurus.org/official/unstable/3.5.4/sources/ \
|
||||
http://www.oesources.org/source/current/"
|
||||
|
||||
|
||||
include conf/distro/openzaurus.conf
|
||||
|
||||
DISTRO_TYPE = "debug"
|
||||
# DISTRO_TYPE = "release"
|
||||
|
||||
#FEED_URIS += " \
|
||||
# upgrades##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/upgrades/ \
|
||||
#upgrades-${MACHINE}##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/upgrades/${MACHINE} \
|
||||
# machine##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/machine/${MACHINE} \
|
||||
# base##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/base/ \
|
||||
# libs##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/libs/ \
|
||||
# console##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/console \
|
||||
# devel##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/devel"
|
||||
|
||||
SRCDATE = "20060514"
|
||||
#
|
||||
# Zaurus
|
||||
#
|
||||
|
||||
ASSUME_PROVIDED += "virtual/arm-linux-gcc-2.95"
|
||||
PREFERRED_PROVIDER_hostap-conf = "hostap-conf"
|
||||
|
||||
KERNEL_CONSOLE = "ttyS0"
|
||||
#DEBUG_OPTIMIZATION = "-O -g3"
|
||||
#DEBUG_BUILD = "1"
|
||||
#INHIBIT_PACKAGE_STRIP = "1"
|
||||
|
||||
# fix some iconv issues, needs to be adjusted when doing uclibc builds
|
||||
PREFERRED_PROVIDER_virtual/libiconv = "glibc"
|
||||
PREFERRED_PROVIDER_virtual/libintl = "glibc"
|
||||
|
||||
#
|
||||
# Base
|
||||
#
|
||||
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}gcc-initial:gcc-cross-initial"
|
||||
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}gcc:gcc-cross"
|
||||
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}g++:gcc-cross"
|
||||
|
||||
#
|
||||
# Opie
|
||||
#
|
||||
#QTE_VERSION = "2.3.10"
|
||||
#OPIE_VERSION = "1.2.2"
|
||||
#PALMTOP_USE_MULTITHREADED_QT = "yes"
|
||||
include conf/distro/preferred-opie-versions.inc
|
||||
|
||||
#
|
||||
# GPE
|
||||
#
|
||||
PREFERRED_PROVIDERS += "virtual/xserver:xserver-kdrive"
|
||||
PREFERRED_PROVIDER_libx11 = "diet-x11"
|
||||
include conf/distro/preferred-gpe-versions-2.8.inc
|
||||
|
||||
#
|
||||
# E
|
||||
#
|
||||
include conf/distro/preferred-e-versions.inc
|
||||
|
||||
18
meta/conf/distro/openzaurus.conf
Normal file
18
meta/conf/distro/openzaurus.conf
Normal file
@@ -0,0 +1,18 @@
|
||||
#@TYPE: Distribution
|
||||
#@NAME: OpenZaurus
|
||||
#@DESCRIPTION: Distribution configuration for OpenZaurus (http://www.openzaurus.org)
|
||||
|
||||
DISTRO = "openzaurus"
|
||||
DISTRO_NAME = "OpenZaurus"
|
||||
|
||||
INHERIT += " package_ipk debian multimachine"
|
||||
# For some reason, this doesn't work
|
||||
# TARGET_OS ?= "linux"
|
||||
TARGET_OS = "linux"
|
||||
TARGET_FPU = "soft"
|
||||
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "openzaurus-version"
|
||||
|
||||
PARALLEL_INSTALL_MODULES = "1"
|
||||
|
||||
DISTRO_CHECK := "${@bb.data.getVar("DISTRO_VERSION",d,1) or bb.fatal('Remove this line or set a dummy DISTRO_VERSION if you really want to build an unversioned distro')}"
|
||||
23
meta/conf/distro/poky-eabi.conf
Normal file
23
meta/conf/distro/poky-eabi.conf
Normal file
@@ -0,0 +1,23 @@
|
||||
#
|
||||
# Poky configuration to use EABI
|
||||
#
|
||||
|
||||
PREFERRED_PROVIDER_virtual/arm-poky-linux-gnueabi-libc-for-gcc = "glibc-intermediate"
|
||||
PREFERRED_PROVIDER_virtual/arm-linux-libc-for-gcc = "glibc-intermediate"
|
||||
|
||||
PREFERRED_VERSION_gcc ?= "3.4.4+csl-arm-2005q3"
|
||||
PREFERRED_VERSION_gcc-cross ?= "3.4.4+csl-arm-2005q3"
|
||||
PREFERRED_VERSION_gcc-cross-initial ?= "3.4.4+csl-arm-2005q3"
|
||||
#PREFERRED_VERSION_gcc ?= "4.1.0"
|
||||
#PREFERRED_VERSION_gcc-cross ?= "4.1.0"
|
||||
#PREFERRED_VERSION_gcc-cross-initial ?= "4.1.0"
|
||||
PREFERRED_VERSION_binutils ?= "2.16.91.0.7"
|
||||
PREFERRED_VERSION_binutils-cross ?= "2.16.91.0.7"
|
||||
PREFERRED_VERSION_linux-libc-headers ?= "2.6.15.99"
|
||||
PREFERRED_VERSION_glibc ?= "2.4"
|
||||
PREFERRED_VERSION_glibc-intermediate ?= "2.4"
|
||||
|
||||
#Use the ARM EABI when building for an ARM cpu. We can't use overrides
|
||||
#here because this breaks all places where ":=" is used.
|
||||
TARGET_VENDOR = "${@['','-poky'][bb.data.getVar('TARGET_ARCH',d,1)=='arm']}"
|
||||
TARGET_OS = "linux${@['','-gnueabi'][bb.data.getVar('TARGET_ARCH',d,1)=='arm']}"
|
||||
11
meta/conf/distro/poky-oabi.conf
Normal file
11
meta/conf/distro/poky-oabi.conf
Normal file
@@ -0,0 +1,11 @@
|
||||
#
|
||||
# Poky configuration to use its Original ABI
|
||||
#
|
||||
PREFERRED_VERSION_binutils ?= "2.16"
|
||||
PREFERRED_VERSION_binutils-cross ?= "2.15.94.0.1"
|
||||
PREFERRED_VERSION_gcc ?= "3.4.4"
|
||||
PREFERRED_VERSION_gcc-cross ?= "3.4.4"
|
||||
PREFERRED_VERSION_gcc-cross-initial ?= "3.4.4"
|
||||
PREFERRED_VERSION_linux-libc-headers ?= "2.6.11.1"
|
||||
PREFERRED_VERSION_glibc ?= "2.3.5+cvs20050627"
|
||||
PREFERRED_PROVIDER_virtual/arm-linux-libc-for-gcc = "glibc"
|
||||
176
meta/conf/distro/poky.conf
Normal file
176
meta/conf/distro/poky.conf
Normal file
@@ -0,0 +1,176 @@
|
||||
#@TYPE: Distribution#@NAME: Poky
|
||||
#@DESCRIPTION: Distribution configuration for OpenedHand (Poky)
|
||||
|
||||
DISTRO = "poky"
|
||||
DISTRO_NAME = "OpenedHand Linux (Poky)"
|
||||
DISTRO_VERSION = "0.0-snapshot-${DATE}"
|
||||
|
||||
# DISTRO_VERSION = "3.5.4"
|
||||
DISTRO_TYPE = "debug"
|
||||
# DISTRO_TYPE = "release"
|
||||
|
||||
|
||||
INHERIT += " package_ipk debian multimachine poky "
|
||||
# For some reason, this doesn't work
|
||||
# TARGET_OS ?= "linux"
|
||||
TARGET_OS = "linux"
|
||||
TARGET_FPU ?= "soft"
|
||||
IMAGE_ROOTFS_SIZE_ext2 = 131072
|
||||
|
||||
PARALLEL_INSTALL_MODULES = "1"
|
||||
|
||||
#FEED_URIS += " \
|
||||
# upgrades##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/upgrades/ \
|
||||
#upgrades-${MACHINE}##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/upgrades/${MACHINE} \
|
||||
# machine##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/machine/${MACHINE} \
|
||||
# base##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/base/ \
|
||||
# libs##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/libs/ \
|
||||
# console##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/console \
|
||||
# devel##http://openzaurus.org/official/unstable/${DISTRO_VERSION}/feed/devel"
|
||||
|
||||
ASSUME_PROVIDED += "virtual/arm-linux-gcc-2.95"
|
||||
OEINCLUDELOGS = "yes"
|
||||
KERNEL_CONSOLE = "ttyS0"
|
||||
#DEBUG_OPTIMIZATION = "-O -g3"
|
||||
#DEBUG_BUILD = "1"
|
||||
#INHIBIT_PACKAGE_STRIP = "1"
|
||||
|
||||
POKYMODE ?= "oabi"
|
||||
|
||||
include conf/distro/poky-${POKYMODE}.conf
|
||||
|
||||
#
|
||||
# Preferred providers:
|
||||
#
|
||||
# Toolchain:
|
||||
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}gcc-initial:gcc-cross-initial"
|
||||
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}gcc:gcc-cross"
|
||||
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}g++:gcc-cross"
|
||||
|
||||
# Libc/uclibc:
|
||||
#fix some iconv issues, needs to be adjusted when doing uclibc builds
|
||||
PREFERRED_PROVIDER_virtual/libiconv ?= glibc
|
||||
PREFERRED_PROVIDER_virtual/libintl ?= glibc
|
||||
|
||||
# Virtuals:
|
||||
PREFERRED_PROVIDER_virtual/db ?= "db"
|
||||
PREFERRED_PROVIDER_virtual/db-native ?= "db-native"
|
||||
PREFERRED_PROVIDER_virtual/xserver ?= xserver-kdrive
|
||||
|
||||
# Others:
|
||||
PREFERRED_PROVIDER_libx11 ?= "diet-x11"
|
||||
PREFERRED_PROVIDER_gconf ?= gconf-dbus
|
||||
PREFERRED_PROVIDER_gnome-vfs ?= gnome-vfs-dbus
|
||||
PREFERRED_PROVIDER_tslib ?= tslib
|
||||
|
||||
|
||||
#
|
||||
# CVS Dates to use:
|
||||
#
|
||||
#SRCDATE_gnome-vfs-dbus ?= "20051215"
|
||||
SRCDATE_gconf-dbus ?= "20060119"
|
||||
SRCDATE_gnome-vfs-dbus ?= "20060119"
|
||||
SRCDATE_contacts ?= "20060707"
|
||||
SRCDATE_dates ?= "20060721"
|
||||
SRCDATE_gtkhtml2 ?= "20060323"
|
||||
SRCDATE_web ?= "20060613"
|
||||
SRCDATE_eds-dbus ?= "20060707"
|
||||
SRCDATE_minimo ?= "20050401"
|
||||
SRCDATE_ipkg-utils-native ?= "20050930"
|
||||
SRCDATE_ipkg-link-1 ?= "20050930"
|
||||
SRCDATE_ipkg-utils ?= "20050930"
|
||||
SRCDATE_ipkg-link ?= "20050930"
|
||||
SRCDATE_matchbox-common ?= "20060612"
|
||||
SRCDATE_matchbox-config-gtk ?= "20060612"
|
||||
SRCDATE_matchbox-desktop ?= "20060612"
|
||||
SRCDATE_matchbox-keyboard ?= "20060612"
|
||||
SRCDATE_matchbox-panel ?= "20060612"
|
||||
SRCDATE_matchbox-panel-manager ?= "20060612"
|
||||
SRCDATE_matchbox-stroke ?= "20060612"
|
||||
SRCDATE_matchbox-themes-extra ?= "20060612"
|
||||
SRCDATE_matchbox-wm ?= "20060612"
|
||||
SRCDATE_libmatchbox ?= "20060612"
|
||||
SRCDATE_tslib ?= "20051101"
|
||||
SRCDATE_libxext ?= "20051101"
|
||||
SRCDATE_renderext ?= "20051101"
|
||||
SRCDATE_libxrender ?= "20051101"
|
||||
SRCDATE_fixesext ?= "20051101"
|
||||
SRCDATE_damageext ?= "20051101"
|
||||
SRCDATE_compositeext ?= "20051101"
|
||||
SRCDATE_xcalibrateext ?= "20051101"
|
||||
SRCDATE_recordext ?= "20051101"
|
||||
SRCDATE_libfakekey ?= "20051101"
|
||||
SRCDATE_xmodmap ?= "20051101"
|
||||
SRCDATE_libxrandr ?= "20051101"
|
||||
SRCDATE_libxtst ?= "20051101"
|
||||
SRCDATE_xcalibrate ?= "20051101"
|
||||
SRCDATE_libxmu ?= "20051115"
|
||||
SRCDATE_xdpyinfo ?= "20051115"
|
||||
SRCDATE_xauth ?= "20060120"
|
||||
SRCDATE_xserver-kdrive = "20050207"
|
||||
SRCDATE_qemu-native ?= "20060526"
|
||||
SRCDATE_oprofile ?= "20060214"
|
||||
SRCDATE_zaurusd ?= "20060628"
|
||||
|
||||
#
|
||||
# Preferred Versions:
|
||||
#
|
||||
PREFERRED_VERSION_matchbox ?= "0.9.1"
|
||||
PREFERRED_VERSION_matchbox-applet-inputmanager ?= "0.6"
|
||||
PREFERRED_VERSION_matchbox-applet-startup-monitor ?= "0.1"
|
||||
PREFERRED_VERSION_matchbox-common ?= "0.9.1+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_matchbox-config-gtk ?= "0.0+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_matchbox-desktop ?= "0.9.1+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_matchbox-keyboard ?= "0.0+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_matchbox-panel ?= "0.9.2+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_matchbox-stroke ?= "0.0+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_matchbox-themes-extra ?= "0.3+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_matchbox-wm ?= "0.9.5+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_libmatchbox ?= "1.7+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_xserver-kdrive = "0.0+cvs${SRCDATE}"
|
||||
|
||||
PREFERRED_VERSION_glib-2.0 ?= "2.10.3"
|
||||
PREFERRED_VERSION_glib-2.0-native ?= "2.10.3"
|
||||
PREFERRED_VERSION_atk ?= "1.10.1"
|
||||
PREFERRED_VERSION_cairo = "1.0.4"
|
||||
PREFERRED_VERSION_gtk+ ?= "2.6.8"
|
||||
#PREFERRED_VERSION_gtk+ ?= "2.8.16"
|
||||
PREFERRED_VERSION_pango ?= "1.10.0"
|
||||
|
||||
PREFERRED_VERSION_dbus ?= "0.60"
|
||||
PREFERRED_VERSION_gnome-vfs-dbus ?= "2.12.0+cvs${SRCDATE}"
|
||||
|
||||
PREFERRED_VERSION_diet-x11 ?= "6.2.1"
|
||||
PREFERRED_VERSION_dillo2 ?= "0.6.6"
|
||||
PREFERRED_VERSION_evince ?= "0.3.2"
|
||||
PREFERRED_VERSION_firefox ?= "1.0"
|
||||
PREFERRED_VERSION_gaim ?= "2.0.0-cvs-mco1-20050904"
|
||||
PREFERRED_VERSION_gtk2-theme-angelistic ?= "0.3"
|
||||
PREFERRED_VERSION_gsoko ?= "0.4.2-gpe6"
|
||||
PREFERRED_VERSION_keylaunch ?= "2.0.7"
|
||||
PREFERRED_VERSION_librsvg ?= "2.6.5"
|
||||
PREFERRED_VERSION_libgsm ?= "1.0.10"
|
||||
PREFERRED_VERSION_libschedule ?= "0.15"
|
||||
PREFERRED_VERSION_linphone-hh ?= "0.12.2.hh1"
|
||||
PREFERRED_VERSION_linphone ?= "0.12.2"
|
||||
PREFERRED_VERSION_minilite ?= "0.49"
|
||||
PREFERRED_VERSION_minimo ?= "0.0+cvs${SRCDATE_minimo}"
|
||||
PREFERRED_VERSION_poppler ?= "0.3.3"
|
||||
PREFERRED_VERSION_rosetta ?= "0.0+cvs${SRCDATE}"
|
||||
PREFERRED_VERSION_rxvt-unicode ?= "5.6"
|
||||
PREFERRED_VERSION_xcursor-transparent-theme ?= "0.1.1"
|
||||
PREFERRED_VERSION_xdemineur ?= "2.1.1"
|
||||
PREFERRED_VERSION_xextensions ?= "1.0.1"
|
||||
PREFERRED_VERSION_xhost ?= "0.0+cvs20040413"
|
||||
PREFERRED_VERSION_xmonobut ?= "0.4"
|
||||
PREFERRED_VERSION_xprop ?= "0.0+cvs${SRCDATE}"
|
||||
PREFERRED_VERSION_xproto ?= "6.6.2"
|
||||
PREFERRED_VERSION_xrdb ?= "0.0+cvs${SRCDATE}"
|
||||
PREFERRED_VERSION_xst ?= "0.15"
|
||||
PREFERRED_VERSION_oprofile ?= "0.9.1+cvs${SRCDATE}"
|
||||
|
||||
PCMCIA_MANAGER ?= "pcmciautils"
|
||||
|
||||
CVS_TARBALL_STASH += "http://www.o-hand.com/~richard/poky/sources/"
|
||||
|
||||
IMAGE_LINGUAS = "en-gb"
|
||||
33
meta/conf/distro/preferred-e-versions.inc
Normal file
33
meta/conf/distro/preferred-e-versions.inc
Normal file
@@ -0,0 +1,33 @@
|
||||
#
|
||||
# Specify which versions of E-related libraries and applications to build
|
||||
#
|
||||
|
||||
#PREFERRED_VERSION_e = "0.16.999.025"
|
||||
#PREFERRED_VERSION_ecore-x11 = "0.9.9.025"
|
||||
#PREFERRED_VERSION_ecore-native = "0.9.9.025"
|
||||
#PREFERRED_VERSION_edb = "1.0.5.006"
|
||||
#PREFERRED_VERSION_edb-native = "1.0.5.006"
|
||||
#PREFERRED_VERSION_edje = "0.5.0.025"
|
||||
#PREFERRED_VERSION_edje-native = "0.5.0.025"
|
||||
#PREFERRED_VERSION_eet = "0.9.10.025"
|
||||
#PREFERRED_VERSION_eet-native = "0.9.10.025"
|
||||
#PREFERRED_VERSION_embryo = "0.9.1.025"
|
||||
#PREFERRED_VERSION_embryo-native = "0.9.1.025"
|
||||
#PREFERRED_VERSION_emotion = "0.0.1.004"
|
||||
#PREFERRED_VERSION_emotion-native = "0.0.1.004"
|
||||
#PREFERRED_VERSION_epeg = "0.9.0.006"
|
||||
#PREFERRED_VERSION_epsilon = "0.3.0.006"
|
||||
#PREFERRED_VERSION_esmart-x11 = "0.9.0.006"
|
||||
#PREFERRED_VERSION_evas-x11 = "0.9.9.025"
|
||||
#PREFERRED_VERSION_evas-native = "0.9.9.025"
|
||||
#PREFERRED_VERSION_ewl = "0.0.4.006"
|
||||
#PREFERRED_VERSION_imlib2-x11 = "1.2.1.011"
|
||||
#PREFERRED_VERSION_imlib2-native = "1.2.1.011"
|
||||
#PREFERRED_VERSION_e-wm = "0.16.999.025"
|
||||
#PREFERRED_VERSION_entrance = "0.9.0.006"
|
||||
|
||||
PREFERRED_PROVIDER_virtual/evas ?= "evas-x11"
|
||||
PREFERRED_PROVIDER_virtual/ecore ?= "ecore-x11"
|
||||
PREFERRED_PROVIDER_virtual/imlib2 ?= "imlib2-x11"
|
||||
PREFERRED_PROVIDER_virtual/esmart ?= "esmart-x11"
|
||||
|
||||
96
meta/conf/distro/preferred-gpe-versions-2.7.inc
Normal file
96
meta/conf/distro/preferred-gpe-versions-2.7.inc
Normal file
@@ -0,0 +1,96 @@
|
||||
#
|
||||
# Specify which versions of GPE (and related) applications to build
|
||||
#
|
||||
|
||||
#work around some breakages
|
||||
#SRCDATE_minimo=20050401
|
||||
|
||||
#set some preferred providers:
|
||||
PREFERRED_PROVIDER_gconf=gconf-dbus
|
||||
PREFERRED_PROVIDER_gnome-vfs=gnome-vfs-dbus
|
||||
|
||||
#specify versions, as the header says :)
|
||||
PREFERRED_VERSION_libmatchbox ?= "1.7"
|
||||
PREFERRED_VERSION_matchbox ?= "0.9.1"
|
||||
PREFERRED_VERSION_matchbox-common ?= "0.9.1"
|
||||
PREFERRED_VERSION_matchbox-desktop ?= "0.9.1"
|
||||
PREFERRED_VERSION_matchbox-wm ?= "0.9.5+svn${SRCDATE}"
|
||||
PREFERRED_VERSION_matchbox-panel ?= "0.9.2"
|
||||
PREFERRED_VERSION_matchbox-applet-inputmanager ?= "0.6"
|
||||
PREFERRED_VERSION_atk ?= "1.9.0"
|
||||
PREFERRED_VERSION_cairo ?= "0.5.2"
|
||||
PREFERRED_VERSION_glib-2.0 ?= "2.6.4"
|
||||
PREFERRED_VERSION_gtk+ ?= "2.6.10"
|
||||
PREFERRED_VERSION_pango ?= "1.8.1"
|
||||
PREFERRED_VERSION_librsvg ?= "2.6.5"
|
||||
PREFERRED_VERSION_libgpewidget ?= "0.109"
|
||||
PREFERRED_VERSION_libgpepimc ?= "0.5"
|
||||
PREFERRED_VERSION_libgpevtype ?= "0.14"
|
||||
PREFERRED_VERSION_libschedule ?= "0.15"
|
||||
PREFERRED_VERSION_libcontactsdb ?= "0.3"
|
||||
PREFERRED_VERSION_libtododb ?= "0.09"
|
||||
PREFERRED_VERSION_gpe-icons ?= "0.25"
|
||||
PREFERRED_VERSION_libgsm ?= "1.0.10"
|
||||
PREFERRED_VERSION_diet-x11 ?= "6.2.1"
|
||||
PREFERRED_VERSION_xproto ?= "6.6.2"
|
||||
PREFERRED_VERSION_xcursor-transparent-theme ?= "0.1.1"
|
||||
PREFERRED_VERSION_rxvt-unicode ?= "5.6"
|
||||
PREFERRED_VERSION_gtk2-theme-angelistic ?= "0.3"
|
||||
PREFERRED_VERSION_xst ?= "0.15"
|
||||
PREFERRED_VERSION_xextensions ?= "1.0.1"
|
||||
PREFERRED_VERSION_xserver-kdrive = "0.0+cvs20050207"
|
||||
PREFERRED_VERSION_xprop ?= "0.0+cvs${SRCDATE}"
|
||||
PREFERRED_VERSION_xhost ?= "0.0+cvs20040413"
|
||||
PREFERRED_VERSION_xrdb ?= "0.0+cvs${SRCDATE}"
|
||||
PREFERRED_VERSION_gpe-login ?= "0.83"
|
||||
PREFERRED_VERSION_gpe-session-scripts ?= "0.66"
|
||||
PREFERRED_VERSION_gpe-soundserver ?= "0.4-1"
|
||||
PREFERRED_VERSION_gpe-todo ?= "0.55"
|
||||
PREFERRED_VERSION_gpe-calendar ?= "0.70"
|
||||
PREFERRED_VERSION_gpe-sketchbook ?= "0.2.9"
|
||||
PREFERRED_VERSION_gpe-contacts ?= "0.43"
|
||||
PREFERRED_VERSION_gpe-today ?= "0.10"
|
||||
PREFERRED_VERSION_matchbox-panel-manager ?= "0.1"
|
||||
PREFERRED_VERSION_dbus ?= "0.23.4"
|
||||
PREFERRED_VERSION_gpe-beam ?= "0.2.8"
|
||||
PREFERRED_VERSION_gpe-bluetooth ?= "0.51"
|
||||
PREFERRED_VERSION_gpe-su ?= "0.19"
|
||||
PREFERRED_VERSION_gpe-conf ?= "0.1.29"
|
||||
PREFERRED_VERSION_gpe-clock ?= "0.23"
|
||||
PREFERRED_VERSION_gpe-mininet ?= "0.7"
|
||||
PREFERRED_VERSION_gpe-mixer ?= "0.42"
|
||||
PREFERRED_VERSION_gpe-shield ?= "0.8"
|
||||
PREFERRED_VERSION_gpe-wlancfg ?= "0.2.6"
|
||||
PREFERRED_VERSION_gpe-taskmanager ?= "0.19"
|
||||
PREFERRED_VERSION_keylaunch ?= "2.0.7"
|
||||
PREFERRED_VERSION_minilite ?= "0.49"
|
||||
PREFERRED_VERSION_xmonobut ?= "0.4"
|
||||
PREFERRED_VERSION_gpe-edit ?= "0.29"
|
||||
PREFERRED_VERSION_gpe-gallery ?= "0.96"
|
||||
PREFERRED_VERSION_gpe-calculator ?= "0.2"
|
||||
PREFERRED_VERSION_gpe-package ?= "0.3"
|
||||
PREFERRED_VERSION_gpe-soundbite ?= "1.0.6"
|
||||
PREFERRED_VERSION_gpe-terminal ?= "1.1"
|
||||
PREFERRED_VERSION_gpe-watch ?= "0.10"
|
||||
PREFERRED_VERSION_gpe-what ?= "0.42"
|
||||
PREFERRED_VERSION_gpe-filemanager ?= "0.23"
|
||||
PREFERRED_VERSION_gpe-go ?= "0.05"
|
||||
PREFERRED_VERSION_gpe-irc ?= "0.07"
|
||||
PREFERRED_VERSION_gpe-lights ?= "0.13"
|
||||
#PREFERRED_VERSION_gpe-nmf ?= "0.21"
|
||||
PREFERRED_VERSION_gpe-othello ?= "0.2-1"
|
||||
PREFERRED_VERSION_gpe-plucker ?= "0.2"
|
||||
PREFERRED_VERSION_gpe-tetris ?= "0.6-4"
|
||||
PREFERRED_VERSION_gsoko ?= "0.4.2-gpe6"
|
||||
PREFERRED_VERSION_xdemineur ?= "2.1.1"
|
||||
PREFERRED_VERSION_matchbox-panel-hacks ?= "0.3-1"
|
||||
PREFERRED_VERSION_rosetta ?= "0.0+cvs${SRCDATE}"
|
||||
PREFERRED_VERSION_dillo2 ?= "0.6.6"
|
||||
PREFERRED_VERSION_minimo ?= "0.0+cvs${SRCDATE}"
|
||||
PREFERRED_VERSION_linphone-hh ?= "0.12.2.hh1"
|
||||
PREFERRED_VERSION_linphone ?= "0.12.2"
|
||||
PREFERRED_VERSION_firefox ?= "1.0"
|
||||
PREFERRED_VERSION_gaim ?= "2.0.0+cvs20050904-mco1"
|
||||
PREFERRED_VERSION_poppler ?= "0.3.3"
|
||||
PREFERRED_VERSION_evince ?= "0.3.2"
|
||||
PREFERRED_VERSION_gpe_mini_browser ?= "0.19"
|
||||
206
meta/conf/distro/preferred-opie-versions.inc
Normal file
206
meta/conf/distro/preferred-opie-versions.inc
Normal file
@@ -0,0 +1,206 @@
|
||||
#
|
||||
# Default versions
|
||||
QTE_VERSION ?= "2.3.10"
|
||||
OPIE_VERSION ?= "1.2.2"
|
||||
PALMTOP_USE_MULTITHREADED_QT ?= "yes"
|
||||
|
||||
#
|
||||
#
|
||||
# Opie libraries
|
||||
#
|
||||
PREFERRED_PROVIDER_virtual/libqte2 = '${@base_conditional("PALMTOP_USE_MULTITHREADED_QT", "yes", "qte-mt", "qte", d)}'
|
||||
PREFERRED_PROVIDER_virtual/libqpe1 = "libqpe-opie"
|
||||
PREFERRED_VERSION_qte-mt = "${QTE_VERSION}"
|
||||
PREFERRED_VERSION_qte = "${QTE_VERSION}"
|
||||
PREFERRED_VERSION_libopie2 = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_libopieobex0 = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_libopietooth1 = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_libqpe-opie = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_libqtaux2 = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_libmailwrapper = "${OPIE_VERSION}"
|
||||
#
|
||||
# Opie applications
|
||||
#
|
||||
PREFERRED_VERSION_opie-aboutapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-addressbook = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-advancedfm = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-alarm = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-appearance = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-aqpkg = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-autorotateapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-backgammon = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-backup = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-bartender = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-batteryapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-bluepin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-bluetoothapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-bluetoothmanager = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-bounce = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-brightnessapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-button-settings = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-buzzword = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-calculator = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-calibrate = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-camera = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-cardmon = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-checkbook = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-citytime = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-clipboardapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-clockapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-clock = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-confeditor = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-console = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-dagger = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-dasher = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-datebook = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-datebook-chrisholidayplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-datebook-nationalholidayplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-datebook-birthdayplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-deco-flat = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-deco-liquid = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-deco-polished = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-doctab = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-drawpad = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-dvorak = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-embeddedkonsole = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-euroconv = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-examples-python = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-eye = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-fifteen = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-formatter = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-freetype = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-ftplib = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-ftp = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-go = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-gutenbrowser = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-handwriting = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-helpbrowser = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-help-en = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-homeapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-i18n = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-lrelease-native = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-lupdate-native = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-icon-reload = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-irc = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-irdaapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-jumpx = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-kbill = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-kcheckers = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-keyboard = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-keypebble = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-keytabs = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-keyview = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-kjumpx = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-kpacman = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-language = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-launcher-settings = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-libqrsync = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-light-and-power = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-lockapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-login = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-logoutapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mailapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mail = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer1-libmadplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer1-libmodplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer1-libtremorplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer1-libwavplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer1 = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer2 = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer2-skin-default = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer2-skin-default-landscape = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer2-skin-pod = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediaplayer2-skin-techno = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mediummount = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-memoryapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mindbreaker = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-minesweep = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-mobilemsg = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-multikeyapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-multikey = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-networkapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-networksettings = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-notesapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-odict = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-osearch = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-oxygen = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-oyatzee = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-packagemanager = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-parashoot = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-pcmciaapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-performance = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-pickboard = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-pics = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-pimconverter = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-powerchord = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-pyquicklaunchapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-qasteroids = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-qcop = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-quicklauncher = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-qss = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-rdesktop = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-reader = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-recorder = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-remote = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-restartapplet2 = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-restartapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-restartapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-rotateapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-screenshotapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-search = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-security = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-securityplugin-blueping = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-securityplugin-dummy = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-securityplugin-notice = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-securityplugin-pin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-sfcave = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-sheet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-sh = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-showimg = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-snake = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-solitaire = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-sshkeys = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-style-flat = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-style-fresh = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-style-liquid = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-style-metal = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-style-phase = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-style-web = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-sounds = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-suspendapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-symlinker = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-sysinfo = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-systemtime = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-tableviewer = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-tabmanager = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-taskbar = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-tetrix = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-textedit = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-theme = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-tictac = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-tinykate = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-today-addressbookplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-today-datebookplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-today-fortuneplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-today-mailplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-today = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-today-stocktickerplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-today-todolistplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-today-weatherplugin = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-todo = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-tonleiter = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-unikeyboard = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-usermanager = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-vmemo = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-vmemo-settings = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-volumeapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-vtapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-wellenreiter = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-wirelessapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-wordgame = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-write = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-zkbapplet = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-zlines = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-zsafe = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_opie-zsame = "${OPIE_VERSION}"
|
||||
PREFERRED_VERSION_konqueror-embedded = "20060404"
|
||||
104
meta/conf/documentation.conf
Normal file
104
meta/conf/documentation.conf
Normal file
@@ -0,0 +1,104 @@
|
||||
# this file holds documentation for known keys, possible values and
|
||||
# their meaning. Please update, correct and extend this documentation.
|
||||
# Mail your changes to oe@handhelds.org
|
||||
|
||||
# conf/bitbake.conf
|
||||
PREFERRED_VERSION[doc] = 'Normally use it as PREFERRED_VERSION_package-name = "" to set the preferred \
|
||||
version of more than one version for the package-name is available.'
|
||||
|
||||
BUILD_ARCH[doc] = "The name of the building architecture. E.g. i686."
|
||||
BUILD_OS[doc] = "The operating system (in lower case) of the building architecture. E.g \
|
||||
linux."
|
||||
BUILD_VENDOR[doc] = "FIXME"
|
||||
BUILD_SYS[doc] = "FIXME"
|
||||
BUILD_PREFIX[doc] = "FIXME"
|
||||
BUILD_CC_ARCH[doc] = "FIXME"
|
||||
|
||||
HOST_ARCH[doc] = "The name of the target architecture. Normally same as the TARGET_ARCH. @see TARGET_ARCH @group base"
|
||||
HOST_OS[doc] = "The name of the target operating system. Normally the same as the TARGET_OS. \
|
||||
@see TARGET_OS @group base"
|
||||
HOST_VENDOR[doc] = "The name of the vendor. Normally same as the TARGET_VENDOR. @see TARGET_VENDOR"
|
||||
HOST_SYS[doc] = "FIXME"
|
||||
HOST_PREFIX[doc] = "Normally same as the TARGET_PREFIX. @see TARGET_PREFIX @group base"
|
||||
HOST_CC_ARCH[doc] = "Normally same as the TARGET_CC_ARCH. @see TARGET_CC_ARCH @group base"
|
||||
HOST_NONSYSV[doc] = 'This flag can be set to "1" if the host system is not SysV compatible. E.g.\
|
||||
fakeroot-native will be build with tcp as IPC implementation.'
|
||||
|
||||
TARGET_ARCH[doc] = "Build for which architecture. Examples are arm, i686, sh3, mips, powerpc."
|
||||
TARGET_OS[doc] = 'Build for which Operating System. Currently possible values are \
|
||||
linux and linux-uclibc. When using linux-uclibc you might want to set USE_NLS_gcc-cross to "no".'
|
||||
TARGET_VENDOR[doc] = "FIXME"
|
||||
TARGET_SYS[doc] = "The target system is composed out of TARGET_ARCH,TARGET_VENDOR and TARGET_OS."
|
||||
TARGET_PREFIX[doc] = "The prefix for the cross compile toolchain. E.g arm-linux- ."
|
||||
TARGET_CC_ARCH[doc] = "FIXME"
|
||||
TARGET_FPU[doc] = "Floating point option (mostly for FPU-less systems), can be 'soft' or empty \
|
||||
for hardware floating point instructions."
|
||||
|
||||
PACKAGE_ARCH[doc] = 'The architecture needed for using a resulting package. If you have \
|
||||
machine dependant configuration options in your bitbake file add a \
|
||||
PACKAGE_ARCH = "${MACHINE_ARCH}" to the file.'
|
||||
|
||||
IPKG_ARCHS[doc] = 'A list of architectures compatible with the given target in order of priority'
|
||||
IPKG_EXTRA_ARCHS[doc] = 'Set this variable to add extra architectures to the list of supported architectures'
|
||||
|
||||
DATE[doc] = "The date the build was started Ymd"
|
||||
TIME[doc] = "The time the build was started HMS"
|
||||
DATETIME[doc] = "The date and time the build was started at"
|
||||
|
||||
PN[doc] = "PN holds the name of the package (Package Name). It is gathered from the bitbake-file filename"
|
||||
PV[doc] = "PV holds the version of the package (Package Version). The default value is 1.0, it is \
|
||||
retrieved from the bitbake-file filename but can be changed."
|
||||
PR[doc] = "PR is the revision of the package (Package Revision). By default it is set to r0."
|
||||
PF[doc] = "Package name - Package version - Package revision (PN-PV-PR)"
|
||||
P[doc] = "Package name - Package version (PN-PF)"
|
||||
S[doc] = "Directory that holds the sources to build, WORKDIR/PN-PV by default. The 'make' or equivalent command is run from this directory."
|
||||
SECTION[doc] = "Section of the packages e.g. console/networking"
|
||||
PRIORITY[doc] = "Importance of package, default values are 'optional' or 'needed'."
|
||||
DESCRIPTION[doc] = "A small description of the package."
|
||||
LICENSE[doc] = "The license of the resulting package e.g. GPL"
|
||||
AUTHOR[doc] = "The author of the files installed."
|
||||
MAINTAINER[doc] = "Maintainer of the .bb file and the resulting package."
|
||||
HOMEPAGE[doc] = "Homepage of the package e.g. http://www.project.net."
|
||||
|
||||
|
||||
# dependencies
|
||||
GROUP_dependencies[doc] = "The keys in this group influence the dependency handling of BitBake \
|
||||
and the resulting packages."
|
||||
DEPENDS[doc] = "Build time dependencies, things needed to build the package. @group dependencies"
|
||||
RDEPENDS[doc] = "Run time dependencies, things needed for a given package to run. This is used to populate the ipkg:Depends field. @group dependencies"
|
||||
PROVIDES[doc] = "Names for additional build time dependencies that this package will provide. @group dependencies"
|
||||
RPROVIDES[doc] = "Names for additional run time dependencies that this package will provide. This is used to populate the ipkg:Provides field. @group dependencies"
|
||||
|
||||
|
||||
# packaging
|
||||
GROUP_packaging[doc] = "The keys in this group influence the package building process. They influence \
|
||||
which packages will be generated and which files will be present in the generated packages."
|
||||
PACKAGES[doc] = "Names of packages to be generated. @group packaging"
|
||||
FILES[doc] = "Files/Directories belonging to the package. @group packaging"
|
||||
|
||||
|
||||
|
||||
TMPDIR[doc] = "The temporary directory of openembedded holding work-, deploy, staging- and other directories."
|
||||
CACHE[doc] = "The directory holding the cache of the metadata."
|
||||
|
||||
GROUP_fetcher[doc] = "The key highly influence the fetcher implementations."
|
||||
DL_DIR[doc] = "The directory where tarballs will be stored. @group fetcher"
|
||||
CVSDIR[doc] = "The directory where cvs checkouts will be stored in. @group fetcher"
|
||||
GITDIR[doc] = "The directory where git clones will be stored. @group fetcher"
|
||||
|
||||
STAMP[doc] = "The directory that holds files to keep track of what was built."
|
||||
WORKDIR[doc] = "The directory where a concrete package will be unpacked and built."
|
||||
T[doc] = "Temporary directory within the WORKDIR."
|
||||
|
||||
GROUP_locale[doc] = "Locale generation of the GNU libc implementation"
|
||||
ENABLE_BINARY_LOCALE_GENERATION[doc] = "Enable the use of qemu to generate locale information during build time on the host instead of runtime on the target. If you have trouble with qemu-native you should make this an empty var. @group locale"
|
||||
|
||||
PCMCIA_MANAGER[doc] = "Specify which package(s) to install to handle PCMCIA slots (usually pcmcia-cs or pcmciautils)."
|
||||
|
||||
SYSVINIT_ENABLED_GETTYS[doc] = "Specify which VTs should be running a getty, the default is 1"
|
||||
|
||||
# palmtop build class
|
||||
PALMTOP_USE_MULTITHREADED_QT[doc] = "Set to yes, if you want to build qt apps with CONFIG+=thread"
|
||||
|
||||
COMPATIBLE_HOST[doc] = "A regular expression which matches the HOST_SYS names supported by the package/file. Failure to match will cause the file to be skipped by the parser."
|
||||
COMPATIBLE_MACHINE[doc] = "A regular expression which matches the MACHINES support by the package/file. Failure to match will cause the file to be skipped by the parser."
|
||||
5
meta/conf/licenses.conf
Normal file
5
meta/conf/licenses.conf
Normal file
@@ -0,0 +1,5 @@
|
||||
SRC_DISTRIBUTE_LICENSES += "GPL GPLv2 BSD LGPL Apache-2.0 QPL AFL"
|
||||
SRC_DISTRIBUTE_LICENSES += "MIT Sleepycat Classpath Perl PSF PD Artistic"
|
||||
SRC_DISTRIBUTE_LICENSES += "bzip2 zlib ntp cron libpng netperf openssl"
|
||||
SRC_DISTRIBUTE_LICENSES += "Info-ZIP tcp-wrappers"
|
||||
|
||||
8
meta/conf/machine/akita.conf
Normal file
8
meta/conf/machine/akita.conf
Normal file
@@ -0,0 +1,8 @@
|
||||
#@TYPE: Machine
|
||||
#@NAME: Sharp Zaurus SL-C1000
|
||||
#@DESCRIPTION: Machine configuration for the Sharp Zaurus SL-C1000 device
|
||||
|
||||
include conf/machine/include/zaurus-clamshell.conf
|
||||
include conf/machine/include/zaurus-clamshell-2.6.conf
|
||||
|
||||
ROOT_FLASH_SIZE = "58"
|
||||
9
meta/conf/machine/c7x0.conf
Normal file
9
meta/conf/machine/c7x0.conf
Normal file
@@ -0,0 +1,9 @@
|
||||
#@TYPE: Machine
|
||||
#@NAME: Sharp Zaurus SL-C7x0
|
||||
#@DESCRIPTION: Machine configuration for the Sharp Zaurus SL-C700, Sharp Zaurus SL-C750, Sharp Zaurus SL-C760, Sharp Zaurus SL-C860 devices
|
||||
|
||||
include conf/machine/include/zaurus-clamshell.conf
|
||||
include conf/machine/include/zaurus-clamshell-2.6.conf
|
||||
|
||||
ROOT_FLASH_SIZE = "25"
|
||||
# yes, we are aware that the husky (c760,c860) has 54MB rootfs, but we don't make a special image for it.
|
||||
31
meta/conf/machine/cmx270.conf
Normal file
31
meta/conf/machine/cmx270.conf
Normal file
@@ -0,0 +1,31 @@
|
||||
#@TYPE: Machine
|
||||
#@NAME: Nokia 770 internet tablet
|
||||
#@DESCRIPTION: Machine configuration for the Nokia 770, an omap 1710 based tablet
|
||||
TARGET_ARCH = "arm"
|
||||
IPKG_EXTRA_ARCHS = "armv4 armv5te"
|
||||
|
||||
EXTRA_IMAGECMD_jffs2 = "--pad --little-endian --eraseblock=0x4000 -n"
|
||||
|
||||
IMAGE_CMD_jffs2 = "mkdir -p ${DEPLOY_DIR_IMAGE}; \
|
||||
mkfs.jffs2 --root=${IMAGE_ROOTFS} --faketime \
|
||||
--output=${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.img ${EXTRA_IMAGECMD}; "
|
||||
|
||||
PREFERRED_PROVIDER_virtual/bootloader = ""
|
||||
|
||||
# 800x480 is big enough for me
|
||||
GUI_MACHINE_CLASS = "bigscreen"
|
||||
|
||||
# Use tune-arm926 per default. Machine independent feeds should be built with tune-strongarm.
|
||||
include conf/machine/include/tune-xscale.conf
|
||||
|
||||
#size of the root partition (yes, it is 123 MB)
|
||||
ROOT_FLASH_SIZE = "128"
|
||||
|
||||
# serial console port on devboard rev. B3
|
||||
SERIAL_CONSOLE = "115200 ttyS0"
|
||||
|
||||
PREFERRED_PROVIDER_virtual/kernel = "linux-cmx270"
|
||||
|
||||
|
||||
include conf/machine/include/handheld-common.conf
|
||||
|
||||
23
meta/conf/machine/include/handheld-common.conf
Normal file
23
meta/conf/machine/include/handheld-common.conf
Normal file
@@ -0,0 +1,23 @@
|
||||
HANDHELD_MODULES = "ipv6 \
|
||||
ipsec \
|
||||
nvrd \
|
||||
mip6-mn \
|
||||
tun \
|
||||
ide-cs ide-disk ide-probe-mod \
|
||||
loop \
|
||||
vfat ext2 \
|
||||
sunrpc nfs \
|
||||
btuart-cs dtl1-cs bt3c-cs rfcomm bnep l2cap sco hci_uart hidp \
|
||||
pcnet-cs serial-cs airo-cs \
|
||||
af_packet \
|
||||
ppp-async ppp-deflate ppp-mppe \
|
||||
ip-gre ip-tables ipip \
|
||||
irda irlan irnet irport irtty ircomm-tty \
|
||||
input uinput \
|
||||
"
|
||||
|
||||
# Configuration bits for "generic handheld"
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "${PCMCIA_MANAGER} apm apmd network-suspend-scripts"
|
||||
BOOTSTRAP_EXTRA_RRECOMMENDS += "ppp wireless-tools irda-utils openswan wpa-supplicant-nossl lrzsz ppp-dialin ${@linux_module_packages('${HANDHELD_MODULES}', d)}"
|
||||
|
||||
INHERIT += "linux_modules"
|
||||
7
meta/conf/machine/include/ipaq-common.conf
Normal file
7
meta/conf/machine/include/ipaq-common.conf
Normal file
@@ -0,0 +1,7 @@
|
||||
IPAQ_MODULES = "apm h3600_generic_sleeve ipaq-sleeve ipaq-mtd-asset nvrd atmelwlandriver sa1100-rtc ipaq-hal h3600_ts usb-eth wavelan_cs keybdev"
|
||||
|
||||
BOOTSTRAP_EXTRA_RRECOMMENDS += "kbdd bl hostap-modules-cs orinoco-modules-cs atmelwlandriver ${@linux_module_packages('${IPAQ_MODULES}', d)}"
|
||||
BOOTSTRAP_EXTRA_RRECOMMENDS_append_kernel24 += "mipv6"
|
||||
|
||||
PREFERRED_PROVIDER_virtual/xserver = "xserver-kdrive"
|
||||
|
||||
174
meta/conf/machine/include/ixp4xx.conf
Normal file
174
meta/conf/machine/include/ixp4xx.conf
Normal file
@@ -0,0 +1,174 @@
|
||||
#@TYPE: Machine configuration
|
||||
#@NAME: ixp4xx
|
||||
#@DESCRIPTION: genric machine configuration for ixp4xx platforms
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# INPUTS
|
||||
#-------------------------------------------------------------------------------
|
||||
# conf/${DISTRO}.conf is included after this file and should be used to modify
|
||||
# variables identified as 'INPUTS' to the required values for the DISTRO, this
|
||||
# will cause the board specific settings (which depend for the most part on the
|
||||
# exact CPU used) to be set correctly within this file. The results are the
|
||||
# variables identifies here as 'OUTPUTS'
|
||||
#
|
||||
#variable = "default"
|
||||
# <possible values>
|
||||
# description
|
||||
|
||||
ARCH_BYTE_SEX = "be"
|
||||
# "be" "le"
|
||||
# The memory byte sex and (on correctly implemented hardware - the IXP4XX is
|
||||
# correct) the byte sex of the buses within the system. 'be' (big-endian)
|
||||
# means that the first byte is the most significant byte on a bus, 'le'
|
||||
# (little-endian) means that the first byte is the least significant byte.
|
||||
|
||||
ARM_INSTRUCTION_SET = "arm"
|
||||
# "arm" "thumb"
|
||||
# The instruction set the compiler should use when generating application
|
||||
# code. The kernel is always compiled with arm code at present. arm code
|
||||
# is the original 32 bit ARM instruction set, thumb code is the 16 bit
|
||||
# encoded RISC sub-set. Thumb code is smaller (maybe 70% of the ARM size)
|
||||
# but requires more instructions (140% for 70% smaller code) so may be
|
||||
# slower.
|
||||
|
||||
THUMB_INTERWORK = "yes"
|
||||
# "yes" "no"
|
||||
# Whether to compile with code to allow interworking between the two
|
||||
# instruction sets. This allows thumb code to be executed on a primarily
|
||||
# arm system and vice versa. It is strongly recommended that DISTROs not
|
||||
# turn this off - the actual cost is very small.
|
||||
|
||||
DISTRO_BASE = ""
|
||||
# "" ":<base>"
|
||||
# If given this must be the name of a 'distro' to add to the bitbake OVERRIDES
|
||||
# after ${DISTRO}, this allows different distros to share a common base of
|
||||
# overrides. The value given must include a leading ':' or chaos will result.
|
||||
|
||||
IXP4XX_SUFFIX = "${MACHINE_ARCH}"
|
||||
# <string>
|
||||
# Kernel suffix - 'ixp4xxbe' or 'ixp4xxle' for a truely generic image, controls
|
||||
# the suffix on the name of the generated zImage, override in the DISTRO
|
||||
# configuration if patches or defconfig are changed for the DISTRO.
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# OUTPUTS
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
#OVERRIDES
|
||||
# The standard ':' separated list of overrides used by bitbake - see the
|
||||
# basic setting in bitbake.conf. This list is based on that used for the
|
||||
# standard setting however :<base>, :thumb and :thumb-interwork will be
|
||||
# inserted at appropriate points if a base distro, default use of thumb or
|
||||
# arm/thumb interworking are enabled in the inputs.
|
||||
OVERRIDE_THUMB = "${@['', ':thumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}"
|
||||
OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][bb.data.getVar('THUMB_INTERWORK', d, 1) == 'yes']}"
|
||||
OVERRIDES = "local:${MACHINE}:ixp4xx:${DISTRO}${DISTRO_BASE}:${TARGET_OS}:${TARGET_ARCH}${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}:build-${BUILD_OS}:fail-fast"
|
||||
|
||||
# TARGET_CC_ARCH
|
||||
# TARGET_CC_KERNEL_ARCH
|
||||
# TARGET_LD_ARCH
|
||||
# TARGET_LD_KERNEL_ARCH
|
||||
# Compiler and linker options for application code and kernel code. These
|
||||
# options ensure that the compiler has the correct settings for the selected
|
||||
# instruction set and interworking.
|
||||
ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][bb.data.getVar('THUMB_INTERWORK', d, 1) == 'yes']}"
|
||||
ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}"
|
||||
#
|
||||
TARGET_CC_ARCH = "-march=armv5te -mtune=xscale ${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}"
|
||||
TARGET_CC_KERNEL_ARCH = "-march=armv5te -mtune=xscale -mno-thumb-interwork -mno-thumb"
|
||||
TARGET_LD_ARCH = ""
|
||||
TARGET_LD_KERNEL_ARCH = ""
|
||||
|
||||
# FULL_OPTIMIZATION
|
||||
# Optimization settings. Os works fine and is significantly better than O2.
|
||||
# The other settings are somewhat arbitrary. The optimisations chosen here
|
||||
# include the ones which 'break' symbolic debugging (because the compiler does
|
||||
# not include enough information to allow the debugger to know where given
|
||||
# values are.) The -fno options are an attempt to cancel explicit use of space
|
||||
# consuming options found in some packages (this isn't a complete list, this is
|
||||
# just the ones which package writers seem to know about and abuse).
|
||||
FULL_OPTIMIZATION = "-Os -fomit-frame-pointer -frename-registers \
|
||||
-fno-unroll-loops -fno-inline-functions \
|
||||
-fweb -funit-at-a-time"
|
||||
|
||||
# TARGET_ARCH
|
||||
# The correct setting for the system endianness (ARCH_BYTE_SEX). This will
|
||||
# be arm (le) or armeb (be) - it is difficult to be more precise in the
|
||||
# setting because of the large number of ways it is used within OpenEmbedded.
|
||||
TARGET_ARCH = "${@['armeb', 'arm'][bb.data.getVar('ARCH_BYTE_SEX', d, 1) == 'le']}"
|
||||
|
||||
# TARGET_PACKAGE_ARCH [not used]
|
||||
# The correct setting for the generated packages. This corresponds to the
|
||||
# -march flag passed to the compiler because that limits the architectures
|
||||
# on which the generated code will run.
|
||||
BYTE_SEX_CHAR = "${@['b', 'l'][bb.data.getVar('ARCH_BYTE_SEX', d, 1) == 'le']}"
|
||||
TARGET_PACKAGE_ARCH_BASE = "${@['armv5te', 'thumbv5t'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}"
|
||||
TARGET_PACKAGE_ARCH = "${TARGET_PACKAGE_ARCH_BASE}${BYTE_SEX_CHAR}"
|
||||
|
||||
# MACHINE_ARCH
|
||||
# The correct setting for packages which are specific to the machine, this
|
||||
# defaults to ${MACHINE} in bitbake.conf, however it is set to ixp4xx here
|
||||
# because everything built here is no more specific than that.
|
||||
MACHINE_ARCH = "ixp4xx${ARCH_BYTE_SEX}"
|
||||
|
||||
# IPKG_EXTRA_ARCHS
|
||||
# The full list of package architectures which should run on the system.
|
||||
# This takes into account both the board level issues and the INPUTS set
|
||||
# by the distro. The arm list is derived from the architecture settings
|
||||
# known to gcc, the thumb list is then derived from that (only the 't'
|
||||
# architectures of course).
|
||||
#[not used]: TARGET_ARCH is handled below because it is also currently
|
||||
# used for thumb packages.
|
||||
#ARM_ARCHITECTURES = "${TARGET_ARCH} armv2${BYTE_SEX_CHAR} armv2a${BYTE_SEX_CHAR} armv3${BYTE_SEX_CHAR} armv3m${BYTE_SEX_CHAR} armv4${BYTE_SEX_CHAR} armv4t${BYTE_SEX_CHAR} armv5${BYTE_SEX_CHAR} armv5t${BYTE_SEX_CHAR} armv5e${BYTE_SEX_CHAR} armv5te${BYTE_SEX_CHAR} xscale${BYTE_SEX_CHAR}"
|
||||
ARM_ARCHITECTURES = "armv2${BYTE_SEX_CHAR} armv2a${BYTE_SEX_CHAR} armv3${BYTE_SEX_CHAR} armv3m${BYTE_SEX_CHAR} armv4${BYTE_SEX_CHAR} armv4t${BYTE_SEX_CHAR} armv5${BYTE_SEX_CHAR} armv5t${BYTE_SEX_CHAR} armv5e${BYTE_SEX_CHAR} armv5te${BYTE_SEX_CHAR} xscale${BYTE_SEX_CHAR}"
|
||||
THUMB_ARCHITECTURES = "thumbe${BYTE_SEX_CHAR} thumbv4t${BYTE_SEX_CHAR} thumbv5t${BYTE_SEX_CHAR}"
|
||||
|
||||
# NOTE: this list contains just the things which rootfs_ipk.bbclass does
|
||||
# not add, rootfs_ipk.bbclass evaluates:
|
||||
#
|
||||
# ipkgarchs="all any noarch ${TARGET_ARCH} ${IPKG_EXTRA_ARCHS} ${MACHINE}"
|
||||
#
|
||||
# This is a priority ordered list - most desireable architecture at the end,
|
||||
# so put <ARM_INSTRUCTION_SET>_ARCHITECTURES at the end and, if
|
||||
# THUMB_INTERWORK precede this with the other architectures.
|
||||
IPKG_EXTRA_ARCHS = "ixp4xx ${MACHINE} \
|
||||
${@(lambda arch_thumb, arch_arm, is_arm, interwork: \
|
||||
(interwork and (is_arm and arch_thumb or arch_arm) + ' ' or '') \
|
||||
+ '${TARGET_ARCH} ' + (is_arm and arch_arm or arch_thumb)) \
|
||||
(bb.data.getVar('THUMB_ARCHITECTURES', d, 1), \
|
||||
bb.data.getVar('ARM_ARCHITECTURES', d, 1), \
|
||||
bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) != 'thumb', \
|
||||
bb.data.getVar('THUMB_INTERWORK', d, 1) == 'yes')} \
|
||||
${MACHINE_ARCH} ${MACHINE}${ARCH_BYTE_SEX}"
|
||||
|
||||
# IPKG_ARCH_LIST [not used]
|
||||
# This is used to override the ipkgarchs settings in rootfs_ipk.bbclass, allowing
|
||||
# the removal of the raw "${MACHINE}" from the end of the list. ${MACHINE} and
|
||||
# ixp4xx are included at the start (lower priority) as the non-byte-sex specific
|
||||
# versions.
|
||||
IPKG_ARCH_LIST = "all any noarch ixp4xx ${MACHINE} ${IPKG_EXTRA_ARCHS}"
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Package versions
|
||||
#-------------------------------------------------------------------------------
|
||||
# Warning: these are shared across the different machine and distro
|
||||
# configurations but can be overridden therein if required.
|
||||
SRCDATE_ipkg-utils ?= "20060106"
|
||||
SRCDATE_ipkg-utils-native ?= "20060106"
|
||||
SRCDATE_ipkg-link ?= "20060106"
|
||||
SRCDATE_irssi ?= "20050930"
|
||||
|
||||
CVS_TARBALL_STASH = "http://sources.nslu2-linux.org/sources/"
|
||||
INHERIT += "nslu2-mirrors"
|
||||
|
||||
PREFERRED_VERSION_ipkg ?= "0.99.154"
|
||||
PREFERRED_VERSION_ipkg-native ?= "0.99.154"
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Board defaults
|
||||
#-------------------------------------------------------------------------------
|
||||
# These values are board specific but they seem to be common to a large number
|
||||
# of boards so are reasonable defaults.
|
||||
SERIAL_CONSOLE = "115200 ttyS0"
|
||||
KERNEL_CONSOLE = "ttyS0,115200n8"
|
||||
USE_VT = "0"
|
||||
43
meta/conf/machine/include/poodle-2.6.conf
Normal file
43
meta/conf/machine/include/poodle-2.6.conf
Normal file
@@ -0,0 +1,43 @@
|
||||
PREFERRED_PROVIDER_virtual/kernel = "linux-openzaurus"
|
||||
|
||||
PCMCIA_MANAGER ?= "pcmciautils"
|
||||
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel udev sysfsutils spectrum-fw \
|
||||
${PCMCIA_MANAGER} apm wireless-tools irda-utils udev-utils keymaps hostap-utils prism3-firmware prism3-support \
|
||||
ppp ppp-dialin alsa-utils-alsactl alsa-utils-alsamixer module-init-tools alsa-conf zaurusd"
|
||||
|
||||
# Ethernet modules
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-pcnet-cs"
|
||||
# NFS Modules
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-nfs kernel-module-lockd kernel-module-sunrpc"
|
||||
# Crypto Modules
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-des kernel-module-md5"
|
||||
# SMB and CRAMFS
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-smbfs kernel-module-cramfs"
|
||||
# Serial Modules
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-8250 kernel-module-serial-cs"
|
||||
# Bluetooth Modules
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-bluetooth kernel-module-l2cap kernel-module-rfcomm kernel-module-hci-vhci \
|
||||
kernel-module-bnep kernel-module-hidp kernel-module-hci-uart kernel-module-sco \
|
||||
kernel-module-bt3c-cs kernel-module-bluecard-cs kernel-module-btuart-cs kernel-module-dtl1-cs"
|
||||
# Infrared Modules
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-pxaficp-ir kernel-module-irda kernel-module-ircomm \
|
||||
kernel-module-ircomm-tty kernel-module-irlan kernel-module-irnet kernel-module-ir-usb"
|
||||
|
||||
# USB Gadget Modules
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-gadgetfs kernel-module-g-file-storage \
|
||||
kernel-module-g-serial kernel-module-g-ether"
|
||||
|
||||
# Wireless Modules
|
||||
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-hostap kernel-module-hostap-cs \
|
||||
kernel-module-hermes kernel-module-orinoco \
|
||||
kernel-module-orinoco-cs kernel-module-spectrum-cs \
|
||||
hostap-conf orinoco-conf"
|
||||
|
||||
# Sound Modules
|
||||
###BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-snd-mixer-oss kernel-module-snd-pcm-oss"
|
||||
BOOTSTRAP_EXTRA_SOUND = "kernel-module-soundcore kernel-module-snd kernel-module-snd-page-alloc kernel-module-snd-timer \
|
||||
kernel-module-snd-pcm \
|
||||
kernel-module-snd-soc-core kernel-module-snd-soc-pxa2xx kernel-module-snd-soc-pxa2xx-i2s"
|
||||
|
||||
###BOOTSTRAP_EXTRA_RDEPENDS_append_poodle += "${BOOTSTRAP_EXTRA_SOUND} kernel-module-snd-soc-poodle kernel-module-snd-soc-wm8731 "
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user