mirror of
https://git.yoctoproject.org/poky
synced 2026-02-23 01:49:40 +01:00
Compare commits
79 Commits
yocto-1.9_
...
fido-13.0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
90d943ae62 | ||
|
|
46669d3074 | ||
|
|
8f39398dbf | ||
|
|
be46715a9d | ||
|
|
f73d6752db | ||
|
|
33558eacc8 | ||
|
|
0db74ea8f4 | ||
|
|
08246fc7d4 | ||
|
|
d691916ef8 | ||
|
|
2f6a0659fe | ||
|
|
888fb56e97 | ||
|
|
1444bef551 | ||
|
|
d1df6afab2 | ||
|
|
faf19c5274 | ||
|
|
dee274305d | ||
|
|
20d2d2096a | ||
|
|
a274e39f0c | ||
|
|
dfaa74d34f | ||
|
|
d66075e595 | ||
|
|
5f46b438a8 | ||
|
|
674ffa8c68 | ||
|
|
9b6eee9f30 | ||
|
|
c636c22484 | ||
|
|
98ae8387e3 | ||
|
|
f96ffe74a9 | ||
|
|
66f1700481 | ||
|
|
d2d94d071c | ||
|
|
0b82198c8d | ||
|
|
cf475ab8ea | ||
|
|
4ef59fe419 | ||
|
|
64ccb07092 | ||
|
|
acf41de1fa | ||
|
|
3eabeef5d6 | ||
|
|
2f050af982 | ||
|
|
dbab9c0ba1 | ||
|
|
07fc680a4a | ||
|
|
8ab5bc3e1e | ||
|
|
59813630e5 | ||
|
|
ad7edfbde3 | ||
|
|
6fdf368f76 | ||
|
|
40db5ea7df | ||
|
|
56a4b1e1d4 | ||
|
|
80f3b20060 | ||
|
|
693ba0b14f | ||
|
|
e7b07d33c2 | ||
|
|
aa47ba52f0 | ||
|
|
00b2c3a5e4 | ||
|
|
23a0e97d2e | ||
|
|
0fb825c5e5 | ||
|
|
b0cb740fe0 | ||
|
|
6dd5e472eb | ||
|
|
3ff649953d | ||
|
|
8db54a9cfa | ||
|
|
77b1711afb | ||
|
|
1e551ec0c3 | ||
|
|
e643f3defe | ||
|
|
31d301a3f6 | ||
|
|
b803944ce6 | ||
|
|
64b75d4276 | ||
|
|
2ac2706bb7 | ||
|
|
a3360f2cc6 | ||
|
|
ef73b474fb | ||
|
|
b936350fc2 | ||
|
|
d81ee4d277 | ||
|
|
232ccf23cf | ||
|
|
a7e20761bd | ||
|
|
bc090aa673 | ||
|
|
6891ae6425 | ||
|
|
a842038bca | ||
|
|
7c0846cc5b | ||
|
|
47686dc42f | ||
|
|
938fa5cebf | ||
|
|
18661937e4 | ||
|
|
c1d31cf2c7 | ||
|
|
8b3d3e7c95 | ||
|
|
fe5b98019d | ||
|
|
184e00a36b | ||
|
|
8f1decb32b | ||
|
|
36ac2c6dfd |
@@ -33,21 +33,17 @@ except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
from bb import cookerdata
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
|
||||
|
||||
__version__ = "1.27.1"
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __version__ != bb.__version__:
|
||||
sys.exit("Bitbake core version and program version mismatch!")
|
||||
try:
|
||||
sys.exit(bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration()))
|
||||
except BBMainException as err:
|
||||
sys.exit(err)
|
||||
ret = bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration())
|
||||
except bb.BBHandledException:
|
||||
sys.exit(1)
|
||||
ret = 1
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
sys.exit(ret)
|
||||
|
||||
|
||||
@@ -46,12 +46,6 @@ logger = logger_create('bitbake-diffsigs')
|
||||
def find_compare_task(bbhandler, pn, taskname):
|
||||
""" Find the most recent signature files for the specified PN/task and compare them """
|
||||
|
||||
def get_hashval(siginfo):
|
||||
if siginfo.endswith('.siginfo'):
|
||||
return siginfo.rpartition(':')[2].partition('_')[0]
|
||||
else:
|
||||
return siginfo.rpartition('.')[2]
|
||||
|
||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
||||
logger.error('Metadata does not support finding signature data files')
|
||||
sys.exit(1)
|
||||
@@ -60,7 +54,7 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
taskname = 'do_%s' % taskname
|
||||
|
||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
|
||||
if not latestfiles:
|
||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
@@ -68,16 +62,6 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# It's possible that latestfiles contain 3 elements and the first two have the same hash value.
|
||||
# In this case, we delete the second element.
|
||||
# The above case is actually the most common one. Because we may have sigdata file and siginfo
|
||||
# file having the same hash value. Comparing such two files makes no sense.
|
||||
if len(latestfiles) == 3:
|
||||
hash0 = get_hashval(latestfiles[0])
|
||||
hash1 = get_hashval(latestfiles[1])
|
||||
if hash0 == hash1:
|
||||
latestfiles.pop(1)
|
||||
|
||||
# Define recursion callback
|
||||
def recursecb(key, hash1, hash2):
|
||||
hashes = [hash1, hash2]
|
||||
|
||||
@@ -501,13 +501,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
if len(allproviders[p]) > 1 or not show_multi_provider_only:
|
||||
pref = preferred_versions[p]
|
||||
realfn = bb.cache.Cache.virtualfn2realfn(pref[1])
|
||||
preffile = realfn[0]
|
||||
# We only display once per recipe, we should prefer non extended versions of the
|
||||
# recipe if present (so e.g. in OpenEmbedded, openssl rather than nativesdk-openssl
|
||||
# which would otherwise sort first).
|
||||
if realfn[1] and realfn[0] in self.bbhandler.cooker.recipecache.pkg_fn:
|
||||
continue
|
||||
preffile = bb.cache.Cache.virtualfn2realfn(pref[1])[0]
|
||||
if preffile not in preffiles:
|
||||
preflayer = self.get_file_layer(preffile)
|
||||
multilayer = False
|
||||
|
||||
@@ -39,7 +39,6 @@ else:
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.parse",
|
||||
"bb.tests.utils"]
|
||||
|
||||
for t in tests:
|
||||
|
||||
@@ -24,15 +24,6 @@ if sys.argv[1] == "decafbadbad":
|
||||
except:
|
||||
import profile
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
# (c) 2013 Intel Corp.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
@@ -28,84 +28,79 @@
|
||||
|
||||
# Helper function to kill a background toaster development server
|
||||
|
||||
webserverKillAll()
|
||||
function webserverKillAll()
|
||||
{
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
pid=`cat ${pidfile}`
|
||||
while kill -0 $pid 2>/dev/null; do
|
||||
kill -SIGTERM -$pid 2>/dev/null
|
||||
sleep 1
|
||||
# Kill processes if they are still running - may happen in interactive shells
|
||||
ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
|
||||
done
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
while kill -0 $(< ${pidfile}) 2>/dev/null; do
|
||||
kill -SIGTERM -$(< ${pidfile}) 2>/dev/null
|
||||
sleep 1;
|
||||
# Kill processes if they are still running - may happen in interactive shells
|
||||
ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
|
||||
done;
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
webserverStartAll()
|
||||
function webserverStartAll()
|
||||
{
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
|
||||
retval=0
|
||||
if [ "$TOASTER_MANAGED" '=' '1' ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
|
||||
else
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb --noinput || retval=1
|
||||
fi
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed db sync, stopping system start" 1>&2
|
||||
elif [ $retval -eq 2 ]; then
|
||||
printf "\nError on migration, trying to recover... \n"
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
fi
|
||||
if [ "$TOASTER_MANAGED" = '1' ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
echo "Starting webserver..."
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed db sync, stopping system start" 1>&2
|
||||
elif [ $retval -eq 2 ]; then
|
||||
echo -e "\nError on migration, trying to recover... \n"
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
fi
|
||||
if [ "x$TOASTER_MANAGED" == "x1" ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
echo "Starting webserver..."
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
}
|
||||
|
||||
# Helper functions to add a special configuration file
|
||||
|
||||
addtoConfiguration()
|
||||
function addtoConfiguration()
|
||||
{
|
||||
file=$1
|
||||
shift
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$file
|
||||
for var in "$@"; do echo $var >> ${BUILDDIR}/conf/$file; done
|
||||
file=$1
|
||||
shift
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$file
|
||||
for var in "$@"; do echo $var >> ${BUILDDIR}/conf/$file; done
|
||||
}
|
||||
|
||||
INSTOPSYSTEM=0
|
||||
|
||||
# define the stop command
|
||||
stop_system()
|
||||
function stop_system()
|
||||
{
|
||||
# prevent reentry
|
||||
if [ $INSTOPSYSTEM -eq 1 ]; then return; fi
|
||||
if [ $INSTOPSYSTEM == 1 ]; then return; fi
|
||||
INSTOPSYSTEM=1
|
||||
if [ -f ${BUILDDIR}/.toasterui.pid ]; then
|
||||
kill `cat ${BUILDDIR}/.toasterui.pid` 2>/dev/null
|
||||
kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null
|
||||
rm ${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
BBSERVER=0.0.0.0:-1 bitbake -m
|
||||
@@ -118,29 +113,29 @@ stop_system()
|
||||
INSTOPSYSTEM=0
|
||||
}
|
||||
|
||||
check_pidbyfile() {
|
||||
[ -e $1 ] && kill -0 `cat $1` 2>/dev/null
|
||||
function check_pidbyfile() {
|
||||
[ -e $1 ] && kill -0 $(< $1) 2>/dev/null
|
||||
}
|
||||
|
||||
|
||||
notify_chldexit() {
|
||||
if [ $NOTOASTERUI -eq 0 ]; then
|
||||
function notify_chldexit() {
|
||||
if [ $NOTOASTERUI == 0 ]; then
|
||||
check_pidbyfile ${BUILDDIR}/.toasterui.pid && return
|
||||
stop_system
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
verify_prereq() {
|
||||
# Verify prerequisites
|
||||
function verify_prereq() {
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (6,)" | python 2>/dev/null | grep True >/dev/null; then
|
||||
printf "This program needs Django 1.6. Please install with\n\npip install django==1.6\n"
|
||||
echo -e "This program needs Django 1.6. Please install with\n\npip install django==1.6\n"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print reduce(lambda x, y: 2 if x==2 else 0 if x == 0 else y, map(lambda x: 1+cmp(x[1]-x[0],0), zip([0,8,4], map(int,south.__version__.split(\".\"))))) > 0" | python 2>/dev/null | grep True >/dev/null; then
|
||||
printf "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4\n"
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4\n"
|
||||
return 2
|
||||
fi
|
||||
return 0
|
||||
@@ -148,23 +143,14 @@ verify_prereq() {
|
||||
|
||||
|
||||
# read command line parameters
|
||||
if [ -n "$BASH_SOURCE" ] ; then
|
||||
TOASTER=${BASH_SOURCE}
|
||||
elif [ -n "$ZSH_NAME" ] ; then
|
||||
TOASTER=${(%):-%x}
|
||||
else
|
||||
TOASTER=$0
|
||||
fi
|
||||
|
||||
BBBASEDIR=`dirname $TOASTER`/..
|
||||
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
RUNNING=0
|
||||
|
||||
NOTOASTERUI=0
|
||||
WEBSERVER=1
|
||||
TOASTER_BRBE=""
|
||||
WEB_PORT="8000"
|
||||
NOBROWSER=0
|
||||
|
||||
for param in $*; do
|
||||
case $param in
|
||||
@@ -174,9 +160,6 @@ for param in $*; do
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
nobrowser )
|
||||
NOBROWSER=1
|
||||
;;
|
||||
brbe=* )
|
||||
TOASTER_BRBE=$'\n'"TOASTER_BRBE=\""${param#*=}"\""
|
||||
;;
|
||||
@@ -185,67 +168,71 @@ for param in $*; do
|
||||
esac
|
||||
done
|
||||
|
||||
[ -n "${BASH_SOURCE}" ] && SRCFILE=${BASH_SOURCE} || SRCFILE=$_
|
||||
|
||||
if [ `basename \"$0\"` = `basename \"${SRCFILE}\"` ]; then
|
||||
if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then
|
||||
# We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that.
|
||||
# Start just the web server, point the web browser to the interface, and start any Django services.
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2
|
||||
exit 1
|
||||
echo -e "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [ -n "$BUILDDIR" ]; then
|
||||
printf "Error: It looks like you sourced oe-init-build-env. Toaster cannot start in build mode from an oe-core build environment.\n You should be starting Toaster from a new terminal window." 1>&2
|
||||
exit 1
|
||||
echo -e "Error: It looks like you sourced oe-init-build-env. Toaster cannot start in build mode from an oe-core build environment.\n You should be starting Toaster from a new terminal window." 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [ "x`which daemon`" == "x" ]; then
|
||||
echo -e "Failed dependency; toaster needs the 'daemon' program in order to be able to start builds'. Please install the 'daemon' program from your distribution repositories or http://www.libslack.org/daemon/" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
# Define a fake builddir where only the pid files are actually created. No real builds will take place here.
|
||||
BUILDDIR=/tmp/toaster_$$
|
||||
if [ -d "$BUILDDIR" ]; then
|
||||
echo "Previous toaster run directory $BUILDDIR found, cowardly refusing to start. Please remove the directory when that toaster instance is over" 2>&1
|
||||
exit 1
|
||||
echo -e "Previous toaster run directory $BUILDDIR found, cowardly refusing to start. Please remove the directory when that toaster instance is over" 2>&1
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
mkdir -p "$BUILDDIR"
|
||||
|
||||
RUNNING=1
|
||||
trap_ctrlc() {
|
||||
function trap_ctrlc() {
|
||||
echo "** Stopping system"
|
||||
webserverKillAll
|
||||
RUNNING=0
|
||||
}
|
||||
|
||||
do_cleanup() {
|
||||
function do_cleanup() {
|
||||
find "$BUILDDIR" -type f | xargs rm
|
||||
rmdir "$BUILDDIR"
|
||||
}
|
||||
cleanup() {
|
||||
function cleanup() {
|
||||
if grep -ir error "$BUILDDIR" >/dev/null; then
|
||||
if grep -irn "That port is already in use" "$BUILDDIR"; then
|
||||
echo "You can use the \"webport=PORTNUMBER\" parameter to start Toaster on a different port (port $WEB_PORT is already in use)"
|
||||
do_cleanup
|
||||
else
|
||||
printf "\nErrors found in the Toaster log files present in '$BUILDDIR'. Directory will not be cleaned.\n Please review the errors and notify toaster@yoctoproject.org or submit a bug https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Toaster"
|
||||
echo -e "\nErrors found in the Toaster log files present in '$BUILDDIR'. Directory will not be cleaned.\n Please review the errors and notify toaster@yoctoproject.org or submit a bug https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Toaster"
|
||||
fi
|
||||
else
|
||||
echo "No errors found, removing the run directory '$BUILDDIR'"
|
||||
do_cleanup
|
||||
fi
|
||||
fi;
|
||||
}
|
||||
TOASTER_MANAGED=1
|
||||
export TOASTER_MANAGED=1
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
echo "Failed to start the web server, stopping" 1>&2
|
||||
echo "Failed to start the web server, stopping" 1>&2;
|
||||
cleanup
|
||||
exit 1
|
||||
exit 1;
|
||||
fi
|
||||
if [ $WEBSERVER -gt 0 ] && [ $NOBROWSER -eq 0 ] ; then
|
||||
if [ $WEBSERVER -gt 0 ]; then
|
||||
echo "Starting browser..."
|
||||
xdg-open http://127.0.0.1:$WEB_PORT/ >/dev/null 2>&1 &
|
||||
fi
|
||||
trap trap_ctrlc 2
|
||||
trap trap_ctrlc SIGINT
|
||||
echo "Toaster is now running. You can stop it with Ctrl-C"
|
||||
while [ $RUNNING -gt 0 ]; do
|
||||
python $BBBASEDIR/lib/toaster/manage.py runbuilds 2>&1 | tee -a "$BUILDDIR/toaster.log"
|
||||
@@ -258,27 +245,27 @@ fi
|
||||
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2
|
||||
return 1
|
||||
echo -e "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2;
|
||||
return 1;
|
||||
fi
|
||||
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || ! which bitbake >/dev/null 2>&1 ; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2
|
||||
if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2;
|
||||
return 2
|
||||
fi
|
||||
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "$1" = 'start' ] || [ "$1" = 'stop' ]; then
|
||||
if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then
|
||||
CMD="$1"
|
||||
else
|
||||
if [ -z "$BBSERVER" ]; then
|
||||
CMD="start"
|
||||
else
|
||||
CMD="stop"
|
||||
fi
|
||||
fi;
|
||||
fi
|
||||
|
||||
echo "The system will $CMD."
|
||||
@@ -287,16 +274,16 @@ echo "The system will $CMD."
|
||||
|
||||
lock=1
|
||||
if [ -e $BUILDDIR/bitbake.lock ]; then
|
||||
python -c "import fcntl; fcntl.flock(open(\"$BUILDDIR/bitbake.lock\"), fcntl.LOCK_EX|fcntl.LOCK_NB)" 2>/dev/null || lock=0
|
||||
(flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0
|
||||
fi
|
||||
|
||||
if [ ${CMD} = 'start' ] && [ $lock -eq 0 ]; then
|
||||
if [ ${CMD} == "start" ] && [ $lock -eq 0 ]; then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 1>&2
|
||||
echo "Please wait for the current build to finish, stop and then start the system again." 1>&2
|
||||
return 3
|
||||
fi
|
||||
|
||||
if [ ${CMD} = 'start' ] && [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
|
||||
if [ ${CMD} == "start" ] && [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
|
||||
echo "Warning: bitbake appears to be dead, but the Toaster web server is running. Something fishy is going on." 1>&2
|
||||
echo "Cleaning up the web server to start from a clean slate."
|
||||
webserverKillAll
|
||||
@@ -316,7 +303,7 @@ case $CMD in
|
||||
unset BBSERVER
|
||||
PREREAD=""
|
||||
if [ -e ${BUILDDIR}/conf/toaster-pre.conf ]; then
|
||||
rm ${BUILDDIR}/conf/toaster-pre.conf
|
||||
rm ${BUILDDIR}/conf/toaster-pre.conf
|
||||
fi
|
||||
bitbake $PREREAD --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0
|
||||
if [ $? -ne 0 ]; then
|
||||
@@ -324,7 +311,7 @@ case $CMD in
|
||||
echo "Bitbake server start failed"
|
||||
else
|
||||
export BBSERVER=0.0.0.0:-1
|
||||
if [ $NOTOASTERUI -eq 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui >>${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
fi
|
||||
@@ -350,3 +337,4 @@ case $CMD in
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
|
||||
@@ -26,7 +26,6 @@
|
||||
# as a build eventlog, and the ToasterUI is used to process events in the file
|
||||
# and log data in the database
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys, logging
|
||||
|
||||
@@ -40,6 +39,12 @@ from bb.ui import toasterui
|
||||
import sys
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
format_str = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(format=format_str)
|
||||
|
||||
|
||||
import json, pickle
|
||||
|
||||
|
||||
@@ -163,12 +168,12 @@ class MockConfigParameters():
|
||||
# run toaster ui on our mock bitbake class
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: %s event.log " % sys.argv[0])
|
||||
logger.error("Usage: %s event.log " % sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
file_name = sys.argv[-1]
|
||||
mock_connection = FileReadEventsServerConnection(file_name)
|
||||
configParams = MockConfigParameters()
|
||||
|
||||
# run the main program and set exit code to the returned value
|
||||
sys.exit(toasterui.main(mock_connection.connection, mock_connection.events, configParams))
|
||||
# run the main program
|
||||
toasterui.main(mock_connection.connection, mock_connection.events, configParams)
|
||||
|
||||
@@ -1,15 +1,7 @@
|
||||
<?xml version='1.0'?>
|
||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
|
||||
|
||||
<xsl:import href="http://downloads.yoctoproject.org/mirror/docbook-mirror/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<!--
|
||||
|
||||
<xsl:import href="../template/1.76.1/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
-->
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:include href="../template/permalinks.xsl"/>
|
||||
<xsl:include href="../template/section.title.xsl"/>
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
The execution process is launched using the following command
|
||||
form:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
For information on the BitBake command and its options,
|
||||
see
|
||||
@@ -37,16 +37,14 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common method to determine this value for your build host is to run
|
||||
the following:
|
||||
A common way to determine this value for your build host is to run:
|
||||
<literallayout class='monospaced'>
|
||||
$ grep processor /proc/cpuinfo
|
||||
</literallayout>
|
||||
This command returns the number of processors, which takes into
|
||||
account hyper-threading.
|
||||
Thus, a quad-core build host with hyper-threading most likely
|
||||
shows eight processors, which is the value you would then assign to
|
||||
<filename>BB_NUMBER_THREADS</filename>.
|
||||
and count the number of processors displayed. Note that the number of
|
||||
processors will take into account hyper-threading, so that a quad-core
|
||||
build host with hyper-threading will most likely show eight processors,
|
||||
which is the value you would then assign to that variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -287,8 +285,8 @@
|
||||
<link linkend='var-PN'><filename>PN</filename></link> and
|
||||
<link linkend='var-PV'><filename>PV</filename></link>:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
In this example, a recipe called "something_1.2.3.bb" would set
|
||||
<filename>PN</filename> to "something" and
|
||||
@@ -784,13 +782,13 @@
|
||||
make some dependency and hash information available to the build.
|
||||
This information includes:
|
||||
<itemizedlist>
|
||||
<listitem><para><filename>BB_BASEHASH_task-</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_task-<taskname></filename>:
|
||||
The base hashes for each task in the recipe.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_BASEHASH_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_<filename:taskname></filename>:
|
||||
The base hashes for each dependent task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BBHASHDEPS_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BBHASHDEPS_<filename:taskname></filename>:
|
||||
The task dependencies for each task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_TASKHASH</filename>:
|
||||
|
||||
@@ -157,8 +157,8 @@
|
||||
<filename>SRC_URI</filename> variable with the appropriate
|
||||
varflags as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI[md5sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[sha256sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[md5sum] = "value"
|
||||
SRC_URI[sha256sum] = "value"
|
||||
</literallayout>
|
||||
You can also specify the checksums as parameters on the
|
||||
<filename>SRC_URI</filename> as shown below:
|
||||
@@ -628,7 +628,7 @@
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
</literallayout>
|
||||
The fetcher uses the <filename>rcleartool</filename> or
|
||||
<filename>cleartool</filename> remote client, depending on
|
||||
@@ -646,19 +646,13 @@
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>module</filename></emphasis>:
|
||||
The module, which must include the
|
||||
prepending "/" character, in the selected VOB.
|
||||
<note>
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the <filename>load</filename> rule in
|
||||
the view config spec.
|
||||
As an example, consider the <filename>vob</filename> and
|
||||
<filename>module</filename> values from the
|
||||
<filename>SRC_URI</filename> statement at the start of this section.
|
||||
Combining those values results in the following:
|
||||
<literallayout class='monospaced'>
|
||||
load /example_vob/example_module
|
||||
</literallayout>
|
||||
</note>
|
||||
prepending "/" character, in the selected VOB
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the following load rule in
|
||||
the view config spec:
|
||||
<literallayout class='monospaced'>
|
||||
load <vob><module>
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>proto</filename></emphasis>:
|
||||
The protocol, which can be either <filename>http</filename> or
|
||||
|
||||
@@ -221,7 +221,7 @@
|
||||
<para>From your shell, enter the following commands to set and
|
||||
export the <filename>BBPATH</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>projectdirectory</replaceable>"
|
||||
$ BBPATH="<projectdirectory>"
|
||||
$ export BBPATH
|
||||
</literallayout>
|
||||
Use your actual project directory in the command.
|
||||
|
||||
@@ -327,8 +327,8 @@
|
||||
The following lines select the values of a package name
|
||||
and its version number, respectively:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
@@ -952,7 +952,7 @@
|
||||
<listitem><para>
|
||||
The class needs to define the function as follows:
|
||||
<literallayout class='monospaced'>
|
||||
<replaceable>classname</replaceable><filename>_</filename><replaceable>functionname</replaceable>
|
||||
<classname>_<functionname>
|
||||
</literallayout>
|
||||
For example, if you have a class file
|
||||
<filename>bar.bbclass</filename> and a function named
|
||||
@@ -966,7 +966,7 @@
|
||||
The class needs to contain the <filename>EXPORT_FUNCTIONS</filename>
|
||||
statement as follows:
|
||||
<literallayout class='monospaced'>
|
||||
EXPORT_FUNCTIONS <replaceable>functionname</replaceable>
|
||||
EXPORT_FUNCTIONS <functionname>
|
||||
</literallayout>
|
||||
For example, continuing with the same example, the
|
||||
statement in the <filename>bar.bbclass</filename> would be
|
||||
@@ -1065,41 +1065,13 @@
|
||||
<title>Deleting a Task</title>
|
||||
|
||||
<para>
|
||||
As well as being able to add tasks, you can delete them.
|
||||
Simply use the <filename>deltask</filename> command to
|
||||
delete a task.
|
||||
As well as being able to add tasks, tasks can also be deleted.
|
||||
This is done simply with <filename>deltask</filename> command.
|
||||
For example, to delete the example task used in the previous
|
||||
sections, you would use:
|
||||
<literallayout class='monospaced'>
|
||||
deltask printdate
|
||||
</literallayout>
|
||||
If you delete a task using the <filename>deltask</filename>
|
||||
command and the task has dependencies, the dependencies are
|
||||
not reconnected.
|
||||
For example, suppose you have three tasks named
|
||||
<filename>do_a</filename>, <filename>do_b</filename>, and
|
||||
<filename>do_c</filename>.
|
||||
Furthermore, <filename>do_c</filename> is dependent on
|
||||
<filename>do_b</filename>, which in turn is dependent on
|
||||
<filename>do_a</filename>.
|
||||
Given this scenario, if you use <filename>deltask</filename>
|
||||
to delete <filename>do_b</filename>, the implicit dependency
|
||||
relationship between <filename>do_c</filename> and
|
||||
<filename>do_a</filename> through <filename>do_b</filename>
|
||||
no longer exists, and <filename>do_c</filename> dependencies
|
||||
are not updated to include <filename>do_a</filename>.
|
||||
Thus, <filename>do_c</filename> is free to run before
|
||||
<filename>do_a</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you want dependencies such as these to remain intact, use
|
||||
the <filename>noexec</filename> varflag to disable the task
|
||||
instead of using the <filename>deltask</filename> command to
|
||||
delete it:
|
||||
<literallayout class='monospaced'>
|
||||
do_b[noexec] = "1"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -1163,7 +1135,7 @@
|
||||
<para>
|
||||
The <filename>BB_ORIGENV</filename> variable returns a datastore
|
||||
object that can be queried using the standard datastore operators
|
||||
such as <filename>getVar(, False)</filename>.
|
||||
such as <filename>getVar()</filename>.
|
||||
The datastore object is useful, for example, to find the original
|
||||
<filename>DISPLAY</filename> variable.
|
||||
Here is an example:
|
||||
@@ -1192,7 +1164,7 @@
|
||||
BitBake reads and writes varflags to the datastore using the following
|
||||
command forms:
|
||||
<literallayout class='monospaced'>
|
||||
<replaceable>variable</replaceable> = d.getVarFlags("<replaceable>variable</replaceable>")
|
||||
<variable> = d.getVarFlags("<variable>")
|
||||
self.d.setVarFlags("FOO", {"func": True})
|
||||
</literallayout>
|
||||
</para>
|
||||
@@ -1213,36 +1185,11 @@
|
||||
Tasks support a number of these flags which control various
|
||||
functionality of the task:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>cleandirs:</emphasis>
|
||||
Empty directories that should created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>depends:</emphasis>
|
||||
Controls inter-task dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>deptask:</emphasis>
|
||||
Controls task build-time dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='build-dependencies'>Build Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>dirs:</emphasis>
|
||||
Directories that should be created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>lockfiles:</emphasis>
|
||||
Specifies one or more lockfiles to lock while the task
|
||||
executes.
|
||||
Only one task may hold a lockfile, and any task that
|
||||
attempts to lock an already locked file will block until
|
||||
the lock is released.
|
||||
You can use this variable flag to accomplish mutual
|
||||
exclusion.
|
||||
<listitem><para><emphasis>cleandirs:</emphasis>
|
||||
Empty directories that should created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>noexec:</emphasis>
|
||||
Marks the tasks as being empty and no execution required.
|
||||
@@ -1254,20 +1201,15 @@
|
||||
Tells BitBake to not generate a stamp file for a task,
|
||||
which implies the task should always be executed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>postfuncs:</emphasis>
|
||||
List of functions to call after the completion of the task.
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>prefuncs:</emphasis>
|
||||
List of functions to call before the task executes.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdepends:</emphasis>
|
||||
Controls inter-task runtime dependencies.
|
||||
<listitem><para><emphasis>deptask:</emphasis>
|
||||
Controls task build-time dependencies.
|
||||
See the
|
||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
|
||||
variable, the
|
||||
<link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
|
||||
variable, and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='build-dependencies'>Build Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdeptask:</emphasis>
|
||||
@@ -1280,11 +1222,6 @@
|
||||
"<link linkend='runtime-dependencies'>Runtime Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>recideptask:</emphasis>
|
||||
When set in conjunction with
|
||||
<filename>recrdeptask</filename>, specifies a task that
|
||||
should be inspected for additional dependencies.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>recrdeptask:</emphasis>
|
||||
Controls task recursive runtime dependencies.
|
||||
See the
|
||||
@@ -1295,14 +1232,35 @@
|
||||
"<link linkend='recursive-dependencies'>Recursive Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>depends:</emphasis>
|
||||
Controls inter-task dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdepends:</emphasis>
|
||||
Controls inter-task runtime dependencies.
|
||||
See the
|
||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
|
||||
variable, the
|
||||
<link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
|
||||
variable, and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>postfuncs:</emphasis>
|
||||
List of functions to call after the completion of the task.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>prefuncs:</emphasis>
|
||||
List of functions to call before the task executes.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>stamp-extra-info:</emphasis>
|
||||
Extra stamp information to append to the task's stamp.
|
||||
As an example, OpenEmbedded uses this flag to allow
|
||||
machine-specific tasks.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
@@ -1322,16 +1280,16 @@
|
||||
does not allow BitBake to automatically determine
|
||||
that the variable is referred to.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepsexclude:</emphasis>
|
||||
Specifies a space-separated list of variables
|
||||
that should be excluded from a variable's dependencies
|
||||
for the purposes of calculating its signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepvalue:</emphasis>
|
||||
If set, instructs BitBake to ignore the actual
|
||||
value of the variable and instead use the specified
|
||||
value when calculating the variable's signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepsexclude:</emphasis>
|
||||
Specifies a space-separated list of variables
|
||||
that should be excluded from a variable's dependencies
|
||||
for the purposes of calculating its signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepvalueexclude:</emphasis>
|
||||
Specifies a pipe-separated list of strings to exclude
|
||||
from the variable's value when calculating the
|
||||
|
||||
@@ -102,56 +102,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_ALLOWED_NETWORKS'><glossterm>BB_ALLOWED_NETWORKS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Specifies a space-delimited list of hosts that the fetcher
|
||||
is allowed to use to obtain the required source code.
|
||||
Following are considerations surrounding this variable:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
This host list is only used if
|
||||
<link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
|
||||
is either not set or set to "0".
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Limited support for wildcard matching against the
|
||||
beginning of host names exists.
|
||||
For example, the following setting matches
|
||||
<filename>git.gnu.org</filename>,
|
||||
<filename>ftp.gnu.org</filename>, and
|
||||
<filename>foo.git.gnu.org</filename>.
|
||||
<literallayout class='monospaced'>
|
||||
BB_ALLOWED_NETWORKS = "*.gnu.org"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Mirrors not in the host list are skipped and
|
||||
logged in debug.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Attempts to access networks not in the host list
|
||||
cause a failure.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Using <filename>BB_ALLOWED_NETWORKS</filename> in
|
||||
conjunction with
|
||||
<link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
|
||||
is very useful.
|
||||
Adding the host you want to use to
|
||||
<filename>PREMIRRORS</filename> results in the source code
|
||||
being fetched from an allowed location and avoids raising
|
||||
an error when a host that is not allowed is in a
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
statement.
|
||||
This is because the fetcher does not attempt to use the
|
||||
host listed in <filename>SRC_URI</filename> after a
|
||||
successful fetch from the
|
||||
<filename>PREMIRRORS</filename> occurs.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_CONSOLELOG'><glossterm>BB_CONSOLELOG</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -922,7 +872,7 @@
|
||||
that run on the target <filename>MACHINE</filename>;
|
||||
"nativesdk", which targets the SDK machine instead of
|
||||
<filename>MACHINE</filename>; and "mulitlibs" in the form
|
||||
"<filename>multilib:</filename><replaceable>multilib_name</replaceable>".
|
||||
"<filename>multilib:<multilib_name></filename>".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -934,7 +884,7 @@
|
||||
metadata:
|
||||
<literallayout class='monospaced'>
|
||||
BBCLASSEXTEND =+ "native nativesdk"
|
||||
BBCLASSEXTEND =+ "multilib:<replaceable>multilib_name</replaceable>"
|
||||
BBCLASSEXTEND =+ "multilib:<multilib_name>"
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -1066,20 +1016,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBLAYERS_FETCH_DIR'><glossterm>BBLAYERS_FETCH_DIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Sets the base location where layers are stored.
|
||||
By default, this location is set to
|
||||
<filename>${COREBASE}</filename>.
|
||||
This setting is used in conjunction with
|
||||
<filename>bitbake-layers layerindex-fetch</filename> and
|
||||
tells <filename>bitbake-layers</filename> where to place
|
||||
the fetched layers.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBMASK'><glossterm>BBMASK</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -1155,9 +1091,9 @@
|
||||
Set the variable as you would any environment variable
|
||||
and then run BitBake:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>build_directory</replaceable>"
|
||||
$ BBPATH="<build_directory>"
|
||||
$ export BBPATH
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -1952,7 +1888,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RDEPENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RDEPENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RDEPENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
@@ -2018,7 +1954,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RRECOMMENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RRECOMMENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RRECOMMENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.27.1"
|
||||
__version__ = "1.26.0"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 7, 3):
|
||||
|
||||
@@ -31,7 +31,6 @@ import logging
|
||||
import shlex
|
||||
import glob
|
||||
import time
|
||||
import stat
|
||||
import bb
|
||||
import bb.msg
|
||||
import bb.process
|
||||
@@ -43,20 +42,6 @@ logger = logging.getLogger('BitBake.Build')
|
||||
|
||||
NULL = open(os.devnull, 'r+')
|
||||
|
||||
__mtime_cache = {}
|
||||
|
||||
def cached_mtime_noerror(f):
|
||||
if f not in __mtime_cache:
|
||||
try:
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
return 0
|
||||
return __mtime_cache[f]
|
||||
|
||||
def reset_cache():
|
||||
global __mtime_cache
|
||||
__mtime_cache = {}
|
||||
|
||||
# When we execute a Python function, we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__.
|
||||
# If we do not do this and use the exec globals, they will
|
||||
@@ -159,7 +144,7 @@ class LogTee(object):
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute a BB 'function'"""
|
||||
|
||||
body = d.getVar(func, False)
|
||||
body = d.getVar(func)
|
||||
if not body:
|
||||
if body is None:
|
||||
logger.warn("Function %s doesn't exist", func)
|
||||
@@ -550,7 +535,7 @@ def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
|
||||
|
||||
stampdir = os.path.dirname(stamp)
|
||||
if cached_mtime_noerror(stampdir) == 0:
|
||||
if bb.parse.cached_mtime_noerror(stampdir) == 0:
|
||||
bb.utils.mkdirhier(stampdir)
|
||||
|
||||
return stamp
|
||||
@@ -646,7 +631,7 @@ def stampfile(taskname, d, file_name = None):
|
||||
return stamp_internal(taskname, d, file_name)
|
||||
|
||||
def add_tasks(tasklist, deltasklist, d):
|
||||
task_deps = d.getVar('_task_deps', False)
|
||||
task_deps = d.getVar('_task_deps')
|
||||
if not task_deps:
|
||||
task_deps = {}
|
||||
if not 'tasks' in task_deps:
|
||||
@@ -696,7 +681,7 @@ def addtask(task, before, after, d):
|
||||
task = "do_" + task
|
||||
|
||||
d.setVarFlag(task, "task", 1)
|
||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
||||
bbtasks = d.getVar('__BBTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBTASKS', bbtasks)
|
||||
@@ -719,7 +704,7 @@ def deltask(task, d):
|
||||
if task[:3] != "do_":
|
||||
task = "do_" + task
|
||||
|
||||
bbtasks = d.getVar('__BBDELTASKS', False) or []
|
||||
bbtasks = d.getVar('__BBDELTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBDELTASKS', bbtasks)
|
||||
|
||||
@@ -659,25 +659,25 @@ class Cache(object):
|
||||
"""
|
||||
chdir_back = False
|
||||
|
||||
from bb import parse
|
||||
from bb import data, parse
|
||||
|
||||
# expand tmpdir to include this topdir
|
||||
config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "")
|
||||
data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
|
||||
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
||||
oldpath = os.path.abspath(os.getcwd())
|
||||
parse.cached_mtime_noerror(bbfile_loc)
|
||||
bb_data = config.createCopy()
|
||||
bb_data = data.init_db(config)
|
||||
# The ConfHandler first looks if there is a TOPDIR and if not
|
||||
# then it would call getcwd().
|
||||
# Previously, we chdir()ed to bbfile_loc, called the handler
|
||||
# and finally chdir()ed back, a couple of thousand times. We now
|
||||
# just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
|
||||
if not bb_data.getVar('TOPDIR', False):
|
||||
if not data.getVar('TOPDIR', bb_data):
|
||||
chdir_back = True
|
||||
bb_data.setVar('TOPDIR', bbfile_loc)
|
||||
data.setVar('TOPDIR', bbfile_loc, bb_data)
|
||||
try:
|
||||
if appends:
|
||||
bb_data.setVar('__BBAPPEND', " ".join(appends))
|
||||
data.setVar('__BBAPPEND', " ".join(appends), bb_data)
|
||||
bb_data = parse.handle(bbfile, bb_data)
|
||||
if chdir_back:
|
||||
os.chdir(oldpath)
|
||||
|
||||
@@ -92,9 +92,6 @@ class pythonCacheLine(object):
|
||||
for c in sorted(self.contains.keys()):
|
||||
l = l + (c, hash(self.contains[c]))
|
||||
return hash(l)
|
||||
def __repr__(self):
|
||||
return " ".join([str(self.refs), str(self.execs), str(self.contains)])
|
||||
|
||||
|
||||
class shellCacheLine(object):
|
||||
def __init__(self, execs):
|
||||
@@ -108,8 +105,6 @@ class shellCacheLine(object):
|
||||
self.__init__(execs)
|
||||
def __hash__(self):
|
||||
return hash(self.execs)
|
||||
def __repr__(self):
|
||||
return str(self.execs)
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
@@ -245,9 +240,6 @@ class PythonParser():
|
||||
self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
|
||||
|
||||
def parse_python(self, node):
|
||||
if not node or not node.strip():
|
||||
return
|
||||
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
|
||||
@@ -267,12 +267,6 @@ class CommandsSync:
|
||||
features = params[0]
|
||||
command.cooker.setFeatures(features)
|
||||
|
||||
def unlockBitbake(self, command, params):
|
||||
"""
|
||||
Unlock bitbake.lock file
|
||||
"""
|
||||
command.cooker.unlockBitbake()
|
||||
|
||||
# although we change the internal state of the cooker, this is transparent since
|
||||
# we always take and leave the cooker in state.initial
|
||||
setFeatures.readonly = True
|
||||
|
||||
@@ -35,7 +35,7 @@ from contextlib import closing
|
||||
from functools import wraps
|
||||
from collections import defaultdict
|
||||
import bb, bb.exceptions, bb.command
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
|
||||
import Queue
|
||||
import signal
|
||||
import prserv.serv
|
||||
@@ -151,7 +151,9 @@ class BBCooker:
|
||||
|
||||
# Take a lock so only one copy of bitbake can run against a given build
|
||||
# directory at a time
|
||||
if not self.lockBitbake():
|
||||
lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
|
||||
self.lock = bb.utils.lockfile(lockfile, False, False)
|
||||
if not self.lock:
|
||||
bb.fatal("Only one copy of bitbake should be run against a build directory")
|
||||
try:
|
||||
self.lock.seek(0)
|
||||
@@ -384,7 +386,7 @@ class BBCooker:
|
||||
|
||||
replaced = False
|
||||
#do not save if nothing changed
|
||||
if str(val) == self.data.getVar(var, False):
|
||||
if str(val) == self.data.getVar(var):
|
||||
return
|
||||
|
||||
conf_files = self.data.varhistory.get_variable_files(var)
|
||||
@@ -396,7 +398,7 @@ class BBCooker:
|
||||
listval += "%s " % value
|
||||
val = listval
|
||||
|
||||
topdir = self.data.getVar("TOPDIR", False)
|
||||
topdir = self.data.getVar("TOPDIR")
|
||||
|
||||
#comment or replace operations made on var
|
||||
for conf_file in conf_files:
|
||||
@@ -451,7 +453,7 @@ class BBCooker:
|
||||
|
||||
def removeConfigurationVar(self, var):
|
||||
conf_files = self.data.varhistory.get_variable_files(var)
|
||||
topdir = self.data.getVar("TOPDIR", False)
|
||||
topdir = self.data.getVar("TOPDIR")
|
||||
|
||||
for conf_file in conf_files:
|
||||
if topdir in conf_file:
|
||||
@@ -491,7 +493,7 @@ class BBCooker:
|
||||
|
||||
def parseConfiguration(self):
|
||||
# Set log file verbosity
|
||||
verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
|
||||
verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0"))
|
||||
if verboselogs:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
|
||||
@@ -611,7 +613,7 @@ class BBCooker:
|
||||
data.expandKeys(envdata)
|
||||
for e in envdata.keys():
|
||||
if data.getVarFlag( e, 'python', envdata ):
|
||||
logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, True))
|
||||
logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
|
||||
|
||||
|
||||
def buildTaskData(self, pkgs_to_build, task, abort):
|
||||
@@ -906,8 +908,8 @@ class BBCooker:
|
||||
for appends in appends_without_recipes
|
||||
for append in appends)
|
||||
msg = 'No recipes available for:\n%s' % '\n'.join(appendlines)
|
||||
warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
|
||||
False) or "no"
|
||||
warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
|
||||
self.data, False) or "no"
|
||||
if warn_only.lower() in ("1", "yes", "true"):
|
||||
bb.warn(msg)
|
||||
else:
|
||||
@@ -954,8 +956,8 @@ class BBCooker:
|
||||
# Generate a list of parsed configuration files by searching the files
|
||||
# listed in the __depends and __base_depends variables with a .conf suffix.
|
||||
conffiles = []
|
||||
dep_files = self.data.getVar('__base_depends', False) or []
|
||||
dep_files = dep_files + (self.data.getVar('__depends', False) or [])
|
||||
dep_files = self.data.getVar('__base_depends') or []
|
||||
dep_files = dep_files + (self.data.getVar('__depends') or [])
|
||||
|
||||
for f in dep_files:
|
||||
if f[0].endswith(".conf"):
|
||||
@@ -1172,12 +1174,9 @@ class BBCooker:
|
||||
"""
|
||||
Setup any variables needed before starting a build
|
||||
"""
|
||||
t = time.gmtime()
|
||||
if not self.data.getVar("BUILDNAME", False):
|
||||
self.data.setVar("BUILDNAME", "${DATE}${TIME}")
|
||||
self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
|
||||
self.data.setVar("DATE", time.strftime('%Y%m%d', t))
|
||||
self.data.setVar("TIME", time.strftime('%H%M%S', t))
|
||||
if not self.data.getVar("BUILDNAME"):
|
||||
self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'))
|
||||
self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()))
|
||||
|
||||
def matchFiles(self, bf):
|
||||
"""
|
||||
@@ -1276,7 +1275,7 @@ class BBCooker:
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
taskdata.add_provider(self.data, self.recipecache, item)
|
||||
|
||||
buildname = self.data.getVar("BUILDNAME", True)
|
||||
buildname = self.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
|
||||
|
||||
# Execute the runqueue
|
||||
@@ -1304,8 +1303,8 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.expanded_data)
|
||||
self.command.finishAsyncCommand(msg)
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.expanded_data)
|
||||
return False
|
||||
if retval is True:
|
||||
return True
|
||||
@@ -1337,19 +1336,18 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data)
|
||||
self.command.finishAsyncCommand(msg)
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data)
|
||||
return False
|
||||
if retval is True:
|
||||
return True
|
||||
return retval
|
||||
|
||||
build.reset_cache()
|
||||
self.buildSetVars()
|
||||
|
||||
taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
|
||||
|
||||
buildname = self.data.getVar("BUILDNAME", False)
|
||||
buildname = self.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data)
|
||||
|
||||
rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
|
||||
@@ -1403,7 +1401,7 @@ class BBCooker:
|
||||
if base_image is None:
|
||||
imagefile.write("inherit core-image\n")
|
||||
else:
|
||||
topdir = self.data.getVar("TOPDIR", False)
|
||||
topdir = self.data.getVar("TOPDIR")
|
||||
if topdir in base_image:
|
||||
base_image = require_line.split()[1]
|
||||
imagefile.write("require " + base_image + "\n")
|
||||
@@ -1463,7 +1461,7 @@ class BBCooker:
|
||||
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
|
||||
|
||||
self.data.renameVar("__depends", "__base_depends")
|
||||
self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
|
||||
self.add_filewatch(self.data.getVar("__base_depends"), self.configwatcher)
|
||||
|
||||
self.parser = CookerParser(self, filelist, masked)
|
||||
self.parsecache_valid = True
|
||||
@@ -1478,11 +1476,6 @@ class BBCooker:
|
||||
self.handlePrefProviders()
|
||||
self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
|
||||
self.state = state.running
|
||||
|
||||
# Send an event listing all stamps reachable after parsing
|
||||
# which the metadata may use to clean up stale data
|
||||
event = bb.event.ReachableStamps(self.recipecache.stamp)
|
||||
bb.event.fire(event, self.expanded_data)
|
||||
return None
|
||||
|
||||
return True
|
||||
@@ -1545,19 +1538,6 @@ class BBCooker:
|
||||
def reset(self):
|
||||
self.initConfigurationData()
|
||||
|
||||
def lockBitbake(self):
|
||||
if not hasattr(self, 'lock'):
|
||||
self.lock = None
|
||||
if self.data:
|
||||
lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
|
||||
if lockfile:
|
||||
self.lock = bb.utils.lockfile(lockfile, False, False)
|
||||
return self.lock
|
||||
|
||||
def unlockBitbake(self):
|
||||
if hasattr(self, 'lock') and self.lock:
|
||||
bb.utils.unlockfile(self.lock)
|
||||
|
||||
def server_main(cooker, func, *args):
|
||||
cooker.pre_serve()
|
||||
|
||||
@@ -1817,6 +1797,8 @@ class Parser(multiprocessing.Process):
|
||||
finally:
|
||||
logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
|
||||
prof.dump_stats(logfile)
|
||||
bb.utils.process_profilelog(logfile)
|
||||
print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile))
|
||||
|
||||
def realrun(self):
|
||||
if self.init:
|
||||
@@ -1886,7 +1868,6 @@ class CookerParser(object):
|
||||
self.current = 0
|
||||
self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
|
||||
multiprocessing.cpu_count())
|
||||
self.process_names = []
|
||||
|
||||
self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
|
||||
self.fromcache = []
|
||||
@@ -1922,7 +1903,6 @@ class CookerParser(object):
|
||||
for i in range(0, self.num_processes):
|
||||
parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
|
||||
parser.start()
|
||||
self.process_names.append(parser.name)
|
||||
self.processes.append(parser)
|
||||
|
||||
self.results = itertools.chain(self.results, self.parse_generator())
|
||||
@@ -1966,16 +1946,6 @@ class CookerParser(object):
|
||||
multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
|
||||
bb.codeparser.parser_cache_savemerge(self.cooker.data)
|
||||
bb.fetch.fetcher_parse_done(self.cooker.data)
|
||||
if self.cooker.configuration.profile:
|
||||
profiles = []
|
||||
for i in self.process_names:
|
||||
logfile = "profile-parse-%s.log" % i
|
||||
if os.path.exists(logfile):
|
||||
profiles.append(logfile)
|
||||
|
||||
pout = "profile-parse.log.processed"
|
||||
bb.utils.process_profilelog(profiles, pout = pout)
|
||||
print("Processed parsing statistics saved to %s" % (pout))
|
||||
|
||||
def load_cached(self):
|
||||
for filename, appends in self.fromcache:
|
||||
|
||||
@@ -269,11 +269,8 @@ class CookerDataBuilder(object):
|
||||
layers = (data.getVar('BBLAYERS', True) or "").split()
|
||||
|
||||
data = bb.data.createCopy(data)
|
||||
approved = bb.utils.approved_variables()
|
||||
for layer in layers:
|
||||
parselog.debug(2, "Adding layer %s", layer)
|
||||
if 'HOME' in approved and '~' in layer:
|
||||
layer = os.path.expanduser(layer)
|
||||
data.setVar('LAYERDIR', layer)
|
||||
data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
|
||||
data.expandVarref('LAYERDIR')
|
||||
@@ -301,15 +298,15 @@ class CookerDataBuilder(object):
|
||||
|
||||
# Nomally we only register event handlers at the end of parsing .bb files
|
||||
# We register any handlers we've found so far here...
|
||||
for var in data.getVar('__BBHANDLERS', False) or []:
|
||||
bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split())
|
||||
for var in data.getVar('__BBHANDLERS') or []:
|
||||
bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
if data.getVar("BB_WORKERCONTEXT", False) is None:
|
||||
bb.fetch.fetcher_init(data)
|
||||
bb.codeparser.parser_cache_init(data)
|
||||
bb.event.fire(bb.event.ConfigParsed(), data)
|
||||
|
||||
if data.getVar("BB_INVALIDCONF", False) is True:
|
||||
if data.getVar("BB_INVALIDCONF") is True:
|
||||
data.setVar("BB_INVALIDCONF", False)
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
return
|
||||
|
||||
@@ -159,12 +159,13 @@ def expandKeys(alterdata, readdata = None):
|
||||
|
||||
# These two for loops are split for performance to maximise the
|
||||
# usefulness of the expand cache
|
||||
for key in sorted(todolist):
|
||||
|
||||
for key in todolist:
|
||||
ekey = todolist[key]
|
||||
newval = alterdata.getVar(ekey, 0)
|
||||
if newval is not None:
|
||||
if newval:
|
||||
val = alterdata.getVar(key, 0)
|
||||
if val is not None:
|
||||
if val is not None and newval is not None:
|
||||
bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
|
||||
alterdata.renameVar(key, ekey)
|
||||
|
||||
@@ -419,7 +420,7 @@ def generate_dependencies(d):
|
||||
deps = {}
|
||||
values = {}
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
for task in tasklist:
|
||||
deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
|
||||
newdeps = deps[task]
|
||||
|
||||
@@ -72,10 +72,6 @@ _eventfilter = None
|
||||
|
||||
def execute_handler(name, handler, event, d):
|
||||
event.data = d
|
||||
addedd = False
|
||||
if 'd' not in __builtins__:
|
||||
__builtins__['d'] = d
|
||||
addedd = True
|
||||
try:
|
||||
ret = handler(event)
|
||||
except (bb.parse.SkipRecipe, bb.BBHandledException):
|
||||
@@ -91,8 +87,6 @@ def execute_handler(name, handler, event, d):
|
||||
raise
|
||||
finally:
|
||||
del event.data
|
||||
if addedd:
|
||||
del __builtins__['d']
|
||||
|
||||
def fire_class_handlers(event, d):
|
||||
if isinstance(event, logging.LogRecord):
|
||||
@@ -503,16 +497,6 @@ class TargetsTreeGenerated(Event):
|
||||
Event.__init__(self)
|
||||
self._model = model
|
||||
|
||||
class ReachableStamps(Event):
|
||||
"""
|
||||
An event listing all stamps reachable after parsing
|
||||
which the metadata may use to clean up stale data
|
||||
"""
|
||||
|
||||
def __init__(self, stamps):
|
||||
Event.__init__(self)
|
||||
self.stamps = stamps
|
||||
|
||||
class FilesMatchingFound(Event):
|
||||
"""
|
||||
Event when a list of files matching the supplied pattern has
|
||||
|
||||
@@ -61,17 +61,6 @@ class BBFetchException(Exception):
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
class UntrustedUrl(BBFetchException):
|
||||
"""Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
|
||||
def __init__(self, url, message=''):
|
||||
if message:
|
||||
msg = message
|
||||
else:
|
||||
msg = "The URL: '%s' is not trusted and cannot be used" % url
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url,)
|
||||
|
||||
class MalformedUrl(BBFetchException):
|
||||
"""Exception raised when encountering an invalid url"""
|
||||
def __init__(self, url, message=''):
|
||||
@@ -720,7 +709,7 @@ def get_autorev(d):
|
||||
d.setVar('__BB_DONT_CACHE', '1')
|
||||
return "AUTOINC"
|
||||
|
||||
def get_srcrev(d, method_name='sortable_revision'):
|
||||
def get_srcrev(d):
|
||||
"""
|
||||
Return the revsion string, usually for use in the version string (PV) of the current package
|
||||
Most packages usually only have one SCM so we just pass on the call.
|
||||
@@ -729,9 +718,6 @@ def get_srcrev(d, method_name='sortable_revision'):
|
||||
|
||||
The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
|
||||
incremental, other code is then responsible for turning that into an increasing value (if needed)
|
||||
|
||||
A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
|
||||
that fetcher provides a method with the given name and the same signature as sortable_revision.
|
||||
"""
|
||||
|
||||
scms = []
|
||||
@@ -745,7 +731,7 @@ def get_srcrev(d, method_name='sortable_revision'):
|
||||
raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
|
||||
|
||||
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
|
||||
autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
|
||||
autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
|
||||
if len(rev) > 10:
|
||||
rev = rev[:10]
|
||||
if autoinc:
|
||||
@@ -763,7 +749,7 @@ def get_srcrev(d, method_name='sortable_revision'):
|
||||
for scm in scms:
|
||||
ud = urldata[scm]
|
||||
for name in ud.names:
|
||||
autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
|
||||
autoinc, rev = ud.method.sortable_revision(ud, d, name)
|
||||
seenautoinc = seenautoinc or autoinc
|
||||
if len(rev) > 10:
|
||||
rev = rev[:10]
|
||||
@@ -797,8 +783,6 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
||||
'NO_PROXY', 'no_proxy',
|
||||
'ALL_PROXY', 'all_proxy',
|
||||
'GIT_PROXY_COMMAND',
|
||||
'GIT_SSL_CAINFO',
|
||||
'GIT_SMART_HTTP',
|
||||
'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
|
||||
'SOCKS5_USER', 'SOCKS5_PASSWD']
|
||||
|
||||
@@ -867,11 +851,6 @@ def build_mirroruris(origud, mirrors, ld):
|
||||
newuri = uri_replace(ud, find, replace, replacements, ld)
|
||||
if not newuri or newuri in uris or newuri == origud.url:
|
||||
continue
|
||||
|
||||
if not trusted_network(ld, newuri):
|
||||
logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
|
||||
continue
|
||||
|
||||
try:
|
||||
newud = FetchData(newuri, ld)
|
||||
newud.setup_localpath(ld)
|
||||
@@ -879,9 +858,7 @@ def build_mirroruris(origud, mirrors, ld):
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
# setup_localpath of file:// urls may fail, we should still see
|
||||
# if mirrors of the url exist
|
||||
adduri(newud, uris, uds)
|
||||
ud.method.clean(ud, ld)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
continue
|
||||
@@ -907,12 +884,12 @@ def rename_bad_checksum(ud, suffix):
|
||||
bb.utils.movefile(ud.localpath, new_localpath)
|
||||
|
||||
|
||||
def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
def try_mirror_url(origud, ud, ld, check = False):
|
||||
# Return of None or a value means we're finished
|
||||
# False means try another url
|
||||
try:
|
||||
if check:
|
||||
found = ud.method.checkstatus(fetch, ud, ld)
|
||||
found = ud.method.checkstatus(ud, ld)
|
||||
if found:
|
||||
return found
|
||||
return False
|
||||
@@ -975,7 +952,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
pass
|
||||
return False
|
||||
|
||||
def try_mirrors(fetch, d, origud, mirrors, check = False):
|
||||
def try_mirrors(d, origud, mirrors, check = False):
|
||||
"""
|
||||
Try to use a mirrored version of the sources.
|
||||
This method will be automatically called before the fetchers go.
|
||||
@@ -989,46 +966,11 @@ def try_mirrors(fetch, d, origud, mirrors, check = False):
|
||||
uris, uds = build_mirroruris(origud, mirrors, ld)
|
||||
|
||||
for index, uri in enumerate(uris):
|
||||
ret = try_mirror_url(fetch, origud, uds[index], ld, check)
|
||||
ret = try_mirror_url(origud, uds[index], ld, check)
|
||||
if ret != False:
|
||||
return ret
|
||||
return None
|
||||
|
||||
def trusted_network(d, url):
|
||||
"""
|
||||
Use a trusted url during download if networking is enabled and
|
||||
BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
|
||||
Note: modifies SRC_URI & mirrors.
|
||||
"""
|
||||
if d.getVar('BB_NO_NETWORK', True) == "1":
|
||||
return True
|
||||
|
||||
pkgname = d.expand(d.getVar('PN', False))
|
||||
trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname)
|
||||
|
||||
if not trusted_hosts:
|
||||
trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
|
||||
|
||||
# Not enabled.
|
||||
if not trusted_hosts:
|
||||
return True
|
||||
|
||||
scheme, network, path, user, passwd, param = decodeurl(url)
|
||||
|
||||
if not network:
|
||||
return True
|
||||
|
||||
network = network.lower()
|
||||
|
||||
for host in trusted_hosts.split(" "):
|
||||
host = host.lower()
|
||||
if host.startswith("*.") and ("." + network).endswith(host[1:]):
|
||||
return True
|
||||
if host == network:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def srcrev_internal_helper(ud, d, name):
|
||||
"""
|
||||
Return:
|
||||
@@ -1473,7 +1415,7 @@ class FetchMethod(object):
|
||||
"""
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, urldata, d):
|
||||
def checkstatus(self, urldata, d):
|
||||
"""
|
||||
Check the status of a URL
|
||||
Assumes localpath was called first
|
||||
@@ -1577,7 +1519,7 @@ class Fetch(object):
|
||||
elif m.try_premirror(ud, self.d):
|
||||
logger.debug(1, "Trying PREMIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
localpath = try_mirrors(self, self.d, ud, mirrors, False)
|
||||
localpath = try_mirrors(self.d, ud, mirrors, False)
|
||||
|
||||
if premirroronly:
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
@@ -1587,8 +1529,6 @@ class Fetch(object):
|
||||
firsterr = None
|
||||
if not localpath and ((not verify_donestamp(ud, self.d)) or m.need_update(ud, self.d)):
|
||||
try:
|
||||
if not trusted_network(self.d, ud.url):
|
||||
raise UntrustedUrl(ud.url)
|
||||
logger.debug(1, "Trying Upstream")
|
||||
m.download(ud, self.d)
|
||||
if hasattr(m, "build_mirror_data"):
|
||||
@@ -1616,7 +1556,7 @@ class Fetch(object):
|
||||
m.clean(ud, self.d)
|
||||
logger.debug(1, "Trying MIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
localpath = try_mirrors(self, self.d, ud, mirrors)
|
||||
localpath = try_mirrors (self.d, ud, mirrors)
|
||||
|
||||
if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
|
||||
if firsterr:
|
||||
@@ -1648,15 +1588,15 @@ class Fetch(object):
|
||||
logger.debug(1, "Testing URL %s", u)
|
||||
# First try checking uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
ret = try_mirrors(self, self.d, ud, mirrors, True)
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
if not ret:
|
||||
# Next try checking from the original uri, u
|
||||
try:
|
||||
ret = m.checkstatus(self, ud, self.d)
|
||||
ret = m.checkstatus(ud, self.d)
|
||||
except:
|
||||
# Finally, try checking uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
ret = try_mirrors(self, self.d, ud, mirrors, True)
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
|
||||
if not ret:
|
||||
raise FetchError("URL %s doesn't work" % u, u)
|
||||
@@ -1711,42 +1651,6 @@ class Fetch(object):
|
||||
if ud.lockfile:
|
||||
bb.utils.unlockfile(lf)
|
||||
|
||||
class FetchConnectionCache(object):
|
||||
"""
|
||||
A class which represents an container for socket connections.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
|
||||
def get_connection_name(self, host, port):
|
||||
return host + ':' + str(port)
|
||||
|
||||
def add_connection(self, host, port, connection):
|
||||
cn = self.get_connection_name(host, port)
|
||||
|
||||
if cn not in self.cache:
|
||||
self.cache[cn] = connection
|
||||
|
||||
def get_connection(self, host, port):
|
||||
connection = None
|
||||
|
||||
cn = self.get_connection_name(host, port)
|
||||
if cn in self.cache:
|
||||
connection = self.cache[cn]
|
||||
|
||||
return connection
|
||||
|
||||
def remove_connection(self, host, port):
|
||||
cn = self.get_connection_name(host, port)
|
||||
if cn in self.cache:
|
||||
self.cache[cn].close()
|
||||
del self.cache[cn]
|
||||
|
||||
def close_connections(self):
|
||||
for cn in self.cache.keys():
|
||||
self.cache[cn].close()
|
||||
del self.cache[cn]
|
||||
|
||||
from . import cvs
|
||||
from . import git
|
||||
from . import gitsm
|
||||
|
||||
@@ -9,7 +9,7 @@ Usage in the recipe:
|
||||
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
|
||||
The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
|
||||
|
||||
@@ -113,7 +113,7 @@ class ClearCase(FetchMethod):
|
||||
if data.getVar("SRCREV", d, True) == "INVALID":
|
||||
raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
|
||||
|
||||
ud.label = d.getVar("SRCREV", False)
|
||||
ud.label = d.getVar("SRCREV")
|
||||
ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
|
||||
|
||||
ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
|
||||
|
||||
@@ -178,6 +178,11 @@ class Git(FetchMethod):
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
ud.repochanged = not os.path.exists(ud.fullmirror)
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
@@ -186,7 +191,7 @@ class Git(FetchMethod):
|
||||
os.chdir(ud.clonedir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||
|
||||
repourl = self._get_repo_url(ud)
|
||||
repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
# If the repo still doesn't exist, fallback to cloning it
|
||||
if not os.path.exists(ud.clonedir):
|
||||
@@ -241,7 +246,7 @@ class Git(FetchMethod):
|
||||
subdir = ud.parm.get("subpath", "")
|
||||
if subdir != "":
|
||||
readpathspec = ":%s" % (subdir)
|
||||
def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
|
||||
def_destsuffix = "%s/" % os.path.basename(subdir)
|
||||
else:
|
||||
readpathspec = ""
|
||||
def_destsuffix = "git/"
|
||||
@@ -272,10 +277,8 @@ class Git(FetchMethod):
|
||||
clonedir = indirectiondir
|
||||
|
||||
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
|
||||
os.chdir(destdir)
|
||||
repourl = self._get_repo_url(ud)
|
||||
runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
|
||||
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
|
||||
@@ -309,16 +312,6 @@ class Git(FetchMethod):
|
||||
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
|
||||
return output.split()[0] != "0"
|
||||
|
||||
def _get_repo_url(self, ud):
|
||||
"""
|
||||
Return the repository URL
|
||||
"""
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
def _revision_key(self, ud, d, name):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
@@ -329,9 +322,13 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Run git ls-remote with the specified search string
|
||||
"""
|
||||
repourl = self._get_repo_url(ud)
|
||||
cmd = "%s ls-remote %s %s" % \
|
||||
(ud.basecmd, repourl, search)
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
cmd = "%s ls-remote %s://%s%s%s %s" % \
|
||||
(ud.basecmd, ud.proto, username, ud.host, ud.path, search)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
output = runfetchcmd(cmd, d, True)
|
||||
@@ -355,8 +352,7 @@ class Git(FetchMethod):
|
||||
for l in output.split('\n'):
|
||||
if s in l:
|
||||
return l.split()[0]
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
|
||||
(ud.unresolvedrev[name], ud.host+ud.path))
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output" % ud.unresolvedrev[name])
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
@@ -398,34 +394,10 @@ class Git(FetchMethod):
|
||||
def _build_revision(self, ud, d, name):
|
||||
return ud.revisions[name]
|
||||
|
||||
def gitpkgv_revision(self, ud, d, name):
|
||||
"""
|
||||
Return a sortable revision number by counting commits in the history
|
||||
Based on gitpkgv.bblass in meta-openembedded
|
||||
"""
|
||||
rev = self._build_revision(ud, d, name)
|
||||
localpath = ud.localpath
|
||||
rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
|
||||
if not os.path.exists(localpath):
|
||||
commits = None
|
||||
else:
|
||||
if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
|
||||
from pipes import quote
|
||||
commits = bb.fetch2.runfetchcmd(
|
||||
"git rev-list %s -- | wc -l" % (quote(rev)),
|
||||
d, quiet=True).strip().lstrip('0')
|
||||
if commits:
|
||||
open(rev_file, "w").write("%d\n" % int(commits))
|
||||
else:
|
||||
commits = open(rev_file, "r").readline(128).strip()
|
||||
if commits:
|
||||
return False, "%s+%s" % (commits, rev[:7])
|
||||
else:
|
||||
return True, str(rev)
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
def checkstatus(self, ud, d):
|
||||
fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
|
||||
try:
|
||||
self._lsremote(ud, d, "")
|
||||
runfetchcmd(fetchcmd, d, quiet=True)
|
||||
return True
|
||||
except FetchError:
|
||||
return False
|
||||
|
||||
@@ -109,7 +109,6 @@ class GitSM(Git):
|
||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
|
||||
os.chdir(tmpclonedir)
|
||||
runfetchcmd(ud.basecmd + " reset --hard", d)
|
||||
runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
|
||||
runfetchcmd(ud.basecmd + " submodule init", d)
|
||||
runfetchcmd(ud.basecmd + " submodule update", d)
|
||||
self._set_relative_paths(tmpclonedir)
|
||||
|
||||
@@ -43,13 +43,6 @@ class Hg(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['hg']
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
"""
|
||||
Don't require checksums for local archives created from
|
||||
repository checkouts.
|
||||
"""
|
||||
return False
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init hg specific variable within url data
|
||||
@@ -59,12 +52,10 @@ class Hg(FetchMethod):
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
elif not ud.host:
|
||||
ud.proto = 'file'
|
||||
else:
|
||||
ud.proto = "hg"
|
||||
# Create paths to mercurial checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
|
||||
ud.setup_revisons(d)
|
||||
|
||||
@@ -73,19 +64,7 @@ class Hg(FetchMethod):
|
||||
elif not ud.revision:
|
||||
ud.revision = self.latest_revision(ud, d)
|
||||
|
||||
# Create paths to mercurial checkouts
|
||||
hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
|
||||
ud.host, ud.path.replace('/', '.'))
|
||||
ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
|
||||
hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/")
|
||||
ud.pkgdir = os.path.join(hgdir, hgsrcname)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
ud.localfile = ud.moddir
|
||||
ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg"
|
||||
|
||||
ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True)
|
||||
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
revTag = ud.parm.get('rev', 'tip')
|
||||
@@ -95,21 +74,14 @@ class Hg(FetchMethod):
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_premirror(self, ud, d):
|
||||
# If we don't do this, updating an existing checkout with only premirrors
|
||||
# is not possible
|
||||
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
|
||||
return True
|
||||
if os.path.exists(ud.moddir):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _buildhgcommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an hg commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
|
||||
host = ud.host
|
||||
@@ -126,7 +98,7 @@ class Hg(FetchMethod):
|
||||
hgroot = ud.user + "@" + host + ud.path
|
||||
|
||||
if command == "info":
|
||||
return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
|
||||
return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
|
||||
|
||||
options = [];
|
||||
|
||||
@@ -139,22 +111,22 @@ class Hg(FetchMethod):
|
||||
|
||||
if command == "fetch":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
else:
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
elif command == "pull":
|
||||
# do not pass options list; limiting pull to rev causes the local
|
||||
# repo not to contain it and immediately following "update" command
|
||||
# will crash
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto)
|
||||
else:
|
||||
cmd = "%s pull" % (ud.basecmd)
|
||||
cmd = "%s pull" % (basecmd)
|
||||
elif command == "update":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
|
||||
cmd = "%s update -C %s" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid hg command %s" % command, ud.url)
|
||||
|
||||
@@ -163,36 +135,18 @@ class Hg(FetchMethod):
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
ud.repochanged = not os.path.exists(ud.fullmirror)
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||
|
||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||
# Found the source, check whether need pull
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Update " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
try:
|
||||
runfetchcmd(updatecmd, d)
|
||||
except bb.fetch2.FetchError:
|
||||
# Runnning pull in the repo
|
||||
pullcmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Pulling " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", pullcmd)
|
||||
bb.fetch2.check_network_access(d, pullcmd, ud.url)
|
||||
runfetchcmd(pullcmd, d)
|
||||
ud.repochanged = True
|
||||
bb.fetch2.check_network_access(d, updatecmd, ud.url)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
# No source found, clone it.
|
||||
if not os.path.exists(ud.moddir):
|
||||
else:
|
||||
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
@@ -209,12 +163,14 @@ class Hg(FetchMethod):
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean the hg dir """
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.hg' --exclude '.hgrags'"
|
||||
|
||||
bb.utils.remove(ud.localpath, True)
|
||||
bb.utils.remove(ud.fullmirror)
|
||||
bb.utils.remove(ud.fullmirror + ".done")
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
@@ -235,41 +191,3 @@ class Hg(FetchMethod):
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "hg:" + ud.moddir
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs == "1" and (ud.repochanged or not os.path.exists(ud.fullmirror)):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.info("Creating tarball of hg repository")
|
||||
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d)
|
||||
|
||||
def localpath(self, ud, d):
|
||||
return ud.pkgdir
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
"""
|
||||
Make a local clone or export for the url
|
||||
"""
|
||||
|
||||
revflag = "-r %s" % ud.revision
|
||||
subdir = ud.parm.get("destsuffix", ud.module)
|
||||
codir = "%s/%s" % (destdir, subdir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata != "nokeep":
|
||||
if not os.access(os.path.join(codir, '.hg'), os.R_OK):
|
||||
logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
|
||||
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
|
||||
logger.debug(2, "Unpack: updating source in '" + codir + "'")
|
||||
os.chdir(codir)
|
||||
runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d)
|
||||
runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
|
||||
else:
|
||||
logger.debug(2, "Unpack: extracting source to '" + codir + "'")
|
||||
os.chdir(ud.moddir)
|
||||
runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)
|
||||
|
||||
@@ -112,7 +112,7 @@ class Local(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, urldata, d):
|
||||
def checkstatus(self, urldata, d):
|
||||
"""
|
||||
Check the status of the url
|
||||
"""
|
||||
|
||||
@@ -48,7 +48,7 @@ class Perforce(FetchMethod):
|
||||
(user, pswd, host, port) = path.split('@')[0].split(":")
|
||||
path = path.split('@')[1]
|
||||
else:
|
||||
(host, port) = d.getVar('P4PORT', False).split(':')
|
||||
(host, port) = d.getVar('P4PORT').split(':')
|
||||
user = ""
|
||||
pswd = ""
|
||||
|
||||
@@ -123,7 +123,7 @@ class Perforce(FetchMethod):
|
||||
if depot.find('/...') != -1:
|
||||
path = depot[:depot.find('/...')]
|
||||
else:
|
||||
path = depot[:depot.rfind('/')]
|
||||
path = depot
|
||||
|
||||
module = parm.get('module', os.path.basename(path))
|
||||
|
||||
|
||||
@@ -100,7 +100,7 @@ class Wget(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
def checkstatus(self, ud, d):
|
||||
|
||||
uri = ud.url.split(";")[0]
|
||||
fetchcmd = self.basecmd + " --spider '%s'" % uri
|
||||
|
||||
@@ -36,9 +36,10 @@ from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
|
||||
__version__ = "1.26.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
class BBMainException(Exception):
|
||||
class BBMainException(bb.BBHandledException):
|
||||
pass
|
||||
|
||||
def get_ui(config):
|
||||
@@ -82,8 +83,8 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
parser = optparse.OptionParser(
|
||||
version = "BitBake Build Tool Core version %s" % bb.__version__,
|
||||
usage = """%prog [options] [recipename/target recipe:do_task ...]
|
||||
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||
usage = """%prog [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
|
||||
@@ -81,7 +81,7 @@ def update_cache(f):
|
||||
def mark_dependency(d, f):
|
||||
if f.startswith('./'):
|
||||
f = "%s/%s" % (os.getcwd(), f[2:])
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
if s not in deps:
|
||||
deps.append(s)
|
||||
@@ -89,7 +89,7 @@ def mark_dependency(d, f):
|
||||
|
||||
def check_dependency(d, f):
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
return s in deps
|
||||
|
||||
def supports(fn, data):
|
||||
|
||||
@@ -85,7 +85,7 @@ class DataNode(AstNode):
|
||||
if 'flag' in self.groupd and self.groupd['flag'] != None:
|
||||
return data.getVarFlag(key, self.groupd['flag'], noweakdefault=True)
|
||||
else:
|
||||
return data.getVar(key, False, noweakdefault=True)
|
||||
return data.getVar(key, noweakdefault=True)
|
||||
|
||||
def eval(self, data):
|
||||
groupd = self.groupd
|
||||
@@ -152,7 +152,7 @@ class MethodNode(AstNode):
|
||||
funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
|
||||
text = "def %s(d):\n" % (funcname) + text
|
||||
bb.methodpool.insert_method(funcname, text, self.filename)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS', False) or []
|
||||
anonfuncs = data.getVar('__BBANONFUNCS') or []
|
||||
anonfuncs.append(funcname)
|
||||
data.setVar('__BBANONFUNCS', anonfuncs)
|
||||
data.setVar(funcname, text)
|
||||
@@ -184,7 +184,7 @@ class MethodFlagsNode(AstNode):
|
||||
self.m = m
|
||||
|
||||
def eval(self, data):
|
||||
if data.getVar(self.key, False):
|
||||
if data.getVar(self.key):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.setVarFlag(self.key, 'python', None)
|
||||
@@ -209,10 +209,10 @@ class ExportFuncsNode(AstNode):
|
||||
for func in self.n:
|
||||
calledfunc = self.classname + "_" + func
|
||||
|
||||
if data.getVar(func, False) and not data.getVarFlag(func, 'export_func'):
|
||||
if data.getVar(func) and not data.getVarFlag(func, 'export_func'):
|
||||
continue
|
||||
|
||||
if data.getVar(func, False):
|
||||
if data.getVar(func):
|
||||
data.setVarFlag(func, 'python', None)
|
||||
data.setVarFlag(func, 'func', None)
|
||||
|
||||
@@ -255,7 +255,7 @@ class BBHandlerNode(AstNode):
|
||||
self.hs = fns.split()
|
||||
|
||||
def eval(self, data):
|
||||
bbhands = data.getVar('__BBHANDLERS', False) or []
|
||||
bbhands = data.getVar('__BBHANDLERS') or []
|
||||
for h in self.hs:
|
||||
bbhands.append(h)
|
||||
data.setVarFlag(h, "handler", 1)
|
||||
@@ -315,22 +315,22 @@ def handleInherit(statements, filename, lineno, m):
|
||||
|
||||
def finalize(fn, d, variant = None):
|
||||
all_handlers = {}
|
||||
for var in d.getVar('__BBHANDLERS', False) or []:
|
||||
for var in d.getVar('__BBHANDLERS') or []:
|
||||
# try to add the handler
|
||||
bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask", True) or "").split())
|
||||
bb.event.register(var, d.getVar(var), (d.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
bb.event.fire(bb.event.RecipePreFinalise(fn), d)
|
||||
|
||||
bb.data.expandKeys(d)
|
||||
bb.data.update_data(d)
|
||||
code = []
|
||||
for funcname in d.getVar("__BBANONFUNCS", False) or []:
|
||||
for funcname in d.getVar("__BBANONFUNCS") or []:
|
||||
code.append("%s(d)" % funcname)
|
||||
bb.utils.better_exec("\n".join(code), {"d": d})
|
||||
bb.data.update_data(d)
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
deltasklist = d.getVar('__BBDELTASKS', False) or []
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
deltasklist = d.getVar('__BBDELTASKS') or []
|
||||
bb.build.add_tasks(tasklist, deltasklist, d)
|
||||
|
||||
bb.parse.siggen.finalise(fn, d, variant)
|
||||
|
||||
@@ -32,7 +32,7 @@ import bb.build, bb.utils
|
||||
from bb import data
|
||||
|
||||
from . import ConfHandler
|
||||
from .. import resolve_file, ast, logger, ParseError
|
||||
from .. import resolve_file, ast, logger
|
||||
from .ConfHandler import include, init
|
||||
|
||||
# For compatibility
|
||||
@@ -48,7 +48,7 @@ __def_regexp__ = re.compile( r"def\s+(\w+).*:" )
|
||||
__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
|
||||
|
||||
|
||||
__infunc__ = []
|
||||
__infunc__ = ""
|
||||
__inpython__ = False
|
||||
__body__ = []
|
||||
__classname__ = ""
|
||||
@@ -69,14 +69,15 @@ def supports(fn, d):
|
||||
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
|
||||
|
||||
def inherit(files, fn, lineno, d):
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
files = d.expand(files).split()
|
||||
for file in files:
|
||||
if not os.path.isabs(file) and not file.endswith(".bbclass"):
|
||||
file = os.path.join('classes', '%s.bbclass' % file)
|
||||
|
||||
if not os.path.isabs(file):
|
||||
bbpath = d.getVar("BBPATH", True)
|
||||
dname = os.path.dirname(fn)
|
||||
bbpath = "%s:%s" % (dname, d.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
|
||||
for af in attempts:
|
||||
if af != abs_fn:
|
||||
@@ -89,7 +90,7 @@ def inherit(files, fn, lineno, d):
|
||||
__inherit_cache.append( file )
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
|
||||
def get_statements(filename, absolute_filename, base_name):
|
||||
global cached_statements
|
||||
@@ -119,7 +120,7 @@ def get_statements(filename, absolute_filename, base_name):
|
||||
def handle(fn, d, include):
|
||||
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
|
||||
__body__ = []
|
||||
__infunc__ = []
|
||||
__infunc__ = ""
|
||||
__classname__ = ""
|
||||
__residue__ = []
|
||||
|
||||
@@ -129,13 +130,13 @@ def handle(fn, d, include):
|
||||
|
||||
if ext == ".bbclass":
|
||||
__classname__ = root
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
if not fn in __inherit_cache:
|
||||
__inherit_cache.append(fn)
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
|
||||
if include != 0:
|
||||
oldfile = d.getVar('FILE', False)
|
||||
oldfile = d.getVar('FILE')
|
||||
else:
|
||||
oldfile = None
|
||||
|
||||
@@ -148,7 +149,7 @@ def handle(fn, d, include):
|
||||
statements = get_statements(fn, abs_fn, base_name)
|
||||
|
||||
# DONE WITH PARSING... time to evaluate
|
||||
if ext != ".bbclass" and abs_fn != oldfile:
|
||||
if ext != ".bbclass":
|
||||
d.setVar('FILE', abs_fn)
|
||||
|
||||
try:
|
||||
@@ -158,15 +159,10 @@ def handle(fn, d, include):
|
||||
if include == 0:
|
||||
return { "" : d }
|
||||
|
||||
if __infunc__:
|
||||
raise ParseError("Shell function %s is never closed" % __infunc__[0], __infunc__[1], __infunc__[2])
|
||||
if __residue__:
|
||||
raise ParseError("Leftover unparsed (incomplete?) data %s from %s" % __residue__, fn)
|
||||
|
||||
if ext != ".bbclass" and include == 0:
|
||||
return ast.multi_finalize(fn, d)
|
||||
|
||||
if ext != ".bbclass" and oldfile and abs_fn != oldfile:
|
||||
if oldfile:
|
||||
d.setVar("FILE", oldfile)
|
||||
|
||||
return d
|
||||
@@ -176,8 +172,8 @@ def feeder(lineno, s, fn, root, statements):
|
||||
if __infunc__:
|
||||
if s == '}':
|
||||
__body__.append('')
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__[0], __body__)
|
||||
__infunc__ = []
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
|
||||
__infunc__ = ""
|
||||
__body__ = []
|
||||
else:
|
||||
__body__.append(s)
|
||||
@@ -221,8 +217,8 @@ def feeder(lineno, s, fn, root, statements):
|
||||
|
||||
m = __func_start_regexp__.match(s)
|
||||
if m:
|
||||
__infunc__ = [m.group("func") or "__anonymous", fn, lineno]
|
||||
ast.handleMethodFlags(statements, fn, lineno, __infunc__[0], m)
|
||||
__infunc__ = m.group("func") or "__anonymous"
|
||||
ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
|
||||
return
|
||||
|
||||
m = __def_regexp__.match(s)
|
||||
|
||||
@@ -58,7 +58,7 @@ __require_regexp__ = re.compile( r"require\s+(.+)" )
|
||||
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
|
||||
|
||||
def init(data):
|
||||
topdir = data.getVar('TOPDIR', False)
|
||||
topdir = data.getVar('TOPDIR')
|
||||
if not topdir:
|
||||
data.setVar('TOPDIR', os.getcwd())
|
||||
|
||||
@@ -97,6 +97,7 @@ def include(parentfn, fn, lineno, data, error_out):
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), parentfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
bb.parse.mark_dependency(data, fn)
|
||||
|
||||
# We have an issue where a UI might want to enforce particular settings such as
|
||||
# an empty DISTRO variable. If configuration files do something like assigning
|
||||
@@ -112,7 +113,7 @@ def handle(fn, data, include):
|
||||
if include == 0:
|
||||
oldfile = None
|
||||
else:
|
||||
oldfile = data.getVar('FILE', False)
|
||||
oldfile = data.getVar('FILE')
|
||||
|
||||
abs_fn = resolve_file(fn, data)
|
||||
f = open(abs_fn, 'r')
|
||||
|
||||
@@ -1096,13 +1096,6 @@ class RunQueue:
|
||||
raise
|
||||
except SystemExit:
|
||||
raise
|
||||
except bb.BBHandledException:
|
||||
try:
|
||||
self.teardown_workers()
|
||||
except:
|
||||
pass
|
||||
self.state = runQueueComplete
|
||||
raise
|
||||
except:
|
||||
logger.error("An uncaught exception occured in runqueue, please see the failure below:")
|
||||
try:
|
||||
@@ -1161,14 +1154,9 @@ class RunQueue:
|
||||
sq_hash.append(self.rqdata.runq_hash[task])
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
try:
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)"
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
# Handle version with no siginfo parameter
|
||||
except TypeError:
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
for v in valid:
|
||||
valid_new.add(sq_task[v])
|
||||
|
||||
@@ -1280,9 +1268,6 @@ class RunQueueExecute:
|
||||
if rq.fakeworkerpipe:
|
||||
rq.fakeworkerpipe.setrunqueueexec(self)
|
||||
|
||||
if self.number_tasks <= 0:
|
||||
bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
|
||||
|
||||
def runqueue_process_waitpid(self, task, status):
|
||||
|
||||
# self.build_stamps[pid] may not exist when use shared work directory.
|
||||
@@ -1346,7 +1331,7 @@ class RunQueueExecute:
|
||||
taskname = self.rqdata.runq_task[depid]
|
||||
taskdata[dep] = [pn, taskname, fn]
|
||||
call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data }
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
return valid
|
||||
|
||||
@@ -1415,7 +1400,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
|
||||
call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.data, "invalidtasks" : invalidtasks }
|
||||
# Backwards compatibility with older versions without invalidtasks
|
||||
try:
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
@@ -1580,12 +1565,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
|
||||
if not self.rq.fakeworker:
|
||||
try:
|
||||
self.rq.start_fakeworker(self)
|
||||
except OSError as exc:
|
||||
logger.critical("Failed to spawn fakeroot worker to run %s:%s: %s" % (fn, taskname, str(exc)))
|
||||
self.rq.state = runQueueFailed
|
||||
return True
|
||||
self.rq.start_fakeworker(self)
|
||||
self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
|
||||
self.rq.fakeworker.stdin.flush()
|
||||
else:
|
||||
@@ -1840,7 +1820,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
|
||||
valid_new = stamppresent
|
||||
|
||||
@@ -144,9 +144,8 @@ class ProcessServer(Process, BaseImplServer):
|
||||
fds = fds + retval
|
||||
except SystemExit:
|
||||
raise
|
||||
except Exception as exc:
|
||||
if not isinstance(exc, bb.BBHandledException):
|
||||
logger.exception('Running idle function')
|
||||
except Exception:
|
||||
logger.exception('Running idle function')
|
||||
del self._idlefuns[function]
|
||||
self.quit = True
|
||||
|
||||
|
||||
@@ -419,16 +419,12 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
for f in removed:
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
|
||||
if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
|
||||
changed = ["Number of task dependencies changed"]
|
||||
else:
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
|
||||
if changed:
|
||||
output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
|
||||
|
||||
@@ -24,30 +24,6 @@ import unittest
|
||||
import bb
|
||||
import bb.data
|
||||
import bb.parse
|
||||
import logging
|
||||
|
||||
class LogRecord():
|
||||
def __enter__(self):
|
||||
logs = []
|
||||
class LogHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
logs.append(record)
|
||||
logger = logging.getLogger("BitBake")
|
||||
handler = LogHandler()
|
||||
self.handler = handler
|
||||
logger.addHandler(handler)
|
||||
return logs
|
||||
def __exit__(self, type, value, traceback):
|
||||
logger = logging.getLogger("BitBake")
|
||||
logger.removeHandler(self.handler)
|
||||
return
|
||||
|
||||
def logContains(item, logs):
|
||||
for l in logs:
|
||||
m = l.getMessage()
|
||||
if item in m:
|
||||
return True
|
||||
return False
|
||||
|
||||
class DataExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -134,12 +110,12 @@ class DataExpansions(unittest.TestCase):
|
||||
|
||||
def test_rename(self):
|
||||
self.d.renameVar("foo", "newfoo")
|
||||
self.assertEqual(self.d.getVar("newfoo", False), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("newfoo"), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_deletion(self):
|
||||
self.d.delVar("foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_keys(self):
|
||||
keys = self.d.keys()
|
||||
@@ -196,28 +172,28 @@ class TestMemoize(unittest.TestCase):
|
||||
def test_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("FOO", "bar")
|
||||
self.assertTrue(d.getVar("FOO", False) is d.getVar("FOO", False))
|
||||
self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
|
||||
|
||||
def test_not_memoized(self):
|
||||
d1 = bb.data.init()
|
||||
d2 = bb.data.init()
|
||||
d1.setVar("FOO", "bar")
|
||||
d2.setVar("FOO", "bar2")
|
||||
self.assertTrue(d1.getVar("FOO", False) is not d2.getVar("FOO", False))
|
||||
self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
|
||||
|
||||
def test_changed_after_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "value of foo")
|
||||
d.setVar("foo", "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "second value of foo")
|
||||
|
||||
def test_same_value(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of")
|
||||
d.setVar("bar", "value of")
|
||||
self.assertEqual(d.getVar("foo", False),
|
||||
d.getVar("bar", False))
|
||||
self.assertEqual(d.getVar("foo"),
|
||||
d.getVar("bar"))
|
||||
|
||||
class TestConcat(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -325,19 +301,6 @@ class TestOverrides(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
class TestKeyExpansion(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("FOO", "foo")
|
||||
self.d.setVar("BAR", "foo")
|
||||
|
||||
def test_keyexpand(self):
|
||||
self.d.setVar("VAL_${FOO}", "A")
|
||||
self.d.setVar("VAL_${BAR}", "B")
|
||||
with LogRecord() as logs:
|
||||
bb.data.expandKeys(self.d)
|
||||
self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs))
|
||||
self.assertEqual(self.d.getVar("VAL_foo", True), "A")
|
||||
|
||||
class TestFlags(unittest.TestCase):
|
||||
def setUp(self):
|
||||
|
||||
@@ -315,7 +315,6 @@ class URITest(unittest.TestCase):
|
||||
class FetcherTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.origdir = os.getcwd()
|
||||
self.d = bb.data.init()
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.dldir = os.path.join(self.tempdir, "download")
|
||||
@@ -327,7 +326,6 @@ class FetcherTest(unittest.TestCase):
|
||||
self.d.setVar("PERSISTENT_DIR", persistdir)
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.origdir)
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
|
||||
class MirrorUriTest(FetcherTest):
|
||||
@@ -393,18 +391,6 @@ class MirrorUriTest(FetcherTest):
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
def test_mirror_of_mirror(self):
|
||||
# Test if mirror of a mirror works
|
||||
mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n"
|
||||
mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n"
|
||||
fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(mirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz',
|
||||
'file:///someotherpath/downloads/bitbake-1.0.tar.gz',
|
||||
'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
|
||||
'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
|
||||
class FetcherLocalTest(FetcherTest):
|
||||
def setUp(self):
|
||||
@@ -490,19 +476,6 @@ class FetcherNetworkTest(FetcherTest):
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_file_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
os.mkdir(self.dldir + "/some2where")
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_premirror(self):
|
||||
self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
@@ -574,43 +547,6 @@ class FetcherNetworkTest(FetcherTest):
|
||||
os.chdir(os.path.dirname(self.unpackdir))
|
||||
fetcher.unpack(self.unpackdir)
|
||||
|
||||
def test_trusted_network(self):
|
||||
# Ensure trusted_network returns False when the host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the *.host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_two_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://something.git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://*.someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
|
||||
class URLHandle(unittest.TestCase):
|
||||
|
||||
datatable = {
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Test for lib/bb/parse/
|
||||
#
|
||||
# Copyright (C) 2015 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import logging
|
||||
import bb
|
||||
import os
|
||||
|
||||
logger = logging.getLogger('BitBake.TestParse')
|
||||
|
||||
import bb.parse
|
||||
import bb.data
|
||||
import bb.siggen
|
||||
|
||||
class ParseTest(unittest.TestCase):
|
||||
|
||||
testfile = """
|
||||
A = "1"
|
||||
B = "2"
|
||||
do_install() {
|
||||
echo "hello"
|
||||
}
|
||||
|
||||
C = "3"
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
bb.parse.siggen = bb.siggen.init(self.d)
|
||||
|
||||
def parsehelper(self, content):
|
||||
|
||||
f = tempfile.NamedTemporaryFile(suffix = ".bb")
|
||||
f.write(content)
|
||||
f.flush()
|
||||
os.chdir(os.path.dirname(f.name))
|
||||
return f
|
||||
|
||||
def test_parse_simple(self):
|
||||
f = self.parsehelper(self.testfile)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("A", True), "1")
|
||||
self.assertEqual(d.getVar("B", True), "2")
|
||||
self.assertEqual(d.getVar("C", True), "3")
|
||||
|
||||
def test_parse_incomplete_function(self):
|
||||
testfileB = self.testfile.replace("}", "")
|
||||
f = self.parsehelper(testfileB)
|
||||
with self.assertRaises(bb.parse.ParseError):
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
@@ -21,8 +21,6 @@
|
||||
|
||||
import unittest
|
||||
import bb
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
class VerCmpString(unittest.TestCase):
|
||||
|
||||
@@ -90,289 +88,3 @@ class VerCmpString(unittest.TestCase):
|
||||
|
||||
# Check that clearly invalid operator raises an exception
|
||||
self.assertRaises(bb.utils.VersionStringException, bb.utils.vercmp_string_op, '0', '0', '$')
|
||||
|
||||
|
||||
class Path(unittest.TestCase):
|
||||
def test_unsafe_delete_path(self):
|
||||
checkitems = [('/', True),
|
||||
('//', True),
|
||||
('///', True),
|
||||
(os.getcwd().count(os.sep) * ('..' + os.sep), True),
|
||||
(os.environ.get('HOME', '/home/test'), True),
|
||||
('/home/someone', True),
|
||||
('/home/other/', True),
|
||||
('/home/other/subdir', False),
|
||||
('', False)]
|
||||
for arg1, correctresult in checkitems:
|
||||
result = bb.utils._check_unsafe_delete_path(arg1)
|
||||
self.assertEqual(result, correctresult, '_check_unsafe_delete_path("%s") != %s' % (arg1, correctresult))
|
||||
|
||||
|
||||
class EditMetadataFile(unittest.TestCase):
|
||||
_origfile = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
def _testeditfile(self, varvalues, compareto, dummyvars=None):
|
||||
if dummyvars is None:
|
||||
dummyvars = []
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
tf.write(self._origfile)
|
||||
tf.close()
|
||||
try:
|
||||
varcalls = []
|
||||
def handle_file(varname, origvalue, op, newlines):
|
||||
self.assertIn(varname, varvalues, 'Callback called for variable %s not in the list!' % varname)
|
||||
self.assertNotIn(varname, dummyvars, 'Callback called for variable %s in dummy list!' % varname)
|
||||
varcalls.append(varname)
|
||||
return varvalues[varname]
|
||||
|
||||
bb.utils.edit_metadata_file(tf.name, varvalues.keys(), handle_file)
|
||||
with open(tf.name) as f:
|
||||
modfile = f.readlines()
|
||||
# Ensure the output matches the expected output
|
||||
self.assertEqual(compareto.splitlines(True), modfile)
|
||||
# Ensure the callback function was called for every variable we asked for
|
||||
# (plus allow testing behaviour when a requested variable is not present)
|
||||
self.assertEqual(sorted(varvalues.keys()), sorted(varcalls + dummyvars))
|
||||
finally:
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def test_edit_metadata_file_nochange(self):
|
||||
# Test file doesn't get modified with nothing to do
|
||||
self._testeditfile({}, self._origfile)
|
||||
# Test file doesn't get modified with only dummy variables
|
||||
self._testeditfile({'DUMMY1': ('should_not_set', None, 0, True),
|
||||
'DUMMY2': ('should_not_set_again', None, 0, True)}, self._origfile, dummyvars=['DUMMY1', 'DUMMY2'])
|
||||
# Test file doesn't get modified with some the same values
|
||||
self._testeditfile({'THIS': ('that', None, 0, True),
|
||||
'OTHER': ('anothervalue', None, 0, True),
|
||||
'MULTILINE3': (' c1 c2 c3', None, 4, False)}, self._origfile)
|
||||
|
||||
def test_edit_metadata_file_1(self):
|
||||
|
||||
newfile1 = """
|
||||
# A comment
|
||||
HELLO = "newvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'HELLO': ('newvalue', None, 4, True)}, newfile1)
|
||||
|
||||
|
||||
def test_edit_metadata_file_2(self):
|
||||
|
||||
newfile2 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = " \\
|
||||
d1 \\
|
||||
d2 \\
|
||||
d3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = "nowsingle"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'MULTILINE': (['d1','d2','d3'], None, 4, False),
|
||||
'MULTILINE3': ('nowsingle', None, 4, True),
|
||||
'NOTPRESENT': (['a', 'b'], None, 4, False)}, newfile2, dummyvars=['NOTPRESENT'])
|
||||
|
||||
|
||||
def test_edit_metadata_file_3(self):
|
||||
|
||||
newfile3 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = "yetanothervalue"
|
||||
|
||||
MULTILINE = "e1 \\
|
||||
e2 \\
|
||||
e3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := "f1 \\
|
||||
\tf2 \\
|
||||
\t"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
othercommand_one a b c
|
||||
othercommand_two d e f
|
||||
}
|
||||
"""
|
||||
|
||||
self._testeditfile({'do_functionname()': (['othercommand_one a b c', 'othercommand_two d e f'], None, 4, False),
|
||||
'MULTILINE2': (['f1', 'f2'], None, '\t', True),
|
||||
'MULTILINE': (['e1', 'e2', 'e3'], None, -1, True),
|
||||
'THIS': (None, None, 0, False),
|
||||
'OTHER': ('yetanothervalue', None, 0, True)}, newfile3)
|
||||
|
||||
|
||||
def test_edit_metadata_file_4(self):
|
||||
|
||||
newfile4 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
"""
|
||||
|
||||
self._testeditfile({'NOCHANGE': (None, None, 0, False),
|
||||
'MULTILINE3': (None, None, 0, False),
|
||||
'THIS': ('that', None, 0, False),
|
||||
'do_functionname()': (None, None, 0, False)}, newfile4)
|
||||
|
||||
|
||||
def test_edit_metadata(self):
|
||||
newfile5 = """
|
||||
# A comment
|
||||
HELLO = "hithere"
|
||||
|
||||
# A new comment
|
||||
THIS += "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
NEWVAR = "value"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def handle_var(varname, origvalue, op, newlines):
|
||||
if varname == 'THIS':
|
||||
newlines.append('# A new comment\n')
|
||||
elif varname == 'do_functionname()':
|
||||
newlines.append('NEWVAR = "value"\n')
|
||||
newlines.append('\n')
|
||||
valueitem = varvalues.get(varname, None)
|
||||
if valueitem:
|
||||
return valueitem
|
||||
else:
|
||||
return (origvalue, op, 0, True)
|
||||
|
||||
varvalues = {'HELLO': ('hithere', None, 0, True), 'THIS': ('that', '+=', 0, True)}
|
||||
varlist = ['HELLO', 'THIS', 'do_functionname()']
|
||||
(updated, newlines) = bb.utils.edit_metadata(self._origfile.splitlines(True), varlist, handle_var)
|
||||
self.assertTrue(updated, 'List should be updated but isn\'t')
|
||||
self.assertEqual(newlines, newfile5.splitlines(True))
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import datetime
|
||||
import sys
|
||||
import bb
|
||||
import re
|
||||
@@ -23,7 +24,6 @@ import ast
|
||||
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
|
||||
|
||||
from django.utils import timezone
|
||||
import toaster.toastermain.settings as toaster_django_settings
|
||||
from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
|
||||
from toaster.orm.models import Target_Image_File, BuildArtifact
|
||||
@@ -31,9 +31,6 @@ from toaster.orm.models import Variable, VariableHistory
|
||||
from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
|
||||
from toaster.orm.models import Task_Dependency, Package_Dependency
|
||||
from toaster.orm.models import Recipe_Dependency
|
||||
|
||||
from toaster.orm.models import Project
|
||||
|
||||
from bb.msg import BBLogFormatter as format
|
||||
from django.db import models
|
||||
from pprint import pformat
|
||||
@@ -41,7 +38,7 @@ import logging
|
||||
|
||||
from django.db import transaction, connection
|
||||
|
||||
logger = logging.getLogger("ToasterLogger")
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
|
||||
class NotExisting(Exception):
|
||||
@@ -106,7 +103,7 @@ class ORMWrapper(object):
|
||||
|
||||
return vars(self)[dictname][key]
|
||||
|
||||
def create_build_object(self, build_info, brbe, project_id):
|
||||
def create_build_object(self, build_info, brbe):
|
||||
assert 'machine' in build_info
|
||||
assert 'distro' in build_info
|
||||
assert 'distro_version' in build_info
|
||||
@@ -115,42 +112,7 @@ class ORMWrapper(object):
|
||||
assert 'build_name' in build_info
|
||||
assert 'bitbake_version' in build_info
|
||||
|
||||
prj = None
|
||||
buildrequest = None
|
||||
if brbe is not None: # this build was triggered by a request from a user
|
||||
logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
br, be = brbe.split(":")
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
prj = buildrequest.project
|
||||
|
||||
elif project_id is not None: # this build was triggered by an external system for a specific project
|
||||
logger.debug(1, "buildinfohelper: project is %s" % prj)
|
||||
prj = Project.objects.get(pk = project_id)
|
||||
|
||||
else: # this build was triggered by a legacy system, or command line interactive mode
|
||||
prj, created = Project.objects.get_or_create(pk=0, name="Default Project")
|
||||
logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
|
||||
|
||||
|
||||
if buildrequest is not None:
|
||||
build = buildrequest.build
|
||||
logger.info("Updating existing build, with %s" % build_info)
|
||||
build.machine=build_info['machine']
|
||||
build.distro=build_info['distro']
|
||||
build.distro_version=build_info['distro_version']
|
||||
started_on=build_info['started_on']
|
||||
completed_on=build_info['started_on']
|
||||
build.cooker_log_path=build_info['cooker_log_path']
|
||||
build.build_name=build_info['build_name']
|
||||
build.bitbake_version=build_info['bitbake_version']
|
||||
build.save()
|
||||
|
||||
Target.objects.filter(build = build).delete()
|
||||
|
||||
else:
|
||||
build = Build.objects.create(
|
||||
project = prj,
|
||||
build = Build.objects.create(
|
||||
machine=build_info['machine'],
|
||||
distro=build_info['distro'],
|
||||
distro_version=build_info['distro_version'],
|
||||
@@ -161,11 +123,17 @@ class ORMWrapper(object):
|
||||
bitbake_version=build_info['bitbake_version'])
|
||||
|
||||
logger.debug(1, "buildinfohelper: build is created %s" % build)
|
||||
if brbe is not None:
|
||||
logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
br, be = brbe.split(":")
|
||||
|
||||
if buildrequest is not None:
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
buildrequest.build = build
|
||||
buildrequest.save()
|
||||
|
||||
build.project_id = buildrequest.project_id
|
||||
build.save()
|
||||
return build
|
||||
|
||||
def create_target_objects(self, target_info):
|
||||
@@ -190,7 +158,10 @@ class ORMWrapper(object):
|
||||
if errors or taskfailures:
|
||||
outcome = Build.FAILED
|
||||
|
||||
build.completed_on = timezone.now()
|
||||
build.completed_on = datetime.datetime.now()
|
||||
build.timespent = int((build.completed_on - build.started_on).total_seconds())
|
||||
build.errors_no = errors
|
||||
build.warnings_no = warnings
|
||||
build.outcome = outcome
|
||||
build.save()
|
||||
|
||||
@@ -250,12 +221,12 @@ class ORMWrapper(object):
|
||||
def get_update_recipe_object(self, recipe_information, must_exist = False):
|
||||
assert 'layer_version' in recipe_information
|
||||
assert 'file_path' in recipe_information
|
||||
assert 'pathflags' in recipe_information
|
||||
|
||||
assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
|
||||
if recipe_information['file_path'].startswith(recipe_information['layer_version'].layer.local_path):
|
||||
recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].layer.local_path):].lstrip("/")
|
||||
|
||||
recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
|
||||
file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
|
||||
file_path=recipe_information['file_path'])
|
||||
if created and must_exist:
|
||||
raise NotExisting("Recipe object created when expected to exist", recipe_information)
|
||||
|
||||
@@ -276,15 +247,13 @@ class ORMWrapper(object):
|
||||
assert 'branch' in layer_version_information
|
||||
assert 'commit' in layer_version_information
|
||||
assert 'priority' in layer_version_information
|
||||
assert 'local_path' in layer_version_information
|
||||
|
||||
layer_version_object, created = Layer_Version.objects.get_or_create(
|
||||
build = build_obj,
|
||||
layer = layer_obj,
|
||||
branch = layer_version_information['branch'],
|
||||
commit = layer_version_information['commit'],
|
||||
priority = layer_version_information['priority'],
|
||||
local_path = layer_version_information['local_path'],
|
||||
priority = layer_version_information['priority']
|
||||
)
|
||||
|
||||
self.layer_version_objects.append(layer_version_object)
|
||||
@@ -293,11 +262,13 @@ class ORMWrapper(object):
|
||||
|
||||
def get_update_layer_object(self, layer_information, brbe):
|
||||
assert 'name' in layer_information
|
||||
assert 'local_path' in layer_information
|
||||
assert 'layer_index_url' in layer_information
|
||||
|
||||
if brbe is None:
|
||||
layer_object, created = Layer.objects.get_or_create(
|
||||
name=layer_information['name'],
|
||||
local_path=layer_information['local_path'],
|
||||
layer_index_url=layer_information['layer_index_url'])
|
||||
return layer_object
|
||||
else:
|
||||
@@ -326,6 +297,7 @@ class ORMWrapper(object):
|
||||
for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
|
||||
if pl.layercommit.layer.vcs_url == brl.giturl :
|
||||
layer = pl.layercommit.layer
|
||||
layer.local_path = layer_information['local_path']
|
||||
layer.save()
|
||||
return layer
|
||||
|
||||
@@ -667,7 +639,6 @@ class BuildInfoHelper(object):
|
||||
self.has_build_history = has_build_history
|
||||
self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
|
||||
self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
|
||||
self.project = self.server.runCommand(["getVariable", "TOASTER_PROJECT"])[0]
|
||||
logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
|
||||
|
||||
|
||||
@@ -686,8 +657,8 @@ class BuildInfoHelper(object):
|
||||
build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
|
||||
build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
|
||||
build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
|
||||
build_info['started_on'] = timezone.now()
|
||||
build_info['completed_on'] = timezone.now()
|
||||
build_info['started_on'] = datetime.datetime.now()
|
||||
build_info['completed_on'] = datetime.datetime.now()
|
||||
build_info['cooker_log_path'] = self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0]
|
||||
build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
|
||||
build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
|
||||
@@ -716,12 +687,12 @@ class BuildInfoHelper(object):
|
||||
if self.brbe is None:
|
||||
def _slkey_interactive(layer_version):
|
||||
assert isinstance(layer_version, Layer_Version)
|
||||
return len(layer_version.local_path)
|
||||
return len(layer_version.layer.local_path)
|
||||
|
||||
# Heuristics: we always match recipe to the deepest layer path in the discovered layers
|
||||
for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
|
||||
# we can match to the recipe file path
|
||||
if path.startswith(lvo.local_path):
|
||||
if path.startswith(lvo.layer.local_path):
|
||||
return lvo
|
||||
|
||||
else:
|
||||
@@ -747,33 +718,21 @@ class BuildInfoHelper(object):
|
||||
return lvo
|
||||
|
||||
#if we get here, we didn't read layers correctly; dump whatever information we have on the error log
|
||||
logger.warn("Could not match layer version for recipe path %s : %s" % (path, self.orm_wrapper.layer_version_objects))
|
||||
logger.error("Could not match layer version for recipe path %s : %s" % (path, self.orm_wrapper.layer_version_objects))
|
||||
|
||||
#mockup the new layer
|
||||
unknown_layer, created = Layer.objects.get_or_create(name="__FIXME__unidentified_layer", layer_index_url="")
|
||||
unknown_layer, created = Layer.objects.get_or_create(name="__FIXME__unidentified_layer", local_path="/", layer_index_url="")
|
||||
unknown_layer_version_obj, created = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
|
||||
|
||||
# append it so we don't run into this error again and again
|
||||
self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
|
||||
|
||||
return unknown_layer_version_obj
|
||||
|
||||
def _get_recipe_information_from_taskfile(self, taskfile):
|
||||
localfilepath = taskfile.split(":")[-1]
|
||||
filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
|
||||
layer_version_obj = self._get_layer_version_for_path(localfilepath)
|
||||
|
||||
|
||||
|
||||
recipe_info = {}
|
||||
recipe_info['layer_version'] = layer_version_obj
|
||||
recipe_info['file_path'] = localfilepath
|
||||
recipe_info['pathflags'] = filepath_flags
|
||||
|
||||
if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
|
||||
recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
|
||||
else:
|
||||
raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
|
||||
recipe_info['file_path'] = taskfile
|
||||
|
||||
return recipe_info
|
||||
|
||||
@@ -825,7 +784,6 @@ class BuildInfoHelper(object):
|
||||
for layer in layerinfos:
|
||||
try:
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
|
||||
except NotExisting as nee:
|
||||
logger.warn("buildinfohelper: cannot identify layer exception:%s " % nee)
|
||||
|
||||
@@ -834,7 +792,7 @@ class BuildInfoHelper(object):
|
||||
assert '_pkgs' in vars(event)
|
||||
build_information = self._get_build_information()
|
||||
|
||||
build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe, self.project)
|
||||
build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe)
|
||||
|
||||
self.internal_state['build'] = build_obj
|
||||
|
||||
@@ -856,31 +814,6 @@ class BuildInfoHelper(object):
|
||||
|
||||
# Save build configuration
|
||||
data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
|
||||
|
||||
# convert the paths from absolute to relative to either the build directory or layer checkouts
|
||||
path_prefixes = []
|
||||
|
||||
if self.brbe is not None:
|
||||
br_id, be_id = self.brbe.split(":")
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
be = BuildEnvironment.objects.get(pk = be_id)
|
||||
path_prefixes.append(be.builddir)
|
||||
|
||||
for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
|
||||
path_prefixes.append(layer.local_path)
|
||||
|
||||
# we strip the prefixes
|
||||
for k in data:
|
||||
if not bool(data[k]['func']):
|
||||
for vh in data[k]['history']:
|
||||
if not 'documentation.conf' in vh['file']:
|
||||
abs_file_name = vh['file']
|
||||
for pp in path_prefixes:
|
||||
if abs_file_name.startswith(pp + "/"):
|
||||
vh['file']=abs_file_name[len(pp + "/"):]
|
||||
break
|
||||
|
||||
# save the variables
|
||||
self.orm_wrapper.save_build_variables(build_obj, data)
|
||||
|
||||
return self.brbe
|
||||
@@ -963,8 +896,8 @@ class BuildInfoHelper(object):
|
||||
|
||||
recipe_information = self._get_recipe_information_from_taskfile(taskfile)
|
||||
try:
|
||||
if recipe_information['file_path'].startswith(recipe_information['layer_version'].local_path):
|
||||
recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].local_path):].lstrip("/")
|
||||
if recipe_information['file_path'].startswith(recipe_information['layer_version'].layer.local_path):
|
||||
recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].layer.local_path):].lstrip("/")
|
||||
|
||||
recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
|
||||
file_path__endswith = recipe_information['file_path'],
|
||||
@@ -1055,7 +988,7 @@ class BuildInfoHelper(object):
|
||||
mevent.taskhash = taskhash
|
||||
task_information = self._get_task_information(mevent,recipe)
|
||||
|
||||
task_information['start_time'] = timezone.now()
|
||||
task_information['start_time'] = datetime.datetime.now()
|
||||
task_information['outcome'] = Task.OUTCOME_NA
|
||||
task_information['sstate_checksum'] = taskhash
|
||||
task_information['sstate_result'] = Task.SSTATE_MISS
|
||||
@@ -1115,9 +1048,8 @@ class BuildInfoHelper(object):
|
||||
self.internal_state['recipes'] = {}
|
||||
for pn in event._depgraph['pn']:
|
||||
|
||||
file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
|
||||
pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
|
||||
layer_version_obj = self._get_layer_version_for_path(file_name)
|
||||
file_name = event._depgraph['pn'][pn]['filename']
|
||||
layer_version_obj = self._get_layer_version_for_path(file_name.split(":")[-1])
|
||||
|
||||
assert layer_version_obj is not None
|
||||
|
||||
@@ -1147,13 +1079,6 @@ class BuildInfoHelper(object):
|
||||
recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
|
||||
|
||||
recipe_info['file_path'] = file_name
|
||||
recipe_info['pathflags'] = pathflags
|
||||
|
||||
if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
|
||||
recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
|
||||
else:
|
||||
raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
|
||||
|
||||
recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
|
||||
recipe.is_image = False
|
||||
if 'inherits' in event._depgraph['pn'][pn].keys():
|
||||
@@ -1230,7 +1155,6 @@ class BuildInfoHelper(object):
|
||||
)
|
||||
|
||||
def _store_build_done(self, errorcode):
|
||||
logger.info("Build exited with errorcode %d", errorcode)
|
||||
br_id, be_id = self.brbe.split(":")
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
be = BuildEnvironment.objects.get(pk = be_id)
|
||||
@@ -1250,7 +1174,7 @@ class BuildInfoHelper(object):
|
||||
mockevent.levelno = format.ERROR
|
||||
mockevent.msg = text
|
||||
mockevent.pathname = '-- None'
|
||||
mockevent.lineno = LogMessage.ERROR
|
||||
mockevent.lineno = -1
|
||||
self.store_log_event(mockevent)
|
||||
|
||||
def store_log_exception(self, text, backtrace = ""):
|
||||
@@ -1274,12 +1198,13 @@ class BuildInfoHelper(object):
|
||||
if not 'backlog' in self.internal_state:
|
||||
self.internal_state['backlog'] = []
|
||||
self.internal_state['backlog'].append(event)
|
||||
return
|
||||
else: # we're under Toaster control, the build is already created
|
||||
else: # we're under Toaster control, post the errors to the build request
|
||||
from bldcontrol.models import BuildRequest, BRError
|
||||
br, be = self.brbe.split(":")
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
self.internal_state['build'] = buildrequest.build
|
||||
brerror = BRError.objects.create(req = buildrequest, errtype="build", errmsg = event.msg)
|
||||
|
||||
return
|
||||
|
||||
if 'build' in self.internal_state and 'backlog' in self.internal_state:
|
||||
# if we have a backlog of events, do our best to save them here
|
||||
@@ -1288,7 +1213,7 @@ class BuildInfoHelper(object):
|
||||
logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
|
||||
self.store_log_event(tempevent)
|
||||
else:
|
||||
logger.info("buildinfohelper: All events saved")
|
||||
logger.error("buildinfohelper: Events not saved: %s" % self.internal_state['backlog'])
|
||||
del self.internal_state['backlog']
|
||||
|
||||
log_information = {}
|
||||
@@ -1297,15 +1222,14 @@ class BuildInfoHelper(object):
|
||||
log_information['level'] = LogMessage.ERROR
|
||||
elif event.levelno == format.WARNING:
|
||||
log_information['level'] = LogMessage.WARNING
|
||||
elif event.levelno == -2: # toaster self-logging
|
||||
log_information['level'] = -2
|
||||
elif event.levelno == -1: # toaster self-logging
|
||||
log_information['level'] = -1
|
||||
else:
|
||||
log_information['level'] = LogMessage.INFO
|
||||
|
||||
log_information['message'] = event.msg
|
||||
log_information['pathname'] = event.pathname
|
||||
log_information['lineno'] = event.lineno
|
||||
logger.info("Logging error 2: %s" % log_information)
|
||||
self.orm_wrapper.create_logmessage(log_information)
|
||||
|
||||
def close(self, errorcode):
|
||||
|
||||
@@ -309,7 +309,7 @@ class Parameters:
|
||||
|
||||
def hob_conf_filter(fn, data):
|
||||
if fn.endswith("/local.conf"):
|
||||
distro = data.getVar("DISTRO_HOB", False)
|
||||
distro = data.getVar("DISTRO_HOB")
|
||||
if distro:
|
||||
if distro != "defaultsetup":
|
||||
data.setVar("DISTRO", distro)
|
||||
@@ -320,13 +320,13 @@ def hob_conf_filter(fn, data):
|
||||
"BB_NUMBER_THREADS_HOB", "PARALLEL_MAKE_HOB", "DL_DIR_HOB", \
|
||||
"SSTATE_DIR_HOB", "SSTATE_MIRRORS_HOB", "INCOMPATIBLE_LICENSE_HOB"]
|
||||
for key in keys:
|
||||
var_hob = data.getVar(key, False)
|
||||
var_hob = data.getVar(key)
|
||||
if var_hob:
|
||||
data.setVar(key.split("_HOB")[0], var_hob)
|
||||
return
|
||||
|
||||
if fn.endswith("/bblayers.conf"):
|
||||
layers = data.getVar("BBLAYERS_HOB", False)
|
||||
layers = data.getVar("BBLAYERS_HOB")
|
||||
if layers:
|
||||
data.setVar("BBLAYERS", layers)
|
||||
return
|
||||
|
||||
@@ -310,7 +310,6 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
locktries = 10
|
||||
|
||||
termfilter = tf(main, helper, console, errconsole, format)
|
||||
atexit.register(termfilter.finish)
|
||||
@@ -538,25 +537,6 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
main.shutdown = 2
|
||||
try:
|
||||
topdir, error = server.runCommand(["getVariable", "TOPDIR"])
|
||||
if error:
|
||||
logger.warn("Unable to get the value of TOPDIR variable: %s" % error)
|
||||
else:
|
||||
lockfile = "%s/bitbake.lock" % topdir
|
||||
_, error = server.runCommand(["unlockBitbake"])
|
||||
if error:
|
||||
logger.warn("Unable to unlock the file %s" % lockfile)
|
||||
else:
|
||||
while locktries:
|
||||
lf = bb.utils.lockfile(lockfile, False, False)
|
||||
if not lf:
|
||||
time.sleep(1)
|
||||
locktries -=1
|
||||
else:
|
||||
bb.utils.unlockfile(lf)
|
||||
break
|
||||
if not locktries:
|
||||
logger.warn("Knotty could not lock the file ${TOPDIR}/bitbake.lock, probably locked by cooker and not unlocked yet. Immediate bitbake commands may failed")
|
||||
summary = ""
|
||||
if taskfailures:
|
||||
summary += pluralise("\nSummary: %s task failed:",
|
||||
|
||||
@@ -43,7 +43,7 @@ import xmlrpclib
|
||||
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
|
||||
|
||||
logger = logging.getLogger("ToasterLogger")
|
||||
logger = logging.getLogger("BitBake")
|
||||
interactive = sys.stdout.isatty()
|
||||
|
||||
|
||||
@@ -66,6 +66,7 @@ def _log_settings_from_server(server):
|
||||
|
||||
|
||||
def main(server, eventHandler, params ):
|
||||
|
||||
helper = uihelper.BBUIHelper()
|
||||
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
@@ -87,7 +88,7 @@ def main(server, eventHandler, params ):
|
||||
|
||||
if not params.observe_only:
|
||||
logger.error("ToasterUI can only work in observer mode")
|
||||
return 1
|
||||
return
|
||||
|
||||
|
||||
main.shutdown = 0
|
||||
@@ -140,12 +141,10 @@ def main(server, eventHandler, params ):
|
||||
continue
|
||||
|
||||
if isinstance(event, logging.LogRecord):
|
||||
if event.levelno == -1:
|
||||
event.levelno = format.ERROR
|
||||
|
||||
buildinfohelper.store_log_event(event)
|
||||
if event.levelno >= format.ERROR:
|
||||
errors = errors + 1
|
||||
return_value = 1
|
||||
elif event.levelno == format.WARNING:
|
||||
warnings = warnings + 1
|
||||
# For "normal" logging conditions, don't show note logs from tasks
|
||||
@@ -159,6 +158,7 @@ def main(server, eventHandler, params ):
|
||||
|
||||
if isinstance(event, bb.build.TaskFailed):
|
||||
buildinfohelper.update_and_store_task(event)
|
||||
return_value = 1
|
||||
logfile = event.logfile
|
||||
if logfile and os.path.exists(logfile):
|
||||
bb.error("Logfile of failure stored in: %s" % logfile)
|
||||
@@ -188,6 +188,7 @@ def main(server, eventHandler, params ):
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.NoProvider):
|
||||
return_value = 1
|
||||
errors = errors + 1
|
||||
if event._runtime:
|
||||
r = "R"
|
||||
@@ -237,19 +238,27 @@ def main(server, eventHandler, params ):
|
||||
if isinstance(event, (bb.event.TreeDataPreparationStarted, bb.event.TreeDataPreparationCompleted)):
|
||||
continue
|
||||
|
||||
if isinstance(event, (bb.event.BuildCompleted, bb.command.CommandFailed)):
|
||||
if isinstance(event, (bb.event.BuildCompleted)):
|
||||
continue
|
||||
|
||||
errorcode = 0
|
||||
if isinstance(event, (bb.command.CommandCompleted,
|
||||
bb.command.CommandFailed,
|
||||
bb.command.CommandExit)):
|
||||
errorcode = 0
|
||||
if (isinstance(event, bb.command.CommandFailed)):
|
||||
event.levelno = format.ERROR
|
||||
event.msg = "Command Failed " + event.error
|
||||
event.pathname = ""
|
||||
event.lineno = 0
|
||||
buildinfohelper.store_log_event(event)
|
||||
errors += 1
|
||||
errorcode = 1
|
||||
logger.error("Command execution failed: %s", event.error)
|
||||
|
||||
# update the build info helper on BuildCompleted, not on CommandXXX
|
||||
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
|
||||
buildinfohelper.close(errorcode)
|
||||
# mark the log output; controllers may kill the toasterUI after seeing this log
|
||||
logger.info("ToasterUI build done 1, brbe: %s" % buildinfohelper.brbe )
|
||||
logger.info("ToasterUI build done")
|
||||
|
||||
# we start a new build info
|
||||
if buildinfohelper.brbe is not None:
|
||||
@@ -263,14 +272,6 @@ def main(server, eventHandler, params ):
|
||||
taskfailures = []
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
logger.info("ToasterUI build done 2")
|
||||
continue
|
||||
|
||||
if isinstance(event, (bb.command.CommandCompleted,
|
||||
bb.command.CommandFailed,
|
||||
bb.command.CommandExit)):
|
||||
errorcode = 0
|
||||
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.MetadataEvent):
|
||||
@@ -315,7 +316,6 @@ def main(server, eventHandler, params ):
|
||||
continue
|
||||
|
||||
logger.error("Unknown event: %s", event)
|
||||
return_value += 1
|
||||
|
||||
except EnvironmentError as ioerror:
|
||||
# ignore interrupted io
|
||||
@@ -344,13 +344,10 @@ def main(server, eventHandler, params ):
|
||||
except Exception as ce:
|
||||
logger.error("CRITICAL - Failed to to save toaster exception to the database: %s" % str(ce))
|
||||
|
||||
# make sure we return with an error
|
||||
return_value += 1
|
||||
pass
|
||||
|
||||
if interrupted:
|
||||
if return_value == 0:
|
||||
return_value += 1
|
||||
return_value = 1
|
||||
|
||||
logger.warn("Return value is %d", return_value)
|
||||
return return_value
|
||||
|
||||
@@ -601,30 +601,11 @@ def build_environment(d):
|
||||
if export:
|
||||
os.environ[var] = d.getVar(var, True) or ""
|
||||
|
||||
def _check_unsafe_delete_path(path):
|
||||
"""
|
||||
Basic safeguard against recursively deleting something we shouldn't. If it returns True,
|
||||
the caller should raise an exception with an appropriate message.
|
||||
NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
|
||||
with potentially disastrous results.
|
||||
"""
|
||||
extra = ''
|
||||
# HOME might not be /home/something, so in case we can get it, check against it
|
||||
homedir = os.environ.get('HOME', '')
|
||||
if homedir:
|
||||
extra = '|%s' % homedir
|
||||
if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def remove(path, recurse=False):
|
||||
"""Equivalent to rm -f or rm -rf"""
|
||||
if not path:
|
||||
return
|
||||
if recurse:
|
||||
for name in glob.glob(path):
|
||||
if _check_unsafe_delete_path(path):
|
||||
raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
|
||||
# shutil.rmtree(name) would be ideal but its too slow
|
||||
subprocess.call(['rm', '-rf'] + glob.glob(path))
|
||||
return
|
||||
@@ -638,8 +619,6 @@ def remove(path, recurse=False):
|
||||
def prunedir(topdir):
|
||||
# Delete everything reachable from the directory named in 'topdir'.
|
||||
# CAUTION: This is dangerous!
|
||||
if _check_unsafe_delete_path(topdir):
|
||||
raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
|
||||
for root, dirs, files in os.walk(topdir, topdown = False):
|
||||
for name in files:
|
||||
os.remove(os.path.join(root, name))
|
||||
@@ -908,17 +887,11 @@ def cpu_count():
|
||||
def nonblockingfd(fd):
|
||||
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
|
||||
|
||||
def process_profilelog(fn, pout = None):
|
||||
# Either call with a list of filenames and set pout or a filename and optionally pout.
|
||||
if not pout:
|
||||
pout = fn + '.processed'
|
||||
pout = open(pout, 'w')
|
||||
def process_profilelog(fn):
|
||||
pout = open(fn + '.processed', 'w')
|
||||
|
||||
import pstats
|
||||
if isinstance(fn, list):
|
||||
p = pstats.Stats(*fn, stream=pout)
|
||||
else:
|
||||
p = pstats.Stats(fn, stream=pout)
|
||||
p = pstats.Stats(fn, stream=pout)
|
||||
p.sort_stats('time')
|
||||
p.print_stats()
|
||||
p.print_callers()
|
||||
@@ -969,207 +942,77 @@ def exec_flat_python_func(func, *args, **kwargs):
|
||||
bb.utils.better_exec(comp, context, code, '<string>')
|
||||
return context['retval']
|
||||
|
||||
def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
|
||||
"""Edit lines from a recipe or config file and modify one or more
|
||||
specified variable values set in the file using a specified callback
|
||||
function. Lines are expected to have trailing newlines.
|
||||
Parameters:
|
||||
meta_lines: lines from the file; can be a list or an iterable
|
||||
(e.g. file pointer)
|
||||
variables: a list of variable names to look for. Functions
|
||||
may also be specified, but must be specified with '()' at
|
||||
the end of the name. Note that the function doesn't have
|
||||
any intrinsic understanding of _append, _prepend, _remove,
|
||||
or overrides, so these are considered as part of the name.
|
||||
These values go into a regular expression, so regular
|
||||
expression syntax is allowed.
|
||||
varfunc: callback function called for every variable matching
|
||||
one of the entries in the variables parameter. The function
|
||||
should take four arguments:
|
||||
varname: name of variable matched
|
||||
origvalue: current value in file
|
||||
op: the operator (e.g. '+=')
|
||||
newlines: list of lines up to this point. You can use
|
||||
this to prepend lines before this variable setting
|
||||
if you wish.
|
||||
and should return a three-element tuple:
|
||||
newvalue: new value to substitute in, or None to drop
|
||||
the variable setting entirely. (If the removal
|
||||
results in two consecutive blank lines, one of the
|
||||
blank lines will also be dropped).
|
||||
newop: the operator to use - if you specify None here,
|
||||
the original operation will be used.
|
||||
indent: number of spaces to indent multi-line entries,
|
||||
or -1 to indent up to the level of the assignment
|
||||
and opening quote, or a string to use as the indent.
|
||||
minbreak: True to allow the first element of a
|
||||
multi-line value to continue on the same line as
|
||||
the assignment, False to indent before the first
|
||||
element.
|
||||
match_overrides: True to match items with _overrides on the end,
|
||||
False otherwise
|
||||
Returns a tuple:
|
||||
updated:
|
||||
True if changes were made, False otherwise.
|
||||
newlines:
|
||||
Lines after processing
|
||||
"""
|
||||
|
||||
var_res = {}
|
||||
if match_overrides:
|
||||
override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
|
||||
else:
|
||||
override_re = ''
|
||||
for var in variables:
|
||||
if var.endswith('()'):
|
||||
var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
|
||||
else:
|
||||
var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
|
||||
|
||||
updated = False
|
||||
varset_start = ''
|
||||
varlines = []
|
||||
newlines = []
|
||||
in_var = None
|
||||
full_value = ''
|
||||
var_end = ''
|
||||
|
||||
def handle_var_end():
|
||||
prerun_newlines = newlines[:]
|
||||
op = varset_start[len(in_var):].strip()
|
||||
(newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
|
||||
changed = (prerun_newlines != newlines)
|
||||
|
||||
if newvalue is None:
|
||||
# Drop the value
|
||||
return True
|
||||
elif newvalue != full_value or (newop not in [None, op]):
|
||||
if newop not in [None, op]:
|
||||
# Callback changed the operator
|
||||
varset_new = "%s %s" % (in_var, newop)
|
||||
else:
|
||||
varset_new = varset_start
|
||||
|
||||
if isinstance(indent, (int, long)):
|
||||
if indent == -1:
|
||||
indentspc = ' ' * (len(varset_new) + 2)
|
||||
else:
|
||||
indentspc = ' ' * indent
|
||||
else:
|
||||
indentspc = indent
|
||||
if in_var.endswith('()'):
|
||||
# A function definition
|
||||
if isinstance(newvalue, list):
|
||||
newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
|
||||
else:
|
||||
if not newvalue.startswith('\n'):
|
||||
newvalue = '\n' + newvalue
|
||||
if not newvalue.endswith('\n'):
|
||||
newvalue = newvalue + '\n'
|
||||
newlines.append('%s {%s}\n' % (varset_new, newvalue))
|
||||
else:
|
||||
# Normal variable
|
||||
if isinstance(newvalue, list):
|
||||
if not newvalue:
|
||||
# Empty list -> empty string
|
||||
newlines.append('%s ""\n' % varset_new)
|
||||
elif minbreak:
|
||||
# First item on first line
|
||||
if len(newvalue) == 1:
|
||||
newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
|
||||
else:
|
||||
newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
|
||||
for item in newvalue[1:]:
|
||||
newlines.append('%s%s \\\n' % (indentspc, item))
|
||||
newlines.append('%s"\n' % indentspc)
|
||||
else:
|
||||
# No item on first line
|
||||
newlines.append('%s " \\\n' % varset_new)
|
||||
for item in newvalue:
|
||||
newlines.append('%s%s \\\n' % (indentspc, item))
|
||||
newlines.append('%s"\n' % indentspc)
|
||||
else:
|
||||
newlines.append('%s "%s"\n' % (varset_new, newvalue))
|
||||
return True
|
||||
else:
|
||||
# Put the old lines back where they were
|
||||
newlines.extend(varlines)
|
||||
# If newlines was touched by the function, we'll need to return True
|
||||
return changed
|
||||
|
||||
checkspc = False
|
||||
|
||||
for line in meta_lines:
|
||||
if in_var:
|
||||
value = line.rstrip()
|
||||
varlines.append(line)
|
||||
if in_var.endswith('()'):
|
||||
full_value += '\n' + value
|
||||
else:
|
||||
full_value += value[:-1]
|
||||
if value.endswith(var_end):
|
||||
if in_var.endswith('()'):
|
||||
if full_value.count('{') - full_value.count('}') >= 0:
|
||||
continue
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
checkspc = True
|
||||
in_var = None
|
||||
else:
|
||||
skip = False
|
||||
for (varname, var_re) in var_res.iteritems():
|
||||
res = var_re.match(line)
|
||||
if res:
|
||||
isfunc = varname.endswith('()')
|
||||
if isfunc:
|
||||
splitvalue = line.split('{', 1)
|
||||
var_end = '}'
|
||||
else:
|
||||
var_end = res.groups()[-1]
|
||||
splitvalue = line.split(var_end, 1)
|
||||
varset_start = splitvalue[0].rstrip()
|
||||
value = splitvalue[1].rstrip()
|
||||
if not isfunc and value.endswith('\\'):
|
||||
value = value[:-1]
|
||||
full_value = value
|
||||
varlines = [line]
|
||||
in_var = res.group(1)
|
||||
if isfunc:
|
||||
in_var += '()'
|
||||
if value.endswith(var_end):
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
checkspc = True
|
||||
in_var = None
|
||||
skip = True
|
||||
break
|
||||
if not skip:
|
||||
if checkspc:
|
||||
checkspc = False
|
||||
if newlines[-1] == '\n' and line == '\n':
|
||||
# Squash blank line if there are two consecutive blanks after a removal
|
||||
continue
|
||||
newlines.append(line)
|
||||
return (updated, newlines)
|
||||
|
||||
|
||||
def edit_metadata_file(meta_file, variables, varfunc):
|
||||
def edit_metadata_file(meta_file, variables, func):
|
||||
"""Edit a recipe or config file and modify one or more specified
|
||||
variable values set in the file using a specified callback function.
|
||||
The file is only written to if the value(s) actually change.
|
||||
This is basically the file version of edit_metadata(), see that
|
||||
function's description for parameter/usage information.
|
||||
Returns True if the file was written to, False otherwise.
|
||||
"""
|
||||
var_res = {}
|
||||
for var in variables:
|
||||
var_res[var] = re.compile(r'^%s[ \t]*[?+]*=' % var)
|
||||
|
||||
updated = False
|
||||
varset_start = ''
|
||||
newlines = []
|
||||
in_var = None
|
||||
full_value = ''
|
||||
|
||||
def handle_var_end():
|
||||
(newvalue, indent, minbreak) = func(in_var, full_value)
|
||||
if newvalue != full_value:
|
||||
if isinstance(newvalue, list):
|
||||
intentspc = ' ' * indent
|
||||
if minbreak:
|
||||
# First item on first line
|
||||
if len(newvalue) == 1:
|
||||
newlines.append('%s "%s"\n' % (varset_start, newvalue[0]))
|
||||
else:
|
||||
newlines.append('%s "%s\\\n' % (varset_start, newvalue[0]))
|
||||
for item in newvalue[1:]:
|
||||
newlines.append('%s%s \\\n' % (intentspc, item))
|
||||
newlines.append('%s"\n' % indentspc)
|
||||
else:
|
||||
# No item on first line
|
||||
newlines.append('%s " \\\n' % varset_start)
|
||||
for item in newvalue:
|
||||
newlines.append('%s%s \\\n' % (intentspc, item))
|
||||
newlines.append('%s"\n' % intentspc)
|
||||
else:
|
||||
newlines.append('%s "%s"\n' % (varset_start, newvalue))
|
||||
return True
|
||||
return False
|
||||
|
||||
with open(meta_file, 'r') as f:
|
||||
(updated, newlines) = edit_metadata(f, variables, varfunc)
|
||||
for line in f:
|
||||
if in_var:
|
||||
value = line.rstrip()
|
||||
full_value += value[:-1]
|
||||
if value.endswith('"') or value.endswith("'"):
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
in_var = None
|
||||
else:
|
||||
matched = False
|
||||
for (varname, var_re) in var_res.iteritems():
|
||||
if var_re.match(line):
|
||||
splitvalue = line.split('"', 1)
|
||||
varset_start = splitvalue[0].rstrip()
|
||||
value = splitvalue[1].rstrip()
|
||||
if value.endswith('\\'):
|
||||
value = value[:-1]
|
||||
full_value = value
|
||||
if value.endswith('"') or value.endswith("'"):
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
else:
|
||||
in_var = varname
|
||||
matched = True
|
||||
break
|
||||
if not matched:
|
||||
newlines.append(line)
|
||||
if updated:
|
||||
with open(meta_file, 'w') as f:
|
||||
f.writelines(newlines)
|
||||
return updated
|
||||
|
||||
|
||||
def edit_bblayers_conf(bblayers_conf, add, remove):
|
||||
"""Edit bblayers.conf, adding and/or removing layers"""
|
||||
@@ -1198,7 +1041,7 @@ def edit_bblayers_conf(bblayers_conf, add, remove):
|
||||
# Need to use a list here because we can't set non-local variables from a callback in python 2.x
|
||||
bblayercalls = []
|
||||
|
||||
def handle_bblayers(varname, origvalue, op, newlines):
|
||||
def handle_bblayers(varname, origvalue):
|
||||
bblayercalls.append(varname)
|
||||
updated = False
|
||||
bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
|
||||
@@ -1222,9 +1065,9 @@ def edit_bblayers_conf(bblayers_conf, add, remove):
|
||||
notadded.append(addlayer)
|
||||
|
||||
if updated:
|
||||
return (bblayers, None, 2, False)
|
||||
return (bblayers, 2, False)
|
||||
else:
|
||||
return (origvalue, None, 2, False)
|
||||
return (origvalue, 2, False)
|
||||
|
||||
edit_metadata_file(bblayers_conf, ['BBLAYERS'], handle_bblayers)
|
||||
|
||||
@@ -1233,19 +1076,3 @@ def edit_bblayers_conf(bblayers_conf, add, remove):
|
||||
|
||||
return (notadded, notremoved)
|
||||
|
||||
|
||||
def get_file_layer(filename, d):
|
||||
"""Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
|
||||
collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split()
|
||||
collection_res = {}
|
||||
for collection in collections:
|
||||
collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or ''
|
||||
|
||||
# Use longest path so we handle nested layers
|
||||
matchlen = 0
|
||||
match = None
|
||||
for collection, regex in collection_res.iteritems():
|
||||
if len(regex) > matchlen and re.match(regex, filename):
|
||||
matchlen = len(regex)
|
||||
match = collection
|
||||
return match
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os,sys,logging
|
||||
import signal, time
|
||||
import signal, time, atexit, threading
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
import xmlrpclib
|
||||
import threading
|
||||
import Queue
|
||||
|
||||
|
||||
@@ -190,9 +190,6 @@ class BuildEnvironmentController(object):
|
||||
"""
|
||||
raise Exception("Must override BE release")
|
||||
|
||||
def triggerBuild(self, bitbake, layers, variables, targets):
|
||||
raise Exception("Must override BE release")
|
||||
|
||||
class ShellCmdException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@@ -113,25 +113,19 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
# get the file length; we need to detect the _last_ start of the toaster UI, not the first
|
||||
toaster_ui_log_filelength = 0
|
||||
if os.path.exists(toaster_ui_log_filepath):
|
||||
with open(toaster_ui_log_filepath, "w") as f:
|
||||
with open(toaster_ui_log_filepath, "r") as f:
|
||||
f.seek(0, 2) # jump to the end
|
||||
toaster_ui_log_filelength = f.tell()
|
||||
|
||||
cmd = "bash -c \"source %s/oe-init-build-env %s 2>&1 >toaster_server.log && bitbake --read %s/conf/toaster-pre.conf --postread %s/conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0 2>&1 >>toaster_server.log \"" % (self.pokydirname, self.be.builddir, self.be.builddir, self.be.builddir)
|
||||
|
||||
cmd = "bash -c \"source %s/oe-init-build-env %s 2>&1 >toaster_server.log && bitbake --read conf/toaster-pre.conf --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0 2>&1 >toaster_server.log && DATABASE_URL=%s BBSERVER=0.0.0.0:-1 daemon -d -i -D %s -o toaster_ui.log -- %s --observe-only -u toasterui &\"" % (self.pokydirname, self.be.builddir,
|
||||
self.dburl, self.be.builddir, own_bitbake)
|
||||
port = "-1"
|
||||
logger.debug("localhostbecontroller: starting builder \n%s\n" % cmd)
|
||||
|
||||
cmdoutput = self._shellcmd(cmd)
|
||||
with open(self.be.builddir + "/toaster_server.log", "r") as f:
|
||||
for i in f.readlines():
|
||||
if i.startswith("Bitbake server address"):
|
||||
port = i.split(" ")[-1]
|
||||
logger.debug("localhostbecontroller: Found bitbake server port %s" % port)
|
||||
|
||||
cmd = "bash -c \"source %s/oe-init-build-env-memres -1 %s && DATABASE_URL=%s %s --observe-only -u toasterui --remote-server=0.0.0.0:-1 -t xmlrpc\"" % (self.pokydirname, self.be.builddir, self.dburl, own_bitbake)
|
||||
with open(toaster_ui_log_filepath, "a+") as f:
|
||||
p = subprocess.Popen(cmd, cwd = self.be.builddir, shell=True, stdout=f, stderr=f)
|
||||
for i in cmdoutput.split("\n"):
|
||||
if i.startswith("Bitbake server address"):
|
||||
port = i.split(" ")[-1]
|
||||
logger.debug("localhostbecontroller: Found bitbake server port %s" % port)
|
||||
|
||||
def _toaster_ui_started(filepath, filepos = 0):
|
||||
if not os.path.exists(filepath):
|
||||
@@ -139,7 +133,7 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
with open(filepath, "r") as f:
|
||||
f.seek(filepos)
|
||||
for line in f:
|
||||
if line.startswith("Bitbake server started on demand"):
|
||||
if line.startswith("NOTE: ToasterUI waiting for events"):
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -153,9 +147,8 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
retries += 1
|
||||
|
||||
if not started:
|
||||
toaster_ui_log = open(os.path.join(self.be.builddir, "toaster_ui.log"), "r").read()
|
||||
toaster_server_log = open(os.path.join(self.be.builddir, "toaster_server.log"), "r").read()
|
||||
raise BuildSetupException("localhostbecontroller: Bitbake server did not start in 5 seconds, aborting (Error: '%s' '%s')" % (toaster_ui_log, toaster_server_log))
|
||||
raise BuildSetupException("localhostbecontroller: Bitbake server did not start in 5 seconds, aborting (Error: '%s' '%s')" % (cmdoutput, toaster_server_log))
|
||||
|
||||
logger.debug("localhostbecontroller: Started bitbake server")
|
||||
|
||||
@@ -317,25 +310,3 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
import shutil
|
||||
shutil.rmtree(os.path.join(self.be.sourcedir, "build"))
|
||||
assert not os.path.exists(self.be.builddir)
|
||||
|
||||
|
||||
def triggerBuild(self, bitbake, layers, variables, targets):
|
||||
# set up the buid environment with the needed layers
|
||||
self.setLayers(bitbake, layers)
|
||||
self.writeConfFile("conf/toaster-pre.conf", variables)
|
||||
self.writeConfFile("conf/toaster.conf", raw = "INHERIT+=\"toaster buildhistory\"")
|
||||
|
||||
# get the bb server running with the build req id and build env id
|
||||
bbctrl = self.getBBController()
|
||||
|
||||
# trigger the build command
|
||||
task = reduce(lambda x, y: x if len(y)== 0 else y, map(lambda y: y.task, targets))
|
||||
if len(task) == 0:
|
||||
task = None
|
||||
|
||||
bbctrl.build(list(map(lambda x:x.target, targets)), task)
|
||||
|
||||
logger.debug("localhostbecontroller: Build launched, exiting. Follow build logs at %s/toaster_ui.log" % self.be.builddir)
|
||||
|
||||
# disconnect from the server
|
||||
bbctrl.disconnect()
|
||||
|
||||
@@ -2,7 +2,7 @@ from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.db import transaction
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
|
||||
from orm.models import ToasterSetting, Build
|
||||
from orm.models import ToasterSetting
|
||||
import os
|
||||
|
||||
def DN(path):
|
||||
@@ -61,7 +61,7 @@ class Command(NoArgsCommand):
|
||||
return DN(self._find_first_path_for_file(DN(self.guesspath), "bblayers.conf", 4))
|
||||
|
||||
|
||||
def _verify_artifact_storage_dir(self):
|
||||
def handle(self, **options):
|
||||
# verify that we have a settings for downloading artifacts
|
||||
while ToasterSetting.objects.filter(name="ARTIFACTS_STORAGE_DIR").count() == 0:
|
||||
guessedpath = os.getcwd() + "/toaster_build_artifacts/"
|
||||
@@ -78,10 +78,7 @@ class Command(NoArgsCommand):
|
||||
else:
|
||||
raise ose
|
||||
ToasterSetting.objects.create(name="ARTIFACTS_STORAGE_DIR", value=artifacts_storage_dir)
|
||||
return 0
|
||||
|
||||
|
||||
def _verify_build_environment(self):
|
||||
self.guesspath = DN(DN(DN(DN(DN(DN(DN(__file__)))))))
|
||||
# refuse to start if we have no build environments
|
||||
while BuildEnvironment.objects.count() == 0:
|
||||
@@ -166,8 +163,6 @@ class Command(NoArgsCommand):
|
||||
conffilepath, error = subprocess.Popen('bash -c ". '+os.path.join(dirname, ".templateconf")+'; echo \"\$TEMPLATECONF\""', shell=True, stdout=subprocess.PIPE).communicate()
|
||||
conffilepath = os.path.join(conffilepath.strip(), "toasterconf.json")
|
||||
candidatefilepath = os.path.join(dirname, conffilepath)
|
||||
if "toaster_cloned" in candidatefilepath:
|
||||
continue
|
||||
if os.path.exists(candidatefilepath):
|
||||
config_files.append(candidatefilepath)
|
||||
|
||||
@@ -202,16 +197,12 @@ class Command(NoArgsCommand):
|
||||
|
||||
while (_verify_be()):
|
||||
pass
|
||||
return 0
|
||||
|
||||
def _verify_default_settings(self):
|
||||
# verify that default settings are there
|
||||
if ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').count() != 1:
|
||||
ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').delete()
|
||||
ToasterSetting.objects.get_or_create(name = 'DEFAULT_RELEASE', value = '')
|
||||
return 0
|
||||
|
||||
def _verify_builds_in_progress(self):
|
||||
# we are just starting up. we must not have any builds in progress, or build environments taken
|
||||
for b in BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS):
|
||||
BRError.objects.create(req = b, errtype = "toaster", errmsg = "Toaster found this build IN PROGRESS while Toaster started up. This is an inconsistent state, and the build was marked as failed")
|
||||
@@ -220,19 +211,4 @@ class Command(NoArgsCommand):
|
||||
|
||||
BuildEnvironment.objects.update(lock = BuildEnvironment.LOCK_FREE)
|
||||
|
||||
# also mark "In Progress builds as failures"
|
||||
from django.utils import timezone
|
||||
Build.objects.filter(outcome = Build.IN_PROGRESS).update(outcome = Build.FAILED, completed_on = timezone.now())
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
def handle(self, **options):
|
||||
retval = 0
|
||||
retval += self._verify_artifact_storage_dir()
|
||||
retval += self._verify_build_environment()
|
||||
retval += self._verify_default_settings()
|
||||
retval += self._verify_builds_in_progress()
|
||||
|
||||
return retval
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.db import transaction
|
||||
from orm.models import Build, ToasterSetting, LogMessage, Target
|
||||
from orm.models import Build, ToasterSetting
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException, BuildSetupException
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError, BRVariable
|
||||
import os
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("ToasterScheduler")
|
||||
logger = logging.getLogger("toaster")
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
args = ""
|
||||
@@ -35,7 +35,7 @@ class Command(NoArgsCommand):
|
||||
# select the build environment and the request to build
|
||||
br = self._selectBuildRequest()
|
||||
except IndexError as e:
|
||||
#logger.debug("runbuilds: No build request")
|
||||
# logger.debug("runbuilds: No build request")
|
||||
return
|
||||
try:
|
||||
bec = self._selectBuildEnvironment()
|
||||
@@ -50,16 +50,33 @@ class Command(NoArgsCommand):
|
||||
|
||||
# write the build identification variable
|
||||
BRVariable.objects.create(req = br, name="TOASTER_BRBE", value="%d:%d" % (br.pk, bec.be.pk))
|
||||
|
||||
# let the build request know where it is being executed
|
||||
br.environment = bec.be
|
||||
br.save()
|
||||
|
||||
# this triggers an async build
|
||||
bec.triggerBuild(br.brbitbake_set.all(), br.brlayer_set.all(), br.brvariable_set.all(), br.brtarget_set.all())
|
||||
# set up the buid environment with the needed layers
|
||||
bec.setLayers(br.brbitbake_set.all(), br.brlayer_set.all())
|
||||
bec.writeConfFile("conf/toaster-pre.conf", br.brvariable_set.all())
|
||||
bec.writeConfFile("conf/toaster.conf", raw = "INHERIT+=\"toaster buildhistory\"")
|
||||
|
||||
# get the bb server running with the build req id and build env id
|
||||
bbctrl = bec.getBBController()
|
||||
|
||||
# trigger the build command
|
||||
task = reduce(lambda x, y: x if len(y)== 0 else y, map(lambda y: y.task, br.brtarget_set.all()))
|
||||
if len(task) == 0:
|
||||
task = None
|
||||
bbctrl.build(list(map(lambda x:x.target, br.brtarget_set.all())), task)
|
||||
|
||||
logger.debug("runbuilds: Build launched, exiting. Follow build logs at %s/toaster_ui.log" % bec.be.builddir)
|
||||
# disconnect from the server
|
||||
bbctrl.disconnect()
|
||||
|
||||
# cleanup to be performed by toaster when the deed is done
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("runbuilds: Error launching build %s" % e)
|
||||
logger.error("runbuilds: Error executing shell command %s" % e)
|
||||
traceback.print_exc(e)
|
||||
if "[Errno 111] Connection refused" in str(e):
|
||||
# Connection refused, read toaster_server.out
|
||||
@@ -108,43 +125,7 @@ class Command(NoArgsCommand):
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
# environments locked for more than 30 seconds - they should be unlocked
|
||||
BuildEnvironment.objects.filter(buildrequest__state__in=[BuildRequest.REQ_FAILED, BuildRequest.REQ_COMPLETED]).filter(lock=BuildEnvironment.LOCK_LOCK).filter(updated__lt = timezone.now() - timedelta(seconds = 30)).update(lock = BuildEnvironment.LOCK_FREE)
|
||||
|
||||
|
||||
# update all Builds that failed to start
|
||||
|
||||
for br in BuildRequest.objects.filter(state = BuildRequest.REQ_FAILED, build__outcome = Build.IN_PROGRESS):
|
||||
# transpose the launch errors in ToasterExceptions
|
||||
br.build.outcome = Build.FAILED
|
||||
for brerror in br.brerror_set.all():
|
||||
logger.debug("Saving error %s" % brerror)
|
||||
LogMessage.objects.create(build = br.build, level = LogMessage.EXCEPTION, message = brerror.errmsg)
|
||||
br.build.save()
|
||||
|
||||
# we don't have a true build object here; hence, toasterui didn't have a change to release the BE lock
|
||||
br.environment.lock = BuildEnvironment.LOCK_FREE
|
||||
br.environment.save()
|
||||
|
||||
|
||||
|
||||
# update all BuildRequests without a build created
|
||||
for br in BuildRequest.objects.filter(build = None):
|
||||
br.build = Build.objects.create(project = br.project, completed_on = br.updated, started_on = br.created)
|
||||
br.build.outcome = BuildRequest.REQ_FAILED
|
||||
try:
|
||||
br.build.machine = br.brvariable_set.get(name='MACHINE').value
|
||||
except BRVariable.DoesNotExist:
|
||||
pass
|
||||
br.save()
|
||||
# transpose target information
|
||||
for brtarget in br.brtarget_set.all():
|
||||
Target.objects.create(build = br.build, target= brtarget.target)
|
||||
# transpose the launch errors in ToasterExceptions
|
||||
for brerror in br.brerror_set.all():
|
||||
LogMessage.objects.create(build = br.build, level = LogMessage.EXCEPTION, message = brerror.errmsg)
|
||||
|
||||
br.build.save()
|
||||
pass
|
||||
BuildEnvironment.objects.filter(lock=BuildEnvironment.LOCK_LOCK).filter(updated__lt = timezone.now() - timedelta(seconds = 30)).update(lock = BuildEnvironment.LOCK_FREE)
|
||||
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
|
||||
@@ -125,9 +125,6 @@ class BuildRequest(models.Model):
|
||||
def get_machine(self):
|
||||
return self.brvariable_set.get(name="MACHINE").value
|
||||
|
||||
def __str__(self):
|
||||
return "%s %s" % (self.project, self.get_state_display())
|
||||
|
||||
# These tables specify the settings for running an actual build.
|
||||
# They MUST be kept in sync with the tables in orm.models.Project*
|
||||
|
||||
@@ -159,6 +156,3 @@ class BRError(models.Model):
|
||||
errtype = models.CharField(max_length=100)
|
||||
errmsg = models.TextField()
|
||||
traceback = models.TextField()
|
||||
|
||||
def __str__(self):
|
||||
return "%s (%s)" % (self.errmsg, self.req)
|
||||
|
||||
@@ -31,9 +31,6 @@ from toastermain import settings
|
||||
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException
|
||||
|
||||
class NotImplementedException(Exception):
|
||||
pass
|
||||
|
||||
def DN(path):
|
||||
return "/".join(path.split("/")[0:-1])
|
||||
|
||||
@@ -128,7 +125,7 @@ class SSHBEController(BuildEnvironmentController):
|
||||
# set layers in the layersource
|
||||
|
||||
|
||||
raise NotImplementedException("Not implemented: SSH setLayers")
|
||||
raise Exception("Not implemented: SSH setLayers")
|
||||
# 3. configure the build environment, so we have a conf/bblayers.conf
|
||||
assert self.pokydirname is not None
|
||||
self._setupBE()
|
||||
@@ -156,24 +153,3 @@ class SSHBEController(BuildEnvironmentController):
|
||||
import shutil
|
||||
shutil.rmtree(os.path.join(self.be.sourcedir, "build"))
|
||||
assert not self._pathexists(self.be.builddir)
|
||||
|
||||
def triggerBuild(self, bitbake, layers, variables, targets):
|
||||
# set up the buid environment with the needed layers
|
||||
self.setLayers(bitbake, layers)
|
||||
self.writeConfFile("conf/toaster-pre.conf", )
|
||||
self.writeConfFile("conf/toaster.conf", raw = "INHERIT+=\"toaster buildhistory\"")
|
||||
|
||||
# get the bb server running with the build req id and build env id
|
||||
bbctrl = self.getBBController()
|
||||
|
||||
# trigger the build command
|
||||
task = reduce(lambda x, y: x if len(y)== 0 else y, map(lambda y: y.task, targets))
|
||||
if len(task) == 0:
|
||||
task = None
|
||||
|
||||
bbctrl.build(list(map(lambda x:x.target, targets)), task)
|
||||
|
||||
logger.debug("localhostbecontroller: Build launched, exiting. Follow build logs at %s/toaster_ui.log" % self.be.builddir)
|
||||
|
||||
# disconnect from the server
|
||||
bbctrl.disconnect()
|
||||
|
||||
@@ -7,7 +7,7 @@ Replace this with more appropriate tests for your application.
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from bldcontrol.bbcontroller import BitbakeController, BuildSetupException
|
||||
from bldcontrol.bbcontroller import BitbakeController
|
||||
from bldcontrol.localhostbecontroller import LocalhostBEController
|
||||
from bldcontrol.sshbecontroller import SSHBEController
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
@@ -15,7 +15,6 @@ from bldcontrol.management.commands.runbuilds import Command
|
||||
|
||||
import socket
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
# standard poky data hardcoded for testing
|
||||
BITBAKE_LAYERS = [type('bitbake_info', (object,), { "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "", "commit": "HEAD"})]
|
||||
@@ -30,17 +29,6 @@ POKY_LAYERS = [
|
||||
# we have an abstract test class designed to ensure that the controllers use a single interface
|
||||
# specific controller tests only need to override the _getBuildEnvironment() method
|
||||
|
||||
test_sourcedir = os.getenv("TTS_SOURCE_DIR")
|
||||
test_builddir = os.getenv("TTS_BUILD_DIR")
|
||||
test_address = os.getenv("TTS_TEST_ADDRESS", "localhost")
|
||||
|
||||
if test_sourcedir == None or test_builddir == None or test_address == None:
|
||||
raise Exception("Please set TTTS_SOURCE_DIR, TTS_BUILD_DIR and TTS_TEST_ADDRESS")
|
||||
|
||||
# The bb server will expect a toaster-pre.conf file to exist. If it doesn't exit then we make
|
||||
# an empty one here.
|
||||
open(test_builddir + 'conf/toaster-pre.conf', 'a').close()
|
||||
|
||||
class BEControllerTests(object):
|
||||
|
||||
def _serverForceStop(self, bc):
|
||||
@@ -48,53 +36,28 @@ class BEControllerTests(object):
|
||||
self.assertTrue(err == '', "bitbake server pid %s not stopped" % err)
|
||||
|
||||
def test_serverStartAndStop(self):
|
||||
from bldcontrol.sshbecontroller import NotImplementedException
|
||||
obe = self._getBuildEnvironment()
|
||||
bc = self._getBEController(obe)
|
||||
try:
|
||||
# setting layers, skip any layer info
|
||||
bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS)
|
||||
except NotImplementedException, e:
|
||||
print "Test skipped due to command not implemented yet"
|
||||
return True
|
||||
# We are ok with the exception as we're handling the git already exists
|
||||
except BuildSetupException:
|
||||
pass
|
||||
bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS) # setting layers, skip any layer info
|
||||
|
||||
bc.pokydirname = test_sourcedir
|
||||
bc.islayerset = True
|
||||
|
||||
hostname = test_address.split("@")[-1]
|
||||
hostname = self.test_address.split("@")[-1]
|
||||
|
||||
# test start server and stop
|
||||
bc.startBBServer()
|
||||
|
||||
self.assertFalse(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, int(bc.be.bbport))), "Server not answering")
|
||||
self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Port already occupied")
|
||||
bc.startBBServer("0:0")
|
||||
self.assertFalse(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Server not answering")
|
||||
|
||||
bc.stopBBServer()
|
||||
self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, int(bc.be.bbport))), "Server not stopped")
|
||||
self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Server not stopped")
|
||||
|
||||
self._serverForceStop(bc)
|
||||
|
||||
def test_getBBController(self):
|
||||
from bldcontrol.sshbecontroller import NotImplementedException
|
||||
obe = self._getBuildEnvironment()
|
||||
bc = self._getBEController(obe)
|
||||
layerSet = False
|
||||
try:
|
||||
# setting layers, skip any layer info
|
||||
layerSet = bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS)
|
||||
except NotImplementedException:
|
||||
print "Test skipped due to command not implemented yet"
|
||||
return True
|
||||
# We are ok with the exception as we're handling the git already exists
|
||||
except BuildSetupException:
|
||||
pass
|
||||
bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS) # setting layers, skip any layer info
|
||||
|
||||
bc.pokydirname = test_sourcedir
|
||||
bc.islayerset = True
|
||||
|
||||
bbc = bc.getBBController()
|
||||
bbc = bc.getBBController("%d:%d" % (-1, obe.pk))
|
||||
self.assertTrue(isinstance(bbc, BitbakeController))
|
||||
bc.stopBBServer()
|
||||
|
||||
@@ -103,15 +66,19 @@ class BEControllerTests(object):
|
||||
class LocalhostBEControllerTests(TestCase, BEControllerTests):
|
||||
def __init__(self, *args):
|
||||
super(LocalhostBEControllerTests, self).__init__(*args)
|
||||
|
||||
# hardcoded for Alex's machine; since the localhost BE is machine-dependent,
|
||||
# I found no good way to abstractize this
|
||||
self.test_sourcedir = "/home/ddalex/ssd/yocto"
|
||||
self.test_builddir = "/home/ddalex/ssd/yocto/build"
|
||||
self.test_address = "localhost"
|
||||
|
||||
def _getBuildEnvironment(self):
|
||||
return BuildEnvironment.objects.create(
|
||||
lock = BuildEnvironment.LOCK_FREE,
|
||||
betype = BuildEnvironment.TYPE_LOCAL,
|
||||
address = test_address,
|
||||
sourcedir = test_sourcedir,
|
||||
builddir = test_builddir )
|
||||
address = self.test_address,
|
||||
sourcedir = self.test_sourcedir,
|
||||
builddir = self.test_builddir )
|
||||
|
||||
def _getBEController(self, obe):
|
||||
return LocalhostBEController(obe)
|
||||
@@ -119,20 +86,25 @@ class LocalhostBEControllerTests(TestCase, BEControllerTests):
|
||||
class SSHBEControllerTests(TestCase, BEControllerTests):
|
||||
def __init__(self, *args):
|
||||
super(SSHBEControllerTests, self).__init__(*args)
|
||||
self.test_address = "ddalex-desktop.local"
|
||||
# hardcoded for ddalex-desktop.local machine; since the localhost BE is machine-dependent,
|
||||
# I found no good way to abstractize this
|
||||
self.test_sourcedir = "/home/ddalex/ssd/yocto"
|
||||
self.test_builddir = "/home/ddalex/ssd/yocto/build"
|
||||
|
||||
def _getBuildEnvironment(self):
|
||||
return BuildEnvironment.objects.create(
|
||||
lock = BuildEnvironment.LOCK_FREE,
|
||||
betype = BuildEnvironment.TYPE_SSH,
|
||||
address = test_address,
|
||||
sourcedir = test_sourcedir,
|
||||
builddir = test_builddir )
|
||||
address = self.test_address,
|
||||
sourcedir = self.test_sourcedir,
|
||||
builddir = self.test_builddir )
|
||||
|
||||
def _getBEController(self, obe):
|
||||
return SSHBEController(obe)
|
||||
|
||||
def test_pathExists(self):
|
||||
obe = BuildEnvironment.objects.create(betype = BuildEnvironment.TYPE_SSH, address= test_address)
|
||||
obe = BuildEnvironment.objects.create(betype = BuildEnvironment.TYPE_SSH, address= self.test_address)
|
||||
sbc = SSHBEController(obe)
|
||||
self.assertTrue(sbc._pathexists("/"))
|
||||
self.assertFalse(sbc._pathexists("/.deadbeef"))
|
||||
@@ -157,7 +129,7 @@ class RunBuildsCommandTests(TestCase):
|
||||
self.assertRaises(IndexError, command._selectBuildEnvironment)
|
||||
|
||||
def test_br_select(self):
|
||||
from orm.models import Project, Release, BitbakeVersion, Branch
|
||||
from orm.models import Project, Release, BitbakeVersion
|
||||
p = Project.objects.create_project("test", Release.objects.get_or_create(name = "HEAD", bitbake_version = BitbakeVersion.objects.get_or_create(name="HEAD", branch=Branch.objects.get_or_create(name="HEAD"))[0])[0])
|
||||
obr = BuildRequest.objects.create(state = BuildRequest.REQ_QUEUED, project = p)
|
||||
command = Command()
|
||||
|
||||
44
bitbake/lib/toaster/bldviewer/api.py
Normal file
44
bitbake/lib/toaster/bldviewer/api.py
Normal file
@@ -0,0 +1,44 @@
|
||||
#
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.conf.urls import patterns, include, url
|
||||
|
||||
|
||||
urlpatterns = patterns('bldviewer.views',
|
||||
url(r'^builds$', 'model_explorer', {'model_name':'build'}, name='builds'),
|
||||
url(r'^targets$', 'model_explorer', {'model_name':'target'}, name='targets'),
|
||||
url(r'^target_files$', 'model_explorer', {'model_name':'target_file'}, name='target_file'),
|
||||
url(r'^target_image_file$', 'model_explorer', {'model_name':'target_image_file'}, name='target_image_file'),
|
||||
url(r'^tasks$', 'model_explorer', {'model_name':'task'}, name='task'),
|
||||
url(r'^task_dependencies$', 'model_explorer', {'model_name':'task_dependency'}, name='task_dependencies'),
|
||||
url(r'^packages$', 'model_explorer', {'model_name':'package'}, name='package'),
|
||||
url(r'^package_dependencies$', 'model_explorer', {'model_name':'package_dependency'}, name='package_dependency'),
|
||||
url(r'^target_packages$', 'model_explorer', {'model_name':'target_installed_package'}, name='target_packages'),
|
||||
url(r'^target_installed_packages$', 'model_explorer', {'model_name':'target_installed_package'}, name='target_installed_package'),
|
||||
url(r'^package_files$', 'model_explorer', {'model_name':'build_file'}, name='build_file'),
|
||||
url(r'^layers$', 'model_explorer', {'model_name':'layer'}, name='layer'),
|
||||
url(r'^layerversions$', 'model_explorer', {'model_name':'layerversion'}, name='layerversion'),
|
||||
url(r'^recipes$', 'model_explorer', {'model_name':'recipe'}, name='recipe'),
|
||||
url(r'^recipe_dependencies$', 'model_explorer', {'model_name':'recipe_dependency'}, name='recipe_dependencies'),
|
||||
url(r'^variables$', 'model_explorer', {'model_name':'variable'}, name='variables'),
|
||||
url(r'^variableshistory$', 'model_explorer', {'model_name':'variablehistory'}, name='variablehistory'),
|
||||
url(r'^logmessages$', 'model_explorer', {'model_name':'logmessage'}, name='logmessages'),
|
||||
)
|
||||
4797
bitbake/lib/toaster/bldviewer/static/css/bootstrap.css
vendored
Normal file
4797
bitbake/lib/toaster/bldviewer/static/css/bootstrap.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1982
bitbake/lib/toaster/bldviewer/static/js/bootstrap.js
vendored
Normal file
1982
bitbake/lib/toaster/bldviewer/static/js/bootstrap.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
6
bitbake/lib/toaster/bldviewer/static/js/jquery-2.0.3.js
vendored
Normal file
6
bitbake/lib/toaster/bldviewer/static/js/jquery-2.0.3.js
vendored
Normal file
File diff suppressed because one or more lines are too long
30
bitbake/lib/toaster/bldviewer/templates/simple_base.html
Normal file
30
bitbake/lib/toaster/bldviewer/templates/simple_base.html
Normal file
@@ -0,0 +1,30 @@
|
||||
<!DOCTYPE html>
|
||||
{% load static %}
|
||||
<html>
|
||||
<head>
|
||||
<title>Toaster Simple Explorer</title>
|
||||
<script src="{% static 'js/jquery-2.0.3.js' %}">
|
||||
</script>
|
||||
<script src="{% static 'js/bootstrap.js' %}">
|
||||
</script>
|
||||
<link href="{% static 'css/bootstrap.css' %}" rel="stylesheet" type="text/css">
|
||||
</head>
|
||||
|
||||
<body style="height: 100%">
|
||||
<div style="width:100%; height: 100%; position:absolute">
|
||||
<div style="width: 100%; height: 3em" class="nav">
|
||||
<ul class="nav nav-tabs">
|
||||
<li><a href="{% url "simple-all-builds" %}">All Builds</a></li>
|
||||
<li><a href="{% url "simple-all-layers" %}">All Layers</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div style="overflow-y:scroll; width: 100%; position: absolute; top: 3em; bottom:70px ">
|
||||
{% block pagecontent %}
|
||||
{% endblock %}
|
||||
</div>
|
||||
<div class="navbar" style="position: absolute; bottom: 0; width:100%"><br/>About Toaster | Yocto Project </div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
{% extends "simple_basetable.html" %}
|
||||
|
||||
{% block pagename %}
|
||||
<ul class="nav nav-tabs" style="display: inline-block">
|
||||
<li><a>Build {{build.target_set.all|join:" "}} at {{build.started_on}} : </a></li>
|
||||
<li><a href="{% url "simple-task" build.id %}"> Tasks </a></li>
|
||||
<li><a href="{% url "simple-bpackage" build.id %}"> Build Packages </a></li>
|
||||
{% for t in build.target_set.all %}
|
||||
{% if t.is_image %}
|
||||
<li><a href="{% url "simple-tpackage" build.id t.pk %}"> Packages for {{t.target}} </a> </li>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
<li><a href="{% url "simple-configuration" build.id %}"> Configuration </a> </li>
|
||||
</ul>
|
||||
<h1>Toaster - Build {% block pagetitle %} {% endblock %}</h1>
|
||||
{% endblock %}
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
{% extends "simple_base.html" %}
|
||||
|
||||
{% block pagecontent %}
|
||||
<script>
|
||||
function showhideTableColumn(i, sh) {
|
||||
if (sh)
|
||||
$('td:nth-child('+i+'),th:nth-child('+i+')').show();
|
||||
else
|
||||
$('td:nth-child('+i+'),th:nth-child('+i+')').hide();
|
||||
}
|
||||
|
||||
|
||||
function filterTableRows(test) {
|
||||
if (test.length > 0) {
|
||||
var r = test.split(/[ ,]+/).map(function (e) { return new RegExp(e, 'i') });
|
||||
$('tr.data').map( function (i, el) {
|
||||
(! r.map(function (j) { return j.test($(el).html())}).reduce(function (c, p) { return c && p;} )) ? $(el).hide() : $(el).show();
|
||||
});
|
||||
} else
|
||||
{
|
||||
$('tr.data').show();
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<div style="margin-bottom: 0.5em">
|
||||
|
||||
{% block pagename %}
|
||||
{% endblock %}
|
||||
<div align="left" style="display:inline-block; width: 40%; margin-left: 2em"> Filter: <input type="search" id="filterstring" style="width: 80%" onkeyup="filterTableRows($('#filterstring').val())" autocomplete="off">
|
||||
</div>
|
||||
{% if hideshowcols %}
|
||||
<div align="right" style="display: inline-block; width: 40%">Show/Hide columns:
|
||||
{% for i in hideshowcols %}
|
||||
<span>{{i.name}} <input type="checkbox" id="ct{{i.name}}" onchange="showhideTableColumn({{i.order}}, $('#ct{{i.name}}').is(':checked'))" checked autocomplete="off"></span> |
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div style="display: block; float:right; margin-left: auto; margin-right:5em"><span class="pagination" style="vertical-align: top; margin-right: 3em">Showing {{objects.start_index}} to {{objects.end_index}} out of {{objects.paginator.count}} entries. </span>
|
||||
<ul class="pagination" style="display: block-inline">
|
||||
{%if objects.has_previous %}
|
||||
<li><a href="?page={{objects.previous_page_number}}">«</a></li>
|
||||
{%else%}
|
||||
<li class="disabled"><a href="#">«</a></li>
|
||||
{%endif%}
|
||||
{% for i in objects.page_range %}
|
||||
<li{%if i == objects.number %} class="active" {%endif%}><a href="?page={{i}}">{{i}}</a></li>
|
||||
{% endfor %}
|
||||
{%if objects.has_next%}
|
||||
<li><a href="?page={{objects.next_page_number}}">»</a></li>
|
||||
{%else%}
|
||||
<li class="disabled"><a href="#">»</a></li>
|
||||
{%endif%}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<table class="table table-striped table-condensed" style="width:95%">
|
||||
{% block pagetable %}
|
||||
{% endblock %}
|
||||
</table>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
24
bitbake/lib/toaster/bldviewer/templates/simple_bfile.html
Normal file
24
bitbake/lib/toaster/bldviewer/templates/simple_bfile.html
Normal file
@@ -0,0 +1,24 @@
|
||||
{% extends "simple_basebuildpage.html" %}
|
||||
|
||||
{% block pagetitle %}Files for package {{objects.0.bpackage.name}} {% endblock %}
|
||||
{% block pagetable %}
|
||||
{% if not objects %}
|
||||
<p>No files were recorded for this package!</p>
|
||||
{% else %}
|
||||
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Size (Bytes)</th>
|
||||
</tr>
|
||||
|
||||
{% for file in objects %}
|
||||
|
||||
<tr class="data">
|
||||
<td>{{file.path}}</td>
|
||||
<td>{{file.size}}</td>
|
||||
|
||||
{% endfor %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
44
bitbake/lib/toaster/bldviewer/templates/simple_bpackage.html
Normal file
44
bitbake/lib/toaster/bldviewer/templates/simple_bpackage.html
Normal file
@@ -0,0 +1,44 @@
|
||||
{% extends "simple_basebuildpage.html" %}
|
||||
|
||||
{% block pagetitle %}Packages{% endblock %}
|
||||
{% block pagetable %}
|
||||
{% if not objects %}
|
||||
<p>No packages were recorded for this target!</p>
|
||||
{% else %}
|
||||
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Version</th>
|
||||
<th>Recipe</th>
|
||||
<th>Summary</th>
|
||||
<th>Section</th>
|
||||
<th>Description</th>
|
||||
<th>Size on host disk (Bytes)</th>
|
||||
<th>License</th>
|
||||
<th>Dependencies List (all)</th>
|
||||
</tr>
|
||||
|
||||
{% for package in objects %}
|
||||
|
||||
<tr class="data">
|
||||
<td><a name="#{{package.name}}" href="{% url "simple-bfile" build.pk package.pk %}">{{package.name}} ({{package.filelist_bpackage.count}} files)</a></td>
|
||||
<td>{{package.version}}-{{package.revision}}</td>
|
||||
<td>{%if package.recipe%}<a href="{% url "simple-layer_versions_recipes" package.recipe.layer_version_id %}#{{package.recipe.name}}">{{package.recipe.name}}</a>{{package.package_name}}</a>{%endif%}</td>
|
||||
|
||||
<td>{{package.summary}}</td>
|
||||
<td>{{package.section}}</td>
|
||||
<td>{{package.description}}</td>
|
||||
<td>{{package.size}}</td>
|
||||
<td>{{package.license}}</td>
|
||||
<td>
|
||||
<div style="height: 3em; overflow:auto">
|
||||
{% for bpd in package.package_dependencies_source.all %}
|
||||
{{bpd.dep_type}}: {{bpd.depends_on.name}} <br/>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</td>
|
||||
{% endfor %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
43
bitbake/lib/toaster/bldviewer/templates/simple_build.html
Normal file
43
bitbake/lib/toaster/bldviewer/templates/simple_build.html
Normal file
@@ -0,0 +1,43 @@
|
||||
{% extends "simple_basetable.html" %}
|
||||
|
||||
{% block pagename %}
|
||||
<h1>Toaster - Builds</h1>
|
||||
{% endblock %}
|
||||
|
||||
{% block pagetable %}
|
||||
|
||||
{% load simple_projecttags %}
|
||||
<tr>
|
||||
<th>Outcome</th>
|
||||
<th>Started On</th>
|
||||
<th>Completed On</th>
|
||||
<th>Target</th>
|
||||
<th>Machine</th>
|
||||
<th>Time</th>
|
||||
<th>Errors</th>
|
||||
<th>Warnings</th>
|
||||
<th>Output</th>
|
||||
<th>Log</th>
|
||||
<th>Bitbake Version</th>
|
||||
<th>Build Name</th>
|
||||
</tr>
|
||||
{% for build in objects %}
|
||||
<tr class="data">
|
||||
<td><a href="{% url "simple-configuration" build.id %}">{{build.get_outcome_display}}</a></td>
|
||||
<td>{{build.started_on}}</td>
|
||||
<td>{{build.completed_on}}</td>
|
||||
<td>{% for t in build.target_set.all %}{%if t.is_image %}<a href="{% url "simple-tpackage" build.id t.id %}">{% endif %}{{t.target}}{% if t.is_image %}</a>{% endif %}<br/>{% endfor %}</td>
|
||||
<td>{{build.machine}}</td>
|
||||
<td>{% time_difference build.started_on build.completed_on %}</td>
|
||||
<td>{{build.errors_no}}:{% if build.errors_no %}{% for error in logs %}{% if error.build == build %}{% if error.level == 2 %}<p>{{error.message}}</p>{% endif %}{% endif %}{% endfor %}{% else %}None{% endif %}</td>
|
||||
<td>{{build.warnings_no}}:{% if build.warnings_no %}{% for warning in logs %}{% if warning.build == build %}{% if warning.level == 1 %}<p>{{warning.message}}</p>{% endif %}{% endif %}{% endfor %}{% else %}None{% endif %}</td>
|
||||
<td>TBD: determine image file list</td>
|
||||
<td>{{build.cooker_log_path}}</td>
|
||||
<td>{{build.bitbake_version}}</td>
|
||||
<td>{{build.build_name}}</td>
|
||||
</tr>
|
||||
|
||||
{% endfor %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
{% extends "simple_basebuildpage.html" %}
|
||||
|
||||
{% block pagetitle %}Configuration{% endblock %}
|
||||
{% block pagetable %}
|
||||
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Description</th>
|
||||
<th>Definition history</th>
|
||||
<th>Value</th>
|
||||
</tr>
|
||||
|
||||
{% for variable in objects %}
|
||||
|
||||
<tr class="data">
|
||||
<td>{{variable.variable_name}}</td>
|
||||
<td>{% if variable.description %}{{variable.description}}{% endif %}</td>
|
||||
<td>{% for vh in variable.variablehistory_set.all %}{{vh.operation}} in {{vh.file_name}}:{{vh.line_number}}<br/>{%endfor%}</td>
|
||||
<td>{{variable.variable_value}}</td>
|
||||
{% endfor %}
|
||||
|
||||
{% endblock %}
|
||||
34
bitbake/lib/toaster/bldviewer/templates/simple_layer.html
Normal file
34
bitbake/lib/toaster/bldviewer/templates/simple_layer.html
Normal file
@@ -0,0 +1,34 @@
|
||||
{% extends "simple_basetable.html" %}
|
||||
|
||||
{% block pagename %}
|
||||
<h1>Toaster - Layers</h1>
|
||||
{% endblock %}
|
||||
|
||||
{% block pagetable %}
|
||||
{% load simple_projecttags %}
|
||||
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Local Path</th>
|
||||
<th>Layer Index URL</th>
|
||||
<th>Known Versions</th>
|
||||
</tr>
|
||||
|
||||
{% for layer in objects %}
|
||||
|
||||
<tr class="data">
|
||||
<td>{{layer.name}}</td>
|
||||
<td>{{layer.local_path}}</td>
|
||||
<td><a href='{{layer.layer_index_url}}'>{{layer.layer_index_url}}</a></td>
|
||||
<td><table>
|
||||
{% for lv in layer.versions %}
|
||||
<tr><td>
|
||||
<a href="{% url "simple-layer_versions_recipes" lv.id %}">({{lv.priority}}){{lv.branch}}:{{lv.commit}} ({{lv.count}} recipes)</a>
|
||||
</td></tr>
|
||||
{% endfor %}
|
||||
</table></td>
|
||||
</tr>
|
||||
|
||||
{% endfor %}
|
||||
|
||||
{% endblock %}
|
||||
36
bitbake/lib/toaster/bldviewer/templates/simple_package.html
Normal file
36
bitbake/lib/toaster/bldviewer/templates/simple_package.html
Normal file
@@ -0,0 +1,36 @@
|
||||
{% extends "simple_basebuildpage.html" %}
|
||||
|
||||
{% block pagetable %}
|
||||
{% if not objects %}
|
||||
<p>No packages were recorded for this target!</p>
|
||||
{% else %}
|
||||
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Version</th>
|
||||
<th>Size (Bytes)</th>
|
||||
<th>Recipe</th>
|
||||
<th>Depends on</th>
|
||||
</tr>
|
||||
|
||||
{% for package in objects %}
|
||||
|
||||
<tr class="data">
|
||||
<td><a name="#{{package.name}}">{{package.name}}</a></td>
|
||||
<td>{{package.version}}</td>
|
||||
<td>{{package.size}}</td>
|
||||
<td>{%if package.recipe %}<a name="{{package.recipe.name}}.{{package.package_name}}">
|
||||
<a href="{% url "simple-layer_versions_recipes" package.recipe.layer_version_id %}#{{package.recipe.name}}">{{package.recipe.name}}</a>{{package.package_name}}</a>{%endif%}</td>
|
||||
<td>
|
||||
<div style="height: 4em; overflow:auto">
|
||||
{% for d in package.package_dependencies_source.all %}
|
||||
<a href="#{{d.name}}">{{d.depends_on.name}}</a><br/>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</td>
|
||||
|
||||
{% endfor %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
50
bitbake/lib/toaster/bldviewer/templates/simple_recipe.html
Normal file
50
bitbake/lib/toaster/bldviewer/templates/simple_recipe.html
Normal file
@@ -0,0 +1,50 @@
|
||||
{% extends "simple_basetable.html" %}
|
||||
|
||||
{% block pagename %}
|
||||
<ul class="nav nav-tabs" style="display: inline-block">
|
||||
<li><a>Layer {{layer_version.layer.name}} : {{layer_version.branch}} : {{layer_version.commit}} : {{layer_version.priority}}</a></li>
|
||||
</ul>
|
||||
<h1>Toaster - Recipes for a Layer</h1>
|
||||
{% endblock %}
|
||||
|
||||
{% block pagetable %}
|
||||
{% load simple_projecttags %}
|
||||
|
||||
<tr>
|
||||
</tr>
|
||||
<th>Name</th>
|
||||
<th>Version</th>
|
||||
<th>Summary</th>
|
||||
<th>Description</th>
|
||||
<th>Section</th>
|
||||
<th>License</th>
|
||||
<th>Homepage</th>
|
||||
<th>Bugtracker</th>
|
||||
<th>File_path</th>
|
||||
<th style="width: 30em">Recipe Dependency</th>
|
||||
|
||||
|
||||
{% for recipe in objects %}
|
||||
|
||||
<tr class="data">
|
||||
<td><a name="{{recipe.name}}">{{recipe.name}}</a></td>
|
||||
<td>{{recipe.version}}</td>
|
||||
<td>{{recipe.summary}}</td>
|
||||
<td>{{recipe.description}}</td>
|
||||
<td>{{recipe.section}}</td>
|
||||
<td>{{recipe.license}}</td>
|
||||
<td>{{recipe.homepage}}</td>
|
||||
<td>{{recipe.bugtracker}}</td>
|
||||
<td>{{recipe.file_path}}</td>
|
||||
<td>
|
||||
<div style="height: 5em; overflow:auto">
|
||||
{% for rr in recipe.r_dependencies_recipe.all %}
|
||||
<a href="#{{rr.depends_on.name}}">{{rr.depends_on.name}}</a><br/>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
{% endfor %}
|
||||
|
||||
{% endblock %}
|
||||
71
bitbake/lib/toaster/bldviewer/templates/simple_task.html
Normal file
71
bitbake/lib/toaster/bldviewer/templates/simple_task.html
Normal file
@@ -0,0 +1,71 @@
|
||||
{% extends "simple_basebuildpage.html" %}
|
||||
|
||||
{% block pagetitle %}Tasks{% endblock %}
|
||||
{% block pagetable %}
|
||||
{% if not objects %}
|
||||
<p>No tasks were executed in this build!</p>
|
||||
{% else %}
|
||||
|
||||
<tr>
|
||||
<th>Order</th>
|
||||
<th>Task</th>
|
||||
<th>Recipe Version</th>
|
||||
<th>Task Type</th>
|
||||
<th>Checksum</th>
|
||||
<th>Outcome</th>
|
||||
<th>Message</th>
|
||||
<th>Time</th>
|
||||
<th>CPU usage</th>
|
||||
<th>Disk I/O</th>
|
||||
<th>Script type</th>
|
||||
<th>Filesystem</th>
|
||||
<th>Depends</th>
|
||||
</tr>
|
||||
|
||||
{% for task in objects %}
|
||||
|
||||
<tr class="data">
|
||||
<td>{{task.order}}</td>
|
||||
<td><a name="{{task.recipe.name}}.{{task.task_name}}">
|
||||
<a href="{% url "simple-layer_versions_recipes" task.recipe.layer_version_id %}#{{task.recipe.name}}">{{task.recipe.name}}</a>.{{task.task_name}}</a></td>
|
||||
<td>{{task.recipe.version}}</td>
|
||||
|
||||
{% if task.task_executed %}
|
||||
<td>Executed</td>
|
||||
{% else %}
|
||||
<td>Not Executed</td>
|
||||
{% endif %}
|
||||
|
||||
<td>{{task.sstate_checksum}}</td>
|
||||
<td>{{task.get_outcome_display}}{% if task.provider %}</br>(by <a href="#{{task.provider.recipe.name}}.{{task.provider.task_name}}">{{task.provider.recipe.name}}.{{task.provider.task_name}}</a>){% endif %}
|
||||
{% if task.outcome == task.OUTCOME_CACHED %}{% for t in task.get_related_setscene %}
|
||||
<br/>({{t.task_name}} {{t.get_outcome_display}})
|
||||
{% endfor %}{%endif%}
|
||||
</td>
|
||||
<td><p>{{task.message}}</td>
|
||||
<td>{{task.elapsed_time}}</td>
|
||||
<td>{{task.cpu_usage}}</td>
|
||||
<td>{{task.disk_io}}</td>
|
||||
<td>{{task.get_script_type_display}}</td>
|
||||
<td> <table>
|
||||
<tr><td> Recipe</td><td><a target="_fileview" href="file:///{{task.recipe.file_path}}">{{task.recipe.file_path}}</a></td></tr>
|
||||
<tr><td> Source</td><td><a target="_fileview" href="file:///{{task.file_name}}">{{task.file_name}}:{{task.line_number}}</a></td></tr>
|
||||
<tr><td> Workdir</td><td><a target="_fileview" href="file:///{{task.work_directory}}">{{task.work_directory}}</a></td></tr>
|
||||
<tr><td> Log</td><td><a target="_fileview" href="file:///{{task.logfile}}">{{task.logfile}}</a><br/></td></tr>
|
||||
</table>
|
||||
</td>
|
||||
<td>
|
||||
<div style="height: 3em; overflow:auto">
|
||||
{% for tt in task.task_dependencies_task.all %}
|
||||
<a href="#{{tt.depends_on.recipe.name}}.{{tt.depends_on.task_name}}">
|
||||
{{tt.depends_on.recipe.name}}.{{tt.depends_on.task_name}}</a><br/>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
{% endfor %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,29 @@
|
||||
#
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from datetime import datetime
|
||||
from django import template
|
||||
|
||||
register = template.Library()
|
||||
|
||||
@register.simple_tag
|
||||
def time_difference(start_time, end_time):
|
||||
return end_time - start_time
|
||||
345
bitbake/lib/toaster/bldviewer/tests.py
Normal file
345
bitbake/lib/toaster/bldviewer/tests.py
Normal file
@@ -0,0 +1,345 @@
|
||||
"""
|
||||
This file demonstrates writing tests using the unittest module. These will pass
|
||||
when you run "manage.py test".
|
||||
|
||||
Replace this with more appropriate tests for your application.
|
||||
"""
|
||||
from django.test import TestCase
|
||||
from django.test.client import Client
|
||||
from django.db.models import Count, Q
|
||||
from orm.models import Target, Recipe, Recipe_Dependency, Layer_Version, Target_Installed_Package
|
||||
from orm.models import Build, Task, Layer, Package, Package_File, LogMessage, Variable, VariableHistory
|
||||
import json, os, re, urllib, shlex
|
||||
|
||||
|
||||
class Tests(TestCase):
|
||||
# fixtures = ['orm_views_testdata.json']
|
||||
|
||||
def setUp(self):
|
||||
raise Exception("The %s test data is not longer valid, tests disabled" % __name__)
|
||||
|
||||
def test_builds(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/builds')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
self.assertTrue(fields["machine"] == "qemux86")
|
||||
self.assertTrue(fields["distro"] == "poky")
|
||||
self.assertTrue(fields["image_fstypes"] == "tar.bz2 ext3")
|
||||
self.assertTrue(fields["bitbake_version"] == "1.21.1")
|
||||
self.assertTrue("1.5+snapshot-" in fields["distro_version"])
|
||||
self.assertEqual(fields["outcome"], 0)
|
||||
self.assertEqual(fields["errors_no"], 0)
|
||||
log_path = "/tmp/log/cooker/qemux86/"
|
||||
self.assertTrue(log_path in fields["cooker_log_path"])
|
||||
self.assertTrue(".log" in fields["cooker_log_path"])
|
||||
|
||||
def test_targets(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/targets')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
self.assertTrue(fields["is_image"] == True)
|
||||
self.assertTrue(fields["target"] == "core-image-minimal")
|
||||
|
||||
def test_tasks(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/tasks')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
recipe_id = self.get_recipes_id("pseudo-native")
|
||||
print recipe_id
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["build"] == 1 and fields["task_name"] == "do_populate_lic_setscene" and fields["recipe"] == recipe_id and fields["task_executed"] == True:
|
||||
self.assertTrue(fields["message"] == "recipe pseudo-native-1.5.1-r4: task do_populate_lic_setscene: Succeeded")
|
||||
self.assertTrue(fields["cpu_usage"] == "6.3")
|
||||
self.assertTrue(fields["disk_io"] == 124)
|
||||
self.assertTrue(fields["script_type"] == 2)
|
||||
self.assertTrue(fields["path_to_sstate_obj"] == "")
|
||||
self.assertTrue(fields["elapsed_time"] == "0.103494")
|
||||
self.assertTrue("tmp/work/i686-linux/pseudo-native/1.5.1-r4/temp/log.do_populate_lic_setscene.5867" in fields["logfile"])
|
||||
self.assertTrue(fields["sstate_result"] == 0)
|
||||
self.assertTrue(fields["outcome"] == 0)
|
||||
if fields["build"] == 1 and fields["task_name"] == "do_populate_lic" and fields["recipe"] == recipe_id and fields["task_executed"] == True:
|
||||
self.assertTrue(fields["cpu_usage"] == None)
|
||||
self.assertTrue(fields["disk_io"] == None)
|
||||
self.assertTrue(fields["script_type"] == 2)
|
||||
self.assertTrue(fields["path_to_sstate_obj"] == "")
|
||||
self.assertTrue(fields["elapsed_time"] == "0")
|
||||
self.assertTrue(fields["logfile"], None)
|
||||
self.assertTrue(fields["sstate_result"] == 3)
|
||||
self.assertTrue(fields["outcome"] == 2)
|
||||
|
||||
def test_layers(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/layers')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["name"] == "meta-yocto-bsp":
|
||||
self.assertTrue(fields["local_path"].endswith("meta-yocto-bsp"))
|
||||
self.assertTrue(fields["layer_index_url"] == "http://layers.openembedded.org/layerindex/layer/meta-yocto-bsp/")
|
||||
elif fields["name"] == "meta":
|
||||
self.assertTrue(fields["local_path"].endswith("/meta"))
|
||||
self.assertTrue(fields["layer_index_url"] == "http://layers.openembedded.org/layerindex/layer/openembedded-core/")
|
||||
elif fields["name"] == "meta-yocto":
|
||||
self.assertTrue(fields["local_path"].endswith("/meta-yocto"))
|
||||
self.assertTrue(fields["layer_index_url"] == "http://layers.openembedded.org/layerindex/layer/meta-yocto/")
|
||||
|
||||
def test_layerversions(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/layerversions')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
layer_id = self.get_layer_id("meta")
|
||||
find = False
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["layer"] == layer_id:
|
||||
find = True
|
||||
self.assertTrue(fields["build"] == 1)
|
||||
self.assertTrue(fields["priority"] == 5)
|
||||
self.assertTrue(fields["branch"] == "master")
|
||||
self.assertTrue(find == True)
|
||||
|
||||
def test_recipes(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/recipes')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
find = False
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["name"] == "busybox":
|
||||
find = True
|
||||
self.assertTrue(fields["version"] == "1.21.1-r0")
|
||||
self.assertTrue(fields["license"] == "GPLv2 & bzip2")
|
||||
self.assertTrue(fields["file_path"].endswith("/meta/recipes-core/busybox/busybox_1.21.1.bb"))
|
||||
self.assertTrue(fields["summary"] == "Tiny versions of many common UNIX utilities in a single small executable.")
|
||||
self.assertTrue(fields["description"] == "BusyBox combines tiny versions of many common UNIX utilities into a single small executable. It provides minimalist replacements for most of the utilities you usually find in GNU fileutils, shellutils, etc. The utilities in BusyBox generally have fewer options than their full-featured GNU cousins; however, the options that are included provide the expected functionality and behave very much like their GNU counterparts. BusyBox provides a fairly complete POSIX environment for any small or embedded system.")
|
||||
self.assertTrue(fields["bugtracker"] == "https://bugs.busybox.net/")
|
||||
self.assertTrue(fields["homepage"] == "http://www.busybox.net")
|
||||
self.assertTrue(fields["section"] == "base")
|
||||
self.assertTrue(find == True)
|
||||
|
||||
def test_task_dependencies(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/task_dependencies')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
ids = self.get_task_id()
|
||||
do_install = ids["do_install"]
|
||||
do_compile = ids["do_compile"]
|
||||
entry = False
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["task"] == do_install and fields["depends_on"] == do_compile:
|
||||
entry = True
|
||||
self.assertTrue(entry == True)
|
||||
|
||||
def test_target_installed_package(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/target_installed_packages')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
package = self.get_package_id("udev-utils")
|
||||
find = False
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["package"] == package:
|
||||
self.assertTrue(fields["target"], 1)
|
||||
find = True
|
||||
self.assertTrue(find, True)
|
||||
|
||||
def test_packages(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/packages')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(response['count'] > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["name"] == "base-files-dev":
|
||||
self.assertTrue(fields["license"] == "GPLv2")
|
||||
self.assertTrue(fields["description"] == "The base-files package creates the basic system directory structure and provides a small set of key configuration files for the system. This package contains symbolic links, header files, and related items necessary for software development.")
|
||||
self.assertTrue(fields["summary"] == "Miscellaneous files for the base system. - Development files")
|
||||
self.assertTrue(fields["version"] == "3.0.14")
|
||||
self.assertTrue(fields["build"] == 1)
|
||||
self.assertTrue(fields["section"] == "devel")
|
||||
self.assertTrue(fields["revision"] == "r73")
|
||||
self.assertTrue(fields["size"] == 0)
|
||||
self.assertTrue(fields["installed_size"] == 0)
|
||||
self.assertTrue(self.get_recipe_name(fields["recipe"]) == "base-files")
|
||||
|
||||
def test_package_dependencies(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/package_dependencies')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
build_package = self.get_package_id("busybox")
|
||||
build_package_id = self.get_package_id("busybox-syslog")
|
||||
entry = False
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
self.assertTrue(fields["target"] == 1)
|
||||
if fields["package"] == build_package and fields["dep_type"] == 7 and fields["depends_on"] == build_package_id:
|
||||
entry = True
|
||||
self.assertTrue(entry == True)
|
||||
|
||||
def test_recipe_dependencies(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/recipe_dependencies')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
depends_on = self.get_recipes_id("autoconf-native")
|
||||
recipe = self.get_recipes_id("ncurses")
|
||||
entry = False
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["recipe"] == recipe and fields["depends_on"] == depends_on and fields["dep_type"] == 0:
|
||||
entry = True
|
||||
self.assertTrue(entry == True)
|
||||
|
||||
def test_package_files(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/package_files')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
build_package = self.get_package_id("base-files")
|
||||
entry = False
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["path"] == "/etc/motd" and fields["package"] == build_package and fields["size"] == 0:
|
||||
entry = True
|
||||
self.assertTrue(entry == True)
|
||||
|
||||
def test_Variable(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/variables')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
self.assertTrue(fields["build"] == 1)
|
||||
if fields["variable_name"] == "USRBINPATH":
|
||||
self.assertTrue(fields["variable_value"] == "/usr/bin")
|
||||
self.assertTrue(fields["changed"] == False)
|
||||
self.assertTrue(fields["description"] == "")
|
||||
if fields["variable_name"] == "PREFERRED_PROVIDER_virtual/libx11":
|
||||
self.assertTrue(fields["variable_value"] == "libx11")
|
||||
self.assertTrue(fields["changed"] == False)
|
||||
self.assertTrue(fields["description"] == "If multiple recipes provide an item, this variable determines which recipe should be given preference.")
|
||||
if fields["variable_name"] == "base_libdir_nativesdk":
|
||||
self.assertTrue(fields["variable_value"] == "/lib")
|
||||
|
||||
def test_VariableHistory(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/variableshistory')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
variable_id = self.get_variable_id("STAGING_INCDIR_NATIVE")
|
||||
find = False
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["variable"] == variable_id:
|
||||
find = True
|
||||
self.assertTrue(fields["file_name"] == "conf/bitbake.conf")
|
||||
self.assertTrue(fields["operation"] == "set")
|
||||
self.assertTrue(fields["line_number"] == 358)
|
||||
self.assertTrue(find == True)
|
||||
|
||||
def get_task_id(self):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/tasks')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["recipe"] == 7 and fields["task_name"] == "do_install":
|
||||
do_install = item["pk"]
|
||||
if fields["recipe"] == 7 and fields["task_name"] == "do_compile":
|
||||
do_compile = item["pk"]
|
||||
result = {}
|
||||
result["do_install"] = do_install
|
||||
result["do_compile"] = do_compile
|
||||
return result
|
||||
|
||||
def get_recipes_id(self, value):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/recipes')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["name"] == value:
|
||||
return item["pk"]
|
||||
return None
|
||||
|
||||
def get_recipe_name(self, value):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/recipes')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if item["pk"] == value:
|
||||
return fields["name"]
|
||||
return None
|
||||
|
||||
def get_layer_id(self, value):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/layers')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["name"] == value:
|
||||
return item["pk"]
|
||||
return None
|
||||
|
||||
def get_package_id(self, field):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/packages')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(response['count'] > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["name"] == field:
|
||||
return item["pk"]
|
||||
return None
|
||||
|
||||
def get_variable_id(self, field):
|
||||
client = Client()
|
||||
resp = client.get('http://localhost:8000/api/1.0/variables')
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
response = json.loads(resp.content)
|
||||
self.assertTrue(len(json.loads(response['list'])) > 0)
|
||||
for item in json.loads(response['list']):
|
||||
fields = item['fields']
|
||||
if fields["variable_name"] == field:
|
||||
return item["pk"]
|
||||
return None
|
||||
35
bitbake/lib/toaster/bldviewer/urls.py
Normal file
35
bitbake/lib/toaster/bldviewer/urls.py
Normal file
@@ -0,0 +1,35 @@
|
||||
#
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.conf.urls import patterns, include, url
|
||||
from django.views.generic import RedirectView
|
||||
|
||||
urlpatterns = patterns('bldviewer.views',
|
||||
url(r'^builds/$', 'build', name='simple-all-builds'),
|
||||
url(r'^build/(?P<build_id>\d+)/task/$', 'task', name='simple-task'),
|
||||
url(r'^build/(?P<build_id>\d+)/packages/$', 'bpackage', name='simple-bpackage'),
|
||||
url(r'^build/(?P<build_id>\d+)/package/(?P<package_id>\d+)/files/$', 'bfile', name='simple-bfile'),
|
||||
url(r'^build/(?P<build_id>\d+)/target/(?P<target_id>\d+)/packages/$', 'tpackage', name='simple-tpackage'),
|
||||
url(r'^build/(?P<build_id>\d+)/configuration/$', 'configuration', name='simple-configuration'),
|
||||
url(r'^layers/$', 'layer', name='simple-all-layers'),
|
||||
url(r'^layerversions/(?P<layerversion_id>\d+)/recipes/.*$', 'layer_versions_recipes', name='simple-layer_versions_recipes'),
|
||||
url(r'^$', RedirectView.as_view( url= 'builds/')),
|
||||
)
|
||||
287
bitbake/lib/toaster/bldviewer/views.py
Normal file
287
bitbake/lib/toaster/bldviewer/views.py
Normal file
@@ -0,0 +1,287 @@
|
||||
#
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import operator
|
||||
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import render
|
||||
from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe, LogMessage, Variable, Target_Installed_Package
|
||||
from orm.models import Task_Dependency, Recipe_Dependency, Package, Package_File, Package_Dependency
|
||||
from orm.models import Target_Installed_Package, VariableHistory, Target_Image_File, Target_File
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
|
||||
|
||||
|
||||
def _build_page_range(paginator, index = 1):
|
||||
try:
|
||||
page = paginator.page(index)
|
||||
except PageNotAnInteger:
|
||||
page = paginator.page(1)
|
||||
except EmptyPage:
|
||||
page = paginator.page(paginator.num_pages)
|
||||
|
||||
page.page_range = [page.number]
|
||||
crt_range = 0
|
||||
for i in range(1,5):
|
||||
if (page.number + i) <= paginator.num_pages:
|
||||
page.page_range = page.page_range + [ page.number + i]
|
||||
crt_range +=1
|
||||
if (page.number - i) > 0:
|
||||
page.page_range = [page.number -i] + page.page_range
|
||||
crt_range +=1
|
||||
if crt_range == 4:
|
||||
break
|
||||
return page
|
||||
|
||||
@cache_control(no_store=True)
|
||||
def build(request):
|
||||
template = 'simple_build.html'
|
||||
logs = LogMessage.objects.all()
|
||||
|
||||
build_info = _build_page_range(Paginator(Build.objects.order_by("-id"), 10),request.GET.get('page', 1))
|
||||
|
||||
context = {'objects': build_info, 'logs': logs ,
|
||||
'hideshowcols' : [
|
||||
{'name': 'Output', 'order':10},
|
||||
{'name': 'Log', 'order':11},
|
||||
]}
|
||||
|
||||
return render(request, template, context)
|
||||
|
||||
|
||||
def _find_task_revdep(task):
|
||||
tp = []
|
||||
for p in Task_Dependency.objects.filter(depends_on=task):
|
||||
tp.append(p.task);
|
||||
return tp
|
||||
|
||||
def _find_task_provider(task):
|
||||
task_revdeps = _find_task_revdep(task)
|
||||
for tr in task_revdeps:
|
||||
if tr.outcome != Task.OUTCOME_COVERED:
|
||||
return tr
|
||||
for tr in task_revdeps:
|
||||
trc = _find_task_provider(tr)
|
||||
if trc is not None:
|
||||
return trc
|
||||
return None
|
||||
|
||||
def task(request, build_id):
|
||||
template = 'simple_task.html'
|
||||
|
||||
tasks = _build_page_range(Paginator(Task.objects.filter(build=build_id, order__gt=0), 100),request.GET.get('page', 1))
|
||||
|
||||
for t in tasks:
|
||||
if t.outcome == Task.OUTCOME_COVERED:
|
||||
t.provider = _find_task_provider(t)
|
||||
|
||||
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects': tasks}
|
||||
|
||||
return render(request, template, context)
|
||||
|
||||
def configuration(request, build_id):
|
||||
template = 'simple_configuration.html'
|
||||
variables = _build_page_range(Paginator(Variable.objects.filter(build=build_id), 50), request.GET.get('page', 1))
|
||||
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : variables}
|
||||
return render(request, template, context)
|
||||
|
||||
def bpackage(request, build_id):
|
||||
template = 'simple_bpackage.html'
|
||||
packages = Package.objects.filter(build = build_id)
|
||||
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : packages}
|
||||
return render(request, template, context)
|
||||
|
||||
def bfile(request, build_id, package_id):
|
||||
template = 'simple_bfile.html'
|
||||
files = Package_File.objects.filter(package = package_id)
|
||||
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : files}
|
||||
return render(request, template, context)
|
||||
|
||||
def tpackage(request, build_id, target_id):
|
||||
template = 'simple_package.html'
|
||||
packages = map(lambda x: x.package, list(Target_Installed_Package.objects.filter(target=target_id)))
|
||||
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : packages}
|
||||
return render(request, template, context)
|
||||
|
||||
def layer(request):
|
||||
template = 'simple_layer.html'
|
||||
layer_info = Layer.objects.all()
|
||||
|
||||
for li in layer_info:
|
||||
li.versions = Layer_Version.objects.filter(layer = li)
|
||||
for liv in li.versions:
|
||||
liv.count = Recipe.objects.filter(layer_version__id = liv.id).count()
|
||||
|
||||
context = {'objects': layer_info}
|
||||
|
||||
return render(request, template, context)
|
||||
|
||||
|
||||
def layer_versions_recipes(request, layerversion_id):
|
||||
template = 'simple_recipe.html'
|
||||
recipes = Recipe.objects.filter(layer_version__id = layerversion_id)
|
||||
|
||||
context = {'objects': recipes,
|
||||
'layer_version' : Layer_Version.objects.filter( id = layerversion_id )[0]
|
||||
}
|
||||
|
||||
return render(request, template, context)
|
||||
|
||||
#### API
|
||||
|
||||
import json
|
||||
from django.core import serializers
|
||||
from django.http import HttpResponse, HttpResponseBadRequest
|
||||
|
||||
|
||||
def model_explorer(request, model_name):
|
||||
|
||||
DESCENDING = 'desc'
|
||||
response_data = {}
|
||||
model_mapping = {
|
||||
'build': Build,
|
||||
'target': Target,
|
||||
'target_file': Target_File,
|
||||
'target_image_file': Target_Image_File,
|
||||
'task': Task,
|
||||
'task_dependency': Task_Dependency,
|
||||
'package': Package,
|
||||
'layer': Layer,
|
||||
'layerversion': Layer_Version,
|
||||
'recipe': Recipe,
|
||||
'recipe_dependency': Recipe_Dependency,
|
||||
'package': Package,
|
||||
'package_dependency': Package_Dependency,
|
||||
'target_installed_package': Target_Installed_Package,
|
||||
'build_file': Package_File,
|
||||
'variable': Variable,
|
||||
'variablehistory': VariableHistory,
|
||||
'logmessage': LogMessage,
|
||||
}
|
||||
|
||||
if model_name not in model_mapping.keys():
|
||||
return HttpResponseBadRequest()
|
||||
|
||||
model = model_mapping[model_name]
|
||||
|
||||
try:
|
||||
limit = int(request.GET.get('limit', 0))
|
||||
except ValueError:
|
||||
limit = 0
|
||||
|
||||
try:
|
||||
offset = int(request.GET.get('offset', 0))
|
||||
except ValueError:
|
||||
offset = 0
|
||||
|
||||
ordering_string, invalid = _validate_input(request.GET.get('orderby', ''),
|
||||
model)
|
||||
if invalid:
|
||||
return HttpResponseBadRequest()
|
||||
|
||||
filter_string, invalid = _validate_input(request.GET.get('filter', ''),
|
||||
model)
|
||||
if invalid:
|
||||
return HttpResponseBadRequest()
|
||||
|
||||
search_term = request.GET.get('search', '')
|
||||
|
||||
if filter_string:
|
||||
filter_terms = _get_filtering_terms(filter_string)
|
||||
try:
|
||||
queryset = model.objects.filter(**filter_terms)
|
||||
except ValueError:
|
||||
queryset = []
|
||||
else:
|
||||
queryset = model.objects.all()
|
||||
|
||||
if search_term:
|
||||
queryset = _get_search_results(search_term, queryset, model)
|
||||
|
||||
if ordering_string and queryset:
|
||||
column, order = ordering_string.split(':')
|
||||
if order.lower() == DESCENDING:
|
||||
queryset = queryset.order_by('-' + column)
|
||||
else:
|
||||
queryset = queryset.order_by(column)
|
||||
|
||||
if offset and limit:
|
||||
queryset = queryset[offset:(offset+limit)]
|
||||
elif offset:
|
||||
queryset = queryset[offset:]
|
||||
elif limit:
|
||||
queryset = queryset[:limit]
|
||||
|
||||
if queryset:
|
||||
response_data['count'] = queryset.count()
|
||||
else:
|
||||
response_data['count'] = 0
|
||||
|
||||
response_data['list'] = serializers.serialize('json', queryset)
|
||||
|
||||
return HttpResponse(json.dumps(response_data),
|
||||
content_type='application/json')
|
||||
|
||||
def _get_filtering_terms(filter_string):
|
||||
|
||||
search_terms = filter_string.split(":")
|
||||
keys = search_terms[0].split(',')
|
||||
values = search_terms[1].split(',')
|
||||
|
||||
return dict(zip(keys, values))
|
||||
|
||||
def _validate_input(input, model):
|
||||
|
||||
invalid = 0
|
||||
|
||||
if input:
|
||||
input_list = input.split(":")
|
||||
|
||||
# Check we have only one colon
|
||||
if len(input_list) != 2:
|
||||
invalid = 1
|
||||
return None, invalid
|
||||
|
||||
# Check we have an equal number of terms both sides of the colon
|
||||
if len(input_list[0].split(',')) != len(input_list[1].split(',')):
|
||||
invalid = 1
|
||||
return None, invalid
|
||||
|
||||
# Check we are looking for a valid field
|
||||
valid_fields = model._meta.get_all_field_names()
|
||||
for field in input_list[0].split(','):
|
||||
if field not in valid_fields:
|
||||
invalid = 1
|
||||
return None, invalid
|
||||
|
||||
return input, invalid
|
||||
|
||||
def _get_search_results(search_term, queryset, model):
|
||||
search_objects = []
|
||||
for st in search_term.split(" "):
|
||||
q_map = map(lambda x: Q(**{x+'__icontains': st}),
|
||||
model.search_allowed_fields)
|
||||
|
||||
search_objects.append(reduce(operator.or_, q_map))
|
||||
search_object = reduce(operator.and_, search_objects)
|
||||
queryset = queryset.filter(search_object)
|
||||
|
||||
return queryset
|
||||
@@ -1,6 +0,0 @@
|
||||
contrib directory for toaster
|
||||
|
||||
This directory holds code that works with Toaster, without being an integral part of the Toaster project.
|
||||
It is intended for testing code, testing fixtures, tools for Toaster, etc.
|
||||
|
||||
NOTE: This directory is NOT a Python module.
|
||||
@@ -1,10 +0,0 @@
|
||||
*.pyc
|
||||
*.swp
|
||||
*.swo
|
||||
*.kpf
|
||||
*.egg-info/
|
||||
.idea
|
||||
.tox
|
||||
tmp/
|
||||
dist/
|
||||
.DS_Store
|
||||
@@ -1,50 +0,0 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
- "3.4"
|
||||
services:
|
||||
- mysql
|
||||
- postgresql
|
||||
env:
|
||||
- DJANGO=1.4 DB=sqlite
|
||||
- DJANGO=1.4 DB=mysql
|
||||
- DJANGO=1.4 DB=postgres
|
||||
- DJANGO=1.5 DB=sqlite
|
||||
- DJANGO=1.5 DB=mysql
|
||||
- DJANGO=1.5 DB=postgres
|
||||
- DJANGO=1.6 DB=sqlite
|
||||
- DJANGO=1.6 DB=mysql
|
||||
- DJANGO=1.6 DB=postgres
|
||||
- DJANGO=1.7 DB=sqlite
|
||||
- DJANGO=1.7 DB=mysql
|
||||
- DJANGO=1.7 DB=postgres
|
||||
|
||||
matrix:
|
||||
exclude:
|
||||
- python: "3.4"
|
||||
env: DJANGO=1.4 DB=sqlite
|
||||
- python: "3.4"
|
||||
env: DJANGO=1.4 DB=mysql
|
||||
- python: "3.4"
|
||||
env: DJANGO=1.4 DB=postgres
|
||||
- python: "3.4"
|
||||
env: DJANGO=1.5 DB=sqlite
|
||||
- python: "3.4"
|
||||
env: DJANGO=1.5 DB=mysql
|
||||
- python: "3.4"
|
||||
env: DJANGO=1.5 DB=postgres
|
||||
- python: "3.4"
|
||||
env: DJANGO=1.6 DB=mysql
|
||||
- python: "3.4"
|
||||
env: DJANGO=1.7 DB=mysql
|
||||
|
||||
before_script:
|
||||
- mysql -e 'create database aggregation;'
|
||||
- psql -c 'create database aggregation;' -U postgres
|
||||
install:
|
||||
- pip install six
|
||||
- if [ "$DB" == "mysql" ]; then pip install mysql-python; fi
|
||||
- if [ "$DB" == "postgres" ]; then pip install psycopg2; fi
|
||||
- pip install -q Django==$DJANGO --use-mirrors
|
||||
script:
|
||||
- ./runtests.py --settings=tests.test_$DB
|
||||
@@ -1,21 +0,0 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2012 Henrique Bastos
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
@@ -1,156 +0,0 @@
|
||||
Django Aggregate If: Condition aggregates for Django
|
||||
====================================================
|
||||
|
||||
.. image:: https://travis-ci.org/henriquebastos/django-aggregate-if.png?branch=master
|
||||
:target: https://travis-ci.org/henriquebastos/django-aggregate-if
|
||||
:alt: Test Status
|
||||
|
||||
.. image:: https://landscape.io/github/henriquebastos/django-aggregate-if/master/landscape.png
|
||||
:target: https://landscape.io/github/henriquebastos/django-aggregate-if/master
|
||||
:alt: Code Helth
|
||||
|
||||
.. image:: https://pypip.in/v/django-aggregate-if/badge.png
|
||||
:target: https://crate.io/packages/django-aggregate-if/
|
||||
:alt: Latest PyPI version
|
||||
|
||||
.. image:: https://pypip.in/d/django-aggregate-if/badge.png
|
||||
:target: https://crate.io/packages/django-aggregate-if/
|
||||
:alt: Number of PyPI downloads
|
||||
|
||||
*Aggregate-if* adds conditional aggregates to Django.
|
||||
|
||||
Conditional aggregates can help you reduce the ammount of queries to obtain
|
||||
aggregated information, like statistics for example.
|
||||
|
||||
Imagine you have a model ``Offer`` like this one:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Offer(models.Model):
|
||||
sponsor = models.ForeignKey(User)
|
||||
price = models.DecimalField(max_digits=9, decimal_places=2)
|
||||
status = models.CharField(max_length=30)
|
||||
expire_at = models.DateField(null=True, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
OPEN = "OPEN"
|
||||
REVOKED = "REVOKED"
|
||||
PAID = "PAID"
|
||||
|
||||
Let's say you want to know:
|
||||
|
||||
#. How many offers exists in total;
|
||||
#. How many of them are OPEN, REVOKED or PAID;
|
||||
#. How much money was offered in total;
|
||||
#. How much money is in OPEN, REVOKED and PAID offers;
|
||||
|
||||
To get these informations, you could query:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from django.db.models import Count, Sum
|
||||
|
||||
Offer.objects.count()
|
||||
Offer.objects.filter(status=Offer.OPEN).aggregate(Count('pk'))
|
||||
Offer.objects.filter(status=Offer.REVOKED).aggregate(Count('pk'))
|
||||
Offer.objects.filter(status=Offer.PAID).aggregate(Count('pk'))
|
||||
Offer.objects.aggregate(Sum('price'))
|
||||
Offer.objects.filter(status=Offer.OPEN).aggregate(Sum('price'))
|
||||
Offer.objects.filter(status=Offer.REVOKED).aggregate(Sum('price'))
|
||||
Offer.objects.filter(status=Offer.PAID).aggregate(Sum('price'))
|
||||
|
||||
In this case, **8 queries** were needed to retrieve the desired information.
|
||||
|
||||
With conditional aggregates you can get it all with only **1 query**:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from django.db.models import Q
|
||||
from aggregate_if import Count, Sum
|
||||
|
||||
Offer.objects.aggregate(
|
||||
pk__count=Count('pk'),
|
||||
pk__open__count=Count('pk', only=Q(status=Offer.OPEN)),
|
||||
pk__revoked__count=Count('pk', only=Q(status=Offer.REVOKED)),
|
||||
pk__paid__count=Count('pk', only=Q(status=Offer.PAID)),
|
||||
pk__sum=Sum('price'),
|
||||
pk__open__sum=Sum('price', only=Q(status=Offer.OPEN)),
|
||||
pk__revoked__sum=Sum('price'), only=Q(status=Offer.REVOKED)),
|
||||
pk__paid__sum=Sum('price'), only=Q(status=Offer.PAID))
|
||||
)
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
*Aggregate-if* works with Django 1.4, 1.5, 1.6 and 1.7.
|
||||
|
||||
To install it, simply:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ pip install django-aggregate-if
|
||||
|
||||
Inspiration
|
||||
-----------
|
||||
|
||||
There is a 5 years old `ticket 11305`_ that will (*hopefully*) implement this feature into
|
||||
Django 1.8.
|
||||
|
||||
Using Django 1.6, I still wanted to avoid creating custom queries for very simple
|
||||
conditional aggregations. So I've cherry picked those ideas and others from the
|
||||
internet and built this library.
|
||||
|
||||
This library uses the same API and tests proposed on `ticket 11305`_, so when the
|
||||
new feature is available you can easily replace ``django-aggregate-if``.
|
||||
|
||||
Limitations
|
||||
-----------
|
||||
|
||||
Conditions involving joins with aliases are not supported yet. If you want to
|
||||
help adding this feature, you're welcome to check the `first issue`_.
|
||||
|
||||
Contributors
|
||||
------------
|
||||
|
||||
* `Henrique Bastos <http://github.com/henriquebastos>`_
|
||||
* `Iuri de Silvio <https://github.com/iurisilvio>`_
|
||||
* `Hampus Stjernhav <https://github.com/champ>`_
|
||||
* `Bradley Martsberger <https://github.com/martsberger>`_
|
||||
* `Markus Bertheau <https://github.com/mbertheau>`_
|
||||
* `end0 <https://github.com/end0>`_
|
||||
* `Scott Sexton <https://github.com/scottsexton>`_
|
||||
* `Mauler <https://github.com/mauler>`_
|
||||
* `trbs <https://github.com/trbs>`_
|
||||
|
||||
Changelog
|
||||
---------
|
||||
|
||||
0.5
|
||||
- Support for Django 1.7
|
||||
|
||||
0.4
|
||||
- Use tox to run tests.
|
||||
- Add support for Django 1.6.
|
||||
- Add support for Python3.
|
||||
- The ``only`` parameter now freely supports joins independent of the main query.
|
||||
- Adds support for alias relabeling permitting excludes and updates with aggregates filtered on remote foreign key relations.
|
||||
|
||||
0.3.1
|
||||
- Fix quotation escaping.
|
||||
- Fix boolean casts on Postgres.
|
||||
|
||||
0.2
|
||||
- Fix postgres issue with LIKE conditions.
|
||||
|
||||
0.1
|
||||
- Initial release.
|
||||
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
The MIT License.
|
||||
|
||||
.. _ticket 11305: https://code.djangoproject.com/ticket/11305
|
||||
.. _first issue: https://github.com/henriquebastos/django-aggregate-if/issues/1
|
||||
@@ -1,164 +0,0 @@
|
||||
# coding: utf-8
|
||||
'''
|
||||
Implements conditional aggregates.
|
||||
|
||||
This code was based on the work of others found on the internet:
|
||||
|
||||
1. http://web.archive.org/web/20101115170804/http://www.voteruniverse.com/Members/jlantz/blog/conditional-aggregates-in-django
|
||||
2. https://code.djangoproject.com/ticket/11305
|
||||
3. https://groups.google.com/forum/?fromgroups=#!topic/django-users/cjzloTUwmS0
|
||||
4. https://groups.google.com/forum/?fromgroups=#!topic/django-users/vVprMpsAnPo
|
||||
'''
|
||||
from __future__ import unicode_literals
|
||||
from django.utils import six
|
||||
import django
|
||||
from django.db.models.aggregates import Aggregate as DjangoAggregate
|
||||
from django.db.models.sql.aggregates import Aggregate as DjangoSqlAggregate
|
||||
|
||||
|
||||
VERSION = django.VERSION[:2]
|
||||
|
||||
|
||||
class SqlAggregate(DjangoSqlAggregate):
|
||||
conditional_template = '%(function)s(CASE WHEN %(condition)s THEN %(field)s ELSE null END)'
|
||||
|
||||
def __init__(self, col, source=None, is_summary=False, condition=None, **extra):
|
||||
super(SqlAggregate, self).__init__(col, source, is_summary, **extra)
|
||||
self.condition = condition
|
||||
|
||||
def relabel_aliases(self, change_map):
|
||||
if VERSION < (1, 7):
|
||||
super(SqlAggregate, self).relabel_aliases(change_map)
|
||||
if self.has_condition:
|
||||
condition_change_map = dict((k, v) for k, v in \
|
||||
change_map.items() if k in self.condition.query.alias_map
|
||||
)
|
||||
self.condition.query.change_aliases(condition_change_map)
|
||||
|
||||
def relabeled_clone(self, change_map):
|
||||
self.relabel_aliases(change_map)
|
||||
return super(SqlAggregate, self).relabeled_clone(change_map)
|
||||
|
||||
def as_sql(self, qn, connection):
|
||||
if self.has_condition:
|
||||
self.sql_template = self.conditional_template
|
||||
self.extra['condition'] = self._condition_as_sql(qn, connection)
|
||||
|
||||
return super(SqlAggregate, self).as_sql(qn, connection)
|
||||
|
||||
@property
|
||||
def has_condition(self):
|
||||
# Warning: bool(QuerySet) will hit the database
|
||||
return self.condition is not None
|
||||
|
||||
def _condition_as_sql(self, qn, connection):
|
||||
'''
|
||||
Return sql for condition.
|
||||
'''
|
||||
def escape(value):
|
||||
if isinstance(value, bool):
|
||||
value = str(int(value))
|
||||
if isinstance(value, six.string_types):
|
||||
# Escape params used with LIKE
|
||||
if '%' in value:
|
||||
value = value.replace('%', '%%')
|
||||
# Escape single quotes
|
||||
if "'" in value:
|
||||
value = value.replace("'", "''")
|
||||
# Add single quote to text values
|
||||
value = "'" + value + "'"
|
||||
return value
|
||||
|
||||
sql, param = self.condition.query.where.as_sql(qn, connection)
|
||||
param = map(escape, param)
|
||||
|
||||
return sql % tuple(param)
|
||||
|
||||
|
||||
class SqlSum(SqlAggregate):
|
||||
sql_function = 'SUM'
|
||||
|
||||
|
||||
class SqlCount(SqlAggregate):
|
||||
is_ordinal = True
|
||||
sql_function = 'COUNT'
|
||||
sql_template = '%(function)s(%(distinct)s%(field)s)'
|
||||
conditional_template = '%(function)s(%(distinct)sCASE WHEN %(condition)s THEN %(field)s ELSE null END)'
|
||||
|
||||
def __init__(self, col, distinct=False, **extra):
|
||||
super(SqlCount, self).__init__(col, distinct=distinct and 'DISTINCT ' or '', **extra)
|
||||
|
||||
|
||||
class SqlAvg(SqlAggregate):
|
||||
is_computed = True
|
||||
sql_function = 'AVG'
|
||||
|
||||
|
||||
class SqlMax(SqlAggregate):
|
||||
sql_function = 'MAX'
|
||||
|
||||
|
||||
class SqlMin(SqlAggregate):
|
||||
sql_function = 'MIN'
|
||||
|
||||
|
||||
class Aggregate(DjangoAggregate):
|
||||
def __init__(self, lookup, only=None, **extra):
|
||||
super(Aggregate, self).__init__(lookup, **extra)
|
||||
self.only = only
|
||||
self.condition = None
|
||||
|
||||
def _get_fields_from_Q(self, q):
|
||||
fields = []
|
||||
for child in q.children:
|
||||
if hasattr(child, 'children'):
|
||||
fields.extend(self._get_fields_from_Q(child))
|
||||
else:
|
||||
fields.append(child)
|
||||
return fields
|
||||
|
||||
def add_to_query(self, query, alias, col, source, is_summary):
|
||||
if self.only:
|
||||
self.condition = query.model._default_manager.filter(self.only)
|
||||
for child in self._get_fields_from_Q(self.only):
|
||||
field_list = child[0].split('__')
|
||||
# Pop off the last field if it's a query term ('gte', 'contains', 'isnull', etc.)
|
||||
if field_list[-1] in query.query_terms:
|
||||
field_list.pop()
|
||||
# setup_joins have different returns in Django 1.5 and 1.6, but the order of what we need remains.
|
||||
result = query.setup_joins(field_list, query.model._meta, query.get_initial_alias(), None)
|
||||
join_list = result[3]
|
||||
|
||||
fname = 'promote_alias_chain' if VERSION < (1, 5) else 'promote_joins'
|
||||
args = (join_list, True) if VERSION < (1, 7) else (join_list,)
|
||||
|
||||
promote = getattr(query, fname)
|
||||
promote(*args)
|
||||
|
||||
aggregate = self.sql_klass(col, source=source, is_summary=is_summary, condition=self.condition, **self.extra)
|
||||
query.aggregates[alias] = aggregate
|
||||
|
||||
|
||||
class Sum(Aggregate):
|
||||
name = 'Sum'
|
||||
sql_klass = SqlSum
|
||||
|
||||
|
||||
class Count(Aggregate):
|
||||
name = 'Count'
|
||||
sql_klass = SqlCount
|
||||
|
||||
|
||||
class Avg(Aggregate):
|
||||
name = 'Avg'
|
||||
sql_klass = SqlAvg
|
||||
|
||||
|
||||
class Max(Aggregate):
|
||||
name = 'Max'
|
||||
sql_klass = SqlMax
|
||||
|
||||
|
||||
class Min(Aggregate):
|
||||
name = 'Min'
|
||||
sql_klass = SqlMin
|
||||
@@ -1,48 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = OptionParser()
|
||||
parser.add_option('-s', '--settings', help='Define settings.')
|
||||
parser.add_option('-t', '--unittest', help='Define which test to run. Default all.')
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if not options.settings:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
if not options.unittest:
|
||||
options.unittest = ['aggregation']
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def get_runner(settings_module):
|
||||
'''
|
||||
Asks Django for the TestRunner defined in settings or the default one.
|
||||
'''
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
|
||||
|
||||
import django
|
||||
from django.test.utils import get_runner
|
||||
from django.conf import settings
|
||||
|
||||
if hasattr(django, 'setup'):
|
||||
django.setup()
|
||||
|
||||
return get_runner(settings)
|
||||
|
||||
|
||||
def runtests():
|
||||
options = parse_args()
|
||||
TestRunner = get_runner(options.settings)
|
||||
runner = TestRunner(verbosity=1, interactive=True, failfast=False)
|
||||
sys.exit(runner.run_tests([]))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
runtests()
|
||||
@@ -1,33 +0,0 @@
|
||||
# coding: utf-8
|
||||
from setuptools import setup
|
||||
import os
|
||||
|
||||
|
||||
setup(name='django-aggregate-if',
|
||||
version='0.5',
|
||||
description='Conditional aggregates for Django, just like the famous SumIf in Excel.',
|
||||
long_description=open(os.path.join(os.path.dirname(__file__), "README.rst")).read(),
|
||||
author="Henrique Bastos", author_email="henrique@bastos.net",
|
||||
license="MIT",
|
||||
py_modules=['aggregate_if'],
|
||||
install_requires=[
|
||||
'six>=1.6.1',
|
||||
],
|
||||
zip_safe=False,
|
||||
platforms='any',
|
||||
include_package_data=True,
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Framework :: Django',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Natural Language :: English',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Topic :: Database',
|
||||
'Topic :: Software Development :: Libraries',
|
||||
],
|
||||
url='http://github.com/henriquebastos/django-aggregate-if/',
|
||||
)
|
||||
@@ -1,198 +0,0 @@
|
||||
[tox]
|
||||
envlist =
|
||||
py27-django1.4-sqlite,
|
||||
py27-django1.4-postgres,
|
||||
py27-django1.4-mysql,
|
||||
|
||||
py27-django1.5-sqlite,
|
||||
py27-django1.5-postgres,
|
||||
py27-django1.5-mysql,
|
||||
|
||||
py27-django1.6-sqlite,
|
||||
py27-django1.6-postgres,
|
||||
py27-django1.6-mysql,
|
||||
|
||||
py27-django1.7-sqlite,
|
||||
py27-django1.7-postgres,
|
||||
py27-django1.7-mysql,
|
||||
|
||||
py34-django1.6-sqlite,
|
||||
py34-django1.6-postgres,
|
||||
#py34-django1.6-mysql
|
||||
|
||||
py34-django1.7-sqlite,
|
||||
py34-django1.7-postgres,
|
||||
#py34-django1.7-mysql
|
||||
|
||||
[testenv]
|
||||
whitelist_externals=
|
||||
mysql
|
||||
psql
|
||||
|
||||
# Python 2.7
|
||||
# Django 1.4
|
||||
[testenv:py27-django1.4-sqlite]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.4
|
||||
commands = python runtests.py --settings tests.test_sqlite
|
||||
|
||||
[testenv:py27-django1.4-postgres]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.4
|
||||
psycopg2
|
||||
commands =
|
||||
psql -c 'create database aggregation;' postgres
|
||||
python runtests.py --settings tests.test_postgres
|
||||
psql -c 'drop database aggregation;' postgres
|
||||
|
||||
[testenv:py27-django1.4-mysql]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.4
|
||||
mysql-python
|
||||
commands =
|
||||
mysql -e 'create database aggregation;'
|
||||
python runtests.py --settings tests.test_mysql
|
||||
mysql -e 'drop database aggregation;'
|
||||
|
||||
# Django 1.5
|
||||
[testenv:py27-django1.5-sqlite]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.5
|
||||
commands = python runtests.py --settings tests.test_sqlite
|
||||
|
||||
[testenv:py27-django1.5-postgres]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.5
|
||||
psycopg2
|
||||
commands =
|
||||
psql -c 'create database aggregation;' postgres
|
||||
python runtests.py --settings tests.test_postgres
|
||||
psql -c 'drop database aggregation;' postgres
|
||||
|
||||
[testenv:py27-django1.5-mysql]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.5
|
||||
mysql-python
|
||||
commands =
|
||||
mysql -e 'create database aggregation;'
|
||||
python runtests.py --settings tests.test_mysql
|
||||
mysql -e 'drop database aggregation;'
|
||||
|
||||
# Django 1.6
|
||||
[testenv:py27-django1.6-sqlite]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.6
|
||||
commands = python runtests.py --settings tests.test_sqlite
|
||||
|
||||
[testenv:py27-django1.6-postgres]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.6
|
||||
psycopg2
|
||||
commands =
|
||||
psql -c 'create database aggregation;' postgres
|
||||
python runtests.py --settings tests.test_postgres
|
||||
psql -c 'drop database aggregation;' postgres
|
||||
|
||||
[testenv:py27-django1.6-mysql]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.6
|
||||
mysql-python
|
||||
commands =
|
||||
mysql -e 'create database aggregation;'
|
||||
python runtests.py --settings tests.test_mysql
|
||||
mysql -e 'drop database aggregation;'
|
||||
|
||||
|
||||
# Python 2.7 and Django 1.7
|
||||
[testenv:py27-django1.7-sqlite]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.7
|
||||
commands = python runtests.py --settings tests.test_sqlite
|
||||
|
||||
[testenv:py27-django1.7-postgres]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.7
|
||||
psycopg2
|
||||
commands =
|
||||
psql -c 'create database aggregation;' postgres
|
||||
python runtests.py --settings tests.test_postgres
|
||||
psql -c 'drop database aggregation;' postgres
|
||||
|
||||
[testenv:py27-django1.7-mysql]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
django==1.7
|
||||
mysql-python
|
||||
commands =
|
||||
mysql -e 'create database aggregation;'
|
||||
python runtests.py --settings tests.test_mysql
|
||||
mysql -e 'drop database aggregation;'
|
||||
|
||||
|
||||
# Python 3.4
|
||||
# Django 1.6
|
||||
[testenv:py34-django1.6-sqlite]
|
||||
basepython = python3.4
|
||||
deps =
|
||||
django==1.6
|
||||
commands = python runtests.py --settings tests.test_sqlite
|
||||
|
||||
[testenv:py34-django1.6-postgres]
|
||||
basepython = python3.4
|
||||
deps =
|
||||
django==1.6
|
||||
psycopg2
|
||||
commands =
|
||||
psql -c 'create database aggregation;' postgres
|
||||
python runtests.py --settings tests.test_postgres
|
||||
psql -c 'drop database aggregation;' postgres
|
||||
|
||||
[testenv:py34-django1.6-mysql]
|
||||
basepython = python3.4
|
||||
deps =
|
||||
django==1.6
|
||||
mysql-python3
|
||||
commands =
|
||||
mysql -e 'create database aggregation;'
|
||||
python runtests.py --settings tests.test_mysql
|
||||
mysql -e 'drop database aggregation;'
|
||||
|
||||
|
||||
# Python 3.4
|
||||
# Django 1.7
|
||||
[testenv:py34-django1.7-sqlite]
|
||||
basepython = python3.4
|
||||
deps =
|
||||
django==1.7
|
||||
commands = python runtests.py --settings tests.test_sqlite
|
||||
|
||||
[testenv:py34-django1.7-postgres]
|
||||
basepython = python3.4
|
||||
deps =
|
||||
django==1.7
|
||||
psycopg2
|
||||
commands =
|
||||
psql -c 'create database aggregation;' postgres
|
||||
python runtests.py --settings tests.test_postgres
|
||||
psql -c 'drop database aggregation;' postgres
|
||||
|
||||
[testenv:py34-django1.7-mysql]
|
||||
basepython = python3.4
|
||||
deps =
|
||||
django==1.7
|
||||
mysql-python3
|
||||
commands =
|
||||
mysql -e 'create database aggregation;'
|
||||
python runtests.py --settings tests.test_mysql
|
||||
mysql -e 'drop database aggregation;'
|
||||
@@ -1,41 +0,0 @@
|
||||
|
||||
Toaster Testing Framework
|
||||
Yocto Project
|
||||
|
||||
|
||||
Rationale
|
||||
------------
|
||||
As Toaster contributions grow with the number of people that contribute code, verifying each patch prior to submitting upstream becomes a hard-to-scale problem for humans. We devised this system in order to run patch-level validation, trying to eliminate common problems from submitted patches, in an automated fashion.
|
||||
|
||||
The Toaster Testing Framework is a set of Python scripts that provides an extensible way to write smoke and regression tests that will be run on each patch set sent for review on the toaster mailing list.
|
||||
|
||||
|
||||
Usage
|
||||
------------
|
||||
There are three main executable scripts in this directory.
|
||||
* runner.py is designed to be run from the command line. It requires, as mandatory parameter, a branch name on poky-contrib, branch which contains the patches to be tested. The program will auto-discover the available tests residing in this directory by looking for unittest classes, and will run the tests on the branch dumping the output to the standard output. Optionally, it can take parameters inhibiting the branch checkout, or specifying a single test to be run, for debugging purposes.
|
||||
* launcher.py is a designed to be run from a crontab or similar scheduling mechanism. It looks up a backlog file containing branches-to-test (named tasks in the source code), select the first one in FIFO manner, and launch runner.py on it. It will await for completion, and email the standard output and standard error dumps from the runner.py execution
|
||||
* recv.py is an email receiver, designed to be called as a pipe from a .forward file. It is used to monitor a mailing list, for example, and add tasks to the backlog based on review requests coming on the mailing list.
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
As prerequisite, we expect a functioning email system on a machine with Python2.
|
||||
|
||||
The broad steps to installation
|
||||
* set up the .forward on the receiving email account to pipe to the recv.py file
|
||||
* edit config.py and settings.json to alter for local installation settings
|
||||
* on email receive, verify backlog.txt to see that the tasks are received and marked for processing
|
||||
* execute launcher.py in command line to verify that a test occurs with no problems, and that the outgoing email is delivered
|
||||
* add launcher.py
|
||||
|
||||
|
||||
|
||||
Contribute
|
||||
------------
|
||||
What we need are tests. Add your own tests to either tests.py file, or to a new file.
|
||||
Use "config.logger" to write logs that will make it to email.
|
||||
|
||||
Commonly used code should be going to shellutils, and configuration to config.py.
|
||||
|
||||
Contribute code by emailing patches to the list: toaster@yoctoproject.org (membership required)
|
||||
@@ -1,9 +0,0 @@
|
||||
We need to implement tests:
|
||||
|
||||
automated link checker; currently
|
||||
$ linkchecker -t 1000 -F csv http://localhost:8000/
|
||||
|
||||
integrate the w3c-validation service; currently
|
||||
$ python urlcheck.py
|
||||
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2015 Alexandru Damian for Intel Corp.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
# This is the configuration/single module for tts
|
||||
# everything that would be a global variable goes here
|
||||
|
||||
import os, sys, logging
|
||||
import socket
|
||||
|
||||
LOGDIR = "log"
|
||||
SETTINGS_FILE = os.path.join(os.path.dirname(__file__), "settings.json")
|
||||
TEST_DIR_NAME = "tts_testdir"
|
||||
|
||||
OWN_PID = os.getpid()
|
||||
|
||||
W3C_VALIDATOR = "http://icarus.local/w3c-validator/check?doctype=HTML5&uri="
|
||||
|
||||
#TODO assign port dynamically
|
||||
TOASTER_PORT=56789
|
||||
|
||||
#we parse the w3c URL to know where to connect
|
||||
|
||||
import urlparse
|
||||
|
||||
def get_public_ip():
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
p = urlparse.urlparse("http://icarus.local/w3c-validator/check?doctype=HTML5&uri=")
|
||||
s.connect(( p.netloc, 80 if p.port is None else p.port))
|
||||
hn = s.getsockname()[0]
|
||||
s.close()
|
||||
return hn
|
||||
|
||||
TOASTER_BASEURL="http://%s:%d/" % (get_public_ip(), TOASTER_PORT)
|
||||
|
||||
|
||||
OWN_EMAIL_ADDRESS = "Toaster Testing Framework <alexandru.damian@intel.com>"
|
||||
REPORT_EMAIL_ADDRESS = "alexandru.damian@intel.com"
|
||||
|
||||
# make sure we have the basic logging infrastructure
|
||||
logger = logging.getLogger("toastertest")
|
||||
__console = logging.StreamHandler(sys.stdout)
|
||||
__console.setFormatter(logging.Formatter("%(asctime)s %(levelname)s: %(message)s"))
|
||||
logger.addHandler(__console)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
# singleton file names
|
||||
LOCKFILE="/tmp/ttf.lock"
|
||||
BACKLOGFILE=os.path.join(os.path.dirname(__file__), "backlog.txt")
|
||||
|
||||
# task states
|
||||
def enum(*sequential, **named):
|
||||
enums = dict(zip(sequential, range(len(sequential))), **named)
|
||||
reverse = dict((value, key) for key, value in enums.iteritems())
|
||||
enums['reverse_mapping'] = reverse
|
||||
return type('Enum', (), enums)
|
||||
|
||||
|
||||
class TASKS:
|
||||
PENDING = "PENDING"
|
||||
INPROGRESS = "INPROGRESS"
|
||||
DONE = "DONE"
|
||||
|
||||
@staticmethod
|
||||
def next_task(task):
|
||||
if task == TASKS.PENDING:
|
||||
return TASKS.INPROGRESS
|
||||
if task == TASKS.INPROGRESS:
|
||||
return TASKS.DONE
|
||||
raise Exception("Invalid next task state for %s" % task)
|
||||
|
||||
# TTS specific
|
||||
CONTRIB_REPO = "git@git.yoctoproject.org:poky-contrib"
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2015 Alexandru Damian for Intel Corp.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
# Program to run the next task listed from the backlog.txt; designed to be
|
||||
# run from crontab.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys, os, config, shellutils
|
||||
from shellutils import ShellCmdException
|
||||
|
||||
# Import smtplib for the actual sending function
|
||||
import smtplib
|
||||
|
||||
# Import the email modules we'll need
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
DEBUG=True
|
||||
|
||||
def _take_lockfile():
|
||||
return shellutils.lockfile(shellutils.mk_lock_filename())
|
||||
|
||||
|
||||
def read_next_task_by_state(task_state, task_name = None):
|
||||
if not os.path.exists(os.path.join(os.path.dirname(__file__), config.BACKLOGFILE)):
|
||||
return None
|
||||
os.rename(config.BACKLOGFILE, config.BACKLOGFILE + ".tmp")
|
||||
task = None
|
||||
with open(config.BACKLOGFILE + ".tmp", "r") as f_in:
|
||||
with open(config.BACKLOGFILE, "w") as f_out:
|
||||
for line in f_in.readlines():
|
||||
if task is None:
|
||||
fields = line.strip().split("|", 2)
|
||||
if fields[1] == task_state:
|
||||
if task_name is None or task_name == fields[0]:
|
||||
task = fields[0]
|
||||
print("Updating %s %s to %s" % (task, task_state, config.TASKS.next_task(task_state)))
|
||||
line = "%s|%s\n" % (task, config.TASKS.next_task(task_state))
|
||||
f_out.write(line)
|
||||
os.remove(config.BACKLOGFILE + ".tmp")
|
||||
return task
|
||||
|
||||
def send_report(task_name, plaintext, errtext = None):
|
||||
if errtext is None:
|
||||
msg = MIMEText(plaintext)
|
||||
else:
|
||||
if plaintext is None:
|
||||
plaintext=""
|
||||
msg = MIMEText("--STDOUT dump--\n\n%s\n\n--STDERR dump--\n\n%s" % (plaintext, errtext))
|
||||
|
||||
msg['Subject'] = "[review-request] %s - smoke test results" % task_name
|
||||
msg['From'] = config.OWN_EMAIL_ADDRESS
|
||||
msg['To'] = config.REPORT_EMAIL_ADDRESS
|
||||
|
||||
s = smtplib.SMTP("localhost")
|
||||
s.sendmail(config.OWN_EMAIL_ADDRESS, [config.REPORT_EMAIL_ADDRESS], msg.as_string())
|
||||
s.quit()
|
||||
|
||||
if __name__ == "__main__":
|
||||
# we don't do anything if we have another instance of us running
|
||||
lf = _take_lockfile()
|
||||
|
||||
if lf is None:
|
||||
if DEBUG:
|
||||
print("Concurrent script in progress, exiting")
|
||||
sys.exit(1)
|
||||
|
||||
next_task = read_next_task_by_state(config.TASKS.PENDING)
|
||||
if next_task is not None:
|
||||
print("Next task is", next_task)
|
||||
errtext = None
|
||||
out = None
|
||||
try:
|
||||
out = shellutils.run_shell_cmd("%s %s" % (os.path.join(os.path.dirname(__file__), "runner.py"), next_task))
|
||||
pass
|
||||
except ShellCmdException as e:
|
||||
print("Failed while running the test runner: %s", e)
|
||||
errtext = e.__str__()
|
||||
send_report(next_task, out, errtext)
|
||||
read_next_task_by_state(config.TASKS.INPROGRESS, next_task)
|
||||
else:
|
||||
print("No task")
|
||||
|
||||
shellutils.unlockfile(lf)
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2015 Alexandru Damian for Intel Corp.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
# Program to receive review requests by email and log tasks to backlog.txt
|
||||
# Designed to be run by the email system from a .forward file:
|
||||
#
|
||||
# cat .forward
|
||||
# |[full/path]/recv.py
|
||||
|
||||
from __future__ import print_function
|
||||
import sys, os, config, shellutils
|
||||
from shellutils import ShellCmdException
|
||||
|
||||
from email.parser import Parser
|
||||
|
||||
def recv_mail(datastring):
|
||||
headers = Parser().parsestr(datastring)
|
||||
return headers['subject']
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
lf = shellutils.lockfile(shellutils.mk_lock_filename(), retry = True)
|
||||
|
||||
subject = recv_mail(sys.stdin.read())
|
||||
|
||||
subject_parts = subject.split()
|
||||
if "[review-request]" in subject_parts:
|
||||
task_name = subject_parts[subject_parts.index("[review-request]") + 1]
|
||||
with open(os.path.join(os.path.dirname(__file__), config.BACKLOGFILE), "a") as fout:
|
||||
line = "%s|%s\n" % (task_name, config.TASKS.PENDING)
|
||||
fout.write(line)
|
||||
|
||||
shellutils.unlockfile(lf)
|
||||
|
||||
@@ -1,200 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2015 Alexandru Damian for Intel Corp.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
# This is the main test execution controller. It is designed to be run
|
||||
# manually from the command line, or to be called from a different program
|
||||
# that schedules test execution.
|
||||
#
|
||||
# Execute runner.py -h for help.
|
||||
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
import optparse
|
||||
import sys, os
|
||||
import unittest, inspect, importlib
|
||||
import logging, pprint, json
|
||||
|
||||
from shellutils import *
|
||||
|
||||
import config
|
||||
|
||||
# we also log to a file, in addition to console, because our output is important
|
||||
__log_file_name =os.path.join(os.path.dirname(__file__),"log/tts_%d.log" % config.OWN_PID)
|
||||
mkdirhier(os.path.dirname(__log_file_name))
|
||||
__log_file = open(__log_file_name, "w")
|
||||
__file_handler = logging.StreamHandler(__log_file)
|
||||
__file_handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s: %(message)s"))
|
||||
|
||||
config.logger.addHandler(__file_handler)
|
||||
|
||||
|
||||
# set up log directory
|
||||
try:
|
||||
if not os.path.exists(config.LOGDIR):
|
||||
os.mkdir(config.LOGDIR)
|
||||
else:
|
||||
if not os.path.isdir(config.LOGDIR):
|
||||
raise Exception("Expected log dir '%s' is not actually a directory." % config.LOGDIR)
|
||||
except OSError as e:
|
||||
raise e
|
||||
|
||||
# creates the under-test-branch as a separate directory
|
||||
def set_up_test_branch(settings, branch_name):
|
||||
testdir = "%s/%s.%d" % (settings['workdir'], config.TEST_DIR_NAME, config.OWN_PID)
|
||||
|
||||
# creates the host dir
|
||||
if os.path.exists(testdir):
|
||||
raise Exception("Test dir '%s'is already there, aborting" % testdir)
|
||||
os.mkdir(testdir)
|
||||
|
||||
# copies over the .git from the localclone
|
||||
run_shell_cmd("cp -a '%s'/.git '%s'" % (settings['localclone'], testdir))
|
||||
|
||||
# add the remote if it doesn't exist
|
||||
crt_remotes = run_shell_cmd("git remote -v", cwd = testdir)
|
||||
remotes = [word for line in crt_remotes.split("\n") for word in line.split()]
|
||||
if not config.CONTRIB_REPO in remotes:
|
||||
remote_name = "tts_contrib"
|
||||
run_shell_cmd("git remote add %s %s" % (remote_name, config.CONTRIB_REPO), cwd = testdir)
|
||||
else:
|
||||
remote_name = remotes[remotes.index(config.CONTRIB_REPO) - 1]
|
||||
|
||||
# do the fetch
|
||||
run_shell_cmd("git fetch %s -p" % remote_name, cwd=testdir)
|
||||
|
||||
# do the checkout
|
||||
run_shell_cmd("git checkout origin/master && git branch -D %s; git checkout %s/%s -b %s && git reset --hard" % (branch_name,remote_name,branch_name,branch_name), cwd=testdir)
|
||||
|
||||
return testdir
|
||||
|
||||
|
||||
def __search_for_tests():
|
||||
# we find all classes that can run, and run them
|
||||
tests = []
|
||||
for dir_name, dirs_list, files_list in os.walk(os.path.dirname(os.path.abspath(__file__))):
|
||||
for f in [f[:-3] for f in files_list if f.endswith(".py") and not f.startswith("__init__")]:
|
||||
config.logger.debug("Inspecting module %s", f)
|
||||
current_module = importlib.import_module(f)
|
||||
crtclass_names = vars(current_module)
|
||||
for v in crtclass_names:
|
||||
t = crtclass_names[v]
|
||||
if isinstance(t, type(unittest.TestCase)) and issubclass(t, unittest.TestCase):
|
||||
tests.append((f,v))
|
||||
break
|
||||
return tests
|
||||
|
||||
|
||||
# boilerplate to self discover tests and run them
|
||||
def execute_tests(dir_under_test, testname):
|
||||
|
||||
if testname is not None and "." in testname:
|
||||
tests = []
|
||||
tests.append(tuple(testname.split(".", 2)))
|
||||
else:
|
||||
tests = __search_for_tests()
|
||||
|
||||
# let's move to the directory under test
|
||||
crt_dir = os.getcwd()
|
||||
os.chdir(dir_under_test)
|
||||
|
||||
# execute each module
|
||||
try:
|
||||
config.logger.debug("Discovered test clases: %s" % pprint.pformat(tests))
|
||||
suite = unittest.TestSuite()
|
||||
loader = unittest.TestLoader()
|
||||
result = unittest.TestResult()
|
||||
for m,t in tests:
|
||||
suite.addTest(loader.loadTestsFromName("%s.%s" % (m,t)))
|
||||
config.logger.info("Running %d test(s)", suite.countTestCases())
|
||||
suite.run(result)
|
||||
|
||||
if len(result.errors) > 0:
|
||||
map(lambda x: config.logger.error("Exception on test: %s" % pprint.pformat(x)), result.errors)
|
||||
|
||||
if len(result.failures) > 0:
|
||||
map(lambda x: config.logger.error("Failed test: %s:\n%s\n" % (pprint.pformat(x[0]), "\n".join(["-- %s" % x for x in eval(pprint.pformat(x[1])).split("\n")]))), result.failures)
|
||||
|
||||
config.logger.info("Test results: %d ran, %d errors, %d failures" % (result.testsRun, len(result.errors), len(result.failures)))
|
||||
|
||||
except Exception as e:
|
||||
import traceback
|
||||
config.logger.error("Exception while running test. Tracedump: \n%s", traceback.format_exc(e))
|
||||
finally:
|
||||
os.chdir(crt_dir)
|
||||
return len(result.failures)
|
||||
|
||||
# verify that we had a branch-under-test name as parameter
|
||||
def validate_args():
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage="usage: %prog [options] branch_under_test")
|
||||
|
||||
parser.add_option("-t", "--test-dir", dest="testdir", default=None, help="Use specified directory to run tests, inhibits the checkout.")
|
||||
parser.add_option("-s", "--single", dest="singletest", default=None, help="Run only the specified test")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
if len(args) < 1:
|
||||
raise Exception("Please specify the branch to run on. Use option '-h' when in doubt.")
|
||||
return (options, args)
|
||||
|
||||
|
||||
|
||||
|
||||
# load the configuration options
|
||||
def read_settings():
|
||||
if not os.path.exists(config.SETTINGS_FILE) or not os.path.isfile(config.SETTINGS_FILE):
|
||||
raise Exception("Config file '%s' cannot be openend" % config.SETTINGS_FILE);
|
||||
return json.loads(open(config.SETTINGS_FILE, "r").read())
|
||||
|
||||
|
||||
# cleanup !
|
||||
def clean_up(testdir):
|
||||
# TODO: delete the test dir
|
||||
run_shell_cmd("rm -rf -- '%s'" % testdir)
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
(options, args) = validate_args()
|
||||
|
||||
settings = read_settings()
|
||||
need_cleanup = False
|
||||
|
||||
testdir = None
|
||||
no_failures = 1
|
||||
try:
|
||||
if options.testdir is not None and os.path.exists(options.testdir):
|
||||
testdir = os.path.abspath(options.testdir)
|
||||
config.logger.info("No checkout, using %s" % testdir)
|
||||
else:
|
||||
need_cleanup = True
|
||||
testdir = set_up_test_branch(settings, args[0]) # we expect a branch name as first argument
|
||||
|
||||
config.testdir = testdir # we let tests know where to run
|
||||
no_failures = execute_tests(testdir, options.singletest)
|
||||
|
||||
except ShellCmdException as e :
|
||||
import traceback
|
||||
config.logger.error("Error while setting up testing. Traceback: \n%s" % traceback.format_exc(e))
|
||||
finally:
|
||||
if need_cleanup and testdir is not None:
|
||||
clean_up(testdir)
|
||||
|
||||
sys.exit(no_failures)
|
||||
@@ -1,20 +0,0 @@
|
||||
import config
|
||||
|
||||
# Code testing section
|
||||
def _code_test():
|
||||
def callback_writeeventlog(opt, opt_str, value, parser):
|
||||
if len(parser.rargs) < 1 or parser.rargs[0].startswith("-"):
|
||||
value = ""
|
||||
else:
|
||||
value = parser.rargs[0]
|
||||
del parser.rargs[0]
|
||||
|
||||
setattr(parser.values, opt.dest, value)
|
||||
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file.",
|
||||
action = "callback", callback=callback_writeeventlog, dest = "writeeventlog")
|
||||
|
||||
options, targets = parser.parse_args(sys.argv)
|
||||
|
||||
print (options, targets)
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"repo": "git@git.yoctoproject.org:poky-contrib",
|
||||
"localclone": "/home/ddalex/ssd/yocto/poky",
|
||||
"workdir": "/home/ddalex/ssd/yocto"
|
||||
}
|
||||
@@ -1,139 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2015 Alexandru Damian for Intel Corp.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
# Utilities shared by tests and other common bits of code.
|
||||
|
||||
import sys, os, subprocess, fcntl, errno
|
||||
import config
|
||||
from config import logger
|
||||
|
||||
|
||||
# License warning; this code is copied from the BitBake project, file bitbake/lib/bb/utils.py
|
||||
# The code is originally licensed GPL-2.0, and we redistribute it under still GPL-2.0
|
||||
|
||||
# End of copy is marked with #ENDOFCOPY marker
|
||||
|
||||
def mkdirhier(directory):
|
||||
"""Create a directory like 'mkdir -p', but does not complain if
|
||||
directory already exists like os.makedirs
|
||||
"""
|
||||
|
||||
try:
|
||||
os.makedirs(directory)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise e
|
||||
|
||||
def lockfile(name, shared=False, retry=True):
|
||||
"""
|
||||
Use the file fn as a lock file, return when the lock has been acquired.
|
||||
Returns a variable to pass to unlockfile().
|
||||
"""
|
||||
config.logger.debug("take lockfile %s" % name)
|
||||
dirname = os.path.dirname(name)
|
||||
mkdirhier(dirname)
|
||||
|
||||
if not os.access(dirname, os.W_OK):
|
||||
logger.error("Unable to acquire lock '%s', directory is not writable",
|
||||
name)
|
||||
sys.exit(1)
|
||||
|
||||
op = fcntl.LOCK_EX
|
||||
if shared:
|
||||
op = fcntl.LOCK_SH
|
||||
if not retry:
|
||||
op = op | fcntl.LOCK_NB
|
||||
|
||||
while True:
|
||||
# If we leave the lockfiles lying around there is no problem
|
||||
# but we should clean up after ourselves. This gives potential
|
||||
# for races though. To work around this, when we acquire the lock
|
||||
# we check the file we locked was still the lock file on disk.
|
||||
# by comparing inode numbers. If they don't match or the lockfile
|
||||
# no longer exists, we start again.
|
||||
|
||||
# This implementation is unfair since the last person to request the
|
||||
# lock is the most likely to win it.
|
||||
|
||||
try:
|
||||
lf = open(name, 'a+')
|
||||
fileno = lf.fileno()
|
||||
fcntl.flock(fileno, op)
|
||||
statinfo = os.fstat(fileno)
|
||||
if os.path.exists(lf.name):
|
||||
statinfo2 = os.stat(lf.name)
|
||||
if statinfo.st_ino == statinfo2.st_ino:
|
||||
return lf
|
||||
lf.close()
|
||||
except Exception:
|
||||
try:
|
||||
lf.close()
|
||||
except Exception:
|
||||
pass
|
||||
pass
|
||||
if not retry:
|
||||
return None
|
||||
|
||||
def unlockfile(lf):
|
||||
"""
|
||||
Unlock a file locked using lockfile()
|
||||
"""
|
||||
try:
|
||||
# If we had a shared lock, we need to promote to exclusive before
|
||||
# removing the lockfile. Attempt this, ignore failures.
|
||||
fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
|
||||
os.unlink(lf.name)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
|
||||
lf.close()
|
||||
|
||||
#ENDOFCOPY
|
||||
|
||||
|
||||
def mk_lock_filename():
|
||||
our_name = os.path.basename(__file__)
|
||||
our_name = ".%s" % ".".join(reversed(our_name.split(".")))
|
||||
return config.LOCKFILE + our_name
|
||||
|
||||
|
||||
|
||||
class ShellCmdException(Exception):
|
||||
pass
|
||||
|
||||
def run_shell_cmd(command, cwd = None):
|
||||
if cwd is None:
|
||||
cwd = os.getcwd()
|
||||
|
||||
config.logger.debug("_shellcmd: (%s) %s" % (cwd, command))
|
||||
p = subprocess.Popen(command, cwd = cwd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(out,err) = p.communicate()
|
||||
p.wait()
|
||||
if p.returncode:
|
||||
if len(err) == 0:
|
||||
err = "command: %s \n%s" % (command, out)
|
||||
else:
|
||||
err = "command: %s \n%s" % (command, err)
|
||||
config.logger.warn("_shellcmd: error \n%s\n%s" % (out, err))
|
||||
raise ShellCmdException(err)
|
||||
else:
|
||||
#config.logger.debug("localhostbecontroller: shellcmd success\n%s" % out)
|
||||
return out
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2015 Alexandru Damian for Intel Corp.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
# Test definitions. The runner will look for and auto-discover the tests
|
||||
# no matter what they file are they in, as long as they are in the same directory
|
||||
# as this file.
|
||||
|
||||
import unittest
|
||||
from shellutils import *
|
||||
|
||||
import pexpect
|
||||
import sys, os, signal, time
|
||||
|
||||
class TestPyCompilable(unittest.TestCase):
|
||||
''' Verifies that all Python files are syntactically correct '''
|
||||
def test_compile_file(self):
|
||||
try:
|
||||
out = run_shell_cmd("find . -name *py -type f -print0 | xargs -0 -n1 -P20 python -m py_compile", config.testdir)
|
||||
except ShellCmdException as e:
|
||||
self.fail("Error compiling python files: %s" % (e))
|
||||
except Exception as e:
|
||||
self.fail("Unknown error: %s" % e)
|
||||
|
||||
|
||||
class TestPySystemStart(unittest.TestCase):
|
||||
''' Attempts to start Toaster, verify that it is succesfull, and stop it '''
|
||||
def setUp(self):
|
||||
run_shell_cmd("bash -c 'rm -f build/*log'")
|
||||
|
||||
def test_start_interactive_mode(self):
|
||||
try:
|
||||
run_shell_cmd("bash -c 'source %s/oe-init-build-env && source toaster start webport=%d && source toaster stop'" % (config.testdir, config.TOASTER_PORT), config.testdir)
|
||||
except ShellCmdException as e:
|
||||
self.fail("Failed starting interactive mode: %s" % (e))
|
||||
|
||||
def test_start_managed_mode(self):
|
||||
try:
|
||||
run_shell_cmd("%s/bitbake/bin/toaster webport=%d nobrowser & sleep 10 && curl http://localhost:%d/ && kill -2 %1" % (config.testdir, config.TOASTER_PORT, config.TOASTER_PORT), config.testdir)
|
||||
pass
|
||||
except ShellCmdException as e:
|
||||
self.fail("Failed starting managed mode: %s" % (e))
|
||||
|
||||
class TestHTML5Compliance(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.origdir = os.getcwd()
|
||||
self.crtdir = os.path.dirname(config.testdir)
|
||||
os.chdir(self.crtdir)
|
||||
if not os.path.exists(os.path.join(self.crtdir, "toaster.sqlite")):
|
||||
run_shell_cmd("%s/bitbake/lib/toaster/manage.py syncdb --noinput" % config.testdir)
|
||||
run_shell_cmd("%s/bitbake/lib/toaster/manage.py migrate orm" % config.testdir)
|
||||
run_shell_cmd("%s/bitbake/lib/toaster/manage.py migrate bldcontrol" % config.testdir)
|
||||
run_shell_cmd("%s/bitbake/lib/toaster/manage.py loadconf %s/meta-yocto/conf/toasterconf.json" % (config.testdir, config.testdir))
|
||||
|
||||
setup = pexpect.spawn("%s/bitbake/lib/toaster/manage.py checksettings" % config.testdir)
|
||||
setup.logfile = sys.stdout
|
||||
setup.expect(r".*or type the full path to a different directory: ")
|
||||
setup.sendline('')
|
||||
setup.sendline('')
|
||||
setup.expect(r".*or type the full path to a different directory: ")
|
||||
setup.sendline('')
|
||||
setup.expect(r"Enter your option: ")
|
||||
setup.sendline('0')
|
||||
|
||||
self.child = pexpect.spawn("%s/bitbake/bin/toaster webport=%d nobrowser" % (config.testdir, config.TOASTER_PORT))
|
||||
self.child.logfile=sys.stdout
|
||||
self.child.expect("Toaster is now running. You can stop it with Ctrl-C")
|
||||
|
||||
def test_html5_compliance(self):
|
||||
import urllist, urlcheck
|
||||
results = {}
|
||||
for url in urllist.URLS:
|
||||
results[url] = urlcheck.validate_html5(config.TOASTER_BASEURL + url)
|
||||
|
||||
failed = []
|
||||
for url in results:
|
||||
if results[url][1] != 0:
|
||||
failed.append((url, results[url]))
|
||||
|
||||
|
||||
self.assertTrue(len(failed)== 0, "Not all URLs validate: \n%s " % "\n".join(map(lambda x: "".join(str(x)),failed)))
|
||||
|
||||
#(config.TOASTER_BASEURL + url, status, errors, warnings))
|
||||
|
||||
def tearDown(self):
|
||||
while self.child.isalive():
|
||||
self.child.kill(signal.SIGINT)
|
||||
time.sleep(1)
|
||||
os.chdir(self.origdir)
|
||||
# if os.path.exists(os.path.join(self.crtdir, "toaster.sqlite")):
|
||||
# os.remove(os.path.join(self.crtdir, "toaster.sqlite"))
|
||||
@@ -1,87 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright
|
||||
|
||||
# DESCRIPTION
|
||||
# This is script for running all selected toaster cases on
|
||||
# selected web browsers manifested in toaster_test.cfg.
|
||||
|
||||
# 1. How to start toaster in yocto:
|
||||
# $ source poky/oe-init-build-env
|
||||
# $ source toaster start
|
||||
# $ bitbake core-image-minimal
|
||||
|
||||
# 2. How to install selenium on Ubuntu:
|
||||
# $ sudo apt-get install scrot python-pip
|
||||
# $ sudo pip install selenium
|
||||
|
||||
# 3. How to install selenium addon in firefox:
|
||||
# Download the lastest firefox addon from http://release.seleniumhq.org/selenium-ide/
|
||||
# Then install it. You can also install firebug and firepath addon
|
||||
|
||||
# 4. How to start writing a new case:
|
||||
# All you need to do is to implement the function test_xxx() and pile it on.
|
||||
|
||||
# 5. How to test with Chrome browser
|
||||
# Download/install chrome on host
|
||||
# Download chromedriver from https://code.google.com/p/chromedriver/downloads/list according to your host type
|
||||
# put chromedriver in PATH, (e.g. /usr/bin/, bear in mind to chmod)
|
||||
# For windows host, you may put chromedriver.exe in the same directory as chrome.exe
|
||||
|
||||
|
||||
import unittest, time, re, sys, getopt, os, logging, platform
|
||||
import ConfigParser
|
||||
import subprocess
|
||||
|
||||
|
||||
class toaster_run_all():
|
||||
def __init__(self):
|
||||
# in case this script is called from other directory
|
||||
os.chdir(os.path.abspath(sys.path[0]))
|
||||
self.starttime = time.strptime(time.ctime())
|
||||
self.parser = ConfigParser.SafeConfigParser()
|
||||
found = self.parser.read('toaster_test.cfg')
|
||||
self.host_os = platform.system().lower()
|
||||
self.run_all_cases()
|
||||
self.collect_log()
|
||||
|
||||
def get_test_cases(self):
|
||||
# we have config groups for different os type in toaster_test.cfg
|
||||
cases_to_run = eval(self.parser.get('toaster_test_' + self.host_os, 'test_cases'))
|
||||
return cases_to_run
|
||||
|
||||
|
||||
def run_all_cases(self):
|
||||
cases_temp = self.get_test_cases()
|
||||
for case in cases_temp:
|
||||
single_case_cmd = "python -m unittest toaster_automation_test.toaster_cases.test_" + str(case)
|
||||
print single_case_cmd
|
||||
subprocess.call(single_case_cmd, shell=True)
|
||||
|
||||
def collect_log(self):
|
||||
"""
|
||||
the log files are temporarily stored in ./log/tmp/..
|
||||
After all cases are done, they should be transfered to ./log/$TIMESTAMP/
|
||||
"""
|
||||
def comple(number):
|
||||
if number < 10:
|
||||
return str(0) + str(number)
|
||||
else:
|
||||
return str(number)
|
||||
now = self.starttime
|
||||
now_str = comple(now.tm_year) + comple(now.tm_mon) + comple(now.tm_mday) + \
|
||||
comple(now.tm_hour) + comple(now.tm_min) + comple(now.tm_sec)
|
||||
log_dir = os.path.abspath(sys.path[0]) + os.sep + 'log' + os.sep + now_str
|
||||
log_tmp_dir = os.path.abspath(sys.path[0]) + os.sep + 'log' + os.sep + 'tmp'
|
||||
try:
|
||||
os.renames(log_tmp_dir, log_dir)
|
||||
except OSError :
|
||||
logging.error(" Cannot create log dir(timestamp) under log, please check your privilege")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
toaster_run_all()
|
||||
|
||||
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,21 +0,0 @@
|
||||
# Configuration file for toaster_test
|
||||
# Sorted by different host type
|
||||
|
||||
# test browser could be: firefox; chrome; ie(still under development)
|
||||
# logging_level could be: CRITICAL; ERROR; WARNING; INFO; DEBUG; NOTSET
|
||||
|
||||
|
||||
[toaster_test_linux]
|
||||
toaster_url = 'http://127.0.0.1:8000'
|
||||
test_browser = 'firefox'
|
||||
test_cases = [946]
|
||||
logging_level = 'INFO'
|
||||
|
||||
|
||||
[toaster_test_windows]
|
||||
toaster_url = 'http://127.0.0.1:8000'
|
||||
test_browser = ['ie', 'firefox', 'chrome']
|
||||
test_cases = [901, 902, 903]
|
||||
logging_level = 'DEBUG'
|
||||
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
|
||||
import httplib2
|
||||
import time
|
||||
|
||||
import config
|
||||
import urllist
|
||||
|
||||
|
||||
# TODO: turn to a test
|
||||
def validate_html5(url):
|
||||
h = httplib2.Http(".cache")
|
||||
status = "Failed"
|
||||
errors = -1
|
||||
warnings = -1
|
||||
|
||||
# TODO: the w3c-validator must be a configurable setting
|
||||
urlrequest = config.W3C_VALIDATOR+url
|
||||
try:
|
||||
resp, content = h.request(urlrequest, "HEAD")
|
||||
if resp['x-w3c-validator-status'] != "Abort":
|
||||
status = resp['x-w3c-validator-status']
|
||||
errors = int(resp['x-w3c-validator-errors'])
|
||||
warnings = int(resp['x-w3c-validator-warnings'])
|
||||
except Exception as e:
|
||||
config.logger.warn("Failed validation call: %s" % e.__str__())
|
||||
return (status, errors, warnings)
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Testing %s with %s" % (config.TOASTER_BASEURL, config.W3C_VALIDATOR))
|
||||
|
||||
def print_validation(url):
|
||||
status, errors, warnings = validate_html5(url)
|
||||
config.logger.error("url %s is %s\terrors %s warnings %s (check at %s)" % (url, status, errors, warnings, config.W3C_VALIDATOR+url))
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
print_validation(sys.argv[1])
|
||||
else:
|
||||
for url in urllist.URLS:
|
||||
print_validation(config.TOASTER_BASEURL+url)
|
||||
@@ -1,48 +0,0 @@
|
||||
import config
|
||||
|
||||
URLS = [
|
||||
'toastergui/landing/',
|
||||
'toastergui/builds/',
|
||||
'toastergui/build/1',
|
||||
'toastergui/build/1/tasks/',
|
||||
'toastergui/build/1/tasks/1/',
|
||||
'toastergui/build/1/task/1',
|
||||
'toastergui/build/1/recipes/',
|
||||
'toastergui/build/1/recipe/1/active_tab/1',
|
||||
'toastergui/build/1/recipe/1',
|
||||
'toastergui/build/1/recipe_packages/1',
|
||||
'toastergui/build/1/packages/',
|
||||
'toastergui/build/1/package/1',
|
||||
'toastergui/build/1/package_built_dependencies/1',
|
||||
'toastergui/build/1/package_included_detail/1/1',
|
||||
'toastergui/build/1/package_included_dependencies/1/1',
|
||||
'toastergui/build/1/package_included_reverse_dependencies/1/1',
|
||||
'toastergui/build/1/target/1',
|
||||
'toastergui/build/1/target/1/targetpkg',
|
||||
'toastergui/dentries/build/1/target/1',
|
||||
'toastergui/build/1/target/1/dirinfo',
|
||||
'toastergui/build/1/target/1/dirinfo_filepath/_/bin/bash',
|
||||
'toastergui/build/1/configuration',
|
||||
'toastergui/build/1/configvars',
|
||||
'toastergui/build/1/buildtime',
|
||||
'toastergui/build/1/cpuusage',
|
||||
'toastergui/build/1/diskio',
|
||||
'toastergui/build/1/target/1/packagefile/1',
|
||||
'toastergui/newproject/',
|
||||
'toastergui/projects/',
|
||||
'toastergui/project/',
|
||||
'toastergui/project/1',
|
||||
'toastergui/project/1/configuration',
|
||||
'toastergui/project/1/builds/',
|
||||
'toastergui/project/1/layers/',
|
||||
'toastergui/project/1/layer/1',
|
||||
'toastergui/project/1/layer/',
|
||||
'toastergui/project/1/importlayer',
|
||||
'toastergui/project/1/targets/',
|
||||
'toastergui/project/1/machines/',
|
||||
'toastergui/xhr_configvaredit/1',
|
||||
'toastergui/xhr_importlayer/',
|
||||
'toastergui/xhr_updatelayer/',
|
||||
'toastergui/project/1/buildrequest/1',
|
||||
'toastergui/',
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user