mirror of
https://git.yoctoproject.org/poky
synced 2026-01-30 21:38:43 +01:00
Compare commits
347 Commits
2.2_M2
...
uninative-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6b66e9317f | ||
|
|
d11e8e1109 | ||
|
|
2c043e3655 | ||
|
|
e274a5167e | ||
|
|
4ac57fb73e | ||
|
|
cb5ed74e7f | ||
|
|
5827ae0a08 | ||
|
|
f5acb4213b | ||
|
|
875770daf7 | ||
|
|
3a5039949a | ||
|
|
186320fedf | ||
|
|
ef617e6f34 | ||
|
|
8cad51965d | ||
|
|
494eae3db0 | ||
|
|
b3fa98d449 | ||
|
|
331c1c6f0a | ||
|
|
42d3a414fe | ||
|
|
836f332552 | ||
|
|
c2c58a1879 | ||
|
|
953ba7a221 | ||
|
|
f6261da9c0 | ||
|
|
f17ab95c79 | ||
|
|
98ef970c8e | ||
|
|
2ff892d87c | ||
|
|
dd784598cd | ||
|
|
01c8496d47 | ||
|
|
6172fb1923 | ||
|
|
cf0b07c96f | ||
|
|
36f71db680 | ||
|
|
504a85822e | ||
|
|
2d80b902b8 | ||
|
|
e387a7ab9d | ||
|
|
dce5037646 | ||
|
|
9ed45f7974 | ||
|
|
82d998d6c6 | ||
|
|
dd99cf957d | ||
|
|
952ffb3e1f | ||
|
|
c471740f5b | ||
|
|
f4e65d1cd0 | ||
|
|
71f553b022 | ||
|
|
49039829e1 | ||
|
|
33a4006529 | ||
|
|
72fa18fb35 | ||
|
|
a1629a2b26 | ||
|
|
cae9e6f695 | ||
|
|
5d1d1774ec | ||
|
|
93817932de | ||
|
|
82da1e6f6c | ||
|
|
12c8a50c60 | ||
|
|
33b01282e2 | ||
|
|
4534741168 | ||
|
|
dd53025d24 | ||
|
|
a6630c2310 | ||
|
|
87cb470304 | ||
|
|
5aca5bc9a1 | ||
|
|
b6ecbee4b2 | ||
|
|
e0ad5a9dcb | ||
|
|
ad79a894a7 | ||
|
|
fc08ecf8b2 | ||
|
|
3ac3f3f309 | ||
|
|
4a4a24fccc | ||
|
|
b51959e3c8 | ||
|
|
c1a6945d7e | ||
|
|
749d7771ce | ||
|
|
ec755d2524 | ||
|
|
3153bd381c | ||
|
|
d90eaa8518 | ||
|
|
a960d9b401 | ||
|
|
ce8e654e2c | ||
|
|
6f2fa0a018 | ||
|
|
b30aeb3e32 | ||
|
|
a48fea275b | ||
|
|
da72327ab0 | ||
|
|
95403e3ecd | ||
|
|
7b33b23d0e | ||
|
|
2be5ee7e0a | ||
|
|
aca6dae9dc | ||
|
|
0e421c93a8 | ||
|
|
309a6e01e3 | ||
|
|
6d84986631 | ||
|
|
feb125eb17 | ||
|
|
262c81e3ca | ||
|
|
1f00fa182b | ||
|
|
50c23e6c26 | ||
|
|
17d728dede | ||
|
|
ce1cf85bd2 | ||
|
|
7633b81aac | ||
|
|
961b5269cd | ||
|
|
416c4bc009 | ||
|
|
07d9c3a41d | ||
|
|
9ec7cef3d0 | ||
|
|
b26e09b748 | ||
|
|
b192bc02bb | ||
|
|
3ab991bee2 | ||
|
|
adf4266524 | ||
|
|
db9508891b | ||
|
|
909e2120d4 | ||
|
|
1650f67230 | ||
|
|
8efd482911 | ||
|
|
591bd6b0aa | ||
|
|
f157570e74 | ||
|
|
69735b9586 | ||
|
|
a4f0ae1113 | ||
|
|
3b064be9b1 | ||
|
|
3e97d194f8 | ||
|
|
49b4df1b0c | ||
|
|
2b8dd4ce1a | ||
|
|
cb188dc186 | ||
|
|
fe5f80bda8 | ||
|
|
8b88df382d | ||
|
|
f389433b25 | ||
|
|
77e5a89df3 | ||
|
|
63cb0eec1f | ||
|
|
e1081f491c | ||
|
|
c5af7540a5 | ||
|
|
88a3d7629c | ||
|
|
30ab044dac | ||
|
|
26c6b10da8 | ||
|
|
e884ceeaa3 | ||
|
|
dfc016fbf1 | ||
|
|
a9681ef2e3 | ||
|
|
a56fb90dc3 | ||
|
|
3522a90f1b | ||
|
|
574515445a | ||
|
|
7759b4844f | ||
|
|
9ddb513a7f | ||
|
|
1d7228c565 | ||
|
|
a15826520f | ||
|
|
ca5645901b | ||
|
|
1580eac758 | ||
|
|
a5eb998c4d | ||
|
|
9ac1500789 | ||
|
|
46d59011cb | ||
|
|
7dbfe6f73d | ||
|
|
1f1ac9fbda | ||
|
|
37f1e0c154 | ||
|
|
65459f5b6d | ||
|
|
65a03c3f94 | ||
|
|
84d854c98b | ||
|
|
2d565fae46 | ||
|
|
8b5f369726 | ||
|
|
44ae8284d0 | ||
|
|
d2e5c93dba | ||
|
|
43a3933624 | ||
|
|
6d59c5cd61 | ||
|
|
c00346d80f | ||
|
|
6cfd86ff2f | ||
|
|
2726f91d41 | ||
|
|
d4040da8bc | ||
|
|
50008ffeb9 | ||
|
|
a09a7b71ce | ||
|
|
76fc5ab81d | ||
|
|
8b21a516b0 | ||
|
|
564657edae | ||
|
|
b37463b6de | ||
|
|
1e555c0d06 | ||
|
|
d753c91c4e | ||
|
|
a996ae6228 | ||
|
|
453c174e41 | ||
|
|
9dc4cfc54d | ||
|
|
a3cad8f0e0 | ||
|
|
ae427fb097 | ||
|
|
e205789361 | ||
|
|
f0e73a4b65 | ||
|
|
41b49cadd3 | ||
|
|
a3a7ddf120 | ||
|
|
eb282b797e | ||
|
|
6aaf379119 | ||
|
|
d1e3f0bb16 | ||
|
|
2652217970 | ||
|
|
5203cfe6a9 | ||
|
|
2322267e24 | ||
|
|
566ebc1af2 | ||
|
|
4396ffb3c9 | ||
|
|
05beac3ef0 | ||
|
|
376a8201d1 | ||
|
|
599be67eef | ||
|
|
33b01c502f | ||
|
|
f2fa3c59bb | ||
|
|
7c5a147141 | ||
|
|
c9e3815b2e | ||
|
|
fa0c574deb | ||
|
|
6c302a4250 | ||
|
|
38c33b7904 | ||
|
|
30929cf037 | ||
|
|
bc14368d39 | ||
|
|
bb1643444e | ||
|
|
4f27aa6e3a | ||
|
|
781bff802b | ||
|
|
8f2d8c2061 | ||
|
|
04c611290c | ||
|
|
818e439cf5 | ||
|
|
2db1d3b76d | ||
|
|
e07ebd001b | ||
|
|
f5a7cd40f0 | ||
|
|
eef97b3a54 | ||
|
|
27b49de75d | ||
|
|
1a740c6ac3 | ||
|
|
ecde311ccd | ||
|
|
5727f465a6 | ||
|
|
9cfb420174 | ||
|
|
df4451c819 | ||
|
|
f6e53accbd | ||
|
|
0966a839ac | ||
|
|
2c358b9385 | ||
|
|
a9aef4087b | ||
|
|
1981ab0829 | ||
|
|
1648504198 | ||
|
|
a86a1b2703 | ||
|
|
140f6c7308 | ||
|
|
b4ba36a9d7 | ||
|
|
8b3f624549 | ||
|
|
0ec45e90bd | ||
|
|
3286f4feae | ||
|
|
d8cff60677 | ||
|
|
5c2bc4d63a | ||
|
|
e647fe0bff | ||
|
|
ec31f30a6e | ||
|
|
6d2987cc86 | ||
|
|
215687eb55 | ||
|
|
f31683d5ba | ||
|
|
36b3e58e3d | ||
|
|
8c6371f803 | ||
|
|
e7a0997f19 | ||
|
|
3486b729b2 | ||
|
|
f55bf4e07e | ||
|
|
0ef807a79c | ||
|
|
c90d7999d9 | ||
|
|
75efb26f76 | ||
|
|
081acd5a73 | ||
|
|
556059a5cb | ||
|
|
8ab6c4acf3 | ||
|
|
6f1f3174cf | ||
|
|
1ebec491aa | ||
|
|
b004e3aef9 | ||
|
|
0152d75a6e | ||
|
|
f4709b1960 | ||
|
|
81580c5c2e | ||
|
|
b332b38ccf | ||
|
|
baaaea5145 | ||
|
|
2c01447ea6 | ||
|
|
e91d0d5d1c | ||
|
|
cc3c276852 | ||
|
|
12fbed9e60 | ||
|
|
be68ef5129 | ||
|
|
aa2d945423 | ||
|
|
5b61fa04a3 | ||
|
|
05f82e8f12 | ||
|
|
2a849e7e9b | ||
|
|
ea234239f4 | ||
|
|
c42b5333f0 | ||
|
|
039f47ad19 | ||
|
|
1d39e4c145 | ||
|
|
7cb7ca908c | ||
|
|
87678bbdc8 | ||
|
|
3ec9a621d0 | ||
|
|
25507bfa82 | ||
|
|
dd8540550f | ||
|
|
82c7d0f200 | ||
|
|
4048af44eb | ||
|
|
0b2ca66874 | ||
|
|
2ac1fdf537 | ||
|
|
4253e2e0f3 | ||
|
|
f84b01b289 | ||
|
|
cb5d308c48 | ||
|
|
1b4dc787b8 | ||
|
|
5218c24c8b | ||
|
|
ab075b6fb8 | ||
|
|
9b8b730d24 | ||
|
|
8b0db50da5 | ||
|
|
b261791d69 | ||
|
|
21916fe5b6 | ||
|
|
012262ad6a | ||
|
|
6660e95cbf | ||
|
|
71d0c871c9 | ||
|
|
4858c6b728 | ||
|
|
7178f0732d | ||
|
|
1131507d46 | ||
|
|
57bb38d263 | ||
|
|
13ee352a1f | ||
|
|
7d04a4dbfd | ||
|
|
2eec44ee23 | ||
|
|
f02f0edeaa | ||
|
|
2f6bbc7006 | ||
|
|
2c2f22a533 | ||
|
|
600f0b3e04 | ||
|
|
b36753b1c9 | ||
|
|
cbf7902030 | ||
|
|
9621959a3a | ||
|
|
ad6aae3106 | ||
|
|
fef5ae147e | ||
|
|
495e2f80a9 | ||
|
|
51b08835b6 | ||
|
|
2ab49268fd | ||
|
|
0e63414bc3 | ||
|
|
ad625f7f9b | ||
|
|
05dca6eae3 | ||
|
|
7c9acf0ea4 | ||
|
|
328e249399 | ||
|
|
24f871c801 | ||
|
|
1052fef94e | ||
|
|
bb9d9dacca | ||
|
|
560077821a | ||
|
|
e6e05a30b1 | ||
|
|
efd7d6b6fc | ||
|
|
b6aa976756 | ||
|
|
b7a2688ac8 | ||
|
|
df9f8a89fb | ||
|
|
9f7c1a5a99 | ||
|
|
c406e9acd7 | ||
|
|
36f6219c49 | ||
|
|
cff21235c2 | ||
|
|
91a6f3a375 | ||
|
|
2186bbca70 | ||
|
|
6d9958ee9d | ||
|
|
cf0395203a | ||
|
|
cea867b532 | ||
|
|
e1fcb032e2 | ||
|
|
31cef0d518 | ||
|
|
ebc80fa30a | ||
|
|
e693f2e641 | ||
|
|
1393b23f51 | ||
|
|
9411e33b88 | ||
|
|
4357d7a296 | ||
|
|
bd2cce00de | ||
|
|
83ccef335c | ||
|
|
24905d3c2d | ||
|
|
729d9fcb54 | ||
|
|
04d1ad5fe7 | ||
|
|
97278fb51c | ||
|
|
8b3146007f | ||
|
|
bb260e94a6 | ||
|
|
ef627d0ab8 | ||
|
|
ffc78d329d | ||
|
|
853450befc | ||
|
|
20f939f242 | ||
|
|
1477886deb | ||
|
|
c27ae255db | ||
|
|
5f4559b2eb | ||
|
|
23a551bea1 | ||
|
|
0c3ce68410 | ||
|
|
7f6b6b2ab9 | ||
|
|
5d41200113 | ||
|
|
40d45cf7a6 | ||
|
|
c9e65c5d29 | ||
|
|
09f6a56aa4 | ||
|
|
b2ef908be1 |
@@ -10,4 +10,6 @@ Foundation and individual contributors.
|
||||
|
||||
* Twitter typeahead.js redistributed under the MIT license. Note that the JS source has one small modification, so the full unminified file is currently included to make it obvious where this is.
|
||||
|
||||
* jsrender is redistributed under the MIT license.
|
||||
|
||||
* QUnit is redistributed under the MIT license.
|
||||
|
||||
@@ -59,7 +59,8 @@ webserverStartAll()
|
||||
echo "Failed migrations, aborting system start" 1>&2
|
||||
return $retval
|
||||
fi
|
||||
|
||||
# Make sure that checksettings can pick up any value for TEMPLATECONF
|
||||
export TEMPLATECONF
|
||||
$MANAGE checksettings --traceback || retval=1
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
@@ -100,7 +101,6 @@ stop_system()
|
||||
fi
|
||||
webserverKillAll
|
||||
# unset exported variables
|
||||
unset TOASTER_CONF
|
||||
unset TOASTER_DIR
|
||||
unset BITBAKE_UI
|
||||
unset BBBASEDIR
|
||||
@@ -153,23 +153,9 @@ if [ -n "$TEMPLATECONF" ]; then
|
||||
if [ -d "$OEROOT/$TEMPLATECONF" ]; then
|
||||
TEMPLATECONF="$OEROOT/$TEMPLATECONF"
|
||||
fi
|
||||
if [ ! -d "$TEMPLATECONF" ]; then
|
||||
echo >&2 "Error: '$TEMPLATECONF' must be a directory containing toasterconf.json"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$TOASTER_CONF" = "" ]; then
|
||||
TOASTER_CONF="$TEMPLATECONF/toasterconf.json"
|
||||
export TOASTER_CONF=$(python3 -c "import os; print(os.path.realpath('$TOASTER_CONF'))")
|
||||
fi
|
||||
|
||||
if [ ! -f $TOASTER_CONF ]; then
|
||||
echo "$TOASTER_CONF configuration file not found. Set TOASTER_CONF to specify file or fix .templateconf"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# this defines the dir toaster will use for
|
||||
# 1) clones of layers (in _toaster_clones )
|
||||
# 2) the build dir (in build)
|
||||
@@ -203,6 +189,10 @@ for param in $*; do
|
||||
ADDR_PORT="localhost:$PORT"
|
||||
fi
|
||||
;;
|
||||
--help)
|
||||
echo "$HELP"
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo "$HELP"
|
||||
return 1
|
||||
|
||||
@@ -648,26 +648,7 @@
|
||||
recipe and <filename>taskB</filename> for the second
|
||||
recipe:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake myfirstrecipe recipe:do_taskA mysecondrecipe recipe:do_taskB
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='executing-a-list-of-task-and-recipe-combinations'>
|
||||
<title>Executing a List of Task and Recipe Combinations</title>
|
||||
|
||||
<para>
|
||||
The BitBake command line supports specifying different
|
||||
tasks for individual targets when you specify multiple
|
||||
targets.
|
||||
For example, suppose you had two targets (or recipes)
|
||||
<filename>myfirstrecipe</filename> and
|
||||
<filename>mysecondrecipe</filename> and you needed
|
||||
BitBake to run <filename>taskA</filename> for the first
|
||||
recipe and <filename>taskB</filename> for the second
|
||||
recipe:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake myfirstrecipe recipe:do_taskA mysecondrecipe recipe:do_taskB
|
||||
$ bitbake myfirstrecipe:do_taskA mysecondrecipe:do_taskB
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -50,22 +50,52 @@
|
||||
<title>Variable Expansion</title>
|
||||
|
||||
<para>
|
||||
BitBake supports variables referencing one another's
|
||||
contents using a syntax that is similar to shell scripting.
|
||||
Following is an example that results in <filename>A</filename>
|
||||
containing "aval" and <filename>B</filename> evaluating to
|
||||
"preavalpost" based on that current value of
|
||||
<filename>A</filename>.
|
||||
Variables can reference the contents of other variables
|
||||
using a syntax that is similar to variable expansion in
|
||||
Bourne shells.
|
||||
The following assignments
|
||||
result in A containing "aval" and B evaluating to "preavalpost".
|
||||
<literallayout class='monospaced'>
|
||||
A = "aval"
|
||||
B = "pre${A}post"
|
||||
</literallayout>
|
||||
You should realize that whenever <filename>B</filename> is
|
||||
referenced, its evaluation will depend on the state of
|
||||
<filename>A</filename> at that time.
|
||||
Thus, later evaluations of <filename>B</filename> in the
|
||||
previous example could result in different values
|
||||
depending on the value of <filename>A</filename>.
|
||||
<note>
|
||||
Unlike in Bourne shells, the curly braces are mandatory:
|
||||
Only <filename>${FOO}</filename> and not
|
||||
<filename>$FOO</filename> is recognized as an expansion of
|
||||
<filename>FOO</filename>.
|
||||
</note>
|
||||
The "=" operator does not immediately expand variable
|
||||
references in the right-hand side.
|
||||
Instead, expansion is deferred until the variable assigned to
|
||||
is actually used.
|
||||
The result depends on the current values of the referenced
|
||||
variables.
|
||||
The following example should clarify this behavior:
|
||||
<literallayout class='monospaced'>
|
||||
A = "${B} baz"
|
||||
B = "${C} bar"
|
||||
C = "foo"
|
||||
*At this point, ${A} equals "foo bar baz"*
|
||||
C = "qux"
|
||||
*At this point, ${A} equals "qux bar baz"*
|
||||
B = "norf"
|
||||
*At this point, ${A} equals "norf baz"*
|
||||
</literallayout>
|
||||
Contrast this behavior with the
|
||||
<link linkend='immediate-variable-expansion'>immediate variable expansion</link>
|
||||
operator (i.e. ":=").
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If the variable expansion syntax is used on a variable that
|
||||
does not exist, the string is kept as is.
|
||||
For example, given the following assignment,
|
||||
<filename>BAR</filename> expands to the literal string
|
||||
"${FOO}" as long as <filename>FOO</filename> does not exist.
|
||||
<literallayout class='monospaced'>
|
||||
BAR = "${FOO}"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -259,6 +289,60 @@
|
||||
"789 123456" and <filename>FOO2</filename> becomes
|
||||
"ghi abcdef".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Like "_append" and "_prepend", "_remove"
|
||||
is deferred until after parsing completes.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='override-style-operation-advantages'>
|
||||
<title>Override Style Operation Advantages</title>
|
||||
|
||||
<para>
|
||||
An advantage of the override style operations
|
||||
"_append", "_prepend", and "_remove" as compared to the
|
||||
"+=" and "=+" operators is that the override style
|
||||
operators provide guaranteed operations.
|
||||
For example, consider a class <filename>foo.bbclass</filename>
|
||||
that needs to add the value "val" to the variable
|
||||
<filename>FOO</filename>, and a recipe that uses
|
||||
<filename>foo.bbclass</filename> as follows:
|
||||
<literallayout class='monospaced'>
|
||||
inherit foo
|
||||
|
||||
FOO = "initial"
|
||||
</literallayout>
|
||||
If <filename>foo.bbclass</filename> uses the "+=" operator,
|
||||
as follows, then the final value of <filename>FOO</filename>
|
||||
will be "initial", which is not what is desired:
|
||||
<literallayout class='monospaced'>
|
||||
FOO += "val"
|
||||
</literallayout>
|
||||
If, on the other hand, <filename>foo.bbclass</filename>
|
||||
uses the "_append" operator, then the final value of
|
||||
<filename>FOO</filename> will be "initial val", as intended:
|
||||
<literallayout class='monospaced'>
|
||||
FOO_append = " val"
|
||||
</literallayout>
|
||||
<note>
|
||||
It is never necessary to use "+=" together with "_append".
|
||||
The following sequence of assignments appends "barbaz" to
|
||||
<filename>FOO</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
FOO_append = "bar"
|
||||
FOO_append = "baz"
|
||||
</literallayout>
|
||||
The only effect of changing the second assignment in the
|
||||
previous example to use "+=" would be to add a space before
|
||||
"baz" in the appended value (due to how the "+=" operator
|
||||
works).
|
||||
</note>
|
||||
Another advantage of the override style operations is that
|
||||
you can combine them with other overrides as described in the
|
||||
"<link linkend='conditional-syntax-overrides'>Conditional Syntax (Overrides)</link>"
|
||||
section.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='variable-flag-syntax'>
|
||||
@@ -277,8 +361,7 @@
|
||||
You can define, append, and prepend values to variable flags.
|
||||
All the standard syntax operations previously mentioned work
|
||||
for variable flags except for override style syntax
|
||||
(i.e. <filename>_prepend</filename>, <filename>_append</filename>,
|
||||
and <filename>_remove</filename>).
|
||||
(i.e. "_prepend", "_append", and "_remove").
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -330,6 +413,21 @@
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
<note>
|
||||
Inline Python expressions work just like variable expansions
|
||||
insofar as the "=" and ":=" operators are concerned.
|
||||
Given the following assignment, <filename>foo()</filename>
|
||||
is called each time <filename>FOO</filename> is expanded:
|
||||
<literallayout class='monospaced'>
|
||||
FOO = "${@foo()}"
|
||||
</literallayout>
|
||||
Contrast this with the following immediate assignment, where
|
||||
<filename>foo()</filename> is only called once, while the
|
||||
assignment is parsed:
|
||||
<literallayout class='monospaced'>
|
||||
FOO := "${@foo()}"
|
||||
</literallayout>
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -545,15 +643,15 @@
|
||||
OVERRIDES = "foo"
|
||||
A = "Y"
|
||||
A_foo_append = "Z"
|
||||
A_foo_append += "X"
|
||||
A_foo_append = "X"
|
||||
</literallayout>
|
||||
For this case, before any overrides are resolved,
|
||||
<filename>A</filename> is set to "Y" using an immediate assignment.
|
||||
After this immediate assignment, <filename>A_foo</filename> is set
|
||||
to "Z", and then further appended with
|
||||
"X" leaving the variable set to "Z X".
|
||||
"X" leaving the variable set to "ZX".
|
||||
Finally, applying the override for "foo" results in the conditional
|
||||
variable <filename>A</filename> becoming "Z X" (i.e.
|
||||
variable <filename>A</filename> becoming "ZX" (i.e.
|
||||
<filename>A</filename> is replaced with <filename>A_foo</filename>).
|
||||
</para>
|
||||
|
||||
@@ -572,7 +670,7 @@
|
||||
Initially, <filename>A</filename> is set to "1 45" because
|
||||
of the three statements that use immediate operators.
|
||||
After these assignments are made, BitBake applies the
|
||||
<filename>_append</filename> operations.
|
||||
"_append" operations.
|
||||
Those operations result in <filename>A</filename> becoming "1 4523".
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -1976,6 +1976,27 @@
|
||||
The <filename>PROVIDES</filename> statement results in
|
||||
the "libav" recipe also being known as "libpostproc".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In addition to providing recipes under alternate names,
|
||||
the <filename>PROVIDES</filename> mechanism is also used
|
||||
to implement virtual targets.
|
||||
A virtual target is a name that corresponds to some
|
||||
particular functionality (e.g. a Linux kernel).
|
||||
Recipes that provide the functionality in question list the
|
||||
virtual target in <filename>PROVIDES</filename>.
|
||||
Recipes that depend on the functionality in question can
|
||||
include the virtual target in
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
to leave the choice of provider open.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Conventionally, virtual targets have names on the form
|
||||
"virtual/function" (e.g. "virtual/kernel").
|
||||
The slash is simply part of the name and has no
|
||||
syntactical significance.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
|
||||
@@ -385,39 +385,44 @@ exit $ret
|
||||
else:
|
||||
bb.warn('%s: invalid task progress varflag value "%s", ignoring' % (func, progress))
|
||||
|
||||
fifobuffer = bytearray()
|
||||
def readfifo(data):
|
||||
lines = data.split(b'\0')
|
||||
for line in lines:
|
||||
# Just skip empty commands
|
||||
if not line:
|
||||
continue
|
||||
splitval = line.split(b' ', 1)
|
||||
cmd = splitval[0].decode("utf-8")
|
||||
if len(splitval) > 1:
|
||||
value = splitval[1].decode("utf-8")
|
||||
nonlocal fifobuffer
|
||||
fifobuffer.extend(data)
|
||||
while fifobuffer:
|
||||
message, token, nextmsg = fifobuffer.partition(b"\00")
|
||||
if token:
|
||||
splitval = message.split(b' ', 1)
|
||||
cmd = splitval[0].decode("utf-8")
|
||||
if len(splitval) > 1:
|
||||
value = splitval[1].decode("utf-8")
|
||||
else:
|
||||
value = ''
|
||||
if cmd == 'bbplain':
|
||||
bb.plain(value)
|
||||
elif cmd == 'bbnote':
|
||||
bb.note(value)
|
||||
elif cmd == 'bbwarn':
|
||||
bb.warn(value)
|
||||
elif cmd == 'bberror':
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal':
|
||||
# The caller will call exit themselves, so bb.error() is
|
||||
# what we want here rather than bb.fatal()
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal_log':
|
||||
bb.error(value, forcelog=True)
|
||||
elif cmd == 'bbdebug':
|
||||
splitval = value.split(' ', 1)
|
||||
level = int(splitval[0])
|
||||
value = splitval[1]
|
||||
bb.debug(level, value)
|
||||
else:
|
||||
bb.warn("Unrecognised command '%s' on FIFO" % cmd)
|
||||
fifobuffer = nextmsg
|
||||
else:
|
||||
value = ''
|
||||
if cmd == 'bbplain':
|
||||
bb.plain(value)
|
||||
elif cmd == 'bbnote':
|
||||
bb.note(value)
|
||||
elif cmd == 'bbwarn':
|
||||
bb.warn(value)
|
||||
elif cmd == 'bberror':
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal':
|
||||
# The caller will call exit themselves, so bb.error() is
|
||||
# what we want here rather than bb.fatal()
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal_log':
|
||||
bb.error(value, forcelog=True)
|
||||
elif cmd == 'bbdebug':
|
||||
splitval = value.split(' ', 1)
|
||||
level = int(splitval[0])
|
||||
value = splitval[1]
|
||||
bb.debug(level, value)
|
||||
else:
|
||||
bb.warn("Unrecognised command '%s' on FIFO" % cmd)
|
||||
break
|
||||
|
||||
tempdir = d.getVar('T', True)
|
||||
fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
|
||||
if os.path.exists(fifopath):
|
||||
|
||||
@@ -280,72 +280,74 @@ class Cache(object):
|
||||
cache_ok = True
|
||||
if self.caches_array:
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cache_ok = cache_ok and os.path.exists(cachefile)
|
||||
cache_class.init_cacheData(self)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cache_ok = cache_ok and os.path.exists(cachefile)
|
||||
cache_class.init_cacheData(self)
|
||||
if cache_ok:
|
||||
self.load_cachefile()
|
||||
elif os.path.isfile(self.cachefile):
|
||||
logger.info("Out of date cache found, rebuilding...")
|
||||
|
||||
def load_cachefile(self):
|
||||
# Firstly, using core cache file information for
|
||||
# valid checking
|
||||
with open(self.cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
try:
|
||||
cache_ver = pickled.load()
|
||||
bitbake_ver = pickled.load()
|
||||
except Exception:
|
||||
logger.info('Invalid cache, rebuilding...')
|
||||
return
|
||||
|
||||
if cache_ver != __cache_version__:
|
||||
logger.info('Cache version mismatch, rebuilding...')
|
||||
return
|
||||
elif bitbake_ver != bb.__version__:
|
||||
logger.info('Bitbake version mismatch, rebuilding...')
|
||||
return
|
||||
|
||||
|
||||
cachesize = 0
|
||||
previous_progress = 0
|
||||
previous_percent = 0
|
||||
|
||||
# Calculate the correct cachesize of all those cache files
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
cachesize += os.fstat(cachefile.fileno()).st_size
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
cachesize += os.fstat(cachefile.fileno()).st_size
|
||||
|
||||
bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
|
||||
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
while cachefile:
|
||||
try:
|
||||
key = pickled.load()
|
||||
value = pickled.load()
|
||||
except Exception:
|
||||
break
|
||||
if key in self.depends_cache:
|
||||
self.depends_cache[key].append(value)
|
||||
else:
|
||||
self.depends_cache[key] = [value]
|
||||
# only fire events on even percentage boundaries
|
||||
current_progress = cachefile.tell() + previous_progress
|
||||
current_percent = 100 * current_progress / cachesize
|
||||
if current_percent > previous_percent:
|
||||
previous_percent = current_percent
|
||||
bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
|
||||
self.data)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
# Check cache version information
|
||||
try:
|
||||
cache_ver = pickled.load()
|
||||
bitbake_ver = pickled.load()
|
||||
except Exception:
|
||||
logger.info('Invalid cache, rebuilding...')
|
||||
return
|
||||
|
||||
previous_progress += current_progress
|
||||
if cache_ver != __cache_version__:
|
||||
logger.info('Cache version mismatch, rebuilding...')
|
||||
return
|
||||
elif bitbake_ver != bb.__version__:
|
||||
logger.info('Bitbake version mismatch, rebuilding...')
|
||||
return
|
||||
|
||||
# Load the rest of the cache file
|
||||
current_progress = 0
|
||||
while cachefile:
|
||||
try:
|
||||
key = pickled.load()
|
||||
value = pickled.load()
|
||||
except Exception:
|
||||
break
|
||||
if not isinstance(key, str):
|
||||
bb.warn("%s from extras cache is not a string?" % key)
|
||||
break
|
||||
if not isinstance(value, RecipeInfoCommon):
|
||||
bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
|
||||
break
|
||||
|
||||
if key in self.depends_cache:
|
||||
self.depends_cache[key].append(value)
|
||||
else:
|
||||
self.depends_cache[key] = [value]
|
||||
# only fire events on even percentage boundaries
|
||||
current_progress = cachefile.tell() + previous_progress
|
||||
current_percent = 100 * current_progress / cachesize
|
||||
if current_percent > previous_percent:
|
||||
previous_percent = current_percent
|
||||
bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
|
||||
self.data)
|
||||
|
||||
previous_progress += current_progress
|
||||
|
||||
# Note: depends cache number is corresponding to the parsing file numbers.
|
||||
# The same file has several caches, still regarded as one item in the cache
|
||||
@@ -395,6 +397,7 @@ class Cache(object):
|
||||
@classmethod
|
||||
def parse(cls, filename, appends, configdata, caches_array):
|
||||
"""Parse the specified filename, returning the recipe information"""
|
||||
logger.debug(1, "Parsing %s", filename)
|
||||
infos = []
|
||||
datastores = cls.load_bbfile(filename, appends, configdata)
|
||||
depends = []
|
||||
@@ -408,9 +411,8 @@ class Cache(object):
|
||||
|
||||
info_array = []
|
||||
for cache_class in caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
info = cache_class(filename, data)
|
||||
info_array.append(info)
|
||||
info = cache_class(filename, data)
|
||||
info_array.append(info)
|
||||
infos.append((virtualfn, info_array))
|
||||
|
||||
return infos
|
||||
@@ -432,7 +434,6 @@ class Cache(object):
|
||||
virtualfn = self.realfn2virtual(filename, variant)
|
||||
infos.append((virtualfn, self.depends_cache[virtualfn]))
|
||||
else:
|
||||
logger.debug(1, "Parsing %s", filename)
|
||||
return self.parse(filename, appends, configdata, self.caches_array)
|
||||
|
||||
return cached, infos
|
||||
@@ -556,6 +557,9 @@ class Cache(object):
|
||||
if virtualfn not in self.depends_cache:
|
||||
logger.debug(2, "Cache: %s is not cached", virtualfn)
|
||||
invalid = True
|
||||
elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
|
||||
logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
|
||||
invalid = True
|
||||
|
||||
# If any one of the variants is not present, mark as invalid for all
|
||||
if invalid:
|
||||
@@ -597,30 +601,19 @@ class Cache(object):
|
||||
logger.debug(2, "Cache is clean, not saving.")
|
||||
return
|
||||
|
||||
file_dict = {}
|
||||
pickler_dict = {}
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cache_class_name = cache_class.__name__
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
file_dict[cache_class_name] = open(cachefile, "wb")
|
||||
pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
|
||||
pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
|
||||
cache_class_name = cache_class.__name__
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "wb") as f:
|
||||
p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(__cache_version__)
|
||||
p.dump(bb.__version__)
|
||||
|
||||
try:
|
||||
for key, info_array in self.depends_cache.items():
|
||||
for info in info_array:
|
||||
if isinstance(info, RecipeInfoCommon):
|
||||
cache_class_name = info.__class__.__name__
|
||||
pickler_dict[cache_class_name].dump(key)
|
||||
pickler_dict[cache_class_name].dump(info)
|
||||
finally:
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cache_class_name = cache_class.__name__
|
||||
file_dict[cache_class_name].close()
|
||||
for key, info_array in self.depends_cache.items():
|
||||
for info in info_array:
|
||||
if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
|
||||
p.dump(key)
|
||||
p.dump(info)
|
||||
|
||||
del self.depends_cache
|
||||
|
||||
@@ -652,8 +645,7 @@ class Cache(object):
|
||||
|
||||
info_array = []
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
info_array.append(cache_class(realfn, data))
|
||||
info_array.append(cache_class(realfn, data))
|
||||
self.add_info(file_name, info_array, cacheData, parsed)
|
||||
|
||||
@staticmethod
|
||||
@@ -721,8 +713,9 @@ class CacheData(object):
|
||||
def __init__(self, caches_array):
|
||||
self.caches_array = caches_array
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cache_class.init_cacheData(self)
|
||||
if not issubclass(cache_class, RecipeInfoCommon):
|
||||
bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
|
||||
cache_class.init_cacheData(self)
|
||||
|
||||
# Direct cache variables
|
||||
self.task_queues = {}
|
||||
|
||||
@@ -120,13 +120,15 @@ class FileChecksumCache(MultiProcessCache):
|
||||
checksums.extend(checksum_dir(f))
|
||||
else:
|
||||
checksum = checksum_file(f)
|
||||
checksums.append((f, checksum))
|
||||
if checksum:
|
||||
checksums.append((f, checksum))
|
||||
elif os.path.isdir(pth):
|
||||
if not os.path.islink(pth):
|
||||
checksums.extend(checksum_dir(pth))
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
checksums.append((pth, checksum))
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort(key=operator.itemgetter(1))
|
||||
return checksums
|
||||
|
||||
@@ -1235,6 +1235,7 @@ class BBCooker:
|
||||
"""
|
||||
Build the file matching regexp buildfile
|
||||
"""
|
||||
bb.event.fire(bb.event.BuildInit(), self.expanded_data)
|
||||
|
||||
# Too many people use -b because they think it's how you normally
|
||||
# specify a target to be built, so show a warning
|
||||
@@ -1377,6 +1378,9 @@ class BBCooker:
|
||||
if not task.startswith("do_"):
|
||||
task = "do_%s" % task
|
||||
|
||||
packages = ["%s:%s" % (target, task) for target in targets]
|
||||
bb.event.fire(bb.event.BuildInit(packages), self.expanded_data)
|
||||
|
||||
taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
|
||||
|
||||
buildname = self.data.getVar("BUILDNAME", False)
|
||||
|
||||
@@ -382,7 +382,11 @@ class BuildBase(Event):
|
||||
|
||||
|
||||
|
||||
|
||||
class BuildInit(BuildBase):
|
||||
"""buildFile or buildTargets was invoked"""
|
||||
def __init__(self, p=[]):
|
||||
name = None
|
||||
BuildBase.__init__(self, name, p)
|
||||
|
||||
class BuildStarted(BuildBase, OperationStarted):
|
||||
"""bbmake build run started"""
|
||||
|
||||
@@ -664,7 +664,7 @@ def verify_donestamp(ud, d, origud=None):
|
||||
# as an upgrade path from the previous done stamp file format.
|
||||
if checksums != precomputed_checksums:
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p = pickle.Pickler(cachefile, 2)
|
||||
p.dump(checksums)
|
||||
return True
|
||||
except ChecksumError as e:
|
||||
@@ -698,7 +698,7 @@ def update_stamp(ud, d):
|
||||
checksums = verify_checksum(ud, d)
|
||||
# Store the checksums for later re-verification against the recipe
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p = pickle.Pickler(cachefile, 2)
|
||||
p.dump(checksums)
|
||||
except ChecksumError as e:
|
||||
# Checksums failed to verify, trigger re-download and remove the
|
||||
@@ -832,7 +832,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None):
|
||||
output = "output:\n%s" % e.stderr
|
||||
else:
|
||||
output = "no output"
|
||||
error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
|
||||
error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
|
||||
except bb.process.CmdError as e:
|
||||
error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
|
||||
if not success:
|
||||
@@ -1431,7 +1431,7 @@ class FetchMethod(object):
|
||||
if urlpath.find("/") != -1:
|
||||
destdir = urlpath.rsplit("/", 1)[0] + '/'
|
||||
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
|
||||
cmd = 'cp -fpPR %s %s' % (file, destdir)
|
||||
cmd = 'cp -fpPRH %s %s' % (file, destdir)
|
||||
|
||||
if not cmd:
|
||||
return
|
||||
|
||||
@@ -106,7 +106,7 @@ class Bzr(FetchMethod):
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
|
||||
tar_flags = "--exclude='.bzr' --exclude='.bzrtags'"
|
||||
|
||||
# tar them up to a defined filename
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
|
||||
|
||||
@@ -147,7 +147,7 @@ class Cvs(FetchMethod):
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude 'CVS'"
|
||||
tar_flags = "--exclude='CVS'"
|
||||
|
||||
# tar them up to a defined filename
|
||||
if 'fullpath' in ud.parm:
|
||||
|
||||
@@ -66,7 +66,7 @@ class GitANNEX(Git):
|
||||
|
||||
os.chdir(ud.destdir)
|
||||
try:
|
||||
runfetchcmd("%s annex sync" % (ud.basecmd), d)
|
||||
runfetchcmd("%s annex init" % (ud.basecmd), d)
|
||||
except bb.fetch.FetchError:
|
||||
pass
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ class Repo(FetchMethod):
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.repo' --exclude '.git'"
|
||||
tar_flags = "--exclude='.repo' --exclude='.git'"
|
||||
|
||||
# Create a cache
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
|
||||
|
||||
@@ -150,7 +150,7 @@ class Svn(FetchMethod):
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.svn'"
|
||||
tar_flags = "--exclude='.svn'"
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
# tar them up to a defined filename
|
||||
|
||||
@@ -131,14 +131,14 @@ class SQLTable(collections.MutableMapping):
|
||||
return [row[1] for row in data]
|
||||
|
||||
def values(self):
|
||||
return list(self.values())
|
||||
return list(self.itervalues())
|
||||
|
||||
def itervalues(self):
|
||||
data = self._execute("SELECT value FROM %s;" % self.table)
|
||||
return (row[0] for row in data)
|
||||
|
||||
def items(self):
|
||||
return list(self.items())
|
||||
return list(self.iteritems())
|
||||
|
||||
def iteritems(self):
|
||||
return self._execute("SELECT * FROM %s;" % self.table)
|
||||
|
||||
@@ -234,10 +234,13 @@ class MultiStageProcessProgressReporter(MultiStageProgressReporter):
|
||||
"""
|
||||
def __init__(self, d, processname, stage_weights, debug=False):
|
||||
self._processname = processname
|
||||
self._started = False
|
||||
MultiStageProgressReporter.__init__(self, d, stage_weights, debug)
|
||||
|
||||
def start(self):
|
||||
bb.event.fire(bb.event.ProcessStarted(self._processname, 100), self._data)
|
||||
if not self._started:
|
||||
bb.event.fire(bb.event.ProcessStarted(self._processname, 100), self._data)
|
||||
self._started = True
|
||||
|
||||
def _fire_progress(self, taskprogress):
|
||||
if taskprogress == 0:
|
||||
|
||||
@@ -293,11 +293,11 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
|
||||
computed_basehash = calc_basehash(data)
|
||||
if computed_basehash != self.basehash[k]:
|
||||
bb.error("Basehash mismatch %s verses %s for %s" % (computed_basehash, self.basehash[k], k))
|
||||
bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[k], k))
|
||||
if runtime and k in self.taskhash:
|
||||
computed_taskhash = calc_taskhash(data)
|
||||
if computed_taskhash != self.taskhash[k]:
|
||||
bb.error("Taskhash mismatch %s verses %s for %s" % (computed_taskhash, self.taskhash[k], k))
|
||||
bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[k], k))
|
||||
|
||||
|
||||
def dump_sigs(self, dataCache, options):
|
||||
|
||||
@@ -150,55 +150,33 @@ class ORMWrapper(object):
|
||||
# pylint: disable=bad-continuation
|
||||
# we do not follow the python conventions for continuation indentation due to long lines here
|
||||
|
||||
def create_build_object(self, build_info, brbe, project_id):
|
||||
assert 'machine' in build_info
|
||||
assert 'distro' in build_info
|
||||
assert 'distro_version' in build_info
|
||||
assert 'started_on' in build_info
|
||||
assert 'cooker_log_path' in build_info
|
||||
assert 'build_name' in build_info
|
||||
assert 'bitbake_version' in build_info
|
||||
|
||||
def get_or_create_build_object(self, brbe):
|
||||
prj = None
|
||||
buildrequest = None
|
||||
if brbe is not None: # this build was triggered by a request from a user
|
||||
if brbe is not None:
|
||||
# Toaster-triggered build
|
||||
logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
|
||||
br, _ = brbe.split(":")
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
buildrequest = BuildRequest.objects.get(pk=br)
|
||||
prj = buildrequest.project
|
||||
|
||||
elif project_id is not None: # this build was triggered by an external system for a specific project
|
||||
logger.debug(1, "buildinfohelper: project is %s" % prj)
|
||||
prj = Project.objects.get(pk = project_id)
|
||||
|
||||
else: # this build was triggered by a legacy system, or command line interactive mode
|
||||
else:
|
||||
# CLI build
|
||||
prj = Project.objects.get_or_create_default_project()
|
||||
logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
|
||||
|
||||
|
||||
if buildrequest is not None:
|
||||
# reuse existing Build object
|
||||
build = buildrequest.build
|
||||
logger.info("Updating existing build, with %s", build_info)
|
||||
build.project = prj
|
||||
build.machine=build_info['machine']
|
||||
build.distro=build_info['distro']
|
||||
build.distro_version=build_info['distro_version']
|
||||
build.cooker_log_path=build_info['cooker_log_path']
|
||||
build.build_name=build_info['build_name']
|
||||
build.bitbake_version=build_info['bitbake_version']
|
||||
build.save()
|
||||
|
||||
else:
|
||||
# create new Build object
|
||||
now = timezone.now()
|
||||
build = Build.objects.create(
|
||||
project = prj,
|
||||
machine=build_info['machine'],
|
||||
distro=build_info['distro'],
|
||||
distro_version=build_info['distro_version'],
|
||||
started_on=build_info['started_on'],
|
||||
completed_on=build_info['started_on'],
|
||||
cooker_log_path=build_info['cooker_log_path'],
|
||||
build_name=build_info['build_name'],
|
||||
bitbake_version=build_info['bitbake_version'])
|
||||
project=prj,
|
||||
started_on=now,
|
||||
completed_on=now,
|
||||
build_name='')
|
||||
|
||||
logger.debug(1, "buildinfohelper: build is created %s" % build)
|
||||
|
||||
@@ -208,6 +186,11 @@ class ORMWrapper(object):
|
||||
|
||||
return build
|
||||
|
||||
def update_build(self, build, data_dict):
|
||||
for key in data_dict:
|
||||
setattr(build, key, data_dict[key])
|
||||
build.save()
|
||||
|
||||
@staticmethod
|
||||
def get_or_create_targets(target_info):
|
||||
"""
|
||||
@@ -230,7 +213,7 @@ class ORMWrapper(object):
|
||||
result.append(obj)
|
||||
return result
|
||||
|
||||
def update_build_object(self, build, errors, warnings, taskfailures):
|
||||
def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures):
|
||||
assert isinstance(build,Build)
|
||||
assert isinstance(errors, int)
|
||||
assert isinstance(warnings, int)
|
||||
@@ -393,7 +376,7 @@ class ORMWrapper(object):
|
||||
layer_copy, c = Layer_Version.objects.get_or_create(
|
||||
build=build_obj,
|
||||
layer=layer_obj.layer,
|
||||
up_branch=layer_obj.up_branch,
|
||||
release=layer_obj.release,
|
||||
branch=layer_version_information['branch'],
|
||||
commit=layer_version_information['commit'],
|
||||
local_path=layer_version_information['local_path'],
|
||||
@@ -436,13 +419,24 @@ class ORMWrapper(object):
|
||||
assert 'name' in layer_information
|
||||
assert 'layer_index_url' in layer_information
|
||||
|
||||
# From command line builds we have no brbe as the request is directly
|
||||
# from bitbake
|
||||
if brbe is None:
|
||||
layer_object, _ = Layer.objects.get_or_create(
|
||||
name=layer_information['name'],
|
||||
layer_index_url=layer_information['layer_index_url'])
|
||||
# If we don't have git commit sha then we're using a non-git
|
||||
# layer so set the layer_source_dir to identify it as such
|
||||
if not layer_information['version']['commit']:
|
||||
local_source_dir = layer_information["local_path"]
|
||||
else:
|
||||
local_source_dir = None
|
||||
|
||||
layer_object, _ = \
|
||||
Layer.objects.get_or_create(
|
||||
name=layer_information['name'],
|
||||
local_source_dir=local_source_dir,
|
||||
layer_index_url=layer_information['layer_index_url'])
|
||||
|
||||
return layer_object
|
||||
else:
|
||||
# we are under managed mode; we must match the layer used in the Project Layer
|
||||
br_id, be_id = brbe.split(":")
|
||||
|
||||
# find layer by checkout path;
|
||||
@@ -467,6 +461,11 @@ class ORMWrapper(object):
|
||||
if brl.layer_version:
|
||||
return brl.layer_version
|
||||
|
||||
# This might be a local layer (i.e. no git info) so try
|
||||
# matching local_source_dir
|
||||
if brl.local_source_dir and brl.local_source_dir == layer_information["local_path"]:
|
||||
return brl.layer_version
|
||||
|
||||
# we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
|
||||
#logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
|
||||
|
||||
@@ -625,8 +624,8 @@ class ORMWrapper(object):
|
||||
Recipe,
|
||||
name=built_recipe.name,
|
||||
layer_version__build=None,
|
||||
layer_version__up_branch=
|
||||
built_recipe.layer_version.up_branch,
|
||||
layer_version__release=
|
||||
built_recipe.layer_version.release,
|
||||
file_path=built_recipe.file_path,
|
||||
version=built_recipe.version
|
||||
)
|
||||
@@ -915,22 +914,55 @@ class BuildInfoHelper(object):
|
||||
###################
|
||||
## methods to convert event/external info into objects that the ORM layer uses
|
||||
|
||||
def _ensure_build(self):
|
||||
"""
|
||||
Ensure the current build object exists and is up to date with
|
||||
data on the bitbake server
|
||||
"""
|
||||
if not 'build' in self.internal_state or not self.internal_state['build']:
|
||||
# create the Build object
|
||||
self.internal_state['build'] = \
|
||||
self.orm_wrapper.get_or_create_build_object(self.brbe)
|
||||
|
||||
def _get_build_information(self, build_log_path):
|
||||
build = self.internal_state['build']
|
||||
|
||||
# update missing fields on the Build object with found data
|
||||
build_info = {}
|
||||
build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
|
||||
build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
|
||||
build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
|
||||
build_info['started_on'] = timezone.now()
|
||||
build_info['completed_on'] = timezone.now()
|
||||
build_info['cooker_log_path'] = build_log_path
|
||||
build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
|
||||
build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
|
||||
build_info['project'] = self.project = self.server.runCommand(["getVariable", "TOASTER_PROJECT"])[0]
|
||||
return build_info
|
||||
|
||||
# set to True if at least one field is going to be set
|
||||
changed = False
|
||||
|
||||
if not build.build_name:
|
||||
build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
|
||||
|
||||
# only reset the build name if the one on the server is actually
|
||||
# a valid value for the build_name field
|
||||
if build_name != None:
|
||||
build_info['build_name'] = build_name
|
||||
changed = True
|
||||
|
||||
if not build.machine:
|
||||
build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
|
||||
changed = True
|
||||
|
||||
if not build.distro:
|
||||
build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
|
||||
changed = True
|
||||
|
||||
if not build.distro_version:
|
||||
build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
|
||||
changed = True
|
||||
|
||||
if not build.bitbake_version:
|
||||
build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
self.orm_wrapper.update_build(self.internal_state['build'], build_info)
|
||||
|
||||
def _get_task_information(self, event, recipe):
|
||||
assert 'taskname' in vars(event)
|
||||
self._ensure_build()
|
||||
|
||||
task_information = {}
|
||||
task_information['build'] = self.internal_state['build']
|
||||
@@ -945,8 +977,9 @@ class BuildInfoHelper(object):
|
||||
return task_information
|
||||
|
||||
def _get_layer_version_for_path(self, path):
|
||||
self._ensure_build()
|
||||
|
||||
assert path.startswith("/")
|
||||
assert 'build' in self.internal_state
|
||||
|
||||
def _slkey_interactive(layer_version):
|
||||
assert isinstance(layer_version, Layer_Version)
|
||||
@@ -957,6 +990,9 @@ class BuildInfoHelper(object):
|
||||
# we can match to the recipe file path
|
||||
if path.startswith(lvo.local_path):
|
||||
return lvo
|
||||
if lvo.layer.local_source_dir and \
|
||||
path.startswith(lvo.layer.local_source_dir):
|
||||
return lvo
|
||||
|
||||
#if we get here, we didn't read layers correctly; dump whatever information we have on the error log
|
||||
logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
|
||||
@@ -990,6 +1026,8 @@ class BuildInfoHelper(object):
|
||||
return recipe_info
|
||||
|
||||
def _get_path_information(self, task_object):
|
||||
self._ensure_build()
|
||||
|
||||
assert isinstance(task_object, Task)
|
||||
build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
|
||||
build_stats_path = []
|
||||
@@ -1032,17 +1070,31 @@ class BuildInfoHelper(object):
|
||||
except NotExisting as nee:
|
||||
logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
|
||||
|
||||
def store_started_build(self):
|
||||
self._ensure_build()
|
||||
|
||||
def store_started_build(self, event, build_log_path):
|
||||
def save_build_log_file_path(self, build_log_path):
|
||||
self._ensure_build()
|
||||
|
||||
if not self.internal_state['build'].cooker_log_path:
|
||||
data_dict = {'cooker_log_path': build_log_path}
|
||||
self.orm_wrapper.update_build(self.internal_state['build'], data_dict)
|
||||
|
||||
def save_build_targets(self, event):
|
||||
self._ensure_build()
|
||||
|
||||
# create target information
|
||||
assert '_pkgs' in vars(event)
|
||||
build_information = self._get_build_information(build_log_path)
|
||||
target_information = {}
|
||||
target_information['targets'] = event._pkgs
|
||||
target_information['build'] = self.internal_state['build']
|
||||
|
||||
# Update brbe and project as they can be changed for every build
|
||||
self.project = build_information['project']
|
||||
self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
|
||||
|
||||
build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe, self.project)
|
||||
def save_build_layers_and_variables(self):
|
||||
self._ensure_build()
|
||||
|
||||
self.internal_state['build'] = build_obj
|
||||
build_obj = self.internal_state['build']
|
||||
|
||||
# save layer version information for this build
|
||||
if not 'lvs' in self.internal_state:
|
||||
@@ -1053,13 +1105,6 @@ class BuildInfoHelper(object):
|
||||
|
||||
del self.internal_state['lvs']
|
||||
|
||||
# create target information
|
||||
target_information = {}
|
||||
target_information['targets'] = event._pkgs
|
||||
target_information['build'] = build_obj
|
||||
|
||||
self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
|
||||
|
||||
# Save build configuration
|
||||
data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
|
||||
|
||||
@@ -1090,9 +1135,48 @@ class BuildInfoHelper(object):
|
||||
|
||||
return self.brbe
|
||||
|
||||
def set_recipes_to_parse(self, num_recipes):
|
||||
"""
|
||||
Set the number of recipes which need to be parsed for this build.
|
||||
This is set the first time ParseStarted is received by toasterui.
|
||||
"""
|
||||
self._ensure_build()
|
||||
self.internal_state['build'].recipes_to_parse = num_recipes
|
||||
self.internal_state['build'].save()
|
||||
|
||||
def set_recipes_parsed(self, num_recipes):
|
||||
"""
|
||||
Set the number of recipes parsed so far for this build; this is updated
|
||||
each time a ParseProgress or ParseCompleted event is received by
|
||||
toasterui.
|
||||
"""
|
||||
self._ensure_build()
|
||||
if num_recipes <= self.internal_state['build'].recipes_to_parse:
|
||||
self.internal_state['build'].recipes_parsed = num_recipes
|
||||
self.internal_state['build'].save()
|
||||
|
||||
def update_target_image_file(self, event):
|
||||
evdata = BuildInfoHelper._get_data_from_event(event)
|
||||
|
||||
for t in self.internal_state['targets']:
|
||||
if t.is_image == True:
|
||||
output_files = list(evdata.keys())
|
||||
for output in output_files:
|
||||
if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
|
||||
self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
|
||||
|
||||
def update_artifact_image_file(self, event):
|
||||
self._ensure_build()
|
||||
evdata = BuildInfoHelper._get_data_from_event(event)
|
||||
for artifact_path in evdata.keys():
|
||||
self.orm_wrapper.save_artifact_information(
|
||||
self.internal_state['build'], artifact_path,
|
||||
evdata[artifact_path])
|
||||
|
||||
def update_build_information(self, event, errors, warnings, taskfailures):
|
||||
if 'build' in self.internal_state:
|
||||
self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures)
|
||||
self._ensure_build()
|
||||
self.orm_wrapper.update_build_stats_and_outcome(
|
||||
self.internal_state['build'], errors, warnings, taskfailures)
|
||||
|
||||
def store_started_task(self, event):
|
||||
assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
|
||||
@@ -1135,6 +1219,7 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_tasks_stats(self, event):
|
||||
self._ensure_build()
|
||||
task_data = BuildInfoHelper._get_data_from_event(event)
|
||||
|
||||
for (task_file, task_name, task_stats, recipe_name) in task_data:
|
||||
@@ -1230,6 +1315,8 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_target_package_data(self, event):
|
||||
self._ensure_build()
|
||||
|
||||
# for all image targets
|
||||
for target in self.internal_state['targets']:
|
||||
if target.is_image:
|
||||
@@ -1263,10 +1350,9 @@ class BuildInfoHelper(object):
|
||||
note that this only gets called for command line builds which are
|
||||
interrupted, so it doesn't touch any BuildRequest objects
|
||||
"""
|
||||
build = self.internal_state['build']
|
||||
if build:
|
||||
build.outcome = Build.CANCELLED
|
||||
build.save()
|
||||
self._ensure_build()
|
||||
self.internal_state['build'].outcome = Build.CANCELLED
|
||||
self.internal_state['build'].save()
|
||||
|
||||
def store_dependency_information(self, event):
|
||||
assert '_depgraph' in vars(event)
|
||||
@@ -1412,6 +1498,8 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_build_package_information(self, event):
|
||||
self._ensure_build()
|
||||
|
||||
package_info = BuildInfoHelper._get_data_from_event(event)
|
||||
self.orm_wrapper.save_build_package_information(
|
||||
self.internal_state['build'],
|
||||
@@ -1427,6 +1515,10 @@ class BuildInfoHelper(object):
|
||||
|
||||
def _store_build_done(self, errorcode):
|
||||
logger.info("Build exited with errorcode %d", errorcode)
|
||||
|
||||
if not self.brbe:
|
||||
return
|
||||
|
||||
br_id, be_id = self.brbe.split(":")
|
||||
be = BuildEnvironment.objects.get(pk = be_id)
|
||||
be.lock = BuildEnvironment.LOCK_LOCK
|
||||
@@ -1448,7 +1540,6 @@ class BuildInfoHelper(object):
|
||||
br.state = BuildRequest.REQ_FAILED
|
||||
br.save()
|
||||
|
||||
|
||||
def store_log_error(self, text):
|
||||
mockevent = MockEvent()
|
||||
mockevent.levelno = formatter.ERROR
|
||||
@@ -1467,24 +1558,22 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_log_event(self, event):
|
||||
self._ensure_build()
|
||||
|
||||
if event.levelno < formatter.WARNING:
|
||||
return
|
||||
|
||||
if 'args' in vars(event):
|
||||
event.msg = event.msg % event.args
|
||||
|
||||
if not 'build' in self.internal_state:
|
||||
if self.brbe is None:
|
||||
if not 'backlog' in self.internal_state:
|
||||
self.internal_state['backlog'] = []
|
||||
self.internal_state['backlog'].append(event)
|
||||
return
|
||||
else: # we're under Toaster control, the build is already created
|
||||
br, _ = self.brbe.split(":")
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
self.internal_state['build'] = buildrequest.build
|
||||
# early return for CLI builds
|
||||
if self.brbe is None:
|
||||
if not 'backlog' in self.internal_state:
|
||||
self.internal_state['backlog'] = []
|
||||
self.internal_state['backlog'].append(event)
|
||||
return
|
||||
|
||||
if 'build' in self.internal_state and 'backlog' in self.internal_state:
|
||||
if 'backlog' in self.internal_state:
|
||||
# if we have a backlog of events, do our best to save them here
|
||||
if len(self.internal_state['backlog']):
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
@@ -1813,18 +1902,12 @@ class BuildInfoHelper(object):
|
||||
sdk_target)
|
||||
|
||||
def close(self, errorcode):
|
||||
if self.brbe is not None:
|
||||
self._store_build_done(errorcode)
|
||||
self._store_build_done(errorcode)
|
||||
|
||||
if 'backlog' in self.internal_state:
|
||||
if 'build' in self.internal_state:
|
||||
# we save missed events in the database for the current build
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
self.store_log_event(tempevent)
|
||||
else:
|
||||
# we have no build, and we still have events; something amazingly wrong happend
|
||||
for event in self.internal_state['backlog']:
|
||||
logger.error("UNSAVED log: %s", event.msg)
|
||||
# we save missed events in the database for the current build
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
self.store_log_event(tempevent)
|
||||
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(True)
|
||||
@@ -1833,3 +1916,7 @@ class BuildInfoHelper(object):
|
||||
# being incorrectly attached to the previous Toaster-triggered build;
|
||||
# see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
|
||||
self.brbe = None
|
||||
|
||||
# unset the internal Build object to prevent it being reused for the
|
||||
# next build
|
||||
self.internal_state['build'] = None
|
||||
|
||||
@@ -40,9 +40,9 @@ logger = logging.getLogger("BitBake")
|
||||
interactive = sys.stdout.isatty()
|
||||
|
||||
class BBProgress(progressbar.ProgressBar):
|
||||
def __init__(self, msg, maxval, widgets=None):
|
||||
def __init__(self, msg, maxval, widgets=None, extrapos=-1):
|
||||
self.msg = msg
|
||||
self.extrapos = -1
|
||||
self.extrapos = extrapos
|
||||
if not widgets:
|
||||
widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
|
||||
progressbar.ETA()]
|
||||
@@ -69,15 +69,16 @@ class BBProgress(progressbar.ProgressBar):
|
||||
self.widgets[0] = msg
|
||||
|
||||
def setextra(self, extra):
|
||||
if extra:
|
||||
extrastr = str(extra)
|
||||
if extrastr[0] != ' ':
|
||||
extrastr = ' ' + extrastr
|
||||
if extrastr[-1] != ' ':
|
||||
extrastr += ' '
|
||||
else:
|
||||
extrastr = ' '
|
||||
self.widgets[self.extrapos] = extrastr
|
||||
if self.extrapos > -1:
|
||||
if extra:
|
||||
extrastr = str(extra)
|
||||
if extrastr[0] != ' ':
|
||||
extrastr = ' ' + extrastr
|
||||
if extrastr[-1] != ' ':
|
||||
extrastr += ' '
|
||||
else:
|
||||
extrastr = ' '
|
||||
self.widgets[self.extrapos] = extrastr
|
||||
|
||||
def _need_update(self):
|
||||
# We always want the bar to print when update() is called
|
||||
@@ -241,10 +242,10 @@ class TerminalFilter(object):
|
||||
start_time = activetasks[t].get("starttime", None)
|
||||
if not pbar or pbar.bouncing != (progress < 0):
|
||||
if progress < 0:
|
||||
pbar = BBProgress("0: %s (pid %s) " % (activetasks[t]["title"], t), 100, widgets=[progressbar.BouncingSlider()])
|
||||
pbar = BBProgress("0: %s (pid %s) " % (activetasks[t]["title"], t), 100, widgets=[progressbar.BouncingSlider(), ''], extrapos=2)
|
||||
pbar.bouncing = True
|
||||
else:
|
||||
pbar = BBProgress("0: %s (pid %s) " % (activetasks[t]["title"], t), 100)
|
||||
pbar = BBProgress("0: %s (pid %s) " % (activetasks[t]["title"], t), 100, widgets=[progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=4)
|
||||
pbar.bouncing = False
|
||||
activetasks[t]["progressbar"] = pbar
|
||||
tasks.append((pbar, progress, rate, start_time))
|
||||
|
||||
@@ -102,6 +102,7 @@ _evt_list = [
|
||||
"bb.command.CommandExit",
|
||||
"bb.command.CommandFailed",
|
||||
"bb.cooker.CookerExit",
|
||||
"bb.event.BuildInit",
|
||||
"bb.event.BuildCompleted",
|
||||
"bb.event.BuildStarted",
|
||||
"bb.event.CacheLoadCompleted",
|
||||
@@ -115,6 +116,7 @@ _evt_list = [
|
||||
"bb.event.NoProvider",
|
||||
"bb.event.ParseCompleted",
|
||||
"bb.event.ParseProgress",
|
||||
"bb.event.ParseStarted",
|
||||
"bb.event.RecipeParsed",
|
||||
"bb.event.SanityCheck",
|
||||
"bb.event.SanityCheckPassed",
|
||||
@@ -231,19 +233,35 @@ def main(server, eventHandler, params):
|
||||
# pylint: disable=protected-access
|
||||
# the code will look into the protected variables of the event; no easy way around this
|
||||
|
||||
# we treat ParseStarted as the first event of toaster-triggered
|
||||
# builds; that way we get the Build Configuration included in the log
|
||||
# and any errors that occur before BuildStarted is fired
|
||||
if isinstance(event, bb.event.ParseStarted):
|
||||
if not (build_log and build_log_file_path):
|
||||
build_log, build_log_file_path = _open_build_log(log_dir)
|
||||
|
||||
buildinfohelper.store_started_build()
|
||||
buildinfohelper.save_build_log_file_path(build_log_file_path)
|
||||
buildinfohelper.set_recipes_to_parse(event.total)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.BuildStarted):
|
||||
# create a build object in buildinfohelper from either BuildInit
|
||||
# (if available) or BuildStarted (for jethro and previous versions)
|
||||
if isinstance(event, (bb.event.BuildStarted, bb.event.BuildInit)):
|
||||
if not (build_log and build_log_file_path):
|
||||
build_log, build_log_file_path = _open_build_log(log_dir)
|
||||
|
||||
buildinfohelper.store_started_build(event, build_log_file_path)
|
||||
buildinfohelper.save_build_targets(event)
|
||||
buildinfohelper.save_build_log_file_path(build_log_file_path)
|
||||
|
||||
# get additional data from BuildStarted
|
||||
if isinstance(event, bb.event.BuildStarted):
|
||||
buildinfohelper.save_build_layers_and_variables()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.ParseProgress):
|
||||
buildinfohelper.set_recipes_parsed(event.current)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.ParseCompleted):
|
||||
buildinfohelper.set_recipes_parsed(event.total)
|
||||
continue
|
||||
|
||||
if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
|
||||
@@ -289,10 +307,6 @@ def main(server, eventHandler, params):
|
||||
# timing and error informations from the parsing phase in Toaster
|
||||
if isinstance(event, (bb.event.SanityCheckPassed, bb.event.SanityCheck)):
|
||||
continue
|
||||
if isinstance(event, bb.event.ParseProgress):
|
||||
continue
|
||||
if isinstance(event, bb.event.ParseCompleted):
|
||||
continue
|
||||
if isinstance(event, bb.event.CacheLoadStarted):
|
||||
continue
|
||||
if isinstance(event, bb.event.CacheLoadProgress):
|
||||
|
||||
@@ -375,6 +375,12 @@ def _print_exception(t, value, tb, realfile, text, context):
|
||||
level = level + 1
|
||||
|
||||
error.append("Exception: %s" % ''.join(exception))
|
||||
|
||||
# If the exception is from spwaning a task, let's be helpful and display
|
||||
# the output (which hopefully includes stderr).
|
||||
if isinstance(value, subprocess.CalledProcessError):
|
||||
error.append("Subprocess output:")
|
||||
error.append(value.output.decode("utf-8", errors="ignore"))
|
||||
finally:
|
||||
logger.error("\n".join(error))
|
||||
|
||||
|
||||
@@ -1,33 +1,33 @@
|
||||
from django.contrib import admin
|
||||
from django.contrib.admin.filters import RelatedFieldListFilter
|
||||
from orm.models import BitbakeVersion, Release, LayerSource, ToasterSetting
|
||||
from django.forms.widgets import Textarea
|
||||
from orm.models import BitbakeVersion, Release, ToasterSetting, Layer_Version
|
||||
from django import forms
|
||||
import django.db.models as models
|
||||
|
||||
from django.contrib.admin import widgets, helpers
|
||||
|
||||
class LayerSourceAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
class BitbakeVersionAdmin(admin.ModelAdmin):
|
||||
|
||||
# we override the formfield for db URLField because of broken URL validation
|
||||
# we override the formfield for db URLField
|
||||
# because of broken URL validation
|
||||
|
||||
def formfield_for_dbfield(self, db_field, **kwargs):
|
||||
if isinstance(db_field, models.fields.URLField):
|
||||
return forms.fields.CharField()
|
||||
return super(BitbakeVersionAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
||||
|
||||
return super(BitbakeVersionAdmin, self).formfield_for_dbfield(
|
||||
db_field, **kwargs)
|
||||
|
||||
|
||||
class ReleaseAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
|
||||
class ToasterSettingAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
admin.site.register(LayerSource, LayerSourceAdmin)
|
||||
|
||||
class LayerVersionsAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
admin.site.register(Layer_Version, LayerVersionsAdmin)
|
||||
admin.site.register(BitbakeVersion, BitbakeVersionAdmin)
|
||||
admin.site.register(Release, ReleaseAdmin)
|
||||
admin.site.register(ToasterSetting, ToasterSettingAdmin)
|
||||
|
||||
@@ -89,6 +89,10 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
""" a word of attention: by convention, the first layer for any build will be poky! """
|
||||
|
||||
assert self.be.sourcedir is not None
|
||||
|
||||
layerlist = []
|
||||
nongitlayerlist = []
|
||||
|
||||
# set layers in the layersource
|
||||
|
||||
# 1. get a list of repos with branches, and map dirpaths for each layer
|
||||
@@ -102,6 +106,13 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
# as it's generated by us layer on if needed
|
||||
if CustomImageRecipe.LAYER_NAME in layer.name:
|
||||
continue
|
||||
|
||||
# If we have local layers then we don't need clone them
|
||||
# For local layers giturl will be empty
|
||||
if not layer.giturl:
|
||||
nongitlayerlist.append(layer.layer_version.layer.local_source_dir)
|
||||
continue
|
||||
|
||||
if not (layer.giturl, layer.commit) in gitrepos:
|
||||
gitrepos[(layer.giturl, layer.commit)] = []
|
||||
gitrepos[(layer.giturl, layer.commit)].append( (layer.name, layer.dirpath) )
|
||||
@@ -131,7 +142,6 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
|
||||
logger.info("Using pre-checked out source for layer %s", cached_layers)
|
||||
|
||||
layerlist = []
|
||||
|
||||
|
||||
# 3. checkout the repositories
|
||||
@@ -245,6 +255,7 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
layerlist.append(layerpath)
|
||||
|
||||
self.islayerset = True
|
||||
layerlist.extend(nongitlayerlist)
|
||||
return layerlist
|
||||
|
||||
def readServerLogFile(self):
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.db import transaction
|
||||
|
||||
from django.core.management import call_command
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
|
||||
from orm.models import ToasterSetting, Build
|
||||
from orm.models import ToasterSetting, Build, Layer
|
||||
|
||||
import os
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
|
||||
def DN(path):
|
||||
if path is None:
|
||||
@@ -21,39 +26,6 @@ class Command(NoArgsCommand):
|
||||
super(Command, self).__init__(*args, **kwargs)
|
||||
self.guesspath = DN(DN(DN(DN(DN(DN(DN(__file__)))))))
|
||||
|
||||
def _find_first_path_for_file(self, startdirectory, filename, level=0):
|
||||
if level < 0:
|
||||
return None
|
||||
dirs = []
|
||||
for i in os.listdir(startdirectory):
|
||||
j = os.path.join(startdirectory, i)
|
||||
if os.path.isfile(j):
|
||||
if i == filename:
|
||||
return startdirectory
|
||||
elif os.path.isdir(j):
|
||||
dirs.append(j)
|
||||
for j in dirs:
|
||||
ret = self._find_first_path_for_file(j, filename, level - 1)
|
||||
if ret is not None:
|
||||
return ret
|
||||
return None
|
||||
|
||||
def _recursive_list_directories(self, startdirectory, level=0):
|
||||
if level < 0:
|
||||
return []
|
||||
dirs = []
|
||||
try:
|
||||
for i in os.listdir(startdirectory):
|
||||
j = os.path.join(startdirectory, i)
|
||||
if os.path.isdir(j):
|
||||
dirs.append(j)
|
||||
except OSError:
|
||||
pass
|
||||
for j in dirs:
|
||||
dirs = dirs + self._recursive_list_directories(j, level - 1)
|
||||
return dirs
|
||||
|
||||
|
||||
def _verify_build_environment(self):
|
||||
# provide a local build env. This will be extended later to include non local
|
||||
if BuildEnvironment.objects.count() == 0:
|
||||
@@ -94,30 +66,58 @@ class Command(NoArgsCommand):
|
||||
print("\n -- Validation: The build directory must to be set to an absolute path.")
|
||||
is_changed = _update_builddir()
|
||||
|
||||
|
||||
if is_changed:
|
||||
print("\nBuild configuration saved")
|
||||
be.save()
|
||||
return True
|
||||
|
||||
|
||||
if be.needs_import:
|
||||
try:
|
||||
config_file = os.environ.get('TOASTER_CONF')
|
||||
print("\nImporting file: %s" % config_file)
|
||||
from .loadconf import Command as LoadConfigCommand
|
||||
print("Loading default settings")
|
||||
call_command("loaddata", "settings")
|
||||
template_conf = os.environ.get("TEMPLATECONF", "")
|
||||
|
||||
if "poky" in template_conf:
|
||||
print("Loading poky configuration")
|
||||
call_command("loaddata", "poky")
|
||||
else:
|
||||
print("Loading OE-Core configuration")
|
||||
call_command("loaddata", "oe-core")
|
||||
if template_conf:
|
||||
oe_core_path = os.realpath(template_conf +
|
||||
"/../")
|
||||
else:
|
||||
print("TEMPLATECONF not found. You may have to"
|
||||
" manually configure layer paths")
|
||||
oe_core_path = input("Please enter the path of"
|
||||
" your openembedded-core "
|
||||
"layer: ")
|
||||
# Update the layer instances of openemebedded-core
|
||||
for layer in Layer.objects.filter(
|
||||
name="openembedded-core"):
|
||||
layer.local_source_dir = oe_core_path
|
||||
layer.save()
|
||||
|
||||
# Import the custom fixture if it's present
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
action="ignore",
|
||||
message="^.*No fixture named.*$")
|
||||
print("Importing custom settings if present")
|
||||
call_command("loaddata", "custom")
|
||||
|
||||
LoadConfigCommand()._import_layer_config(config_file)
|
||||
# we run lsupdates after config update
|
||||
print("\nLayer configuration imported. Updating information from the layer sources, please wait.\nYou can re-update any time later by running bitbake/lib/toaster/manage.py lsupdates")
|
||||
from django.core.management import call_command
|
||||
print("\nFetching information from the layer index, "
|
||||
"please wait.\nYou can re-update any time later "
|
||||
"by running bitbake/lib/toaster/manage.py "
|
||||
"lsupdates\n")
|
||||
call_command("lsupdates")
|
||||
|
||||
# we don't look for any other config files
|
||||
return is_changed
|
||||
except Exception as e:
|
||||
print("Failure while trying to import the toaster config file %s: %s" %\
|
||||
(config_file, e))
|
||||
print("Failure while trying to setup toaster: %s"
|
||||
% e)
|
||||
traceback.print_exc()
|
||||
|
||||
return is_changed
|
||||
|
||||
@@ -1,183 +0,0 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from orm.models import LayerSource, ToasterSetting, Branch, Layer, Layer_Version
|
||||
from orm.models import BitbakeVersion, Release, ReleaseDefaultLayer, ReleaseLayerSourcePriority
|
||||
from django.db import IntegrityError
|
||||
import os
|
||||
|
||||
from .checksettings import DN
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("toaster")
|
||||
|
||||
def _reduce_canon_path(path):
|
||||
components = []
|
||||
for c in path.split("/"):
|
||||
if c == "..":
|
||||
del components[-1]
|
||||
elif c == ".":
|
||||
pass
|
||||
else:
|
||||
components.append(c)
|
||||
if len(components) < 2:
|
||||
components.append('')
|
||||
return "/".join(components)
|
||||
|
||||
def _get_id_for_sourcetype(s):
|
||||
for i in LayerSource.SOURCE_TYPE:
|
||||
if s == i[1]:
|
||||
return i[0]
|
||||
raise Exception("Could not find definition for sourcetype '%s'. Valid source types are %s" % (str(s), ', '.join(map(lambda x: "'%s'" % x[1], LayerSource.SOURCE_TYPE ))))
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Loads a toasterconf.json file in the database"
|
||||
args = "filepath"
|
||||
|
||||
|
||||
|
||||
def _import_layer_config(self, filepath):
|
||||
if not os.path.exists(filepath) or not os.path.isfile(filepath):
|
||||
raise Exception("Failed to find toaster config file %s ." % filepath)
|
||||
|
||||
import json
|
||||
data = json.loads(open(filepath, "r").read())
|
||||
|
||||
# verify config file validity before updating settings
|
||||
for i in ['bitbake', 'releases', 'defaultrelease', 'config', 'layersources']:
|
||||
assert i in data
|
||||
|
||||
def _read_git_url_from_local_repository(address):
|
||||
url = None
|
||||
# we detect the remote name at runtime
|
||||
import subprocess
|
||||
(remote, remote_name) = address.split(":", 1)
|
||||
cmd = subprocess.Popen("git remote -v", shell=True, cwd = os.path.dirname(filepath), stdout=subprocess.PIPE, stderr = subprocess.PIPE)
|
||||
(out,err) = cmd.communicate()
|
||||
if cmd.returncode != 0:
|
||||
logging.warning("Error while importing layer vcs_url: git error: %s" % err)
|
||||
for line in out.decode('utf-8').split("\n"):
|
||||
try:
|
||||
(name, path) = line.split("\t", 1)
|
||||
if name == remote_name:
|
||||
url = path.split(" ")[0]
|
||||
break
|
||||
except ValueError:
|
||||
pass
|
||||
if url == None:
|
||||
logging.warning("Error while looking for remote \"%s\" in \"%s\"" % (remote_name, out))
|
||||
return url
|
||||
|
||||
|
||||
# import bitbake data
|
||||
for bvi in data['bitbake']:
|
||||
bvo, created = BitbakeVersion.objects.get_or_create(name=bvi['name'])
|
||||
if bvi['giturl'].startswith("remote:"):
|
||||
bvo.giturl = _read_git_url_from_local_repository(bvi['giturl'])
|
||||
if bvo.giturl is None:
|
||||
logger.error("The toaster config file references the local git repo, but Toaster cannot detect it.\nYour local configuration for bitbake version %s is invalid. Make sure that the toasterconf.json file is correct." % bvi['name'])
|
||||
|
||||
if bvo.giturl is None:
|
||||
bvo.giturl = bvi['giturl']
|
||||
bvo.branch = bvi['branch']
|
||||
bvo.dirpath = bvi['dirpath']
|
||||
bvo.save()
|
||||
|
||||
# set the layer sources
|
||||
for lsi in data['layersources']:
|
||||
assert 'sourcetype' in lsi
|
||||
assert 'apiurl' in lsi
|
||||
assert 'name' in lsi
|
||||
assert 'branches' in lsi
|
||||
|
||||
|
||||
if _get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX or lsi['apiurl'].startswith("/"):
|
||||
apiurl = lsi['apiurl']
|
||||
else:
|
||||
apiurl = _reduce_canon_path(os.path.join(DN(os.path.abspath(filepath)), lsi['apiurl']))
|
||||
|
||||
assert ((_get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX) or apiurl.startswith("/")), (lsi['sourcetype'],apiurl)
|
||||
|
||||
try:
|
||||
ls, created = LayerSource.objects.get_or_create(sourcetype = _get_id_for_sourcetype(lsi['sourcetype']), apiurl = apiurl)
|
||||
ls.name = lsi['name']
|
||||
ls.save()
|
||||
except IntegrityError as e:
|
||||
logger.warning("IntegrityError %s \nWhile setting name %s for layer source %s " % (e, lsi['name'], ls))
|
||||
|
||||
|
||||
layerbranches = []
|
||||
for branchname in lsi['branches']:
|
||||
bo, created = Branch.objects.get_or_create(layer_source = ls, name = branchname)
|
||||
layerbranches.append(bo)
|
||||
|
||||
if 'layers' in lsi:
|
||||
for layerinfo in lsi['layers']:
|
||||
lo, created = Layer.objects.get_or_create(layer_source = ls, name = layerinfo['name'])
|
||||
if layerinfo['local_path'].startswith("/"):
|
||||
lo.local_path = layerinfo['local_path']
|
||||
else:
|
||||
lo.local_path = _reduce_canon_path(os.path.join(ls.apiurl, layerinfo['local_path']))
|
||||
|
||||
if not os.path.exists(lo.local_path):
|
||||
logger.error("Local layer path %s must exists. Are you trying to import a layer that does not exist ? Check your local toasterconf.json" % lo.local_path)
|
||||
|
||||
if layerinfo['vcs_url'].startswith("remote:"):
|
||||
lo.vcs_url = _read_git_url_from_local_repository(layerinfo['vcs_url'])
|
||||
if lo.vcs_url is None:
|
||||
logger.error("The toaster config file references the local git repo, but Toaster cannot detect it.\nYour local configuration for layer %s is invalid. Make sure that the toasterconf.json file is correct." % layerinfo['name'])
|
||||
|
||||
if lo.vcs_url is None:
|
||||
lo.vcs_url = layerinfo['vcs_url']
|
||||
|
||||
if 'layer_index_url' in layerinfo:
|
||||
lo.layer_index_url = layerinfo['layer_index_url']
|
||||
lo.save()
|
||||
|
||||
for branch in layerbranches:
|
||||
lvo, created = Layer_Version.objects.get_or_create(layer_source = ls,
|
||||
up_branch = branch,
|
||||
commit = branch.name,
|
||||
layer = lo)
|
||||
lvo.dirpath = layerinfo['dirpath']
|
||||
lvo.save()
|
||||
# set releases
|
||||
for ri in data['releases']:
|
||||
bvo = BitbakeVersion.objects.get(name = ri['bitbake'])
|
||||
assert bvo is not None
|
||||
|
||||
ro, created = Release.objects.get_or_create(name = ri['name'], bitbake_version = bvo, branch_name = ri['branch'])
|
||||
ro.description = ri['description']
|
||||
ro.helptext = ri['helptext']
|
||||
ro.save()
|
||||
|
||||
# save layer source priority for release
|
||||
for ls_name in ri['layersourcepriority'].keys():
|
||||
rlspo, created = ReleaseLayerSourcePriority.objects.get_or_create(release = ro, layer_source = LayerSource.objects.get(name=ls_name))
|
||||
rlspo.priority = ri['layersourcepriority'][ls_name]
|
||||
rlspo.save()
|
||||
|
||||
for dli in ri['defaultlayers']:
|
||||
# find layers with the same name
|
||||
ReleaseDefaultLayer.objects.get_or_create( release = ro, layer_name = dli)
|
||||
|
||||
# set default release
|
||||
if ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").update(value = data['defaultrelease'])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFAULT_RELEASE", value = data['defaultrelease'])
|
||||
|
||||
# set default config variables
|
||||
for configname in data['config']:
|
||||
if ToasterSetting.objects.filter(name = "DEFCONF_" + configname).count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFCONF_" + configname).update(value = data['config'][configname])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFCONF_" + configname, value = data['config'][configname])
|
||||
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) == 0:
|
||||
raise CommandError("Need a path to the toasterconf.json file")
|
||||
filepath = args[0]
|
||||
self._import_layer_config(filepath)
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('bldcontrol', '0004_auto_20160523_1446'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='buildrequest',
|
||||
name='state',
|
||||
field=models.IntegerField(choices=[(0, 'created'), (1, 'queued'), (2, 'in progress'), (3, 'failed'), (4, 'deleted'), (5, 'cancelling'), (6, 'completed'), (7, 'archive')], default=0),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('bldcontrol', '0005_reorder_buildrequest_states'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='brlayer',
|
||||
name='local_source_dir',
|
||||
field=models.CharField(max_length=254, null=True),
|
||||
),
|
||||
]
|
||||
@@ -63,20 +63,20 @@ class BuildRequest(models.Model):
|
||||
REQ_CREATED = 0
|
||||
REQ_QUEUED = 1
|
||||
REQ_INPROGRESS = 2
|
||||
REQ_COMPLETED = 3
|
||||
REQ_FAILED = 4
|
||||
REQ_DELETED = 5
|
||||
REQ_CANCELLING = 6
|
||||
REQ_FAILED = 3
|
||||
REQ_DELETED = 4
|
||||
REQ_CANCELLING = 5
|
||||
REQ_COMPLETED = 6
|
||||
REQ_ARCHIVE = 7
|
||||
|
||||
REQUEST_STATE = (
|
||||
(REQ_CREATED, "created"),
|
||||
(REQ_QUEUED, "queued"),
|
||||
(REQ_INPROGRESS, "in progress"),
|
||||
(REQ_COMPLETED, "completed"),
|
||||
(REQ_FAILED, "failed"),
|
||||
(REQ_DELETED, "deleted"),
|
||||
(REQ_CANCELLING, "cancelling"),
|
||||
(REQ_COMPLETED, "completed"),
|
||||
(REQ_ARCHIVE, "archive"),
|
||||
)
|
||||
|
||||
@@ -91,7 +91,7 @@ class BuildRequest(models.Model):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(BuildRequest, self).__init__(*args, **kwargs)
|
||||
# Save the old state incase it's about to be modified
|
||||
# Save the old state in case it's about to be modified
|
||||
self.old_state = self.state
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
@@ -130,6 +130,7 @@ class BRLayer(models.Model):
|
||||
req = models.ForeignKey(BuildRequest)
|
||||
name = models.CharField(max_length = 100)
|
||||
giturl = models.CharField(max_length = 254)
|
||||
local_source_dir = models.CharField(max_length=254, null=True)
|
||||
commit = models.CharField(max_length = 254)
|
||||
dirpath = models.CharField(max_length = 254)
|
||||
layer_version = models.ForeignKey(Layer_Version, null=True)
|
||||
|
||||
@@ -139,22 +139,3 @@ class RunBuildsCommandTests(TestCase):
|
||||
self.assertTrue(br.state == BuildRequest.REQ_INPROGRESS, "Request is not updated")
|
||||
# no more selections possible here
|
||||
self.assertRaises(IndexError, command._selectBuildRequest)
|
||||
|
||||
|
||||
class UtilityTests(TestCase):
|
||||
def test_reduce_path(self):
|
||||
from bldcontrol.management.commands.loadconf import _reduce_canon_path, _get_id_for_sourcetype
|
||||
|
||||
self.assertTrue( _reduce_canon_path("/") == "/")
|
||||
self.assertTrue( _reduce_canon_path("/home/..") == "/")
|
||||
self.assertTrue( _reduce_canon_path("/home/../ana") == "/ana")
|
||||
self.assertTrue( _reduce_canon_path("/home/../ana/..") == "/")
|
||||
self.assertTrue( _reduce_canon_path("/home/ana/mihai/../maria") == "/home/ana/maria")
|
||||
|
||||
def test_get_id_for_sorucetype(self):
|
||||
from bldcontrol.management.commands.loadconf import _reduce_canon_path, _get_id_for_sourcetype
|
||||
self.assertTrue( _get_id_for_sourcetype("layerindex") == 1)
|
||||
self.assertTrue( _get_id_for_sourcetype("local") == 0)
|
||||
self.assertTrue( _get_id_for_sourcetype("imported") == 2)
|
||||
with self.assertRaises(Exception):
|
||||
_get_id_for_sourcetype("unknown")
|
||||
|
||||
30
bitbake/lib/toaster/orm/fixtures/README
Normal file
30
bitbake/lib/toaster/orm/fixtures/README
Normal file
@@ -0,0 +1,30 @@
|
||||
# Fixtures directory
|
||||
|
||||
Fixtures are data dumps that can be loaded into Toaster's database to provide
|
||||
configuration and data.
|
||||
|
||||
In this directory we have the fixtures which are loaded the first time you start Toaster.
|
||||
This is to provide useful default values and metadata to Toaster.
|
||||
|
||||
- settings.xml This Contains Toaster wide settings, such as the default values for
|
||||
certain bitbake variables.
|
||||
|
||||
- poky.xml This is the default release data for supported poky based setup
|
||||
|
||||
- oe-core.xml This is the default release data for supported oe-core based setups
|
||||
|
||||
# Custom data/configuration
|
||||
|
||||
- custom.xml
|
||||
|
||||
To add custom initial data/configuration to Toaster place a file called
|
||||
"custom.xml" in this directory. If present it will be loaded into the database.
|
||||
We suggest that this is used to overlay any configuration already done.
|
||||
All objects loaded with the same primary keys overwrite the existing data.
|
||||
Data can be provided in XML, JSON and if installed YAML formats.
|
||||
|
||||
# To load data at any point in time
|
||||
|
||||
Use the django management command manage.py loaddata <your fixture file>
|
||||
For further information see the Django command documentation at:
|
||||
https://docs.djangoproject.com/en/1.8/ref/django-admin/#django-admin-loaddata
|
||||
57
bitbake/lib/toaster/orm/fixtures/oe-core.xml
Normal file
57
bitbake/lib/toaster/orm/fixtures/oe-core.xml
Normal file
@@ -0,0 +1,57 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<django-objects version="1.0">
|
||||
<!-- Bitbake versions which correspond to the metadata release -->
|
||||
<object model="orm.bitbakeversion" pk="1">
|
||||
<field type="CharField" name="name">master</field>
|
||||
<field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
|
||||
<field type="CharField" name="branch">master</field>
|
||||
<field type="CharField" name="dirpath">bitbake</field>
|
||||
</object>
|
||||
<object model="orm.bitbakeversion" pk="2">
|
||||
<field type="CharField" name="name">HEAD</field>
|
||||
<field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
|
||||
<field type="CharField" name="branch">HEAD</field>
|
||||
<field type="CharField" name="dirpath">bitbake</field>
|
||||
</object>
|
||||
|
||||
<!-- Releases available -->
|
||||
<object model="orm.release" pk="1">
|
||||
<field type="CharField" name="name">master</field>
|
||||
<field type="CharField" name="description">Openembedded master</field>
|
||||
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field>
|
||||
<field type="CharField" name="branch_name">master</field>
|
||||
<field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/\">OpenEmbedded master</a> branch, where active development takes place. This is not a stable branch, so your builds might not work as expected.</field>
|
||||
</object>
|
||||
<object model="orm.release" pk="2">
|
||||
<field type="CharField" name="name">local</field>
|
||||
<field type="CharField" name="description">Local Openembedded</field>
|
||||
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">2</field>
|
||||
<field type="CharField" name="branch_name">HEAD</field>
|
||||
<field type="TextField" name="helptext">Toaster will run your builds with the version of OpenEmbedded that you have cloned or downloaded to your computer.</field>
|
||||
</object>
|
||||
|
||||
<!-- Default layers for each release -->
|
||||
<object model="orm.releasedefaultlayer" pk="1">
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">1</field>
|
||||
<field type="CharField" name="layer_name">openembedded-core</field>
|
||||
</object>
|
||||
<object model="orm.releasedefaultlayer" pk="4">
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
|
||||
<field type="CharField" name="layer_name">openembedded-core</field>
|
||||
</object>
|
||||
|
||||
<!-- TYPE_LOCAL = 0 Layers for the Local release -->
|
||||
<object model="orm.layer" pk="1">
|
||||
<field type="CharField" name="name">openembedded-core</field>
|
||||
<field type="CharField" name="layer_index_url"></field>
|
||||
<field type="CharField" name="vcs_url">git://git.openembedded.org/openembedded-core</field>
|
||||
</object>
|
||||
<object model="orm.layer_version" pk="1">
|
||||
<field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
|
||||
<field type="IntegerField" name="layer_source">0</field>
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
|
||||
<field type="CharField" name="branch">HEAD</field>
|
||||
<field type="CharField" name="commit">HEAD</field>
|
||||
</object>
|
||||
|
||||
</django-objects>
|
||||
105
bitbake/lib/toaster/orm/fixtures/poky.xml
Normal file
105
bitbake/lib/toaster/orm/fixtures/poky.xml
Normal file
@@ -0,0 +1,105 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<django-objects version="1.0">
|
||||
<!-- Bitbake versions which correspond to the metadata release -->
|
||||
<object model="orm.bitbakeversion" pk="1">
|
||||
<field type="CharField" name="name">master</field>
|
||||
<field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
|
||||
<field type="CharField" name="branch">master</field>
|
||||
<field type="CharField" name="dirpath">bitbake</field>
|
||||
</object>
|
||||
<object model="orm.bitbakeversion" pk="2">
|
||||
<field type="CharField" name="name">HEAD</field>
|
||||
<field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
|
||||
<field type="CharField" name="branch">HEAD</field>
|
||||
<field type="CharField" name="dirpath">bitbake</field>
|
||||
</object>
|
||||
|
||||
<!-- Releases available -->
|
||||
<object model="orm.release" pk="1">
|
||||
<field type="CharField" name="name">master</field>
|
||||
<field type="CharField" name="description">Yocto Project master</field>
|
||||
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field>
|
||||
<field type="CharField" name="branch_name">master</field>
|
||||
<field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/">Yocto Project master branch</a>, where active development takes place. This is not a stable branch, so your builds might not work as expected.</field>
|
||||
</object>
|
||||
<object model="orm.release" pk="2">
|
||||
<field type="CharField" name="name">local</field>
|
||||
<field type="CharField" name="description">Local Yocto Project</field>
|
||||
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">2</field>
|
||||
<field type="CharField" name="branch_name">HEAD</field>
|
||||
<field type="TextField" name="helptext">Toaster will run your builds with the version of the Yocto Project you have cloned or downloaded to your computer.</field>
|
||||
</object>
|
||||
|
||||
<!-- Default layers for each release -->
|
||||
<object model="orm.releasedefaultlayer" pk="1">
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">1</field>
|
||||
<field type="CharField" name="layer_name">openembedded-core</field>
|
||||
</object>
|
||||
<object model="orm.releasedefaultlayer" pk="2">
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">1</field>
|
||||
<field type="CharField" name="layer_name">meta-poky</field>
|
||||
</object>
|
||||
<object model="orm.releasedefaultlayer" pk="3">
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">1</field>
|
||||
<field type="CharField" name="layer_name">meta-yocto-bsp</field>
|
||||
</object>
|
||||
<object model="orm.releasedefaultlayer" pk="4">
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
|
||||
<field type="CharField" name="layer_name">openembedded-core</field>
|
||||
</object>
|
||||
<object model="orm.releasedefaultlayer" pk="5">
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
|
||||
<field type="CharField" name="layer_name">meta-poky</field>
|
||||
</object>
|
||||
<object model="orm.releasedefaultlayer" pk="6">
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
|
||||
<field type="CharField" name="layer_name">meta-yocto-bsp</field>
|
||||
</object>
|
||||
|
||||
<!-- Layers for the Local release
|
||||
layersource TYPE_LOCAL = 0
|
||||
-->
|
||||
<object model="orm.layer" pk="1">
|
||||
<field type="CharField" name="name">openembedded-core</field>
|
||||
<field type="CharField" name="layer_index_url"></field>
|
||||
<field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
|
||||
</object>
|
||||
<object model="orm.layer_version" pk="1">
|
||||
<field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
|
||||
<field type="IntegerField" name="layer_source">0</field>
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
|
||||
<field type="CharField" name="branch">HEAD</field>
|
||||
<field type="CharField" name="commit">HEAD</field>
|
||||
<field type="CharField" name="dirpath">meta</field>
|
||||
</object>
|
||||
|
||||
|
||||
<object model="orm.layer" pk="2">
|
||||
<field type="CharField" name="name">meta-poky</field>
|
||||
<field type="CharField" name="layer_index_url"></field>
|
||||
<field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
|
||||
</object>
|
||||
<object model="orm.layer_version" pk="2">
|
||||
<field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
|
||||
<field type="IntegerField" name="layer_source">0</field>
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
|
||||
<field type="CharField" name="branch">HEAD</field>
|
||||
<field type="CharField" name="commit">HEAD</field>
|
||||
<field type="CharField" name="dirpath">meta-poky</field>
|
||||
</object>
|
||||
|
||||
|
||||
<object model="orm.layer" pk="3">
|
||||
<field type="CharField" name="name">meta-yocto-bsp</field>
|
||||
<field type="CharField" name="layer_index_url"></field>
|
||||
<field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
|
||||
</object>
|
||||
<object model="orm.layer_version" pk="3">
|
||||
<field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
|
||||
<field type="IntegerField" name="layer_source">0</field>
|
||||
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
|
||||
<field type="CharField" name="branch">HEAD</field>
|
||||
<field type="CharField" name="commit">HEAD</field>
|
||||
<field type="CharField" name="dirpath">meta-yocto-bsp</field>
|
||||
</object>
|
||||
</django-objects>
|
||||
36
bitbake/lib/toaster/orm/fixtures/settings.xml
Normal file
36
bitbake/lib/toaster/orm/fixtures/settings.xml
Normal file
@@ -0,0 +1,36 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<django-objects version="1.0">
|
||||
<!-- Default project settings -->
|
||||
<object model="orm.toastersetting" pk="1">
|
||||
<field type="CharField" name="name">DEFAULT_RELEASE</field>
|
||||
<field type="CharField" name="value">master</field>
|
||||
</object>
|
||||
<object model="orm.toastersetting" pk="2">
|
||||
<field type="CharField" name="name">DEFCONF_PACKAGE_CLASSES</field>
|
||||
<field type="CharField" name="value">package_rpm</field>
|
||||
</object>
|
||||
<object model="orm.toastersetting" pk="3">
|
||||
<field type="CharField" name="name">DEFCONF_MACHINE</field>
|
||||
<field type="CharField" name="value">qemux86</field>
|
||||
</object>
|
||||
<object model="orm.toastersetting" pk="4">
|
||||
<field type="CharField" name="name">DEFCONF_SSTATE_DIR</field>
|
||||
<field type="CharField" name="value">${TOPDIR}/../sstate-cache</field>
|
||||
</object>
|
||||
<object model="orm.toastersetting" pk="5">
|
||||
<field type="CharField" name="name">DEFCONF_IMAGE_INSTALL_append</field>
|
||||
<field type="CharField" name="value"></field>
|
||||
</object>
|
||||
<object model="orm.toastersetting" pk="6">
|
||||
<field type="CharField" name="name">DEFCONF_IMAGE_FSTYPES</field>
|
||||
<field type="CharField" name="value">ext3 jffs2 tar.bz2</field>
|
||||
</object>
|
||||
<object model="orm.toastersetting" pk="7">
|
||||
<field type="CharField" name="name">DEFCONF_DISTRO</field>
|
||||
<field type="CharField" name="value">poky</field>
|
||||
</object>
|
||||
<object model="orm.toastersetting" pk="8">
|
||||
<field type="CharField" name="name">DEFCONF_DL_DIR</field>
|
||||
<field type="CharField" name="value">${TOPDIR}/../downloads</field>
|
||||
</object>
|
||||
</django-objects>
|
||||
@@ -1,12 +1,328 @@
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from orm.models import LayerSource
|
||||
#
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.core.management.base import NoArgsCommand
|
||||
|
||||
from orm.models import LayerSource, Layer, Release, Layer_Version
|
||||
from orm.models import LayerVersionDependency, Machine, Recipe
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
logger = logging.getLogger("toaster")
|
||||
|
||||
DEFAULT_LAYERINDEX_SERVER = "http://layers.openembedded.org/layerindex/api/"
|
||||
|
||||
|
||||
class Spinner(threading.Thread):
|
||||
""" A simple progress spinner to indicate download/parsing is happening"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Spinner, self).__init__(*args, **kwargs)
|
||||
self.setDaemon(True)
|
||||
self.signal = True
|
||||
|
||||
def run(self):
|
||||
os.system('setterm -cursor off')
|
||||
while self.signal:
|
||||
for char in ["/", "-", "\\", "|"]:
|
||||
sys.stdout.write("\r" + char)
|
||||
sys.stdout.flush()
|
||||
time.sleep(0.25)
|
||||
os.system('setterm -cursor on')
|
||||
|
||||
def stop(self):
|
||||
self.signal = False
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
args = ""
|
||||
help = "Updates locally cached information from all LayerSources"
|
||||
args = ""
|
||||
help = "Updates locally cached information from a layerindex server"
|
||||
|
||||
def mini_progress(self, what, i, total):
|
||||
i = i + 1
|
||||
pec = (float(i)/float(total))*100
|
||||
|
||||
sys.stdout.write("\rUpdating %s %d%%" %
|
||||
(what,
|
||||
pec))
|
||||
sys.stdout.flush()
|
||||
if int(pec) is 100:
|
||||
sys.stdout.write("\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
Fetches layer, recipe and machine information from a layerindex
|
||||
server
|
||||
"""
|
||||
os.system('setterm -cursor off')
|
||||
|
||||
self.apiurl = DEFAULT_LAYERINDEX_SERVER
|
||||
|
||||
assert self.apiurl is not None
|
||||
try:
|
||||
from urllib.request import urlopen, URLError
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
from urllib2 import urlopen, URLError
|
||||
from urlparse import urlparse
|
||||
|
||||
proxy_settings = os.environ.get("http_proxy", None)
|
||||
oe_core_layer = 'openembedded-core'
|
||||
|
||||
def _get_json_response(apiurl=DEFAULT_LAYERINDEX_SERVER):
|
||||
http_progress = Spinner()
|
||||
http_progress.start()
|
||||
|
||||
_parsedurl = urlparse(apiurl)
|
||||
path = _parsedurl.path
|
||||
|
||||
# logger.debug("Fetching %s", apiurl)
|
||||
try:
|
||||
res = urlopen(apiurl)
|
||||
except URLError as e:
|
||||
raise Exception("Failed to read %s: %s" % (path, e.reason))
|
||||
|
||||
parsed = json.loads(res.read().decode('utf-8'))
|
||||
|
||||
http_progress.stop()
|
||||
return parsed
|
||||
|
||||
# verify we can get the basic api
|
||||
try:
|
||||
apilinks = _get_json_response()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
if proxy_settings is not None:
|
||||
logger.info("EE: Using proxy %s" % proxy_settings)
|
||||
logger.warning("EE: could not connect to %s, skipping update:"
|
||||
"%s\n%s" % (self.apiurl, e, traceback.format_exc()))
|
||||
return
|
||||
|
||||
# update branches; only those that we already have names listed in the
|
||||
# Releases table
|
||||
whitelist_branch_names = [rel.branch_name
|
||||
for rel in Release.objects.all()]
|
||||
if len(whitelist_branch_names) == 0:
|
||||
raise Exception("Failed to make list of branches to fetch")
|
||||
|
||||
logger.info("Fetching metadata releases for %s",
|
||||
" ".join(whitelist_branch_names))
|
||||
|
||||
branches_info = _get_json_response(apilinks['branches'] +
|
||||
"?filter=name:%s"
|
||||
% "OR".join(whitelist_branch_names))
|
||||
|
||||
# Map the layer index branches to toaster releases
|
||||
li_branch_id_to_toaster_release = {}
|
||||
|
||||
total = len(branches_info)
|
||||
for i, branch in enumerate(branches_info):
|
||||
li_branch_id_to_toaster_release[branch['id']] = \
|
||||
Release.objects.get(name=branch['name'])
|
||||
self.mini_progress("Releases", i, total)
|
||||
|
||||
# keep a track of the layerindex (li) id mappings so that
|
||||
# layer_versions can be created for these layers later on
|
||||
li_layer_id_to_toaster_layer_id = {}
|
||||
|
||||
logger.info("Fetching layers")
|
||||
|
||||
layers_info = _get_json_response(apilinks['layerItems'])
|
||||
|
||||
total = len(layers_info)
|
||||
for i, li in enumerate(layers_info):
|
||||
# Special case for the openembedded-core layer
|
||||
if li['name'] == oe_core_layer:
|
||||
try:
|
||||
# If we have an existing openembedded-core for example
|
||||
# from the toasterconf.json augment the info using the
|
||||
# layerindex rather than duplicate it
|
||||
oe_core_l = Layer.objects.get(name=oe_core_layer)
|
||||
# Take ownership of the layer as now coming from the
|
||||
# layerindex
|
||||
oe_core_l.summary = li['summary']
|
||||
oe_core_l.description = li['description']
|
||||
oe_core_l.save()
|
||||
li_layer_id_to_toaster_layer_id[li['id']] = oe_core_l.pk
|
||||
self.mini_progress("layers", i, total)
|
||||
continue
|
||||
|
||||
except Layer.DoesNotExist:
|
||||
pass
|
||||
|
||||
try:
|
||||
l, created = Layer.objects.get_or_create(name=li['name'],
|
||||
vcs_url=li['vcs_url'])
|
||||
l.up_date = li['updated']
|
||||
l.vcs_url = li['vcs_url']
|
||||
l.vcs_web_url = li['vcs_web_url']
|
||||
l.vcs_web_tree_base_url = li['vcs_web_tree_base_url']
|
||||
l.vcs_web_file_base_url = li['vcs_web_file_base_url']
|
||||
l.summary = li['summary']
|
||||
l.description = li['description']
|
||||
l.save()
|
||||
except Layer.MultipleObjectsReturned:
|
||||
logger.info("Skipped %s as we found multiple layers and "
|
||||
"don't know which to update" %
|
||||
li['name'])
|
||||
|
||||
li_layer_id_to_toaster_layer_id[li['id']] = l.pk
|
||||
|
||||
self.mini_progress("layers", i, total)
|
||||
|
||||
# update layer_versions
|
||||
logger.info("Fetching layer versions")
|
||||
layerbranches_info = _get_json_response(
|
||||
apilinks['layerBranches'] + "?filter=branch__name:%s" %
|
||||
"OR".join(whitelist_branch_names))
|
||||
|
||||
# Map Layer index layer_branch object id to
|
||||
# layer_version toaster object id
|
||||
li_layer_branch_id_to_toaster_lv_id = {}
|
||||
|
||||
total = len(layerbranches_info)
|
||||
for i, lbi in enumerate(layerbranches_info):
|
||||
|
||||
try:
|
||||
lv, created = Layer_Version.objects.get_or_create(
|
||||
layer_source=LayerSource.TYPE_LAYERINDEX,
|
||||
layer=Layer.objects.get(
|
||||
pk=li_layer_id_to_toaster_layer_id[lbi['layer']])
|
||||
)
|
||||
except KeyError:
|
||||
logger.warning(
|
||||
"No such layerindex layer referenced by layerbranch %d" %
|
||||
lbi['layer'])
|
||||
continue
|
||||
|
||||
lv.release = li_branch_id_to_toaster_release[lbi['branch']]
|
||||
lv.up_date = lbi['updated']
|
||||
lv.commit = lbi['actual_branch']
|
||||
lv.dirpath = lbi['vcs_subdir']
|
||||
lv.save()
|
||||
|
||||
li_layer_branch_id_to_toaster_lv_id[lbi['id']] =\
|
||||
lv.pk
|
||||
self.mini_progress("layer versions", i, total)
|
||||
|
||||
logger.info("Fetching layer version dependencies")
|
||||
# update layer dependencies
|
||||
layerdependencies_info = _get_json_response(
|
||||
apilinks['layerDependencies'] +
|
||||
"?filter=layerbranch__branch__name:%s" %
|
||||
"OR".join(whitelist_branch_names))
|
||||
|
||||
dependlist = {}
|
||||
for ldi in layerdependencies_info:
|
||||
try:
|
||||
lv = Layer_Version.objects.get(
|
||||
pk=li_layer_branch_id_to_toaster_lv_id[ldi['layerbranch']])
|
||||
except Layer_Version.DoesNotExist as e:
|
||||
continue
|
||||
|
||||
if lv not in dependlist:
|
||||
dependlist[lv] = []
|
||||
try:
|
||||
layer_id = li_layer_id_to_toaster_layer_id[ldi['dependency']]
|
||||
|
||||
dependlist[lv].append(
|
||||
Layer_Version.objects.get(
|
||||
layer_source=LayerSource.TYPE_LAYERINDEX,
|
||||
layer__pk=layer_id))
|
||||
|
||||
except Layer_Version.DoesNotExist:
|
||||
logger.warning("Cannot find layer version (ls:%s),"
|
||||
"up_id:%s lv:%s" %
|
||||
(self, ldi['dependency'], lv))
|
||||
|
||||
total = len(dependlist)
|
||||
for i, lv in enumerate(dependlist):
|
||||
LayerVersionDependency.objects.filter(layer_version=lv).delete()
|
||||
for lvd in dependlist[lv]:
|
||||
LayerVersionDependency.objects.get_or_create(layer_version=lv,
|
||||
depends_on=lvd)
|
||||
self.mini_progress("Layer version dependencies", i, total)
|
||||
|
||||
# update machines
|
||||
logger.info("Fetching machine information")
|
||||
machines_info = _get_json_response(
|
||||
apilinks['machines'] + "?filter=layerbranch__branch__name:%s" %
|
||||
"OR".join(whitelist_branch_names))
|
||||
|
||||
total = len(machines_info)
|
||||
for i, mi in enumerate(machines_info):
|
||||
mo, created = Machine.objects.get_or_create(
|
||||
name=mi['name'],
|
||||
layer_version=Layer_Version.objects.get(
|
||||
pk=li_layer_branch_id_to_toaster_lv_id[mi['layerbranch']]))
|
||||
mo.up_date = mi['updated']
|
||||
mo.name = mi['name']
|
||||
mo.description = mi['description']
|
||||
mo.save()
|
||||
self.mini_progress("machines", i, total)
|
||||
|
||||
# update recipes; paginate by layer version / layer branch
|
||||
logger.info("Fetching recipe information")
|
||||
recipes_info = _get_json_response(
|
||||
apilinks['recipes'] + "?filter=layerbranch__branch__name:%s" %
|
||||
"OR".join(whitelist_branch_names))
|
||||
|
||||
total = len(recipes_info)
|
||||
for i, ri in enumerate(recipes_info):
|
||||
try:
|
||||
lv_id = li_layer_branch_id_to_toaster_lv_id[ri['layerbranch']]
|
||||
lv = Layer_Version.objects.get(pk=lv_id)
|
||||
|
||||
ro, created = Recipe.objects.get_or_create(
|
||||
layer_version=lv,
|
||||
name=ri['pn']
|
||||
)
|
||||
|
||||
ro.layer_version = lv
|
||||
ro.up_date = ri['updated']
|
||||
ro.name = ri['pn']
|
||||
ro.version = ri['pv']
|
||||
ro.summary = ri['summary']
|
||||
ro.description = ri['description']
|
||||
ro.section = ri['section']
|
||||
ro.license = ri['license']
|
||||
ro.homepage = ri['homepage']
|
||||
ro.bugtracker = ri['bugtracker']
|
||||
ro.file_path = ri['filepath'] + "/" + ri['filename']
|
||||
if 'inherits' in ri:
|
||||
ro.is_image = 'image' in ri['inherits'].split()
|
||||
else: # workaround for old style layer index
|
||||
ro.is_image = "-image-" in ri['pn']
|
||||
ro.save()
|
||||
except Exception as e:
|
||||
logger.warning("Failed saving recipe %s", e)
|
||||
|
||||
self.mini_progress("recipes", i, total)
|
||||
|
||||
os.system('setterm -cursor on')
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
for ls in LayerSource.objects.all():
|
||||
ls.update()
|
||||
self.update()
|
||||
|
||||
@@ -0,0 +1,118 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('orm', '0009_target_package_manifest_path'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='releaselayersourcepriority',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='releaselayersourcepriority',
|
||||
name='layer_source',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='releaselayersourcepriority',
|
||||
name='release',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='ImportedLayerSource',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LayerIndexLayerSource',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LocalLayerSource',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='recipe',
|
||||
name='layer_source',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='recipe',
|
||||
name='up_id',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='layer',
|
||||
name='up_date',
|
||||
field=models.DateTimeField(default=django.utils.timezone.now, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='layer_version',
|
||||
name='layer_source',
|
||||
field=models.IntegerField(default=0, choices=[(0, 'local'), (1, 'layerindex'), (2, 'imported'), (3, 'build')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='layer_version',
|
||||
name='up_date',
|
||||
field=models.DateTimeField(default=django.utils.timezone.now, null=True),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='branch',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='layer',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='layer_version',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='layerversiondependency',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='machine',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='ReleaseLayerSourcePriority',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='branch',
|
||||
name='layer_source',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='branch',
|
||||
name='up_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='layer',
|
||||
name='layer_source',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='layer',
|
||||
name='up_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='layer_version',
|
||||
name='up_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='layerversiondependency',
|
||||
name='layer_source',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='layerversiondependency',
|
||||
name='up_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='machine',
|
||||
name='layer_source',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='machine',
|
||||
name='up_id',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('orm', '0010_delete_layer_source_references'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name='LayerSource',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,62 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
from django.db.models import Q
|
||||
|
||||
|
||||
def branch_to_release(apps, schema_editor):
|
||||
Layer_Version = apps.get_model('orm', 'Layer_Version')
|
||||
Release = apps.get_model('orm', 'Release')
|
||||
|
||||
print("Converting all layer version up_branches to releases")
|
||||
# Find all the layer versions which have an upbranch and convert them to
|
||||
# the release that they're for.
|
||||
for layer_version in Layer_Version.objects.filter(
|
||||
Q(release=None) & ~Q(up_branch=None)):
|
||||
try:
|
||||
# HEAD and local are equivalent
|
||||
if "HEAD" in layer_version.up_branch.name:
|
||||
release = Release.objects.get(name="local")
|
||||
layer_version.commit = "HEAD"
|
||||
layer_version.branch = "HEAD"
|
||||
else:
|
||||
release = Release.objects.get(
|
||||
name=layer_version.up_branch.name)
|
||||
|
||||
layer_version.release = release
|
||||
layer_version.save()
|
||||
except Exception as e:
|
||||
print("Couldn't work out an appropriate release for %s "
|
||||
"the up_branch was %s "
|
||||
"user the django admin interface to correct it" %
|
||||
(layer_version.layer.name, layer_version.up_branch.name))
|
||||
print(e)
|
||||
|
||||
continue
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('orm', '0011_delete_layersource'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='layer_version',
|
||||
name='release',
|
||||
field=models.ForeignKey(to='orm.Release', default=None, null=True),
|
||||
),
|
||||
migrations.RunPython(branch_to_release,
|
||||
reverse_code=migrations.RunPython.noop),
|
||||
|
||||
migrations.RemoveField(
|
||||
model_name='layer_version',
|
||||
name='up_branch',
|
||||
),
|
||||
|
||||
migrations.DeleteModel(
|
||||
name='Branch',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('orm', '0012_use_release_instead_of_up_branch'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='build',
|
||||
name='recipes_parsed',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='build',
|
||||
name='recipes_to_parse',
|
||||
field=models.IntegerField(default=1),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('orm', '0013_recipe_parse_progress_fields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='build',
|
||||
name='build_name',
|
||||
field=models.CharField(default='', max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('orm', '0014_allow_empty_buildname'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='layer',
|
||||
name='local_source_dir',
|
||||
field=models.TextField(null=True, default=None),
|
||||
),
|
||||
]
|
||||
@@ -21,8 +21,8 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import models, IntegrityError
|
||||
from django.db.models import F, Q, Avg, Max, Sum, Count
|
||||
from django.db import models, IntegrityError, DataError
|
||||
from django.db.models import F, Q, Sum, Count
|
||||
from django.utils import timezone
|
||||
from django.utils.encoding import force_bytes
|
||||
|
||||
@@ -78,7 +78,7 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']:
|
||||
try:
|
||||
obj = self.create(**params)
|
||||
return obj, True
|
||||
except IntegrityError:
|
||||
except (IntegrityError, DataError):
|
||||
exc_info = sys.exc_info()
|
||||
try:
|
||||
return self.get(**lookup), False
|
||||
@@ -117,39 +117,48 @@ class ToasterSetting(models.Model):
|
||||
def __unicode__(self):
|
||||
return "Setting %s = %s" % (self.name, self.value)
|
||||
|
||||
|
||||
class ProjectManager(models.Manager):
|
||||
def create_project(self, name, release):
|
||||
if release is not None:
|
||||
prj = self.model(name = name, bitbake_version = release.bitbake_version, release = release)
|
||||
prj = self.model(name=name,
|
||||
bitbake_version=release.bitbake_version,
|
||||
release=release)
|
||||
else:
|
||||
prj = self.model(name = name, bitbake_version = None, release = None)
|
||||
prj = self.model(name=name,
|
||||
bitbake_version=None,
|
||||
release=None)
|
||||
|
||||
prj.save()
|
||||
|
||||
for defaultconf in ToasterSetting.objects.filter(name__startswith="DEFCONF_"):
|
||||
for defaultconf in ToasterSetting.objects.filter(
|
||||
name__startswith="DEFCONF_"):
|
||||
name = defaultconf.name[8:]
|
||||
ProjectVariable.objects.create( project = prj,
|
||||
name = name,
|
||||
value = defaultconf.value)
|
||||
ProjectVariable.objects.create(project=prj,
|
||||
name=name,
|
||||
value=defaultconf.value)
|
||||
|
||||
if release is None:
|
||||
return prj
|
||||
|
||||
for rdl in release.releasedefaultlayer_set.all():
|
||||
try:
|
||||
lv = Layer_Version.objects.filter(layer__name = rdl.layer_name, up_branch__name = release.branch_name)[0].get_equivalents_wpriority(prj)[0]
|
||||
ProjectLayer.objects.create( project = prj,
|
||||
layercommit = lv,
|
||||
optional = False )
|
||||
except IndexError:
|
||||
# we may have no valid layer version objects, and that's ok
|
||||
pass
|
||||
lv = Layer_Version.objects.filter(
|
||||
layer__name=rdl.layer_name,
|
||||
release=release).first()
|
||||
|
||||
if lv:
|
||||
ProjectLayer.objects.create(project=prj,
|
||||
layercommit=lv,
|
||||
optional=False)
|
||||
else:
|
||||
logger.warning("Default project layer %s not found" %
|
||||
rdl.layer_name)
|
||||
|
||||
return prj
|
||||
|
||||
# return single object with is_default = True
|
||||
def get_or_create_default_project(self):
|
||||
projects = super(ProjectManager, self).filter(is_default = True)
|
||||
projects = super(ProjectManager, self).filter(is_default=True)
|
||||
|
||||
if len(projects) > 1:
|
||||
raise Exception('Inconsistent project data: multiple ' +
|
||||
@@ -157,7 +166,8 @@ class ProjectManager(models.Manager):
|
||||
elif len(projects) < 1:
|
||||
options = {
|
||||
'name': 'Command line builds',
|
||||
'short_description': 'Project for builds started outside Toaster',
|
||||
'short_description':
|
||||
'Project for builds started outside Toaster',
|
||||
'is_default': True
|
||||
}
|
||||
project = Project.objects.create(**options)
|
||||
@@ -270,7 +280,7 @@ class Project(models.Model):
|
||||
# guard on release, as it can be null
|
||||
if self.release:
|
||||
queryset = Layer_Version.objects.filter(
|
||||
(Q(up_branch__name=self.release.branch_name) &
|
||||
(Q(release=self.release) &
|
||||
Q(build=None) &
|
||||
Q(project=None)) |
|
||||
Q(project=self))
|
||||
@@ -336,7 +346,15 @@ class Project(models.Model):
|
||||
for l in self.projectlayer_set.all().order_by("pk"):
|
||||
commit = l.layercommit.get_vcs_reference()
|
||||
print("ii Building layer ", l.layercommit.layer.name, " at vcs point ", commit)
|
||||
BRLayer.objects.create(req = br, name = l.layercommit.layer.name, giturl = l.layercommit.layer.vcs_url, commit = commit, dirpath = l.layercommit.dirpath, layer_version=l.layercommit)
|
||||
BRLayer.objects.create(
|
||||
req=br,
|
||||
name=l.layercommit.layer.name,
|
||||
giturl=l.layercommit.layer.vcs_url,
|
||||
commit=commit,
|
||||
dirpath=l.layercommit.dirpath,
|
||||
layer_version=l.layercommit,
|
||||
local_source_dir=l.layercommit.layer.local_source_dir
|
||||
)
|
||||
|
||||
br.state = BuildRequest.REQ_QUEUED
|
||||
now = timezone.now()
|
||||
@@ -387,9 +405,15 @@ class Build(models.Model):
|
||||
completed_on = models.DateTimeField()
|
||||
outcome = models.IntegerField(choices=BUILD_OUTCOME, default=IN_PROGRESS)
|
||||
cooker_log_path = models.CharField(max_length=500)
|
||||
build_name = models.CharField(max_length=100)
|
||||
build_name = models.CharField(max_length=100, default='')
|
||||
bitbake_version = models.CharField(max_length=50)
|
||||
|
||||
# number of recipes to parse for this build
|
||||
recipes_to_parse = models.IntegerField(default=1)
|
||||
|
||||
# number of recipes parsed so far for this build
|
||||
recipes_parsed = models.IntegerField(default=0)
|
||||
|
||||
@staticmethod
|
||||
def get_recent(project=None):
|
||||
"""
|
||||
@@ -420,6 +444,21 @@ class Build(models.Model):
|
||||
|
||||
return recent_builds
|
||||
|
||||
def started(self):
|
||||
"""
|
||||
As build variables are only added for a build when its BuildStarted event
|
||||
is received, a build with no build variables is counted as
|
||||
"in preparation" and not properly started yet. This method
|
||||
will return False if a build has no build variables (it never properly
|
||||
started), or True otherwise.
|
||||
|
||||
Note that this is a temporary workaround for the fact that we don't
|
||||
have a fine-grained state variable on a build which would allow us
|
||||
to record "in progress" (BuildStarted received) vs. "in preparation".
|
||||
"""
|
||||
variables = Variable.objects.filter(build=self)
|
||||
return len(variables) > 0
|
||||
|
||||
def completeper(self):
|
||||
tf = Task.objects.filter(build = self)
|
||||
tfc = tf.count()
|
||||
@@ -582,22 +621,64 @@ class Build(models.Model):
|
||||
|
||||
return target_labels
|
||||
|
||||
def get_current_status(self):
|
||||
"""
|
||||
get the status string from the build request if the build
|
||||
has one, or the text for the build outcome if it doesn't
|
||||
"""
|
||||
|
||||
from bldcontrol.models import BuildRequest
|
||||
|
||||
build_request = None
|
||||
def get_buildrequest(self):
|
||||
buildrequest = None
|
||||
if hasattr(self, 'buildrequest'):
|
||||
build_request = self.buildrequest
|
||||
buildrequest = self.buildrequest
|
||||
return buildrequest
|
||||
|
||||
if (build_request
|
||||
and build_request.state != BuildRequest.REQ_INPROGRESS
|
||||
and self.outcome == Build.IN_PROGRESS):
|
||||
return self.buildrequest.get_state_display()
|
||||
def is_queued(self):
|
||||
from bldcontrol.models import BuildRequest
|
||||
buildrequest = self.get_buildrequest()
|
||||
if buildrequest:
|
||||
return buildrequest.state == BuildRequest.REQ_QUEUED
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_cancelling(self):
|
||||
from bldcontrol.models import BuildRequest
|
||||
buildrequest = self.get_buildrequest()
|
||||
if buildrequest:
|
||||
return self.outcome == Build.IN_PROGRESS and \
|
||||
buildrequest.state == BuildRequest.REQ_CANCELLING
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_parsing(self):
|
||||
"""
|
||||
True if the build is still parsing recipes
|
||||
"""
|
||||
return self.outcome == Build.IN_PROGRESS and \
|
||||
self.recipes_parsed < self.recipes_to_parse
|
||||
|
||||
def is_starting(self):
|
||||
"""
|
||||
True if the build has no completed tasks yet and is still just starting
|
||||
tasks.
|
||||
|
||||
Note that the mechanism for testing whether a Task is "done" is whether
|
||||
its order field is set, as per the completeper() method.
|
||||
"""
|
||||
return self.outcome == Build.IN_PROGRESS and \
|
||||
self.task_build.filter(order__isnull=False).count() == 0
|
||||
|
||||
def get_state(self):
|
||||
"""
|
||||
Get the state of the build; one of 'Succeeded', 'Failed', 'In Progress',
|
||||
'Cancelled' (Build outcomes); or 'Queued', 'Cancelling' (states
|
||||
dependent on the BuildRequest state).
|
||||
|
||||
This works around the fact that we have BuildRequest states as well
|
||||
as Build states, but really we just want to know the state of the build.
|
||||
"""
|
||||
if self.is_cancelling():
|
||||
return 'Cancelling';
|
||||
elif self.is_queued():
|
||||
return 'Queued'
|
||||
elif self.is_parsing():
|
||||
return 'Parsing'
|
||||
elif self.is_starting():
|
||||
return 'Starting'
|
||||
else:
|
||||
return self.get_outcome_text()
|
||||
|
||||
@@ -1126,21 +1207,27 @@ class Target_Installed_Package(models.Model):
|
||||
target = models.ForeignKey(Target)
|
||||
package = models.ForeignKey(Package, related_name='buildtargetlist_package')
|
||||
|
||||
|
||||
class Package_File(models.Model):
|
||||
package = models.ForeignKey(Package, related_name='buildfilelist_package')
|
||||
path = models.FilePathField(max_length=255, blank=True)
|
||||
size = models.IntegerField()
|
||||
|
||||
|
||||
class Recipe(models.Model):
|
||||
search_allowed_fields = ['name', 'version', 'file_path', 'section', 'summary', 'description', 'license', 'layer_version__layer__name', 'layer_version__branch', 'layer_version__commit', 'layer_version__local_path', 'layer_version__layer_source__name']
|
||||
search_allowed_fields = ['name', 'version', 'file_path', 'section',
|
||||
'summary', 'description', 'license',
|
||||
'layer_version__layer__name',
|
||||
'layer_version__branch', 'layer_version__commit',
|
||||
'layer_version__local_path',
|
||||
'layer_version__layer_source']
|
||||
|
||||
layer_source = models.ForeignKey('LayerSource', default = None, null = True) # from where did we get this recipe
|
||||
up_id = models.IntegerField(null = True, default = None) # id of entry in the source
|
||||
up_date = models.DateTimeField(null = True, default = None)
|
||||
up_date = models.DateTimeField(null=True, default=None)
|
||||
|
||||
name = models.CharField(max_length=100, blank=True) # pn
|
||||
version = models.CharField(max_length=100, blank=True) # pv
|
||||
layer_version = models.ForeignKey('Layer_Version', related_name='recipe_layer_version')
|
||||
name = models.CharField(max_length=100, blank=True)
|
||||
version = models.CharField(max_length=100, blank=True)
|
||||
layer_version = models.ForeignKey('Layer_Version',
|
||||
related_name='recipe_layer_version')
|
||||
summary = models.TextField(blank=True)
|
||||
description = models.TextField(blank=True)
|
||||
section = models.CharField(max_length=100, blank=True)
|
||||
@@ -1151,13 +1238,6 @@ class Recipe(models.Model):
|
||||
pathflags = models.CharField(max_length=200, blank=True)
|
||||
is_image = models.BooleanField(default=False)
|
||||
|
||||
def get_layersource_view_url(self):
|
||||
if self.layer_source is None:
|
||||
return ""
|
||||
|
||||
url = self.layer_source.get_object_view(self.layer_version.up_branch, "recipes", self.name)
|
||||
return url
|
||||
|
||||
def __unicode__(self):
|
||||
return "Recipe " + self.name + ":" + self.version
|
||||
|
||||
@@ -1203,8 +1283,6 @@ class Recipe_Dependency(models.Model):
|
||||
|
||||
class Machine(models.Model):
|
||||
search_allowed_fields = ["name", "description", "layer_version__layer__name"]
|
||||
layer_source = models.ForeignKey('LayerSource', default = None, null = True) # from where did we get this machine
|
||||
up_id = models.IntegerField(null = True, default = None) # id of entry in the source
|
||||
up_date = models.DateTimeField(null = True, default = None)
|
||||
|
||||
layer_version = models.ForeignKey('Layer_Version')
|
||||
@@ -1219,293 +1297,9 @@ class Machine(models.Model):
|
||||
def __unicode__(self):
|
||||
return "Machine " + self.name + "(" + self.description + ")"
|
||||
|
||||
class Meta:
|
||||
unique_together = ("layer_source", "up_id")
|
||||
|
||||
|
||||
from django.db.models.base import ModelBase
|
||||
|
||||
class InheritanceMetaclass(ModelBase):
|
||||
def __call__(cls, *args, **kwargs):
|
||||
obj = super(InheritanceMetaclass, cls).__call__(*args, **kwargs)
|
||||
return obj.get_object()
|
||||
|
||||
|
||||
class LayerSource(models.Model):
|
||||
__metaclass__ = InheritanceMetaclass
|
||||
|
||||
class Meta:
|
||||
unique_together = (('sourcetype', 'apiurl'), )
|
||||
|
||||
TYPE_LOCAL = 0
|
||||
TYPE_LAYERINDEX = 1
|
||||
TYPE_IMPORTED = 2
|
||||
SOURCE_TYPE = (
|
||||
(TYPE_LOCAL, "local"),
|
||||
(TYPE_LAYERINDEX, "layerindex"),
|
||||
(TYPE_IMPORTED, "imported"),
|
||||
)
|
||||
|
||||
name = models.CharField(max_length=63, unique = True)
|
||||
sourcetype = models.IntegerField(choices=SOURCE_TYPE)
|
||||
apiurl = models.CharField(max_length=255, null=True, default=None)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(LayerSource, self).__init__(*args, **kwargs)
|
||||
if self.sourcetype == LayerSource.TYPE_LOCAL:
|
||||
self.__class__ = LocalLayerSource
|
||||
elif self.sourcetype == LayerSource.TYPE_LAYERINDEX:
|
||||
self.__class__ = LayerIndexLayerSource
|
||||
elif self.sourcetype == LayerSource.TYPE_IMPORTED:
|
||||
self.__class__ = ImportedLayerSource
|
||||
elif self.sourcetype == None:
|
||||
raise Exception("Unknown LayerSource-derived class. If you added a new layer source type, fill out all code stubs.")
|
||||
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
Updates the local database information from the upstream layer source
|
||||
"""
|
||||
raise Exception("Abstract, update() must be implemented by all LayerSource-derived classes (object is %s)" % str(vars(self)))
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
return super(LayerSource, self).save(*args, **kwargs)
|
||||
|
||||
def get_object(self):
|
||||
# preset an un-initilized object
|
||||
if None == self.name:
|
||||
self.name=""
|
||||
if None == self.apiurl:
|
||||
self.apiurl=""
|
||||
if None == self.sourcetype:
|
||||
self.sourcetype=LayerSource.TYPE_LOCAL
|
||||
|
||||
if self.sourcetype == LayerSource.TYPE_LOCAL:
|
||||
self.__class__ = LocalLayerSource
|
||||
elif self.sourcetype == LayerSource.TYPE_LAYERINDEX:
|
||||
self.__class__ = LayerIndexLayerSource
|
||||
elif self.sourcetype == LayerSource.TYPE_IMPORTED:
|
||||
self.__class__ = ImportedLayerSource
|
||||
else:
|
||||
raise Exception("Unknown LayerSource type. If you added a new layer source type, fill out all code stubs.")
|
||||
return self
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.name, self.sourcetype)
|
||||
|
||||
|
||||
class LocalLayerSource(LayerSource):
|
||||
class Meta(LayerSource._meta.__class__):
|
||||
proxy = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(LocalLayerSource, self).__init__(args, kwargs)
|
||||
self.sourcetype = LayerSource.TYPE_LOCAL
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
Fetches layer, recipe and machine information from local repository
|
||||
"""
|
||||
pass
|
||||
|
||||
class ImportedLayerSource(LayerSource):
|
||||
class Meta(LayerSource._meta.__class__):
|
||||
proxy = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ImportedLayerSource, self).__init__(args, kwargs)
|
||||
self.sourcetype = LayerSource.TYPE_IMPORTED
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
Fetches layer, recipe and machine information from local repository
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class LayerIndexLayerSource(LayerSource):
|
||||
class Meta(LayerSource._meta.__class__):
|
||||
proxy = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(LayerIndexLayerSource, self).__init__(args, kwargs)
|
||||
self.sourcetype = LayerSource.TYPE_LAYERINDEX
|
||||
|
||||
def get_object_view(self, branch, objectype, upid):
|
||||
return self.apiurl + "../branch/" + branch.name + "/" + objectype + "/?q=" + str(upid)
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
Fetches layer, recipe and machine information from remote repository
|
||||
"""
|
||||
assert self.apiurl is not None
|
||||
from django.db import transaction, connection
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
try:
|
||||
from urllib.request import urlopen, URLError
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
from urllib2 import urlopen, URLError
|
||||
from urlparse import urlparse
|
||||
|
||||
proxy_settings = os.environ.get("http_proxy", None)
|
||||
oe_core_layer = 'openembedded-core'
|
||||
|
||||
def _get_json_response(apiurl = self.apiurl):
|
||||
_parsedurl = urlparse(apiurl)
|
||||
path = _parsedurl.path
|
||||
|
||||
try:
|
||||
res = urlopen(apiurl)
|
||||
except URLError as e:
|
||||
raise Exception("Failed to read %s: %s" % (path, e.reason))
|
||||
|
||||
return json.loads(res.read().decode('utf-8'))
|
||||
|
||||
# verify we can get the basic api
|
||||
try:
|
||||
apilinks = _get_json_response()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
if proxy_settings is not None:
|
||||
logger.info("EE: Using proxy %s" % proxy_settings)
|
||||
logger.warning("EE: could not connect to %s, skipping update: %s\n%s" % (self.apiurl, e, traceback.format_exc()))
|
||||
return
|
||||
|
||||
# update branches; only those that we already have names listed in the
|
||||
# Releases table
|
||||
whitelist_branch_names = [rel.branch_name for rel in Release.objects.all()]
|
||||
if len(whitelist_branch_names) == 0:
|
||||
raise Exception("Failed to make list of branches to fetch")
|
||||
|
||||
logger.debug("Fetching branches")
|
||||
branches_info = _get_json_response(apilinks['branches']
|
||||
+ "?filter=name:%s" % "OR".join(whitelist_branch_names))
|
||||
for bi in branches_info:
|
||||
b, created = Branch.objects.get_or_create(layer_source = self, name = bi['name'])
|
||||
b.up_id = bi['id']
|
||||
b.up_date = bi['updated']
|
||||
b.name = bi['name']
|
||||
b.short_description = bi['short_description']
|
||||
b.save()
|
||||
|
||||
# update layers
|
||||
layers_info = _get_json_response(apilinks['layerItems'])
|
||||
|
||||
for li in layers_info:
|
||||
# Special case for the openembedded-core layer
|
||||
if li['name'] == oe_core_layer:
|
||||
try:
|
||||
# If we have an existing openembedded-core for example
|
||||
# from the toasterconf.json augment the info using the
|
||||
# layerindex rather than duplicate it
|
||||
oe_core_l = Layer.objects.get(name=oe_core_layer)
|
||||
# Take ownership of the layer as now coming from the
|
||||
# layerindex
|
||||
oe_core_l.layer_source = self
|
||||
oe_core_l.up_id = li['id']
|
||||
oe_core_l.summary = li['summary']
|
||||
oe_core_l.description = li['description']
|
||||
oe_core_l.save()
|
||||
continue
|
||||
|
||||
except Layer.DoesNotExist:
|
||||
pass
|
||||
|
||||
l, created = Layer.objects.get_or_create(layer_source = self, name = li['name'])
|
||||
l.up_id = li['id']
|
||||
l.up_date = li['updated']
|
||||
l.vcs_url = li['vcs_url']
|
||||
l.vcs_web_url = li['vcs_web_url']
|
||||
l.vcs_web_tree_base_url = li['vcs_web_tree_base_url']
|
||||
l.vcs_web_file_base_url = li['vcs_web_file_base_url']
|
||||
l.summary = li['summary']
|
||||
l.description = li['description']
|
||||
l.save()
|
||||
|
||||
# update layerbranches/layer_versions
|
||||
logger.debug("Fetching layer information")
|
||||
layerbranches_info = _get_json_response(apilinks['layerBranches']
|
||||
+ "?filter=branch:%s" % "OR".join(map(lambda x: str(x.up_id), [i for i in Branch.objects.filter(layer_source = self) if i.up_id is not None] ))
|
||||
)
|
||||
|
||||
for lbi in layerbranches_info:
|
||||
lv, created = Layer_Version.objects.get_or_create(layer_source = self,
|
||||
up_id = lbi['id'],
|
||||
layer=Layer.objects.get(layer_source = self, up_id = lbi['layer'])
|
||||
)
|
||||
|
||||
lv.up_date = lbi['updated']
|
||||
lv.up_branch = Branch.objects.get(layer_source = self, up_id = lbi['branch'])
|
||||
lv.branch = lbi['actual_branch']
|
||||
lv.commit = lbi['actual_branch']
|
||||
lv.dirpath = lbi['vcs_subdir']
|
||||
lv.save()
|
||||
|
||||
# update layer dependencies
|
||||
layerdependencies_info = _get_json_response(apilinks['layerDependencies'])
|
||||
dependlist = {}
|
||||
for ldi in layerdependencies_info:
|
||||
try:
|
||||
lv = Layer_Version.objects.get(layer_source = self, up_id = ldi['layerbranch'])
|
||||
except Layer_Version.DoesNotExist as e:
|
||||
continue
|
||||
|
||||
if lv not in dependlist:
|
||||
dependlist[lv] = []
|
||||
try:
|
||||
dependlist[lv].append(Layer_Version.objects.get(layer_source = self, layer__up_id = ldi['dependency'], up_branch = lv.up_branch))
|
||||
except Layer_Version.DoesNotExist:
|
||||
logger.warning("Cannot find layer version (ls:%s), up_id:%s lv:%s" % (self, ldi['dependency'], lv))
|
||||
|
||||
for lv in dependlist:
|
||||
LayerVersionDependency.objects.filter(layer_version = lv).delete()
|
||||
for lvd in dependlist[lv]:
|
||||
LayerVersionDependency.objects.get_or_create(layer_version = lv, depends_on = lvd)
|
||||
|
||||
|
||||
# update machines
|
||||
logger.debug("Fetching machine information")
|
||||
machines_info = _get_json_response(apilinks['machines']
|
||||
+ "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self)))
|
||||
)
|
||||
|
||||
for mi in machines_info:
|
||||
mo, created = Machine.objects.get_or_create(layer_source = self, up_id = mi['id'], layer_version = Layer_Version.objects.get(layer_source = self, up_id = mi['layerbranch']))
|
||||
mo.up_date = mi['updated']
|
||||
mo.name = mi['name']
|
||||
mo.description = mi['description']
|
||||
mo.save()
|
||||
|
||||
# update recipes; paginate by layer version / layer branch
|
||||
logger.debug("Fetching target information")
|
||||
recipes_info = _get_json_response(apilinks['recipes']
|
||||
+ "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self)))
|
||||
)
|
||||
for ri in recipes_info:
|
||||
try:
|
||||
ro, created = Recipe.objects.get_or_create(layer_source = self, up_id = ri['id'], layer_version = Layer_Version.objects.get(layer_source = self, up_id = ri['layerbranch']))
|
||||
ro.up_date = ri['updated']
|
||||
ro.name = ri['pn']
|
||||
ro.version = ri['pv']
|
||||
ro.summary = ri['summary']
|
||||
ro.description = ri['description']
|
||||
ro.section = ri['section']
|
||||
ro.license = ri['license']
|
||||
ro.homepage = ri['homepage']
|
||||
ro.bugtracker = ri['bugtracker']
|
||||
ro.file_path = ri['filepath'] + "/" + ri['filename']
|
||||
if 'inherits' in ri:
|
||||
ro.is_image = 'image' in ri['inherits'].split()
|
||||
else: # workaround for old style layer index
|
||||
ro.is_image = "-image-" in ri['pn']
|
||||
ro.save()
|
||||
except IntegrityError as e:
|
||||
logger.debug("Failed saving recipe, ignoring: %s (%s:%s)" % (e, ro.layer_version, ri['filepath']+"/"+ri['filename']))
|
||||
ro.delete()
|
||||
|
||||
class BitbakeVersion(models.Model):
|
||||
|
||||
@@ -1529,87 +1323,94 @@ class Release(models.Model):
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.name, self.branch_name)
|
||||
|
||||
class ReleaseLayerSourcePriority(models.Model):
|
||||
""" Each release selects layers from the set up layer sources, ordered by priority """
|
||||
release = models.ForeignKey("Release")
|
||||
layer_source = models.ForeignKey("LayerSource")
|
||||
priority = models.IntegerField(default = 0)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s-%s:%d" % (self.release.name, self.layer_source.name, self.priority)
|
||||
class Meta:
|
||||
unique_together = (('release', 'layer_source'),)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class ReleaseDefaultLayer(models.Model):
|
||||
release = models.ForeignKey(Release)
|
||||
layer_name = models.CharField(max_length=100, default="")
|
||||
|
||||
|
||||
# Branch class is synced with layerindex.Branch, branches can only come from remote layer indexes
|
||||
class Branch(models.Model):
|
||||
layer_source = models.ForeignKey('LayerSource', null = True, default = True)
|
||||
up_id = models.IntegerField(null = True, default = None) # id of branch in the source
|
||||
up_date = models.DateTimeField(null = True, default = None)
|
||||
class LayerSource(object):
|
||||
""" Where the layer metadata came from """
|
||||
TYPE_LOCAL = 0
|
||||
TYPE_LAYERINDEX = 1
|
||||
TYPE_IMPORTED = 2
|
||||
TYPE_BUILD = 3
|
||||
|
||||
name = models.CharField(max_length=50)
|
||||
short_description = models.CharField(max_length=50, blank=True)
|
||||
SOURCE_TYPE = (
|
||||
(TYPE_LOCAL, "local"),
|
||||
(TYPE_LAYERINDEX, "layerindex"),
|
||||
(TYPE_IMPORTED, "imported"),
|
||||
(TYPE_BUILD, "build"),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Branches"
|
||||
unique_together = (('layer_source', 'name'),('layer_source', 'up_id'))
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
def types_dict():
|
||||
""" Turn the TYPES enums into a simple dictionary """
|
||||
dictionary = {}
|
||||
for key in LayerSource.__dict__:
|
||||
if "TYPE" in key:
|
||||
dictionary[key] = getattr(LayerSource, key)
|
||||
return dictionary
|
||||
|
||||
|
||||
# Layer class synced with layerindex.LayerItem
|
||||
class Layer(models.Model):
|
||||
layer_source = models.ForeignKey(LayerSource, null = True, default = None) # from where did we got this layer
|
||||
up_id = models.IntegerField(null = True, default = None) # id of layer in the remote source
|
||||
up_date = models.DateTimeField(null = True, default = None)
|
||||
|
||||
up_date = models.DateTimeField(null=True, default=timezone.now)
|
||||
|
||||
name = models.CharField(max_length=100)
|
||||
layer_index_url = models.URLField()
|
||||
vcs_url = GitURLField(default = None, null = True)
|
||||
vcs_web_url = models.URLField(null = True, default = None)
|
||||
vcs_web_tree_base_url = models.URLField(null = True, default = None)
|
||||
vcs_web_file_base_url = models.URLField(null = True, default = None)
|
||||
vcs_url = GitURLField(default=None, null=True)
|
||||
local_source_dir = models.TextField(null = True, default = None)
|
||||
vcs_web_url = models.URLField(null=True, default=None)
|
||||
vcs_web_tree_base_url = models.URLField(null=True, default=None)
|
||||
vcs_web_file_base_url = models.URLField(null=True, default=None)
|
||||
|
||||
summary = models.TextField(help_text='One-line description of the layer', null = True, default = None)
|
||||
description = models.TextField(null = True, default = None)
|
||||
summary = models.TextField(help_text='One-line description of the layer',
|
||||
null=True, default=None)
|
||||
description = models.TextField(null=True, default=None)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s / %s " % (self.name, self.layer_source)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("layer_source", "up_id"), ("layer_source", "name"))
|
||||
return "%s / %s " % (self.name, self.summary)
|
||||
|
||||
|
||||
# LayerCommit class is synced with layerindex.LayerBranch
|
||||
class Layer_Version(models.Model):
|
||||
"""
|
||||
A Layer_Version either belongs to a single project or no project
|
||||
"""
|
||||
search_allowed_fields = ["layer__name", "layer__summary", "layer__description", "layer__vcs_url", "dirpath", "up_branch__name", "commit", "branch"]
|
||||
build = models.ForeignKey(Build, related_name='layer_version_build', default = None, null = True)
|
||||
search_allowed_fields = ["layer__name", "layer__summary",
|
||||
"layer__description", "layer__vcs_url",
|
||||
"dirpath", "release__name", "commit", "branch"]
|
||||
|
||||
build = models.ForeignKey(Build, related_name='layer_version_build',
|
||||
default=None, null=True)
|
||||
|
||||
layer = models.ForeignKey(Layer, related_name='layer_version_layer')
|
||||
|
||||
layer_source = models.ForeignKey(LayerSource, null = True, default = None) # from where did we get this Layer Version
|
||||
up_id = models.IntegerField(null = True, default = None) # id of layerbranch in the remote source
|
||||
up_date = models.DateTimeField(null = True, default = None)
|
||||
up_branch = models.ForeignKey(Branch, null = True, default = None)
|
||||
layer_source = models.IntegerField(choices=LayerSource.SOURCE_TYPE,
|
||||
default=0)
|
||||
|
||||
branch = models.CharField(max_length=80) # LayerBranch.actual_branch
|
||||
commit = models.CharField(max_length=100) # LayerBranch.vcs_last_rev
|
||||
dirpath = models.CharField(max_length=255, null = True, default = None) # LayerBranch.vcs_subdir
|
||||
priority = models.IntegerField(default = 0) # if -1, this is a default layer
|
||||
up_date = models.DateTimeField(null=True, default=timezone.now)
|
||||
|
||||
local_path = models.FilePathField(max_length=1024, default = "/") # where this layer was checked-out
|
||||
# To which metadata release does this layer version belong to
|
||||
release = models.ForeignKey(Release, null=True, default=None)
|
||||
|
||||
project = models.ForeignKey('Project', null = True, default = None) # Set if this layer is project-specific; always set for imported layers, and project-set branches
|
||||
branch = models.CharField(max_length=80)
|
||||
commit = models.CharField(max_length=100)
|
||||
# If the layer is in a subdir
|
||||
dirpath = models.CharField(max_length=255, null=True, default=None)
|
||||
|
||||
# code lifted, with adaptations, from the layerindex-web application https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/
|
||||
# if -1, this is a default layer
|
||||
priority = models.IntegerField(default=0)
|
||||
|
||||
# where this layer exists on the filesystem
|
||||
local_path = models.FilePathField(max_length=1024, default="/")
|
||||
|
||||
# Set if this layer is restricted to a particular project
|
||||
project = models.ForeignKey('Project', null=True, default=None)
|
||||
|
||||
# code lifted, with adaptations, from the layerindex-web application
|
||||
# https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/
|
||||
def _handle_url_path(self, base_url, path):
|
||||
import re, posixpath
|
||||
if base_url:
|
||||
@@ -1626,7 +1427,7 @@ class Layer_Version(models.Model):
|
||||
extra_path = self.dirpath
|
||||
else:
|
||||
extra_path = path
|
||||
branchname = self.up_branch.name
|
||||
branchname = self.release.name
|
||||
url = base_url.replace('%branch%', branchname)
|
||||
|
||||
# If there's a % in the path (e.g. a wildcard bbappend) we need to encode it
|
||||
@@ -1651,23 +1452,19 @@ class Layer_Version(models.Model):
|
||||
def get_vcs_file_link_url(self, file_path=""):
|
||||
if self.layer.vcs_web_file_base_url is None:
|
||||
return None
|
||||
return self._handle_url_path(self.layer.vcs_web_file_base_url, file_path)
|
||||
return self._handle_url_path(self.layer.vcs_web_file_base_url,
|
||||
file_path)
|
||||
|
||||
def get_vcs_dirpath_link_url(self):
|
||||
if self.layer.vcs_web_tree_base_url is None:
|
||||
return None
|
||||
return self._handle_url_path(self.layer.vcs_web_tree_base_url, '')
|
||||
|
||||
def get_equivalents_wpriority(self, project):
|
||||
layer_versions = project.get_all_compatible_layer_versions()
|
||||
filtered = layer_versions.filter(layer__name = self.layer.name)
|
||||
return filtered.order_by("-layer_source__releaselayersourcepriority__priority")
|
||||
|
||||
def get_vcs_reference(self):
|
||||
if self.branch is not None and len(self.branch) > 0:
|
||||
return self.branch
|
||||
if self.up_branch is not None:
|
||||
return self.up_branch.name
|
||||
if self.release is not None:
|
||||
return self.release.name
|
||||
if self.commit is not None and len(self.commit) > 0:
|
||||
return self.commit
|
||||
return 'N/A'
|
||||
@@ -1695,20 +1492,23 @@ class Layer_Version(models.Model):
|
||||
return sorted(result, key=lambda x: x.layer.name)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%d %s (VCS %s, Project %s)" % (self.pk, str(self.layer), self.get_vcs_reference(), self.build.project if self.build is not None else "No project")
|
||||
return ("id %d belongs to layer: %s" % (self.pk, self.layer.name))
|
||||
|
||||
def __str__(self):
|
||||
if self.release:
|
||||
release = self.release.name
|
||||
else:
|
||||
release = "No release set"
|
||||
|
||||
return "%d %s (%s)" % (self.pk, self.layer.name, release)
|
||||
|
||||
class Meta:
|
||||
unique_together = ("layer_source", "up_id")
|
||||
|
||||
class LayerVersionDependency(models.Model):
|
||||
layer_source = models.ForeignKey(LayerSource, null = True, default = None) # from where did we got this layer
|
||||
up_id = models.IntegerField(null = True, default = None) # id of layerbranch in the remote source
|
||||
|
||||
layer_version = models.ForeignKey(Layer_Version, related_name="dependencies")
|
||||
depends_on = models.ForeignKey(Layer_Version, related_name="dependees")
|
||||
|
||||
class Meta:
|
||||
unique_together = ("layer_source", "up_id")
|
||||
layer_version = models.ForeignKey(Layer_Version,
|
||||
related_name="dependencies")
|
||||
depends_on = models.ForeignKey(Layer_Version,
|
||||
related_name="dependees")
|
||||
|
||||
class ProjectLayer(models.Model):
|
||||
project = models.ForeignKey(Project)
|
||||
|
||||
@@ -1,180 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013-2015 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Test cases for Toaster ORM."""
|
||||
|
||||
from django.test import TestCase, TransactionTestCase
|
||||
from orm.models import LocalLayerSource, LayerIndexLayerSource, ImportedLayerSource, LayerSource
|
||||
from orm.models import Branch, LayerVersionDependency
|
||||
|
||||
from orm.models import Project, Layer, Layer_Version, Branch, ProjectLayer
|
||||
from orm.models import Release, ReleaseLayerSourcePriority, BitbakeVersion
|
||||
|
||||
from django.db import IntegrityError
|
||||
|
||||
import os
|
||||
|
||||
# set TTS_LAYER_INDEX to the base url to use a different instance of the layer index
|
||||
|
||||
class LayerSourceVerifyInheritanceSaveLoad(TestCase):
|
||||
"""
|
||||
Tests to verify inheritance for the LayerSource proxy-inheritance classes.
|
||||
"""
|
||||
def test_object_creation(self):
|
||||
"""Test LayerSource object creation."""
|
||||
for name, sourcetype in [("a1", LayerSource.TYPE_LOCAL),
|
||||
("a2", LayerSource.TYPE_LAYERINDEX),
|
||||
("a3", LayerSource.TYPE_IMPORTED)]:
|
||||
LayerSource.objects.create(name=name, sourcetype=sourcetype)
|
||||
|
||||
objects = LayerSource.objects.all()
|
||||
self.assertTrue(isinstance(objects[0], LocalLayerSource))
|
||||
self.assertTrue(isinstance(objects[1], LayerIndexLayerSource))
|
||||
self.assertTrue(isinstance(objects[2], ImportedLayerSource))
|
||||
|
||||
def test_duplicate_error(self):
|
||||
"""Test creation of duplicate LayerSource objects."""
|
||||
stype = LayerSource.TYPE_LOCAL
|
||||
LayerSource.objects.create(name="a1", sourcetype=stype)
|
||||
with self.assertRaises(IntegrityError):
|
||||
LayerSource.objects.create(name="a1", sourcetype=stype)
|
||||
|
||||
|
||||
class LILSUpdateTestCase(TransactionTestCase):
|
||||
"""Test Layer Source update."""
|
||||
|
||||
def setUp(self):
|
||||
"""Create release."""
|
||||
bbv = BitbakeVersion.objects.create(\
|
||||
name="master", giturl="git://git.openembedded.org/bitbake")
|
||||
Release.objects.create(name="default-release", bitbake_version=bbv,
|
||||
branch_name="master")
|
||||
|
||||
def test_update(self):
|
||||
"""Check if LayerSource.update can fetch branches."""
|
||||
url = os.getenv("TTS_LAYER_INDEX",
|
||||
default="http://layers.openembedded.org/")
|
||||
|
||||
lsobj = LayerSource.objects.create(\
|
||||
name="b1", sourcetype=LayerSource.TYPE_LAYERINDEX,
|
||||
apiurl=url + "layerindex/api/")
|
||||
lsobj.update()
|
||||
self.assertTrue(lsobj.branch_set.all().count() > 0,
|
||||
"no branches fetched")
|
||||
|
||||
class LayerVersionEquivalenceTestCase(TestCase):
|
||||
"""Verify Layer_Version priority selection."""
|
||||
|
||||
def setUp(self):
|
||||
"""Create required objects."""
|
||||
# create layer source
|
||||
self.lsrc = LayerSource.objects.create(name="dummy-layersource",
|
||||
sourcetype=LayerSource.TYPE_LOCAL)
|
||||
# create release
|
||||
bbv = BitbakeVersion.objects.create(\
|
||||
name="master", giturl="git://git.openembedded.org/bitbake")
|
||||
self.release = Release.objects.create(name="default-release",
|
||||
bitbake_version=bbv,
|
||||
branch_name="master")
|
||||
# attach layer source to release
|
||||
ReleaseLayerSourcePriority.objects.create(\
|
||||
release=self.release, layer_source=self.lsrc, priority=1)
|
||||
|
||||
# create a layer version for the layer on the specified branch
|
||||
self.layer = Layer.objects.create(name="meta-testlayer",
|
||||
layer_source=self.lsrc)
|
||||
self.branch = Branch.objects.create(name="master", layer_source=self.lsrc)
|
||||
self.lver = Layer_Version.objects.create(\
|
||||
layer=self.layer, layer_source=self.lsrc, up_branch=self.branch)
|
||||
|
||||
# create project and project layer
|
||||
self.project = Project.objects.create_project(name="test-project",
|
||||
release=self.release)
|
||||
ProjectLayer.objects.create(project=self.project,
|
||||
layercommit=self.lver)
|
||||
|
||||
# create spoof layer that should not appear in the search results
|
||||
layer = Layer.objects.create(name="meta-notvalid",
|
||||
layer_source=self.lsrc)
|
||||
self.lver2 = Layer_Version.objects.create(layer=layer,
|
||||
layer_source=self.lsrc,
|
||||
up_branch=self.branch)
|
||||
|
||||
def test_single_layersource(self):
|
||||
"""
|
||||
When we have a single layer version,
|
||||
get_equivalents_wpriority() should return a list with
|
||||
just this layer_version.
|
||||
"""
|
||||
equivqs = self.lver.get_equivalents_wpriority(self.project)
|
||||
self.assertEqual(list(equivqs), [self.lver])
|
||||
|
||||
def test_dual_layersource(self):
|
||||
"""
|
||||
If we have two layers with the same name, from different layer sources,
|
||||
we expect both layers in, in increasing priority of the layer source.
|
||||
"""
|
||||
lsrc2 = LayerSource.objects.create(\
|
||||
name="dummy-layersource2",
|
||||
sourcetype=LayerSource.TYPE_LOCAL,
|
||||
apiurl="test")
|
||||
|
||||
# assign a lower priority for the second layer source
|
||||
self.release.releaselayersourcepriority_set.create(layer_source=lsrc2,
|
||||
priority=2)
|
||||
|
||||
# create a new layer_version for a layer with the same name
|
||||
# coming from the second layer source
|
||||
layer2 = Layer.objects.create(name="meta-testlayer",
|
||||
layer_source=lsrc2)
|
||||
lver2 = Layer_Version.objects.create(layer=layer2, layer_source=lsrc2,
|
||||
up_branch=self.branch)
|
||||
|
||||
# expect two layer versions, in the priority order
|
||||
equivqs = self.lver.get_equivalents_wpriority(self.project)
|
||||
self.assertEqual(list(equivqs), [lver2, self.lver])
|
||||
|
||||
def test_compatible_layer_versions(self):
|
||||
"""
|
||||
When we have a 2 layer versions, get_all_compatible_layerversions()
|
||||
should return a queryset with both.
|
||||
"""
|
||||
compat_lv = self.project.get_all_compatible_layer_versions()
|
||||
self.assertEqual(list(compat_lv), [self.lver, self.lver2])
|
||||
|
||||
def test_layerversion_get_alldeps(self):
|
||||
"""Test Layer_Version.get_alldeps API."""
|
||||
lvers = {}
|
||||
for i in range(10):
|
||||
name = "layer%d" % i
|
||||
lvers[name] = Layer_Version.objects.create(layer=Layer.objects.create(name=name),
|
||||
project=self.project)
|
||||
if i:
|
||||
LayerVersionDependency.objects.create(layer_version=lvers["layer%d" % (i - 1)],
|
||||
depends_on=lvers[name])
|
||||
# Check dinamically added deps
|
||||
self.assertEqual(lvers['layer0'].get_alldeps(self.project.id),
|
||||
[lvers['layer%d' % n] for n in range(1, i+1)])
|
||||
|
||||
# Check chain of deps created in previous loop
|
||||
for i in range(10):
|
||||
self.assertEqual(lvers['layer%d' % i].get_alldeps(self.project.id),
|
||||
[lvers['layer%d' % n] for n in range(i+1, 10)])
|
||||
@@ -13,7 +13,7 @@ a Selenium test report with a version-specific format.
|
||||
To run tests against Chrome:
|
||||
|
||||
* Download chromedriver for your host OS from
|
||||
https://code.google.com/p/chromedriver/downloads/list
|
||||
https://sites.google.com/a/chromium.org/chromedriver/downloads
|
||||
* On *nix systems, put chromedriver on PATH
|
||||
* On Windows, put chromedriver.exe in the same directory as chrome.exe
|
||||
|
||||
@@ -24,15 +24,30 @@ To run tests against PhantomJS (headless):
|
||||
* On *nix systems, put phantomjs on PATH
|
||||
* Not tested on Windows
|
||||
|
||||
Firefox should work without requiring additional software to be installed.
|
||||
To run tests against Firefox, you may need to install the Marionette driver,
|
||||
depending on how new your version of Firefox is. One clue that you need to do
|
||||
this is if you see an exception like:
|
||||
|
||||
The test case will instantiate a Selenium driver set by the
|
||||
selenium.common.exceptions.WebDriverException: Message: The browser
|
||||
appears to have exited before we could connect. If you specified
|
||||
a log_file in the FirefoxBinary constructor, check it for details.
|
||||
|
||||
See https://developer.mozilla.org/en-US/docs/Mozilla/QA/Marionette/WebDriver
|
||||
for installation instructions. Ensure that the Marionette executable (renamed
|
||||
as wires on Linux or wires.exe on Windows) is on your PATH; and use "marionette"
|
||||
as the browser string passed via TOASTER_TESTS_BROWSER (see below).
|
||||
|
||||
(Note: The Toaster tests have been checked against Firefox 47 with the
|
||||
Marionette driver.)
|
||||
|
||||
The test cases will instantiate a Selenium driver set by the
|
||||
TOASTER_TESTS_BROWSER environment variable, or Chrome if this is not specified.
|
||||
|
||||
Available drivers:
|
||||
|
||||
* chrome (default)
|
||||
* firefox
|
||||
* marionette (for newer Firefoxes)
|
||||
* ie
|
||||
* phantomjs
|
||||
|
||||
|
||||
218
bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
Normal file
218
bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
Normal file
@@ -0,0 +1,218 @@
|
||||
#! /usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# The Wait class and some of SeleniumDriverHelper and SeleniumTestCase are
|
||||
# modified from Patchwork, released under the same licence terms as Toaster:
|
||||
# https://github.com/dlespiau/patchwork/blob/master/patchwork/tests.browser.py
|
||||
|
||||
"""
|
||||
Helper methods for creating Toaster Selenium tests which run within
|
||||
the context of Django unit tests.
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
|
||||
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.support.ui import WebDriverWait
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.common.exceptions import NoSuchElementException, \
|
||||
StaleElementReferenceException, TimeoutException
|
||||
|
||||
def create_selenium_driver(browser='chrome'):
|
||||
# set default browser string based on env (if available)
|
||||
env_browser = os.environ.get('TOASTER_TESTS_BROWSER')
|
||||
if env_browser:
|
||||
browser = env_browser
|
||||
|
||||
if browser == 'chrome':
|
||||
return webdriver.Chrome(
|
||||
service_args=["--verbose", "--log-path=selenium.log"]
|
||||
)
|
||||
elif browser == 'firefox':
|
||||
return webdriver.Firefox()
|
||||
elif browser == 'marionette':
|
||||
capabilities = DesiredCapabilities.FIREFOX
|
||||
capabilities['marionette'] = True
|
||||
return webdriver.Firefox(capabilities=capabilities)
|
||||
elif browser == 'ie':
|
||||
return webdriver.Ie()
|
||||
elif browser == 'phantomjs':
|
||||
return webdriver.PhantomJS()
|
||||
else:
|
||||
msg = 'Selenium driver for browser %s is not available' % browser
|
||||
raise RuntimeError(msg)
|
||||
|
||||
class Wait(WebDriverWait):
|
||||
"""
|
||||
Subclass of WebDriverWait with predetermined timeout and poll
|
||||
frequency. Also deals with a wider variety of exceptions.
|
||||
"""
|
||||
_TIMEOUT = 10
|
||||
_POLL_FREQUENCY = 0.5
|
||||
|
||||
def __init__(self, driver):
|
||||
super(Wait, self).__init__(driver, self._TIMEOUT, self._POLL_FREQUENCY)
|
||||
|
||||
def until(self, method, message=''):
|
||||
"""
|
||||
Calls the method provided with the driver as an argument until the
|
||||
return value is not False.
|
||||
"""
|
||||
|
||||
end_time = time.time() + self._timeout
|
||||
while True:
|
||||
try:
|
||||
value = method(self._driver)
|
||||
if value:
|
||||
return value
|
||||
except NoSuchElementException:
|
||||
pass
|
||||
except StaleElementReferenceException:
|
||||
pass
|
||||
|
||||
time.sleep(self._poll)
|
||||
if time.time() > end_time:
|
||||
break
|
||||
|
||||
raise TimeoutException(message)
|
||||
|
||||
def until_not(self, method, message=''):
|
||||
"""
|
||||
Calls the method provided with the driver as an argument until the
|
||||
return value is False.
|
||||
"""
|
||||
|
||||
end_time = time.time() + self._timeout
|
||||
while True:
|
||||
try:
|
||||
value = method(self._driver)
|
||||
if not value:
|
||||
return value
|
||||
except NoSuchElementException:
|
||||
return True
|
||||
except StaleElementReferenceException:
|
||||
pass
|
||||
|
||||
time.sleep(self._poll)
|
||||
if time.time() > end_time:
|
||||
break
|
||||
|
||||
raise TimeoutException(message)
|
||||
|
||||
class SeleniumTestCaseBase(unittest.TestCase):
|
||||
"""
|
||||
NB StaticLiveServerTestCase is used as the base test case so that
|
||||
static files are served correctly in a Selenium test run context; see
|
||||
https://docs.djangoproject.com/en/1.9/ref/contrib/staticfiles/#specialized-test-case-to-support-live-testing
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
""" Create a webdriver driver at the class level """
|
||||
|
||||
super(SeleniumTestCaseBase, cls).setUpClass()
|
||||
|
||||
# instantiate the Selenium webdriver once for all the test methods
|
||||
# in this test case
|
||||
cls.driver = create_selenium_driver()
|
||||
cls.driver.maximize_window()
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
""" Clean up webdriver driver """
|
||||
|
||||
cls.driver.quit()
|
||||
super(SeleniumTestCaseBase, cls).tearDownClass()
|
||||
|
||||
def get(self, url):
|
||||
"""
|
||||
Selenium requires absolute URLs, so convert Django URLs returned
|
||||
by resolve() or similar to absolute ones and get using the
|
||||
webdriver instance.
|
||||
|
||||
url: a relative URL
|
||||
"""
|
||||
abs_url = '%s%s' % (self.live_server_url, url)
|
||||
self.driver.get(abs_url)
|
||||
|
||||
def find(self, selector):
|
||||
""" Find single element by CSS selector """
|
||||
return self.driver.find_element_by_css_selector(selector)
|
||||
|
||||
def find_all(self, selector):
|
||||
""" Find all elements matching CSS selector """
|
||||
return self.driver.find_elements_by_css_selector(selector)
|
||||
|
||||
def element_exists(self, selector):
|
||||
"""
|
||||
Return True if one element matching selector exists,
|
||||
False otherwise
|
||||
"""
|
||||
return len(self.find_all(selector)) == 1
|
||||
|
||||
def focused_element(self):
|
||||
""" Return the element which currently has focus on the page """
|
||||
return self.driver.switch_to.active_element
|
||||
|
||||
def wait_until_present(self, selector):
|
||||
""" Wait until element matching CSS selector is on the page """
|
||||
is_present = lambda driver: self.find(selector)
|
||||
msg = 'An element matching "%s" should be on the page' % selector
|
||||
element = Wait(self.driver).until(is_present, msg)
|
||||
return element
|
||||
|
||||
def wait_until_visible(self, selector):
|
||||
""" Wait until element matching CSS selector is visible on the page """
|
||||
is_visible = lambda driver: self.find(selector).is_displayed()
|
||||
msg = 'An element matching "%s" should be visible' % selector
|
||||
Wait(self.driver).until(is_visible, msg)
|
||||
return self.find(selector)
|
||||
|
||||
def wait_until_focused(self, selector):
|
||||
""" Wait until element matching CSS selector has focus """
|
||||
is_focused = \
|
||||
lambda driver: self.find(selector) == self.focused_element()
|
||||
msg = 'An element matching "%s" should be focused' % selector
|
||||
Wait(self.driver).until(is_focused, msg)
|
||||
return self.find(selector)
|
||||
|
||||
def enter_text(self, selector, value):
|
||||
""" Insert text into element matching selector """
|
||||
# note that keyup events don't occur until the element is clicked
|
||||
# (in the case of <input type="text"...>, for example), so simulate
|
||||
# user clicking the element before inserting text into it
|
||||
field = self.click(selector)
|
||||
|
||||
field.send_keys(value)
|
||||
return field
|
||||
|
||||
def click(self, selector):
|
||||
""" Click on element which matches CSS selector """
|
||||
element = self.wait_until_visible(selector)
|
||||
element.click()
|
||||
return element
|
||||
|
||||
def get_page_source(self):
|
||||
""" Get raw HTML for the current page """
|
||||
return self.driver.page_source
|
||||
@@ -58,6 +58,13 @@ class TestAllBuildsPage(SeleniumTestCase):
|
||||
'outcome': Build.SUCCEEDED
|
||||
}
|
||||
|
||||
self.project1_build_failure = {
|
||||
'project': self.project1,
|
||||
'started_on': now,
|
||||
'completed_on': now,
|
||||
'outcome': Build.FAILED
|
||||
}
|
||||
|
||||
self.default_project_build_success = {
|
||||
'project': self.default_project,
|
||||
'started_on': now,
|
||||
@@ -65,6 +72,46 @@ class TestAllBuildsPage(SeleniumTestCase):
|
||||
'outcome': Build.SUCCEEDED
|
||||
}
|
||||
|
||||
def _get_build_time_element(self, build):
|
||||
"""
|
||||
Return the HTML element containing the build time for a build
|
||||
in the recent builds area
|
||||
"""
|
||||
selector = 'div[data-latest-build-result="%s"] ' \
|
||||
'[data-role="data-recent-build-buildtime-field"]' % build.id
|
||||
|
||||
# because this loads via Ajax, wait for it to be visible
|
||||
self.wait_until_present(selector)
|
||||
|
||||
build_time_spans = self.find_all(selector)
|
||||
|
||||
self.assertEqual(len(build_time_spans), 1)
|
||||
|
||||
return build_time_spans[0]
|
||||
|
||||
def _get_row_for_build(self, build):
|
||||
""" Get the table row for the build from the all builds table """
|
||||
self.wait_until_present('#allbuildstable')
|
||||
|
||||
rows = self.find_all('#allbuildstable tr')
|
||||
|
||||
# look for the row with a download link on the recipe which matches the
|
||||
# build ID
|
||||
url = reverse('builddashboard', args=(build.id,))
|
||||
selector = 'td.target a[href="%s"]' % url
|
||||
|
||||
found_row = None
|
||||
for row in rows:
|
||||
|
||||
outcome_links = row.find_elements_by_css_selector(selector)
|
||||
if len(outcome_links) == 1:
|
||||
found_row = row
|
||||
break
|
||||
|
||||
self.assertNotEqual(found_row, None)
|
||||
|
||||
return found_row
|
||||
|
||||
def test_show_tasks_with_suffix(self):
|
||||
""" Task should be shown as suffix on build name """
|
||||
build = Build.objects.create(**self.project1_build_success)
|
||||
@@ -97,13 +144,13 @@ class TestAllBuildsPage(SeleniumTestCase):
|
||||
self.get(url)
|
||||
|
||||
# shouldn't see a rebuild button for command-line builds
|
||||
selector = 'div[data-latest-build-result="%s"] a.run-again-btn' % default_build.id
|
||||
selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % default_build.id
|
||||
run_again_button = self.find_all(selector)
|
||||
self.assertEqual(len(run_again_button), 0,
|
||||
'should not see a rebuild button for cli builds')
|
||||
|
||||
# should see a rebuild button for non-command-line builds
|
||||
selector = 'div[data-latest-build-result="%s"] a.run-again-btn' % build1.id
|
||||
selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % build1.id
|
||||
run_again_button = self.find_all(selector)
|
||||
self.assertEqual(len(run_again_button), 1,
|
||||
'should see a rebuild button for non-cli builds')
|
||||
@@ -142,3 +189,45 @@ class TestAllBuildsPage(SeleniumTestCase):
|
||||
else:
|
||||
msg = 'found unexpected project name cell in all builds table'
|
||||
self.fail(msg)
|
||||
|
||||
def test_builds_time_links(self):
|
||||
"""
|
||||
Successful builds should have links on the time column and in the
|
||||
recent builds area; failed builds should not have links on the time column,
|
||||
or in the recent builds area
|
||||
"""
|
||||
build1 = Build.objects.create(**self.project1_build_success)
|
||||
build2 = Build.objects.create(**self.project1_build_failure)
|
||||
|
||||
# add some targets to these builds so they have recipe links
|
||||
# (and so we can find the row in the ToasterTable corresponding to
|
||||
# a particular build)
|
||||
Target.objects.create(build=build1, target='foo')
|
||||
Target.objects.create(build=build2, target='bar')
|
||||
|
||||
url = reverse('all-builds')
|
||||
self.get(url)
|
||||
|
||||
# test recent builds area for successful build
|
||||
element = self._get_build_time_element(build1)
|
||||
links = element.find_elements_by_css_selector('a')
|
||||
msg = 'should be a link on the build time for a successful recent build'
|
||||
self.assertEquals(len(links), 1, msg)
|
||||
|
||||
# test recent builds area for failed build
|
||||
element = self._get_build_time_element(build2)
|
||||
links = element.find_elements_by_css_selector('a')
|
||||
msg = 'should not be a link on the build time for a failed recent build'
|
||||
self.assertEquals(len(links), 0, msg)
|
||||
|
||||
# test the time column for successful build
|
||||
build1_row = self._get_row_for_build(build1)
|
||||
links = build1_row.find_elements_by_css_selector('td.time a')
|
||||
msg = 'should be a link on the build time for a successful build'
|
||||
self.assertEquals(len(links), 1, msg)
|
||||
|
||||
# test the time column for failed build
|
||||
build2_row = self._get_row_for_build(build2)
|
||||
links = build2_row.find_elements_by_css_selector('td.time a')
|
||||
msg = 'should not be a link on the build time for a failed build'
|
||||
self.assertEquals(len(links), 0, msg)
|
||||
|
||||
@@ -25,7 +25,7 @@ from django.utils import timezone
|
||||
from tests.browser.selenium_helpers import SeleniumTestCase
|
||||
|
||||
from orm.models import Project, Release, BitbakeVersion, Build, LogMessage
|
||||
from orm.models import Layer, Layer_Version, Recipe, CustomImageRecipe
|
||||
from orm.models import Layer, Layer_Version, Recipe, CustomImageRecipe, Variable
|
||||
|
||||
class TestBuildDashboardPage(SeleniumTestCase):
|
||||
""" Tests for the build dashboard /build/X """
|
||||
@@ -42,15 +42,27 @@ class TestBuildDashboardPage(SeleniumTestCase):
|
||||
|
||||
self.build1 = Build.objects.create(project=project,
|
||||
started_on=now,
|
||||
completed_on=now)
|
||||
completed_on=now,
|
||||
outcome=Build.SUCCEEDED)
|
||||
|
||||
self.build2 = Build.objects.create(project=project,
|
||||
started_on=now,
|
||||
completed_on=now)
|
||||
completed_on=now,
|
||||
outcome=Build.SUCCEEDED)
|
||||
|
||||
self.build3 = Build.objects.create(project=project,
|
||||
started_on=now,
|
||||
completed_on=now)
|
||||
completed_on=now,
|
||||
outcome=Build.FAILED)
|
||||
|
||||
# add Variable objects to the successful builds, as this is the criterion
|
||||
# used to determine whether the left-hand panel should be displayed
|
||||
Variable.objects.create(build=self.build1,
|
||||
variable_name='Foo',
|
||||
variable_value='Bar')
|
||||
Variable.objects.create(build=self.build2,
|
||||
variable_name='Foo',
|
||||
variable_value='Bar')
|
||||
|
||||
# exception
|
||||
msg1 = 'an exception was thrown'
|
||||
@@ -68,6 +80,22 @@ class TestBuildDashboardPage(SeleniumTestCase):
|
||||
message=msg2
|
||||
)
|
||||
|
||||
# error on the failed build
|
||||
msg3 = 'an error occurred'
|
||||
self.error_message = LogMessage.objects.create(
|
||||
build=self.build3,
|
||||
level=LogMessage.ERROR,
|
||||
message=msg3
|
||||
)
|
||||
|
||||
# warning on the failed build
|
||||
msg4 = 'DANGER WILL ROBINSON'
|
||||
self.warning_message = LogMessage.objects.create(
|
||||
build=self.build3,
|
||||
level=LogMessage.WARNING,
|
||||
message=msg4
|
||||
)
|
||||
|
||||
# recipes related to the build, for testing the edit custom image/new
|
||||
# custom image buttons
|
||||
layer = Layer.objects.create(name='alayer')
|
||||
@@ -151,36 +179,45 @@ class TestBuildDashboardPage(SeleniumTestCase):
|
||||
self._get_build_dashboard(build)
|
||||
return self.find_all('#errors div.alert-danger')
|
||||
|
||||
def _check_for_log_message(self, build, log_message):
|
||||
def _check_for_log_message(self, message_elements, log_message):
|
||||
"""
|
||||
Check that the LogMessage <log_message> has a representation in
|
||||
the HTML elements <message_elements>.
|
||||
|
||||
message_elements: WebElements representing the log messages shown
|
||||
in the build dashboard; each should have a <pre> element inside
|
||||
it with a data-log-message-id attribute
|
||||
|
||||
log_message: orm.models.LogMessage instance
|
||||
"""
|
||||
expected_text = log_message.message
|
||||
expected_pk = str(log_message.pk)
|
||||
|
||||
found = False
|
||||
for element in message_elements:
|
||||
log_message_text = element.find_element_by_tag_name('pre').text.strip()
|
||||
text_matches = (log_message_text == expected_text)
|
||||
|
||||
log_message_pk = element.get_attribute('data-log-message-id')
|
||||
id_matches = (log_message_pk == expected_pk)
|
||||
|
||||
if text_matches and id_matches:
|
||||
found = True
|
||||
break
|
||||
|
||||
template_vars = (expected_text, expected_pk)
|
||||
assertion_failed_msg = 'message not found: ' \
|
||||
'expected text "%s" and ID %s' % template_vars
|
||||
self.assertTrue(found, assertion_failed_msg)
|
||||
|
||||
def _check_for_error_message(self, build, log_message):
|
||||
"""
|
||||
Check whether the LogMessage instance <log_message> is
|
||||
represented as an HTML error in the dashboard page for the Build object
|
||||
build
|
||||
"""
|
||||
errors = self._get_build_dashboard_errors(build)
|
||||
self.assertEqual(len(errors), 2)
|
||||
|
||||
expected_text = log_message.message
|
||||
expected_id = str(log_message.id)
|
||||
|
||||
found = False
|
||||
for error in errors:
|
||||
error_text = error.find_element_by_tag_name('pre').text
|
||||
text_matches = (error_text == expected_text)
|
||||
|
||||
error_id = error.get_attribute('data-error')
|
||||
id_matches = (error_id == expected_id)
|
||||
|
||||
if text_matches and id_matches:
|
||||
found = True
|
||||
break
|
||||
|
||||
template_vars = (expected_text, error_text,
|
||||
expected_id, error_id)
|
||||
assertion_error_msg = 'exception not found as error: ' \
|
||||
'expected text "%s" and got "%s"; ' \
|
||||
'expected ID %s and got %s' % template_vars
|
||||
self.assertTrue(found, assertion_error_msg)
|
||||
self._check_for_log_message(errors, log_message)
|
||||
|
||||
def _check_labels_in_modal(self, modal, expected):
|
||||
"""
|
||||
@@ -203,14 +240,14 @@ class TestBuildDashboardPage(SeleniumTestCase):
|
||||
LogMessages with level EXCEPTION should display in the errors
|
||||
section of the page
|
||||
"""
|
||||
self._check_for_log_message(self.build1, self.exception_message)
|
||||
self._check_for_error_message(self.build1, self.exception_message)
|
||||
|
||||
def test_criticals_show_as_errors(self):
|
||||
"""
|
||||
LogMessages with level CRITICAL should display in the errors
|
||||
section of the page
|
||||
"""
|
||||
self._check_for_log_message(self.build1, self.critical_message)
|
||||
self._check_for_error_message(self.build1, self.critical_message)
|
||||
|
||||
def test_edit_custom_image_button(self):
|
||||
"""
|
||||
@@ -268,3 +305,43 @@ class TestBuildDashboardPage(SeleniumTestCase):
|
||||
self.assertFalse(self.element_exists(selector),
|
||||
'new custom image button should not show for builds which ' \
|
||||
'don\'t have any image recipes')
|
||||
|
||||
def test_left_panel(self):
|
||||
""""
|
||||
Builds which succeed should have a left panel and a build summary
|
||||
"""
|
||||
self._get_build_dashboard(self.build1)
|
||||
|
||||
left_panel = self.find_all('#nav')
|
||||
self.assertEqual(len(left_panel), 1)
|
||||
|
||||
build_summary = self.find_all('[data-role="build-summary-heading"]')
|
||||
self.assertEqual(len(build_summary), 1)
|
||||
|
||||
def test_failed_no_left_panel(self):
|
||||
"""
|
||||
Builds which fail should have no left panel and no build summary
|
||||
"""
|
||||
self._get_build_dashboard(self.build3)
|
||||
|
||||
left_panel = self.find_all('#nav')
|
||||
self.assertEqual(len(left_panel), 0)
|
||||
|
||||
build_summary = self.find_all('[data-role="build-summary-heading"]')
|
||||
self.assertEqual(len(build_summary), 0)
|
||||
|
||||
def test_failed_shows_errors_and_warnings(self):
|
||||
"""
|
||||
Failed builds should still show error and warning messages
|
||||
"""
|
||||
self._get_build_dashboard(self.build3)
|
||||
|
||||
errors = self.find_all('#errors div.alert-danger')
|
||||
self._check_for_log_message(errors, self.error_message)
|
||||
|
||||
# expand the warnings area
|
||||
self.click('#warning-toggle')
|
||||
self.wait_until_visible('#warnings div.alert-warning')
|
||||
|
||||
warnings = self.find_all('#warnings div.alert-warning')
|
||||
self._check_for_log_message(warnings, self.warning_message)
|
||||
|
||||
@@ -26,7 +26,7 @@ from tests.browser.selenium_helpers import SeleniumTestCase
|
||||
|
||||
from orm.models import Project, Release, BitbakeVersion, Build, Target, Package
|
||||
from orm.models import Target_Image_File, TargetSDKFile, TargetKernelFile
|
||||
from orm.models import Target_Installed_Package
|
||||
from orm.models import Target_Installed_Package, Variable
|
||||
|
||||
class TestBuildDashboardPageArtifacts(SeleniumTestCase):
|
||||
""" Tests for artifacts on the build dashboard /build/X """
|
||||
@@ -151,6 +151,11 @@ class TestBuildDashboardPageArtifacts(SeleniumTestCase):
|
||||
started_on=now, completed_on=timezone.now(),
|
||||
outcome=Build.SUCCEEDED)
|
||||
|
||||
# add a variable to the build so that it counts as "started"
|
||||
Variable.objects.create(build=build,
|
||||
variable_name='Christopher',
|
||||
variable_value='Lee')
|
||||
|
||||
target = Target.objects.create(is_image=True, build=build,
|
||||
task='', target='core-image-minimal',
|
||||
license_manifest_path='/home/foo/license.manifest',
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
#! /usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
from tests.browser.selenium_helpers import SeleniumTestCase
|
||||
from orm.models import Project, Build, Recipe, Task, Layer, Layer_Version
|
||||
from orm.models import Target
|
||||
|
||||
class TestBuilddashboardPageRecipes(SeleniumTestCase):
|
||||
""" Test build dashboard recipes sub-page """
|
||||
|
||||
def setUp(self):
|
||||
project = Project.objects.get_or_create_default_project()
|
||||
|
||||
now = timezone.now()
|
||||
|
||||
self.build = Build.objects.create(project=project,
|
||||
started_on=now,
|
||||
completed_on=now)
|
||||
|
||||
layer = Layer.objects.create()
|
||||
|
||||
layer_version = Layer_Version.objects.create(layer=layer,
|
||||
build=self.build)
|
||||
|
||||
recipe = Recipe.objects.create(layer_version=layer_version)
|
||||
|
||||
task = Task.objects.create(build=self.build, recipe=recipe, order=1)
|
||||
|
||||
Target.objects.create(build=self.build, task=task, target='do_build')
|
||||
|
||||
def test_build_recipes_columns(self):
|
||||
"""
|
||||
Check that non-hideable columns of the table on the recipes sub-page
|
||||
are disabled on the edit columns dropdown.
|
||||
"""
|
||||
url = reverse('recipes', args=(self.build.id,))
|
||||
self.get(url)
|
||||
|
||||
self.wait_until_visible('#edit-columns-button')
|
||||
|
||||
# check that options for the non-hideable columns are disabled
|
||||
non_hideable = ['name', 'version']
|
||||
|
||||
for column in non_hideable:
|
||||
selector = 'input#checkbox-%s[disabled="disabled"]' % column
|
||||
self.wait_until_present(selector)
|
||||
@@ -0,0 +1,65 @@
|
||||
#! /usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
from tests.browser.selenium_helpers import SeleniumTestCase
|
||||
from orm.models import Project, Build, Recipe, Task, Layer, Layer_Version
|
||||
from orm.models import Target
|
||||
|
||||
class TestBuilddashboardPageTasks(SeleniumTestCase):
|
||||
""" Test build dashboard tasks sub-page """
|
||||
|
||||
def setUp(self):
|
||||
project = Project.objects.get_or_create_default_project()
|
||||
|
||||
now = timezone.now()
|
||||
|
||||
self.build = Build.objects.create(project=project,
|
||||
started_on=now,
|
||||
completed_on=now)
|
||||
|
||||
layer = Layer.objects.create()
|
||||
|
||||
layer_version = Layer_Version.objects.create(layer=layer)
|
||||
|
||||
recipe = Recipe.objects.create(layer_version=layer_version)
|
||||
|
||||
task = Task.objects.create(build=self.build, recipe=recipe, order=1)
|
||||
|
||||
Target.objects.create(build=self.build, task=task, target='do_build')
|
||||
|
||||
def test_build_tasks_columns(self):
|
||||
"""
|
||||
Check that non-hideable columns of the table on the tasks sub-page
|
||||
are disabled on the edit columns dropdown.
|
||||
"""
|
||||
url = reverse('tasks', args=(self.build.id,))
|
||||
self.get(url)
|
||||
|
||||
self.wait_until_visible('#edit-columns-button')
|
||||
|
||||
# check that options for the non-hideable columns are disabled
|
||||
non_hideable = ['order', 'task_name', 'recipe__name']
|
||||
|
||||
for column in non_hideable:
|
||||
selector = 'input#checkbox-%s[disabled="disabled"]' % column
|
||||
self.wait_until_present(selector)
|
||||
@@ -49,9 +49,6 @@ class TestLayerDetailsPage(SeleniumTestCase):
|
||||
# project to add new custom images to
|
||||
self.project = Project.objects.create(name='foo', release=release)
|
||||
|
||||
layer_source = LayerSource.objects.create(
|
||||
sourcetype=LayerSource.TYPE_IMPORTED)
|
||||
|
||||
name = "meta-imported"
|
||||
vcs_url = "git://example.com/meta-imported"
|
||||
subdir = "/layer"
|
||||
@@ -66,7 +63,7 @@ class TestLayerDetailsPage(SeleniumTestCase):
|
||||
|
||||
self.imported_layer_version = Layer_Version.objects.create(
|
||||
layer=imported_layer,
|
||||
layer_source=layer_source,
|
||||
layer_source=LayerSource.TYPE_IMPORTED,
|
||||
branch=gitrev,
|
||||
commit=gitrev,
|
||||
dirpath=subdir,
|
||||
@@ -116,8 +113,8 @@ class TestLayerDetailsPage(SeleniumTestCase):
|
||||
new_values = ["%s-edited" % old_val
|
||||
for old_val in self.initial_values]
|
||||
|
||||
for inputs in self.find_all("dd input[type=text]") + \
|
||||
self.find_all("dd textarea"):
|
||||
for inputs in self.find_all('dd input[type="text"]') + \
|
||||
self.find_all('dd textarea'):
|
||||
# ignore the tt inputs (twitter typeahead input)
|
||||
if "tt-" in inputs.get_attribute("class"):
|
||||
continue
|
||||
@@ -125,8 +122,8 @@ class TestLayerDetailsPage(SeleniumTestCase):
|
||||
value = inputs.get_attribute("value")
|
||||
|
||||
self.assertTrue(value in new_values,
|
||||
"Expecting any of \"%s\"but got \"%s\"" %
|
||||
(self.initial_values, value))
|
||||
"Expecting any of \"%s\" but got \"%s\"" %
|
||||
(new_values, value))
|
||||
|
||||
def test_delete_layer(self):
|
||||
""" Delete the layer """
|
||||
|
||||
@@ -0,0 +1,211 @@
|
||||
#! /usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
from tests.browser.selenium_helpers import SeleniumTestCase
|
||||
from tests.browser.selenium_helpers_base import Wait
|
||||
from orm.models import Project, Build, Task, Recipe, Layer, Layer_Version
|
||||
from bldcontrol.models import BuildRequest
|
||||
|
||||
class TestMostRecentBuildsStates(SeleniumTestCase):
|
||||
""" Test states update correctly in most recent builds area """
|
||||
|
||||
def _create_build_request(self):
|
||||
project = Project.objects.get_or_create_default_project()
|
||||
|
||||
now = timezone.now()
|
||||
|
||||
build = Build.objects.create(project=project, build_name='fakebuild',
|
||||
started_on=now, completed_on=now)
|
||||
|
||||
return BuildRequest.objects.create(build=build, project=project,
|
||||
state=BuildRequest.REQ_QUEUED)
|
||||
|
||||
def _create_recipe(self):
|
||||
""" Add a recipe to the database and return it """
|
||||
layer = Layer.objects.create()
|
||||
layer_version = Layer_Version.objects.create(layer=layer)
|
||||
return Recipe.objects.create(name='foo', layer_version=layer_version)
|
||||
|
||||
def _check_build_states(self, build_request):
|
||||
recipes_to_parse = 10
|
||||
url = reverse('all-builds')
|
||||
self.get(url)
|
||||
|
||||
build = build_request.build
|
||||
base_selector = '[data-latest-build-result="%s"] ' % build.id
|
||||
|
||||
# build queued; check shown as queued
|
||||
selector = base_selector + '[data-build-state="Queued"]'
|
||||
element = self.wait_until_visible(selector)
|
||||
self.assertRegexpMatches(element.get_attribute('innerHTML'),
|
||||
'Build queued', 'build should show queued status')
|
||||
|
||||
# waiting for recipes to be parsed
|
||||
build.outcome = Build.IN_PROGRESS
|
||||
build.recipes_to_parse = recipes_to_parse
|
||||
build.recipes_parsed = 0
|
||||
|
||||
build_request.state = BuildRequest.REQ_INPROGRESS
|
||||
build_request.save()
|
||||
|
||||
self.get(url)
|
||||
|
||||
selector = base_selector + '[data-build-state="Parsing"]'
|
||||
element = self.wait_until_visible(selector)
|
||||
|
||||
bar_selector = '#recipes-parsed-percentage-bar-%s' % build.id
|
||||
bar_element = element.find_element_by_css_selector(bar_selector)
|
||||
self.assertEqual(bar_element.value_of_css_property('width'), '0px',
|
||||
'recipe parse progress should be at 0')
|
||||
|
||||
# recipes being parsed; check parse progress
|
||||
build.recipes_parsed = 5
|
||||
build.save()
|
||||
|
||||
self.get(url)
|
||||
|
||||
element = self.wait_until_visible(selector)
|
||||
bar_element = element.find_element_by_css_selector(bar_selector)
|
||||
recipe_bar_updated = lambda driver: \
|
||||
bar_element.get_attribute('style') == 'width: 50%;'
|
||||
msg = 'recipe parse progress bar should update to 50%'
|
||||
element = Wait(self.driver).until(recipe_bar_updated, msg)
|
||||
|
||||
# all recipes parsed, task started, waiting for first task to finish;
|
||||
# check status is shown as "Tasks starting..."
|
||||
build.recipes_parsed = recipes_to_parse
|
||||
build.save()
|
||||
|
||||
recipe = self._create_recipe()
|
||||
task1 = Task.objects.create(build=build, recipe=recipe,
|
||||
task_name='Lionel')
|
||||
task2 = Task.objects.create(build=build, recipe=recipe,
|
||||
task_name='Jeffries')
|
||||
|
||||
self.get(url)
|
||||
|
||||
selector = base_selector + '[data-build-state="Starting"]'
|
||||
element = self.wait_until_visible(selector)
|
||||
self.assertRegexpMatches(element.get_attribute('innerHTML'),
|
||||
'Tasks starting', 'build should show "tasks starting" status')
|
||||
|
||||
# first task finished; check tasks progress bar
|
||||
task1.order = 1
|
||||
task1.save()
|
||||
|
||||
self.get(url)
|
||||
|
||||
selector = base_selector + '[data-build-state="In Progress"]'
|
||||
element = self.wait_until_visible(selector)
|
||||
|
||||
bar_selector = '#build-pc-done-bar-%s' % build.id
|
||||
bar_element = element.find_element_by_css_selector(bar_selector)
|
||||
|
||||
task_bar_updated = lambda driver: \
|
||||
bar_element.get_attribute('style') == 'width: 50%;'
|
||||
msg = 'tasks progress bar should update to 50%'
|
||||
element = Wait(self.driver).until(task_bar_updated, msg)
|
||||
|
||||
# last task finished; check tasks progress bar updates
|
||||
task2.order = 2
|
||||
task2.save()
|
||||
|
||||
self.get(url)
|
||||
|
||||
element = self.wait_until_visible(selector)
|
||||
bar_element = element.find_element_by_css_selector(bar_selector)
|
||||
task_bar_updated = lambda driver: \
|
||||
bar_element.get_attribute('style') == 'width: 100%;'
|
||||
msg = 'tasks progress bar should update to 100%'
|
||||
element = Wait(self.driver).until(task_bar_updated, msg)
|
||||
|
||||
def test_states_to_success(self):
|
||||
"""
|
||||
Test state transitions in the recent builds area for a build which
|
||||
completes successfully.
|
||||
"""
|
||||
build_request = self._create_build_request()
|
||||
|
||||
self._check_build_states(build_request)
|
||||
|
||||
# all tasks complete and build succeeded; check success state shown
|
||||
build = build_request.build
|
||||
build.outcome = Build.SUCCEEDED
|
||||
build.save()
|
||||
|
||||
selector = '[data-latest-build-result="%s"] ' \
|
||||
'[data-build-state="Succeeded"]' % build.id
|
||||
element = self.wait_until_visible(selector)
|
||||
|
||||
def test_states_to_failure(self):
|
||||
"""
|
||||
Test state transitions in the recent builds area for a build which
|
||||
completes in a failure.
|
||||
"""
|
||||
build_request = self._create_build_request()
|
||||
|
||||
self._check_build_states(build_request)
|
||||
|
||||
# all tasks complete and build succeeded; check fail state shown
|
||||
build = build_request.build
|
||||
build.outcome = Build.FAILED
|
||||
build.save()
|
||||
|
||||
selector = '[data-latest-build-result="%s"] ' \
|
||||
'[data-build-state="Failed"]' % build.id
|
||||
element = self.wait_until_visible(selector)
|
||||
|
||||
def test_states_cancelling(self):
|
||||
"""
|
||||
Test that most recent build area updates correctly for a build
|
||||
which is cancelled.
|
||||
"""
|
||||
url = reverse('all-builds')
|
||||
|
||||
build_request = self._create_build_request()
|
||||
build = build_request.build
|
||||
|
||||
# cancel the build
|
||||
build_request.state = BuildRequest.REQ_CANCELLING
|
||||
build_request.save()
|
||||
|
||||
self.get(url)
|
||||
|
||||
# check cancelling state
|
||||
selector = '[data-latest-build-result="%s"] ' \
|
||||
'[data-build-state="Cancelling"]' % build.id
|
||||
element = self.wait_until_visible(selector)
|
||||
self.assertRegexpMatches(element.get_attribute('innerHTML'),
|
||||
'Cancelling the build', 'build should show "cancelling" status')
|
||||
|
||||
# check cancelled state
|
||||
build.outcome = Build.CANCELLED
|
||||
build.save()
|
||||
|
||||
self.get(url)
|
||||
|
||||
selector = '[data-latest-build-result="%s"] ' \
|
||||
'[data-build-state="Cancelled"]' % build.id
|
||||
element = self.wait_until_visible(selector)
|
||||
self.assertRegexpMatches(element.get_attribute('innerHTML'),
|
||||
'Build cancelled', 'build should show "cancelled" status')
|
||||
@@ -22,6 +22,7 @@
|
||||
from django.core.urlresolvers import reverse
|
||||
from tests.browser.selenium_helpers import SeleniumTestCase
|
||||
from selenium.webdriver.support.ui import Select
|
||||
from selenium.common.exceptions import InvalidElementStateException
|
||||
|
||||
from orm.models import Project, Release, BitbakeVersion
|
||||
|
||||
@@ -102,7 +103,10 @@ class TestNewProjectPage(SeleniumTestCase):
|
||||
|
||||
# Try and click it anyway, if it submits we'll have a new project in
|
||||
# the db and assert then
|
||||
self.click("#create-project-button")
|
||||
try:
|
||||
self.click("#create-project-button")
|
||||
except InvalidElementStateException:
|
||||
pass
|
||||
|
||||
self.assertTrue(
|
||||
(Project.objects.filter(name=project_name).count() == 1),
|
||||
|
||||
76
bitbake/lib/toaster/tests/browser/test_task_page.py
Normal file
76
bitbake/lib/toaster/tests/browser/test_task_page.py
Normal file
@@ -0,0 +1,76 @@
|
||||
#! /usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
from tests.browser.selenium_helpers import SeleniumTestCase
|
||||
from orm.models import Project, Build, Layer, Layer_Version, Recipe, Target
|
||||
from orm.models import Task, Task_Dependency
|
||||
|
||||
class TestTaskPage(SeleniumTestCase):
|
||||
""" Test page which shows an individual task """
|
||||
RECIPE_NAME = 'bar'
|
||||
RECIPE_VERSION = '0.1'
|
||||
TASK_NAME = 'do_da_doo_ron_ron'
|
||||
|
||||
def setUp(self):
|
||||
now = timezone.now()
|
||||
|
||||
project = Project.objects.get_or_create_default_project()
|
||||
|
||||
self.build = Build.objects.create(project=project, started_on=now,
|
||||
completed_on=now)
|
||||
|
||||
Target.objects.create(target='foo', build=self.build)
|
||||
|
||||
layer = Layer.objects.create()
|
||||
|
||||
layer_version = Layer_Version.objects.create(layer=layer)
|
||||
|
||||
recipe = Recipe.objects.create(name=TestTaskPage.RECIPE_NAME,
|
||||
layer_version=layer_version, version=TestTaskPage.RECIPE_VERSION)
|
||||
|
||||
self.task = Task.objects.create(build=self.build, recipe=recipe,
|
||||
order=1, outcome=Task.OUTCOME_COVERED, task_executed=False,
|
||||
task_name=TestTaskPage.TASK_NAME)
|
||||
|
||||
def test_covered_task(self):
|
||||
"""
|
||||
Check that covered tasks are displayed for tasks which have
|
||||
dependencies on themselves
|
||||
"""
|
||||
|
||||
# the infinite loop which of bug 9952 was down to tasks which
|
||||
# depend on themselves, so add self-dependent tasks to replicate the
|
||||
# situation which caused the infinite loop (now fixed)
|
||||
Task_Dependency.objects.create(task=self.task, depends_on=self.task)
|
||||
|
||||
url = reverse('task', args=(self.build.id, self.task.id,))
|
||||
self.get(url)
|
||||
|
||||
# check that we see the task name
|
||||
self.wait_until_visible('.page-header h1')
|
||||
|
||||
heading = self.find('.page-header h1')
|
||||
expected_heading = '%s_%s %s' % (TestTaskPage.RECIPE_NAME,
|
||||
TestTaskPage.RECIPE_VERSION, TestTaskPage.TASK_NAME)
|
||||
self.assertEqual(heading.text, expected_heading,
|
||||
'Heading should show recipe name, version and task')
|
||||
160
bitbake/lib/toaster/tests/browser/test_toastertable_ui.py
Normal file
160
bitbake/lib/toaster/tests/browser/test_toastertable_ui.py
Normal file
@@ -0,0 +1,160 @@
|
||||
#! /usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
from tests.browser.selenium_helpers import SeleniumTestCase
|
||||
from orm.models import BitbakeVersion, Release, Project, Build
|
||||
|
||||
class TestToasterTableUI(SeleniumTestCase):
|
||||
"""
|
||||
Tests for the UI elements of ToasterTable (sorting etc.);
|
||||
note that the tests cover generic functionality of ToasterTable which
|
||||
manifests as UI elements in the browser, and can only be tested via
|
||||
Selenium.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def _get_orderby_heading(self, table):
|
||||
"""
|
||||
Get the current order by finding the column heading in <table> with
|
||||
the sorted class on it.
|
||||
|
||||
table: WebElement for a ToasterTable
|
||||
"""
|
||||
selector = 'thead a.sorted'
|
||||
heading = table.find_element_by_css_selector(selector)
|
||||
return heading.get_attribute('innerHTML').strip()
|
||||
|
||||
def _get_datetime_from_cell(self, row, selector):
|
||||
"""
|
||||
Return the value in the cell selected by <selector> on <row> as a
|
||||
datetime.
|
||||
|
||||
row: <tr> WebElement for a row in the ToasterTable
|
||||
selector: CSS selector to use to find the cell containing the date time
|
||||
string
|
||||
"""
|
||||
cell = row.find_element_by_css_selector(selector)
|
||||
cell_text = cell.get_attribute('innerHTML').strip()
|
||||
return datetime.strptime(cell_text, '%d/%m/%y %H:%M')
|
||||
|
||||
def test_revert_orderby(self):
|
||||
"""
|
||||
Test that sort order for a table reverts to the default sort order
|
||||
if the current sort column is hidden.
|
||||
"""
|
||||
now = timezone.now()
|
||||
later = now + timezone.timedelta(hours=1)
|
||||
even_later = later + timezone.timedelta(hours=1)
|
||||
|
||||
bbv = BitbakeVersion.objects.create(name='test bbv', giturl='/tmp/',
|
||||
branch='master', dirpath='')
|
||||
release = Release.objects.create(name='test release',
|
||||
branch_name='master',
|
||||
bitbake_version=bbv)
|
||||
|
||||
project = Project.objects.create_project('project', release)
|
||||
|
||||
# set up two builds which will order differently when sorted by
|
||||
# started_on or completed_on
|
||||
|
||||
# started first, finished last
|
||||
build1 = Build.objects.create(project=project,
|
||||
started_on=now,
|
||||
completed_on=even_later,
|
||||
outcome=Build.SUCCEEDED)
|
||||
|
||||
# started second, finished first
|
||||
build2 = Build.objects.create(project=project,
|
||||
started_on=later,
|
||||
completed_on=later,
|
||||
outcome=Build.SUCCEEDED)
|
||||
|
||||
url = reverse('all-builds')
|
||||
self.get(url)
|
||||
table = self.wait_until_visible('#allbuildstable')
|
||||
|
||||
# check ordering (default is by -completed_on); so build1 should be
|
||||
# first as it finished last
|
||||
active_heading = self._get_orderby_heading(table)
|
||||
self.assertEqual(active_heading, 'Completed on',
|
||||
'table should be sorted by "Completed on" by default')
|
||||
|
||||
row_selector = '#allbuildstable tbody tr'
|
||||
cell_selector = 'td.completed_on'
|
||||
|
||||
rows = self.find_all(row_selector)
|
||||
row1_completed_on = self._get_datetime_from_cell(rows[0], cell_selector)
|
||||
row2_completed_on = self._get_datetime_from_cell(rows[1], cell_selector)
|
||||
self.assertTrue(row1_completed_on > row2_completed_on,
|
||||
'table should be sorted by -completed_on')
|
||||
|
||||
# turn on started_on column
|
||||
self.click('#edit-columns-button')
|
||||
self.click('#checkbox-started_on')
|
||||
|
||||
# sort by started_on column
|
||||
links = table.find_elements_by_css_selector('th.started_on a')
|
||||
for link in links:
|
||||
if link.get_attribute('innerHTML').strip() == 'Started on':
|
||||
link.click()
|
||||
break
|
||||
|
||||
# wait for table data to reload in response to new sort
|
||||
self.wait_until_visible('#allbuildstable')
|
||||
|
||||
# check ordering; build1 should be first
|
||||
active_heading = self._get_orderby_heading(table)
|
||||
self.assertEqual(active_heading, 'Started on',
|
||||
'table should be sorted by "Started on"')
|
||||
|
||||
cell_selector = 'td.started_on'
|
||||
|
||||
rows = self.find_all(row_selector)
|
||||
row1_started_on = self._get_datetime_from_cell(rows[0], cell_selector)
|
||||
row2_started_on = self._get_datetime_from_cell(rows[1], cell_selector)
|
||||
self.assertTrue(row1_started_on < row2_started_on,
|
||||
'table should be sorted by started_on')
|
||||
|
||||
# turn off started_on column
|
||||
self.click('#edit-columns-button')
|
||||
self.click('#checkbox-started_on')
|
||||
|
||||
# wait for table data to reload in response to new sort
|
||||
self.wait_until_visible('#allbuildstable')
|
||||
|
||||
# check ordering (should revert to completed_on); build2 should be first
|
||||
active_heading = self._get_orderby_heading(table)
|
||||
self.assertEqual(active_heading, 'Completed on',
|
||||
'table should be sorted by "Completed on" after hiding sort column')
|
||||
|
||||
cell_selector = 'td.completed_on'
|
||||
|
||||
rows = self.find_all(row_selector)
|
||||
row1_completed_on = self._get_datetime_from_cell(rows[0], cell_selector)
|
||||
row2_completed_on = self._get_datetime_from_cell(rows[1], cell_selector)
|
||||
self.assertTrue(row1_completed_on > row2_completed_on,
|
||||
'table should be sorted by -completed_on')
|
||||
@@ -27,7 +27,10 @@ from bldcontrol import bbcontroller
|
||||
from django.http import HttpResponse, JsonResponse
|
||||
from django.views.generic import View
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
from django.core import serializers
|
||||
from django.utils import timezone
|
||||
from django.template.defaultfilters import date
|
||||
from toastergui.templatetags.projecttags import json, sectohms, get_tasks
|
||||
|
||||
def error_response(error):
|
||||
return JsonResponse({"error": error})
|
||||
@@ -84,7 +87,7 @@ class XhrBuildRequest(View):
|
||||
br.save()
|
||||
|
||||
except BuildRequest.DoesNotExist:
|
||||
return error_response('No such build id %s' % i)
|
||||
return error_response('No such build request id %s' % i)
|
||||
|
||||
return error_response('ok')
|
||||
|
||||
@@ -147,7 +150,7 @@ class XhrLayer(View):
|
||||
layer_version = Layer_Version.objects.get(
|
||||
id=kwargs['layerversion_id'],
|
||||
project=kwargs['pid'],
|
||||
layer_source__sourcetype=LayerSource.TYPE_IMPORTED)
|
||||
layer_source=LayerSource.TYPE_IMPORTED)
|
||||
|
||||
except Layer_Version.DoesNotExist:
|
||||
return error_response("Cannot find imported layer to update")
|
||||
@@ -159,8 +162,6 @@ class XhrLayer(View):
|
||||
if "commit" in request.POST:
|
||||
layer_version.commit = request.POST["commit"]
|
||||
layer_version.branch = request.POST["commit"]
|
||||
if "up_branch" in request.POST:
|
||||
layer_version.up_branch_id = int(request.POST["up_branch"])
|
||||
if "summary" in request.POST:
|
||||
layer_version.layer.summary = request.POST["summary"]
|
||||
if "description" in request.POST:
|
||||
@@ -193,7 +194,7 @@ class XhrLayer(View):
|
||||
layer_version = Layer_Version.objects.get(
|
||||
id=kwargs['layerversion_id'],
|
||||
project=kwargs['pid'],
|
||||
layer_source__sourcetype=LayerSource.TYPE_IMPORTED)
|
||||
layer_source=LayerSource.TYPE_IMPORTED)
|
||||
except Layer_Version.DoesNotExist:
|
||||
return error_response("Cannot find imported layer to delete")
|
||||
|
||||
@@ -210,3 +211,111 @@ class XhrLayer(View):
|
||||
"error": "ok",
|
||||
"redirect": reverse('project', args=(kwargs['pid'],))
|
||||
})
|
||||
|
||||
class MostRecentBuildsView(View):
|
||||
def _was_yesterday_or_earlier(self, completed_on):
|
||||
now = timezone.now()
|
||||
delta = now - completed_on
|
||||
|
||||
if delta.days >= 1:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""
|
||||
Returns a list of builds in JSON format.
|
||||
"""
|
||||
mrb_type = 'all'
|
||||
project = None
|
||||
|
||||
project_id = request.GET.get('project_id', None)
|
||||
if project_id:
|
||||
try:
|
||||
mrb_type = 'project'
|
||||
project = Project.objects.get(pk=project_id)
|
||||
except:
|
||||
# if project lookup fails, assume no project
|
||||
pass
|
||||
|
||||
recent_build_objs = Build.get_recent(project)
|
||||
recent_builds = []
|
||||
|
||||
# for timezone conversion
|
||||
tz = timezone.get_current_timezone()
|
||||
|
||||
for build_obj in recent_build_objs:
|
||||
dashboard_url = reverse('builddashboard', args=(build_obj.pk,))
|
||||
buildtime_url = reverse('buildtime', args=(build_obj.pk,))
|
||||
rebuild_url = \
|
||||
reverse('xhr_buildrequest', args=(build_obj.project.pk,))
|
||||
cancel_url = \
|
||||
reverse('xhr_buildrequest', args=(build_obj.project.pk,))
|
||||
|
||||
build = {}
|
||||
build['id'] = build_obj.pk
|
||||
build['dashboard_url'] = dashboard_url
|
||||
|
||||
buildrequest_id = None
|
||||
if hasattr(build_obj, 'buildrequest'):
|
||||
buildrequest_id = build_obj.buildrequest.pk
|
||||
build['buildrequest_id'] = buildrequest_id
|
||||
|
||||
build['recipes_parsed_percentage'] = \
|
||||
int((build_obj.recipes_parsed / build_obj.recipes_to_parse) * 100)
|
||||
|
||||
tasks_complete_percentage = 0
|
||||
if build_obj.outcome in (Build.SUCCEEDED, Build.FAILED):
|
||||
tasks_complete_percentage = 100
|
||||
elif build_obj.outcome == Build.IN_PROGRESS:
|
||||
tasks_complete_percentage = build_obj.completeper()
|
||||
build['tasks_complete_percentage'] = tasks_complete_percentage
|
||||
|
||||
build['state'] = build_obj.get_state()
|
||||
|
||||
build['errors'] = build_obj.errors.count()
|
||||
build['dashboard_errors_url'] = dashboard_url + '#errors'
|
||||
|
||||
build['warnings'] = build_obj.warnings.count()
|
||||
build['dashboard_warnings_url'] = dashboard_url + '#warnings'
|
||||
|
||||
build['buildtime'] = sectohms(build_obj.timespent_seconds)
|
||||
build['buildtime_url'] = buildtime_url
|
||||
|
||||
build['rebuild_url'] = rebuild_url
|
||||
build['cancel_url'] = cancel_url
|
||||
|
||||
build['is_default_project_build'] = build_obj.project.is_default
|
||||
|
||||
build['build_targets_json'] = \
|
||||
json(get_tasks(build_obj.target_set.all()))
|
||||
|
||||
# convert completed_on time to user's timezone
|
||||
completed_on = timezone.localtime(build_obj.completed_on)
|
||||
|
||||
completed_on_template = '%H:%M'
|
||||
if self._was_yesterday_or_earlier(completed_on):
|
||||
completed_on_template = '%d/%m/%Y ' + completed_on_template
|
||||
build['completed_on'] = completed_on.strftime(completed_on_template)
|
||||
|
||||
targets = []
|
||||
target_objs = build_obj.get_sorted_target_list()
|
||||
for target_obj in target_objs:
|
||||
if target_obj.task:
|
||||
targets.append(target_obj.target + ':' + target_obj.task)
|
||||
else:
|
||||
targets.append(target_obj.target)
|
||||
build['targets'] = ' '.join(targets)
|
||||
|
||||
# abbreviated form of the full target list
|
||||
abbreviated_targets = ''
|
||||
num_targets = len(targets)
|
||||
if num_targets > 0:
|
||||
abbreviated_targets = targets[0]
|
||||
if num_targets > 1:
|
||||
abbreviated_targets += (' +%s' % (num_targets - 1))
|
||||
build['targets_abbreviated'] = abbreviated_targets
|
||||
|
||||
recent_builds.append(build)
|
||||
|
||||
return JsonResponse(recent_builds, safe=False)
|
||||
|
||||
@@ -109,15 +109,31 @@ class BuiltPackagesTableBase(tables.PackagesTable):
|
||||
hidden=True,
|
||||
orderable=True)
|
||||
|
||||
layer_branch_template = '''
|
||||
{%if not data.recipe.layer_version.layer.local_source_dir %}
|
||||
<span class="text-muted">{{data.recipe.layer_version.branch}}</span>
|
||||
{% else %}
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{data.recipe.layer_version.layer.name}} is not in a Git repository, so there is no branch associated with it"> </span>
|
||||
{% endif %}
|
||||
'''
|
||||
|
||||
self.add_column(title="Layer branch",
|
||||
field_name="recipe__layer_version__branch",
|
||||
hidden=True,
|
||||
static_data_name="recipe__layer_version__branch",
|
||||
static_data_template=layer_branch_template,
|
||||
orderable=True)
|
||||
|
||||
git_rev_template = '''
|
||||
{% if not data.recipe.layer_version.layer.local_source_dir %}
|
||||
{% with vcs_ref=data.recipe.layer_version.commit %}
|
||||
{% include 'snippets/gitrev_popover.html' %}
|
||||
{% endwith %}
|
||||
{% else %}
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{data.recipe.layer_version.layer.name}} is not in a Git repository, so there is no revision associated with it"> </span>
|
||||
{% endif %}
|
||||
'''
|
||||
|
||||
self.add_column(title="Layer commit",
|
||||
@@ -250,10 +266,24 @@ class BuiltRecipesTable(BuildTablesMixin):
|
||||
'{% if data.pathflags %}<i>({{data.pathflags}})</i>'\
|
||||
'{% endif %}'
|
||||
|
||||
git_branch_template = '''
|
||||
{% if data.layer_version.layer.local_source_dir %}
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{data.layer_version.layer.name}} is not in a Git repository, so there is no branch associated with it"> </span>
|
||||
{% else %}
|
||||
<span>{{data.layer_version.branch}}</span>
|
||||
{% endif %}
|
||||
'''
|
||||
|
||||
git_rev_template = '''
|
||||
{% if data.layer_version.layer.local_source_dir %}
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{data.layer_version.layer.name}} is not in a Git repository, so there is no commit associated with it"> </span>
|
||||
{% else %}
|
||||
{% with vcs_ref=data.layer_version.commit %}
|
||||
{% include 'snippets/gitrev_popover.html' %}
|
||||
{% endwith %}
|
||||
{% endif %}
|
||||
'''
|
||||
|
||||
depends_on_tmpl = '''
|
||||
@@ -295,13 +325,15 @@ class BuiltRecipesTable(BuildTablesMixin):
|
||||
{% endif %}{% endwith %}{% endwith %}
|
||||
'''
|
||||
|
||||
self.add_column(title="Name",
|
||||
self.add_column(title="Recipe",
|
||||
field_name="name",
|
||||
static_data_name='name',
|
||||
orderable=True,
|
||||
hideable=False,
|
||||
static_data_template=recipe_name_tmpl)
|
||||
|
||||
self.add_column(title="Version",
|
||||
hideable=False,
|
||||
field_name="version")
|
||||
|
||||
self.add_column(title="Dependencies",
|
||||
@@ -340,6 +372,8 @@ class BuiltRecipesTable(BuildTablesMixin):
|
||||
|
||||
self.add_column(title="Layer branch",
|
||||
field_name="layer_version__branch",
|
||||
static_data_name="layer_version__branch",
|
||||
static_data_template=git_branch_template,
|
||||
orderable=True,
|
||||
hidden=True)
|
||||
|
||||
@@ -431,17 +465,20 @@ class BuildTasksTable(BuildTablesMixin):
|
||||
self.add_column(title="Order",
|
||||
static_data_name="order",
|
||||
static_data_template='{{data.order}}',
|
||||
hideable=False,
|
||||
orderable=True)
|
||||
|
||||
self.add_column(title="Task",
|
||||
static_data_name="task_name",
|
||||
static_data_template=task_link_tmpl(
|
||||
"{{data.task_name}}"),
|
||||
hideable=False,
|
||||
orderable=True)
|
||||
|
||||
self.add_column(title="Recipe",
|
||||
static_data_name='recipe__name',
|
||||
static_data_template=recipe_name_tmpl,
|
||||
hideable=False,
|
||||
orderable=True)
|
||||
|
||||
self.add_column(title="Recipe version",
|
||||
|
||||
@@ -1,5 +1,16 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<django-objects version="1.0">
|
||||
<object pk="1" model="orm.bitbakeversion">
|
||||
<field type="CharField" name="name">v2.3</field>
|
||||
<field type="GitURLField" name="giturl">git://git.openembedded.org/bitbake</field>
|
||||
<field type="CharField" name="dirpath">b</field>
|
||||
<field type="CharField" name="branch">a</field>
|
||||
</object>
|
||||
<object pk="1" model="orm.release">
|
||||
<field type="CharField" name="name">master</field>
|
||||
<field type="CharField" name="description">master project</field>
|
||||
<field to="orm.bitbake_version" name="bitbake_version">1</field>
|
||||
</object>
|
||||
<object pk="1" model="orm.project">
|
||||
<field type="CharField" name="name">a test project</field>
|
||||
<field type="CharField" name="short_description"></field>
|
||||
@@ -195,8 +206,6 @@
|
||||
<field to="orm.customimagerecipe" name="recipe_appends" rel="ManyToManyRel"><object pk="3"></object></field>
|
||||
</object>
|
||||
<object pk="1" model="orm.recipe">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field type="CharField" name="name">z recipe</field>
|
||||
<field type="CharField" name="version">5.2</field>
|
||||
@@ -212,8 +221,6 @@
|
||||
<field type="BooleanField" name="is_image">False</field>
|
||||
</object>
|
||||
<object pk="2" model="orm.recipe">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field type="CharField" name="name">a recipe</field>
|
||||
<field type="CharField" name="version">1.2</field>
|
||||
@@ -229,8 +236,6 @@
|
||||
<field type="BooleanField" name="is_image">False</field>
|
||||
</object>
|
||||
<object pk="3" model="orm.recipe">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel"><None></None></field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field type="CharField" name="name">a custom recipe</field>
|
||||
<field type="CharField" name="version"></field>
|
||||
@@ -246,8 +251,6 @@
|
||||
<field type="BooleanField" name="is_image">False</field>
|
||||
</object>
|
||||
<object pk="4" model="orm.recipe">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field type="CharField" name="name">a image recipe</field>
|
||||
<field type="CharField" name="version">1.2</field>
|
||||
@@ -263,8 +266,6 @@
|
||||
<field type="BooleanField" name="is_image">True</field>
|
||||
</object>
|
||||
<object pk="5" model="orm.recipe">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field type="CharField" name="name">z image recipe</field>
|
||||
<field type="CharField" name="version">1.3</field>
|
||||
@@ -280,8 +281,6 @@
|
||||
<field type="BooleanField" name="is_image">True</field>
|
||||
</object>
|
||||
<object pk="6" model="orm.recipe">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel"><None></None></field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field type="CharField" name="name">z custom recipe</field>
|
||||
<field type="CharField" name="version"></field>
|
||||
@@ -307,34 +306,23 @@
|
||||
</object>
|
||||
|
||||
<object pk="1" model="orm.machine">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel"><None></None></field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">1</field>
|
||||
<field type="CharField" name="name">a machine</field>
|
||||
<field type="CharField" name="description">a machine</field>
|
||||
</object>
|
||||
<object pk="2" model="orm.machine">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel"><None></None></field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">2</field>
|
||||
<field type="CharField" name="name">z machine</field>
|
||||
<field type="CharField" name="description">z machine</field>
|
||||
</object>
|
||||
<object pk="3" model="orm.machine">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel"><None></None></field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">1</field>
|
||||
<field type="CharField" name="name">g machine</field>
|
||||
<field type="CharField" name="description">g machine</field>
|
||||
</object>
|
||||
<object pk="1" model="orm.layersource">
|
||||
<field type="CharField" name="name">local</field>
|
||||
<field type="IntegerField" name="sourcetype">1</field>
|
||||
<field type="CharField" name="apiurl"></field>
|
||||
</object>
|
||||
<object pk="1" model="orm.bitbakeversion">
|
||||
<field type="CharField" name="name">test bbv</field>
|
||||
<field type="CharField" name="giturl">/tmp/</field>
|
||||
@@ -355,26 +343,12 @@
|
||||
<field type="CharField" name="branch_name">master</field>
|
||||
<field type="TextField" name="helptext"><None></None></field>
|
||||
</object>
|
||||
<object pk="1" model="orm.releaselayersourcepriority">
|
||||
<field to="orm.release" name="release" rel="ManyToOneRel">1</field>
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="IntegerField" name="priority">0</field>
|
||||
</object>
|
||||
<object pk="1" model="orm.branch">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="CharField" name="name">master</field>
|
||||
<field type="CharField" name="short_description"></field>
|
||||
</object>
|
||||
<object pk="1" model="orm.layer">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel"><None></None></field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field type="CharField" name="name">a layer</field>
|
||||
<field type="CharField" name="vcs_url">/tmp/</field>
|
||||
</object>
|
||||
<object pk="2" model="orm.layer">
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field type="CharField" name="name">z layer</field>
|
||||
<field type="CharField" name="layer_index_url"></field>
|
||||
@@ -383,10 +357,8 @@
|
||||
<object pk="1" model="orm.layer_version">
|
||||
<field to="orm.build" name="build" rel="ManyToOneRel">1</field>
|
||||
<field to="orm.layer" name="layer" rel="ManyToOneRel">1</field>
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field to="orm.branch" name="up_branch" rel="ManyToOneRel">1</field>
|
||||
<field to="orm.release" name="release" rel="ManyToOneRel">1</field>
|
||||
<field type="CharField" name="branch">master</field>
|
||||
<field type="CharField" name="commit">abcdef123</field>
|
||||
<field type="CharField" name="dirpath">/tmp/</field>
|
||||
@@ -397,10 +369,8 @@
|
||||
<object pk="2" model="orm.layer_version">
|
||||
<field to="orm.build" name="build" rel="ManyToOneRel"><None></None></field>
|
||||
<field to="orm.layer" name="layer" rel="ManyToOneRel">2</field>
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field to="orm.branch" name="up_branch" rel="ManyToOneRel">1</field>
|
||||
<field to="orm.release" name="release" rel="ManyToOneRel">1</field>
|
||||
<field type="CharField" name="branch">testing-branch</field>
|
||||
<field type="CharField" name="commit">9876fedcba</field>
|
||||
<field type="CharField" name="dirpath"><None></None></field>
|
||||
@@ -411,10 +381,8 @@
|
||||
<object pk="3" model="orm.layer_version">
|
||||
<field to="orm.build" name="build" rel="ManyToOneRel">1</field>
|
||||
<field to="orm.layer" name="layer" rel="ManyToOneRel">2</field>
|
||||
<field to="orm.layersource" name="layer_source" rel="ManyToOneRel">1</field>
|
||||
<field type="IntegerField" name="up_id"><None></None></field>
|
||||
<field type="DateTimeField" name="up_date"><None></None></field>
|
||||
<field to="orm.branch" name="up_branch" rel="ManyToOneRel">1</field>
|
||||
<field to="orm.release" name="release" rel="ManyToOneRel">1</field>
|
||||
<field type="CharField" name="branch">testing-branch</field>
|
||||
<field type="CharField" name="commit">9876fedcba</field>
|
||||
<field type="CharField" name="dirpath"><None></None></field>
|
||||
|
||||
@@ -45,6 +45,7 @@ img.logo { height: 30px; vertical-align: bottom; }
|
||||
.alert-link.build-warnings,
|
||||
.glyphicon-warning-sign.build-warnings { color: #8a6d3b; }
|
||||
.build-result .project-name { margin-top: -10px; margin-bottom: 5px; }
|
||||
.rebuild-btn, .cancel-build-btn { cursor: pointer; }
|
||||
|
||||
/* Styles for the help information */
|
||||
.get-help { color: #CCCCCC; }
|
||||
@@ -195,17 +196,22 @@ h2 { margin-bottom: 25px; }
|
||||
.tt-suggestion:active { background-color: #f5f5f5; cursor: pointer; }
|
||||
|
||||
/* Style the import layer form controls*/
|
||||
legend { border: none; }
|
||||
legend { border: none; margin-top: 20px; }
|
||||
.radioLegend { margin-bottom: 0; }
|
||||
#layer-name-ctrl { margin-top: 20px; }
|
||||
#import-layer-name,
|
||||
#layer-subdir { width: 20%; }
|
||||
#layer-git-repo-url { width: 40%; }
|
||||
#layer-git-ref { width: 32%; }
|
||||
#local-dir-path { width: 45%; }
|
||||
#layer-dependency { width: 16em; }
|
||||
#layer-deps-list { margin-top: 0; }
|
||||
#form-actions { margin-bottom: 30px; }
|
||||
#duplicate-layer-info dl { margin-top: 10px; }
|
||||
#duplicate-layer-info dd { margin-bottom: 10px; }
|
||||
.help-inline { color: #737373; margin-left: 10px; }
|
||||
.radio-help { width: 50%; margin-left: 20px; }
|
||||
#repo-select div:nth-of-type(2) { margin-top: 15px; }
|
||||
|
||||
/* Give some padding to the in-cell tooltips we use for notifications in tables */
|
||||
td > .tooltip-inner,
|
||||
@@ -249,6 +255,18 @@ code { color: #333; background-color: transparent; }
|
||||
/* Style the special no results message in the custom image details page */
|
||||
[id^="no-results-special-"] > .alert-warning > ol { margin-top: 10px; }
|
||||
|
||||
/* style the loading spinner in the new custom image dialog */
|
||||
#create-new-custom-image-btn [data-role="loading-state"] {
|
||||
padding-left: 16px;
|
||||
}
|
||||
|
||||
/* icon has to be absolutely positioned, otherwise the spin animation doesn't work */
|
||||
#create-new-custom-image-btn [data-role="loading-state"] .icon-spinner {
|
||||
position: absolute;
|
||||
left: 26px;
|
||||
bottom: 26px;
|
||||
}
|
||||
|
||||
/* Style the content of modal dialogs */
|
||||
.modal-footer { text-align: left; }
|
||||
.date-filter-controls { margin-top: 10px; }
|
||||
|
||||
@@ -10,6 +10,7 @@ function importLayerPageInit (ctx) {
|
||||
var layerDepInput = $("#layer-dependency");
|
||||
var layerNameCtrl = $("#layer-name-ctrl");
|
||||
var duplicatedLayerName = $("#duplicated-layer-name-hint");
|
||||
var localDirPath = $("#local-dir-path");
|
||||
|
||||
var layerDeps = {};
|
||||
var layerDepsDeps = {};
|
||||
@@ -35,6 +36,9 @@ function importLayerPageInit (ctx) {
|
||||
}
|
||||
});
|
||||
|
||||
// Disable local dir repo when page is loaded.
|
||||
$('#local-dir').hide();
|
||||
|
||||
// disable the "Add layer" button when the layer input typeahead is empty
|
||||
// or not in the typeahead choices
|
||||
layerDepInput.on("input change", function () {
|
||||
@@ -168,8 +172,16 @@ function importLayerPageInit (ctx) {
|
||||
dir_path: $("#layer-subdir").val(),
|
||||
project_id: libtoaster.ctx.projectId,
|
||||
layer_deps: layerDepsCsv,
|
||||
local_source_dir: $('#local-dir-path').val(),
|
||||
};
|
||||
|
||||
if ($('input[name=repo]:checked').val() == "git") {
|
||||
layerData.local_source_dir = "";
|
||||
} else {
|
||||
layerData.vcs_url = "";
|
||||
layerData.git_ref = "";
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: ctx.xhrImportLayerUrl,
|
||||
@@ -208,25 +220,45 @@ function importLayerPageInit (ctx) {
|
||||
function check_form() {
|
||||
var valid = false;
|
||||
var inputs = $("input:required");
|
||||
var inputStr = inputs.val().split("");
|
||||
|
||||
for (var i=0; i<inputs.length; i++){
|
||||
if (!(valid = inputs[i].value)){
|
||||
for (var i=0; i<inputs.val().length; i++){
|
||||
if (!(valid = inputStr[i])){
|
||||
enable_import_btn(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (valid)
|
||||
enable_import_btn(true);
|
||||
if (valid) {
|
||||
if ($("#local-dir-radio").prop("checked") && localDirPath.val().length > 0) {
|
||||
enable_import_btn(true);
|
||||
}
|
||||
if ($("#git-repo-radio").prop("checked") && vcsURLInput.val().length > 0 && gitRefInput.val().length > 0) {
|
||||
enable_import_btn(true);
|
||||
}
|
||||
}
|
||||
|
||||
if (inputs.val().length == 0)
|
||||
enable_import_btn(false);
|
||||
}
|
||||
|
||||
function layerExistsError(layer){
|
||||
var dupLayerInfo = $("#duplicate-layer-info");
|
||||
dupLayerInfo.find(".dup-layer-name").text(layer.name);
|
||||
dupLayerInfo.find(".dup-layer-link").attr("href", layer.layerdetailurl);
|
||||
dupLayerInfo.find("#dup-layer-vcs-url").text(layer.vcs_url);
|
||||
dupLayerInfo.find("#dup-layer-revision").text(layer.vcs_reference);
|
||||
|
||||
if (layer.local_source_dir) {
|
||||
$("#git-layer-dup").hide();
|
||||
$("#local-layer-dup").fadeIn();
|
||||
dupLayerInfo.find(".dup-layer-name").text(layer.name);
|
||||
dupLayerInfo.find(".dup-layer-link").attr("href", layer.layerdetailurl);
|
||||
dupLayerInfo.find("#dup-local-source-dir-name").text(layer.local_source_dir);
|
||||
} else {
|
||||
$("#git-layer-dup").fadeIn();
|
||||
$("#local-layer-dup").hide();
|
||||
dupLayerInfo.find(".dup-layer-name").text(layer.name);
|
||||
dupLayerInfo.find(".dup-layer-link").attr("href", layer.layerdetailurl);
|
||||
dupLayerInfo.find("#dup-layer-vcs-url").text(layer.vcs_url);
|
||||
dupLayerInfo.find("#dup-layer-revision").text(layer.vcs_reference);
|
||||
}
|
||||
$(".fields-apart-from-layer-name").fadeOut(function(){
|
||||
|
||||
dupLayerInfo.fadeIn();
|
||||
@@ -271,10 +303,13 @@ function importLayerPageInit (ctx) {
|
||||
|
||||
if ($("#duplicate-layer-info").css("display") != "None"){
|
||||
$("#duplicate-layer-info").fadeOut(function(){
|
||||
$(".fields-apart-from-layer-name").show();
|
||||
});
|
||||
$(".fields-apart-from-layer-name").show();
|
||||
radioDisplay();
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
radioDisplay();
|
||||
|
||||
/* Don't remove the error class if we're displaying the error for another
|
||||
* reason.
|
||||
@@ -301,4 +336,72 @@ function importLayerPageInit (ctx) {
|
||||
}
|
||||
});
|
||||
|
||||
function radioDisplay() {
|
||||
if ($('input[name=repo]:checked').val() == "local") {
|
||||
$('#git-repo').hide();
|
||||
$('#import-git-layer-and-add-hint').hide();
|
||||
$('#local-dir').fadeIn();
|
||||
$('#import-local-dir-and-add-hint').fadeIn();
|
||||
} else {
|
||||
$('#local-dir').hide();
|
||||
$('#import-local-dir-and-add-hint').hide();
|
||||
$('#git-repo').fadeIn();
|
||||
$('#import-git-layer-and-add-hint').fadeIn();
|
||||
}
|
||||
}
|
||||
|
||||
$('input:radio[name="repo"]').change(function() {
|
||||
radioDisplay();
|
||||
if ($("#local-dir-radio").prop("checked")) {
|
||||
if (localDirPath.val().length > 0) {
|
||||
enable_import_btn(true);
|
||||
} else {
|
||||
enable_import_btn(false);
|
||||
}
|
||||
}
|
||||
if ($("#git-repo-radio").prop("checked")) {
|
||||
if (vcsURLInput.val().length > 0 && gitRefInput.val().length > 0) {
|
||||
enable_import_btn(true);
|
||||
} else {
|
||||
enable_import_btn(false);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
localDirPath.on('input', function(){
|
||||
if ($(this).val().trim().length == 0) {
|
||||
$('#import-and-add-btn').attr("disabled","disabled");
|
||||
$('#local-dir').addClass('has-error');
|
||||
$('#hintError-dir-abs-path').show();
|
||||
$('#hintError-dir-path-starts-with-slash').show();
|
||||
} else {
|
||||
var input = $(this);
|
||||
var reBeginWithSlash = /^\//;
|
||||
var reCheckVariable = /^\$/;
|
||||
var re = /([ <>\\|":\.%\?\*]+)/;
|
||||
|
||||
var invalidDir = re.test(input.val());
|
||||
var invalidSlash = reBeginWithSlash.test(input.val());
|
||||
var invalidVar = reCheckVariable.test(input.val());
|
||||
|
||||
if (!invalidSlash && !invalidVar) {
|
||||
$('#local-dir').addClass('has-error');
|
||||
$('#import-and-add-btn').attr("disabled","disabled");
|
||||
$('#hintError-dir-abs-path').show();
|
||||
$('#hintError-dir-path-starts-with-slash').show();
|
||||
} else if (invalidDir) {
|
||||
$('#local-dir').addClass('has-error');
|
||||
$('#import-and-add-btn').attr("disabled","disabled");
|
||||
$('#hintError-dir-path').show();
|
||||
} else {
|
||||
$('#local-dir').removeClass('has-error');
|
||||
if (layerNameInput.val().length > 0) {
|
||||
$('#import-and-add-btn').removeAttr("disabled");
|
||||
}
|
||||
$('#hintError-dir-abs-path').hide();
|
||||
$('#hintError-dir-path-starts-with-slash').hide();
|
||||
$('#hintError-dir-path').hide();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
4
bitbake/lib/toaster/toastergui/static/js/jsrender.min.js
vendored
Normal file
4
bitbake/lib/toaster/toastergui/static/js/jsrender.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -366,7 +366,7 @@ function layerDetailsPageInit (ctx) {
|
||||
if ($(this).is("dt")) {
|
||||
var dd = $(this).next("dd");
|
||||
if (!dd.children("form:visible")|| !dd.find(".current-value").html()){
|
||||
if (ctx.layerVersion.sourceId == 3){
|
||||
if (ctx.layerVersion.layer_source == ctx.layerSourceTypes.TYPE_IMPORTED){
|
||||
/* There's no current value and the layer is editable
|
||||
* so show the "Not set" and hide the delete icon
|
||||
*/
|
||||
|
||||
@@ -148,6 +148,21 @@ var libtoaster = (function () {
|
||||
});
|
||||
}
|
||||
|
||||
function _getMostRecentBuilds(url, onsuccess, onfail) {
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: 'GET',
|
||||
data : {format: 'json'},
|
||||
headers: {'X-CSRFToken': $.cookie('csrftoken')},
|
||||
success: function (data) {
|
||||
onsuccess ? onsuccess(data) : console.log(data);
|
||||
},
|
||||
error: function (data) {
|
||||
onfail ? onfail(data) : console.error(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/* Get a project's configuration info */
|
||||
function _getProjectInfo(url, onsuccess, onfail){
|
||||
$.ajax({
|
||||
@@ -421,11 +436,27 @@ var libtoaster = (function () {
|
||||
});
|
||||
}
|
||||
|
||||
// if true, the loading spinner for Ajax requests will be displayed
|
||||
// if requests take more than 1200ms
|
||||
var ajaxLoadingTimerEnabled = true;
|
||||
|
||||
// turn on the page-level loading spinner for Ajax requests
|
||||
function _enableAjaxLoadingTimer() {
|
||||
ajaxLoadingTimerEnabled = true;
|
||||
}
|
||||
|
||||
// turn off the page-level loading spinner for Ajax requests
|
||||
function _disableAjaxLoadingTimer() {
|
||||
ajaxLoadingTimerEnabled = false;
|
||||
}
|
||||
|
||||
return {
|
||||
enableAjaxLoadingTimer: _enableAjaxLoadingTimer,
|
||||
disableAjaxLoadingTimer: _disableAjaxLoadingTimer,
|
||||
reload_params : reload_params,
|
||||
startABuild : _startABuild,
|
||||
cancelABuild : _cancelABuild,
|
||||
getMostRecentBuilds: _getMostRecentBuilds,
|
||||
makeTypeahead : _makeTypeahead,
|
||||
getProjectInfo: _getProjectInfo,
|
||||
getLayerDepsForProject : _getLayerDepsForProject,
|
||||
@@ -469,7 +500,6 @@ function reload_params(params) {
|
||||
window.location.href = url+"?"+callparams.join('&');
|
||||
}
|
||||
|
||||
|
||||
/* Things that happen for all pages */
|
||||
$(document).ready(function() {
|
||||
|
||||
@@ -628,7 +658,9 @@ $(document).ready(function() {
|
||||
window.clearTimeout(ajaxLoadingTimer);
|
||||
|
||||
ajaxLoadingTimer = window.setTimeout(function() {
|
||||
$("#loading-notification").fadeIn();
|
||||
if (libtoaster.ajaxLoadingTimerEnabled) {
|
||||
$("#loading-notification").fadeIn();
|
||||
}
|
||||
}, 1200);
|
||||
});
|
||||
|
||||
|
||||
@@ -1,33 +1,19 @@
|
||||
|
||||
function mrbSectionInit(ctx){
|
||||
|
||||
var projectBuilds;
|
||||
|
||||
if (ctx.mrbType === 'project')
|
||||
projectBuilds = true;
|
||||
|
||||
$(".cancel-build-btn").click(function(e){
|
||||
$('#latest-builds').on('click', '.cancel-build-btn', function(e){
|
||||
e.stopImmediatePropagation();
|
||||
e.preventDefault();
|
||||
|
||||
var url = $(this).data('request-url');
|
||||
var buildReqIds = $(this).data('buildrequest-id');
|
||||
var banner = $(this).parents(".alert");
|
||||
|
||||
banner.find(".progress-info").fadeOut().promise().done(function(){
|
||||
$("#cancelling-msg-" + buildReqIds).show();
|
||||
console.log("cancel build");
|
||||
libtoaster.cancelABuild(url, buildReqIds, function(){
|
||||
if (projectBuilds == false){
|
||||
/* the all builds page is not 'self updating' like thei
|
||||
* project Builds
|
||||
*/
|
||||
window.location.reload();
|
||||
}
|
||||
}, null);
|
||||
});
|
||||
libtoaster.cancelABuild(url, buildReqIds, function () {
|
||||
window.location.reload();
|
||||
}, null);
|
||||
});
|
||||
|
||||
$(".run-again-btn").click(function(e){
|
||||
$('#latest-builds').on('click', '.rebuild-btn', function(e){
|
||||
e.stopImmediatePropagation();
|
||||
e.preventDefault();
|
||||
|
||||
var url = $(this).data('request-url');
|
||||
@@ -38,58 +24,112 @@ function mrbSectionInit(ctx){
|
||||
}, null);
|
||||
});
|
||||
|
||||
// cached version of buildData, so we can determine whether a build has
|
||||
// changed since it was last fetched, and update the DOM appropriately
|
||||
var buildData = {};
|
||||
|
||||
var progressTimer;
|
||||
// returns the cached version of this build, or {} is there isn't a cached one
|
||||
function getCached(build) {
|
||||
return buildData[build.id] || {};
|
||||
}
|
||||
|
||||
if (projectBuilds === true){
|
||||
progressTimer = window.setInterval(function() {
|
||||
libtoaster.getProjectInfo(libtoaster.ctx.projectPageUrl,
|
||||
function(prjInfo){
|
||||
/* These two are needed because a build can be 100% and still
|
||||
* in progress due to the fact that the % done is updated at the
|
||||
* start of a task so it can be doing the last task at 100%
|
||||
*/
|
||||
var inProgress = 0;
|
||||
var allPercentDone = 0;
|
||||
if (prjInfo.builds.length === 0)
|
||||
return
|
||||
// returns true if a build's state changed to "Succeeded", "Failed"
|
||||
// or "Cancelled" from some other value
|
||||
function buildFinished(build) {
|
||||
var cached = getCached(build);
|
||||
return cached.state &&
|
||||
cached.state !== build.state &&
|
||||
(build.state == 'Succeeded' || build.state == 'Failed' ||
|
||||
build.state == 'Cancelled');
|
||||
}
|
||||
|
||||
for (var i in prjInfo.builds){
|
||||
var build = prjInfo.builds[i];
|
||||
// returns true if the state changed
|
||||
function stateChanged(build) {
|
||||
var cached = getCached(build);
|
||||
return (cached.state !== build.state);
|
||||
}
|
||||
|
||||
if (build.outcomeText === "In Progress" ||
|
||||
$(".progress .bar").length > 0){
|
||||
/* Update the build progress */
|
||||
var percentDone;
|
||||
// returns true if the tasks_complete_percentage changed
|
||||
function tasksProgressChanged(build) {
|
||||
var cached = getCached(build);
|
||||
return (cached.tasks_complete_percentage !== build.tasks_complete_percentage);
|
||||
}
|
||||
|
||||
if (build.outcomeText !== "In Progress"){
|
||||
/* We have to ignore the value when it's Succeeded because it
|
||||
* goes back to 0
|
||||
*/
|
||||
percentDone = 100;
|
||||
} else {
|
||||
percentDone = build.percentDone;
|
||||
inProgress++;
|
||||
}
|
||||
// returns true if the number of recipes parsed/to parse changed
|
||||
function recipeProgressChanged(build) {
|
||||
var cached = getCached(build);
|
||||
return (cached.recipes_parsed_percentage !== build.recipes_parsed_percentage);
|
||||
}
|
||||
|
||||
$("#build-pc-done-" + build.id).text(percentDone);
|
||||
$("#build-pc-done-title-" + build.id).attr("title", percentDone);
|
||||
$("#build-pc-done-bar-" + build.id).css("width",
|
||||
String(percentDone) + "%");
|
||||
function refreshMostRecentBuilds(){
|
||||
libtoaster.getMostRecentBuilds(
|
||||
libtoaster.ctx.mostRecentBuildsUrl,
|
||||
|
||||
allPercentDone += percentDone;
|
||||
}
|
||||
// success callback
|
||||
function (data) {
|
||||
var build;
|
||||
var tmpl;
|
||||
var container;
|
||||
var selector;
|
||||
var colourClass;
|
||||
var elements;
|
||||
|
||||
for (var i = 0; i < data.length; i++) {
|
||||
build = data[i];
|
||||
|
||||
if (buildFinished(build)) {
|
||||
// a build finished: reload the whole page so that the build
|
||||
// shows up in the builds table
|
||||
window.location.reload();
|
||||
}
|
||||
else if (stateChanged(build)) {
|
||||
// update the whole template
|
||||
build.warnings_pluralise = (build.warnings !== 1 ? 's' : '');
|
||||
build.errors_pluralise = (build.errors !== 1 ? 's' : '');
|
||||
|
||||
tmpl = $.templates("#build-template");
|
||||
|
||||
html = $(tmpl.render(build));
|
||||
|
||||
selector = '[data-latest-build-result="' + build.id + '"] ' +
|
||||
'[data-role="build-status-container"]';
|
||||
container = $(selector);
|
||||
|
||||
// initialize bootstrap tooltips in the new HTML
|
||||
html.find('span.glyphicon-question-sign').tooltip();
|
||||
|
||||
container.html(html);
|
||||
}
|
||||
else if (tasksProgressChanged(build)) {
|
||||
// update the task progress text
|
||||
selector = '#build-pc-done-' + build.id;
|
||||
$(selector).html(build.tasks_complete_percentage);
|
||||
|
||||
// update the task progress bar
|
||||
selector = '#build-pc-done-bar-' + build.id;
|
||||
$(selector).width(build.tasks_complete_percentage + '%');
|
||||
}
|
||||
else if (recipeProgressChanged(build)) {
|
||||
// update the recipe progress text
|
||||
selector = '#recipes-parsed-percentage-' + build.id;
|
||||
$(selector).html(build.recipes_parsed_percentage);
|
||||
|
||||
// update the recipe progress bar
|
||||
selector = '#recipes-parsed-percentage-bar-' + build.id;
|
||||
$(selector).width(build.recipes_parsed_percentage + '%');
|
||||
}
|
||||
|
||||
if (allPercentDone === (100 * prjInfo.builds.length) && !inProgress)
|
||||
window.location.reload();
|
||||
buildData[build.id] = build;
|
||||
}
|
||||
},
|
||||
|
||||
/* Our progress bar is not still showing so shutdown the polling. */
|
||||
if ($(".progress .bar").length === 0)
|
||||
window.clearInterval(progressTimer);
|
||||
|
||||
});
|
||||
}, 1500);
|
||||
// fail callback
|
||||
function (data) {
|
||||
console.error(data);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
window.setInterval(refreshMostRecentBuilds, 1500);
|
||||
refreshMostRecentBuilds();
|
||||
}
|
||||
|
||||
@@ -25,7 +25,11 @@ function newCustomImageModalInit(){
|
||||
var duplicateNameMsg = "An image with this name already exists. Image names must be unique.";
|
||||
var duplicateImageInProjectMsg = "An image with this name already exists in this project."
|
||||
var invalidBaseRecipeIdMsg = "Please select an image to customise.";
|
||||
|
||||
|
||||
// set button to "submit" state and enable text entry so user can
|
||||
// enter the custom recipe name
|
||||
showSubmitState();
|
||||
|
||||
/* capture clicks on radio buttons inside the modal; when one is selected,
|
||||
* set the recipe on the modal
|
||||
*/
|
||||
@@ -40,6 +44,9 @@ function newCustomImageModalInit(){
|
||||
});
|
||||
|
||||
newCustomImgBtn.click(function(e){
|
||||
// disable the button and text entry
|
||||
showLoadingState();
|
||||
|
||||
e.preventDefault();
|
||||
|
||||
var baseRecipeId = imgCustomModal.data('recipe');
|
||||
@@ -69,12 +76,33 @@ function newCustomImageModalInit(){
|
||||
}
|
||||
} else {
|
||||
imgCustomModal.modal('hide');
|
||||
imgCustomModal.one('hidden.bs.modal', showSubmitState);
|
||||
window.location.replace(ret.url + '?notify=new');
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// enable text entry, show "Create image" button text
|
||||
function showSubmitState() {
|
||||
libtoaster.enableAjaxLoadingTimer();
|
||||
newCustomImgBtn.find('[data-role="loading-state"]').hide();
|
||||
newCustomImgBtn.find('[data-role="submit-state"]').show();
|
||||
newCustomImgBtn.removeAttr('disabled');
|
||||
nameInput.removeAttr('disabled');
|
||||
}
|
||||
|
||||
// disable text entry, show "Creating image..." button text;
|
||||
// we also disabled the page-level ajax loading spinner while this spinner
|
||||
// is active
|
||||
function showLoadingState() {
|
||||
libtoaster.disableAjaxLoadingTimer();
|
||||
newCustomImgBtn.find('[data-role="submit-state"]').hide();
|
||||
newCustomImgBtn.find('[data-role="loading-state"]').show();
|
||||
newCustomImgBtn.attr('disabled', 'disabled');
|
||||
nameInput.attr('disabled', 'disabled');
|
||||
}
|
||||
|
||||
function showNameError(text){
|
||||
invalidNameHelp.text(text);
|
||||
invalidNameHelp.show();
|
||||
@@ -167,6 +195,5 @@ function newCustomImageModalSetRecipes(baseRecipes) {
|
||||
|
||||
// show the radio button container
|
||||
imageSelector.show();
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -172,7 +172,12 @@ function projectPageInit(ctx) {
|
||||
|
||||
link.attr("href", layerObj.layerdetailurl);
|
||||
link.text(layerObj.name);
|
||||
link.tooltip({title: layerObj.vcs_url + " | "+ layerObj.vcs_reference, placement: "right"});
|
||||
|
||||
if (layerObj.local_source_dir) {
|
||||
link.tooltip({title: layerObj.local_source_dir, placement: "right"});
|
||||
} else {
|
||||
link.tooltip({title: layerObj.vcs_url + " | "+ layerObj.vcs_reference, placement: "right"});
|
||||
}
|
||||
|
||||
var trashItem = projectLayer.children("span");
|
||||
trashItem.click(function (e) {
|
||||
|
||||
@@ -15,6 +15,7 @@ function tableInit(ctx){
|
||||
orderby : null,
|
||||
filter : null,
|
||||
search : null,
|
||||
default_orderby: null,
|
||||
};
|
||||
|
||||
var defaultHiddenCols = [];
|
||||
@@ -192,6 +193,8 @@ function tableInit(ctx){
|
||||
tableHeadRow.html("");
|
||||
editColMenu.html("");
|
||||
|
||||
tableParams.default_orderby = tableData.default_orderby;
|
||||
|
||||
if (!tableParams.orderby && tableData.default_orderby){
|
||||
tableParams.orderby = tableData.default_orderby;
|
||||
}
|
||||
@@ -217,6 +220,7 @@ function tableInit(ctx){
|
||||
var title = $('<a href=\"#\" ></a>');
|
||||
|
||||
title.data('field-name', col.field_name);
|
||||
title.attr('data-sort-field', col.field_name);
|
||||
title.text(col.title);
|
||||
title.click(sortColumnClicked);
|
||||
|
||||
@@ -344,29 +348,65 @@ function tableInit(ctx){
|
||||
}
|
||||
}
|
||||
|
||||
/* Apply an ordering to the current table.
|
||||
*
|
||||
* 1. Find the column heading matching the sortSpecifier
|
||||
* 2. Set its up/down arrow and add .sorted
|
||||
*
|
||||
* orderby: e.g. "-started_on", "completed_on"
|
||||
* colHeading: column heading element to activate (by showing the caret
|
||||
* up/down, depending on sort order); if not set, the correct column
|
||||
* heading is selected from the DOM using orderby as a key
|
||||
*/
|
||||
function applyOrderby(orderby, colHeading) {
|
||||
if (!orderby) {
|
||||
return;
|
||||
}
|
||||
|
||||
// We only have one sort at a time so remove existing sort indicators
|
||||
$("#" + ctx.tableName + " th .icon-caret-down").hide();
|
||||
$("#" + ctx.tableName + " th .icon-caret-up").hide();
|
||||
$("#" + ctx.tableName + " th a").removeClass("sorted");
|
||||
|
||||
// normalise the orderby so we can use it to find the link we want
|
||||
// to style
|
||||
var fieldName = orderby;
|
||||
if (fieldName.indexOf('-') === 0) {
|
||||
fieldName = fieldName.slice(1);
|
||||
}
|
||||
|
||||
// find the table header element which corresponds to the sort field
|
||||
// (if we don't already have it)
|
||||
if (!colHeading) {
|
||||
colHeading = $('[data-sort-field="' + fieldName + '"]');
|
||||
}
|
||||
|
||||
colHeading.addClass("sorted");
|
||||
|
||||
var parent = colHeading.parent();
|
||||
|
||||
if (orderby.indexOf('-') === 0) {
|
||||
parent.children('.icon-caret-up').show();
|
||||
}
|
||||
else {
|
||||
parent.children('.icon-caret-down').show();
|
||||
}
|
||||
|
||||
tableParams.orderby = orderby;
|
||||
loadData(tableParams);
|
||||
}
|
||||
|
||||
function sortColumnClicked(e){
|
||||
e.preventDefault();
|
||||
|
||||
/* We only have one sort at a time so remove any existing sort indicators */
|
||||
$("#"+ctx.tableName+" th .icon-caret-down").hide();
|
||||
$("#"+ctx.tableName+" th .icon-caret-up").hide();
|
||||
$("#"+ctx.tableName+" th a").removeClass("sorted");
|
||||
|
||||
var fieldName = $(this).data('field-name');
|
||||
|
||||
/* if we're already sorted sort the other way */
|
||||
if (tableParams.orderby === fieldName &&
|
||||
var orderby = $(this).data('field-name');
|
||||
if (tableParams.orderby === orderby &&
|
||||
tableParams.orderby.indexOf('-') === -1) {
|
||||
tableParams.orderby = '-' + $(this).data('field-name');
|
||||
$(this).parent().children('.icon-caret-up').show();
|
||||
} else {
|
||||
tableParams.orderby = $(this).data('field-name');
|
||||
$(this).parent().children('.icon-caret-down').show();
|
||||
orderby = '-' + orderby;
|
||||
}
|
||||
|
||||
$(this).addClass("sorted");
|
||||
|
||||
loadData(tableParams);
|
||||
applyOrderby(orderby, $(this));
|
||||
}
|
||||
|
||||
function pageButtonClicked(e) {
|
||||
@@ -385,11 +425,13 @@ function tableInit(ctx){
|
||||
table.find("."+col).show();
|
||||
} else {
|
||||
table.find("."+col).hide();
|
||||
/* If we're ordered by the column we're hiding remove the order by */
|
||||
// If we're ordered by the column we're hiding remove the order by
|
||||
// and apply the default one instead
|
||||
if (col === tableParams.orderby ||
|
||||
'-' + col === tableParams.orderby){
|
||||
tableParams.orderby = null;
|
||||
$("#"+ctx.tableName +" .default-orderby").click();
|
||||
|
||||
applyOrderby(tableParams.default_orderby);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -114,8 +114,12 @@ class LayersTable(ToasterTable):
|
||||
|
||||
git_url_template = '''
|
||||
<a href="{% url 'layerdetails' extra.pid data.id %}">
|
||||
{% if data.layer.local_source_dir %}
|
||||
<code>{{data.layer.local_source_dir}}</code>
|
||||
{% else %}
|
||||
<code>{{data.layer.vcs_url}}</code>
|
||||
</a>
|
||||
{% endif %}
|
||||
{% if data.get_vcs_link_url %}
|
||||
<a target="_blank" href="{{ data.get_vcs_link_url }}">
|
||||
<span class="glyphicon glyphicon-new-window"></span>
|
||||
@@ -123,16 +127,21 @@ class LayersTable(ToasterTable):
|
||||
{% endif %}
|
||||
'''
|
||||
|
||||
self.add_column(title="Git repository URL",
|
||||
help_text="The Git repository for the layer source code",
|
||||
self.add_column(title="Layer source code location",
|
||||
help_text="A Git repository or an absolute path to a directory",
|
||||
hidden=True,
|
||||
static_data_name="layer__vcs_url",
|
||||
static_data_template=git_url_template)
|
||||
|
||||
git_dir_template = '''
|
||||
{% if data.layer.local_source_dir %}
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{data.layer.name}} is not in a Git repository, so there is no subdirectory associated with it"> </span>
|
||||
{% else %}
|
||||
<a href="{% url 'layerdetails' extra.pid data.id %}">
|
||||
<code>{{data.dirpath}}</code>
|
||||
</a>
|
||||
{% endif %}
|
||||
{% if data.dirpath and data.get_vcs_dirpath_link_url %}
|
||||
<a target="_blank" href="{{ data.get_vcs_dirpath_link_url }}">
|
||||
<span class="glyphicon glyphicon-new-window"></span>
|
||||
@@ -146,9 +155,14 @@ class LayersTable(ToasterTable):
|
||||
static_data_template=git_dir_template)
|
||||
|
||||
revision_template = '''
|
||||
{% if data.layer.local_source_dir %}
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{data.layer.name}} is not in a Git repository, so there is no revision associated with it"> </span>
|
||||
{% else %}
|
||||
{% with vcs_ref=data.get_vcs_reference %}
|
||||
{% include 'snippets/gitrev_popover.html' %}
|
||||
{% endwith %}
|
||||
{% endif %}
|
||||
'''
|
||||
|
||||
self.add_column(title="Git revision",
|
||||
@@ -413,9 +427,19 @@ class RecipesTable(ToasterTable):
|
||||
orderable=True,
|
||||
field_name="license")
|
||||
|
||||
revision_link_template = '''
|
||||
{% if data.layer_version.layer.local_source_dir %}
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{data.layer_version.layer.name}} is not in a Git repository, so there is no revision associated with it"> </span>
|
||||
{% else %}
|
||||
{{data.layer_version.get_vcs_reference}}
|
||||
{% endif %}
|
||||
'''
|
||||
|
||||
self.add_column(title="Git revision",
|
||||
hidden=True,
|
||||
field_name="layer_version__get_vcs_reference")
|
||||
static_data_name="layer_version__get_vcs_reference",
|
||||
static_data_template=revision_link_template)
|
||||
|
||||
|
||||
class LayerRecipesTable(RecipesTable):
|
||||
@@ -1197,9 +1221,13 @@ class BuildsTable(ToasterTable):
|
||||
|
||||
time_template = '''
|
||||
{% load projecttags %}
|
||||
<a href="{% url "buildtime" data.id %}">
|
||||
{% if data.outcome == extra.Build.SUCCEEDED %}
|
||||
<a href="{% url "buildtime" data.id %}">
|
||||
{{data.timespent_seconds | sectohms}}
|
||||
</a>
|
||||
{% else %}
|
||||
{{data.timespent_seconds | sectohms}}
|
||||
</a>
|
||||
{% endif %}
|
||||
'''
|
||||
|
||||
image_files_template = '''
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
</script>
|
||||
<script src="{% static 'js/typeahead.jquery.js' %}">
|
||||
</script>
|
||||
<script src="{% static 'js/jsrender.min.js' %}">
|
||||
</script>
|
||||
<script src="{% static 'js/prettify.js' %}">
|
||||
</script>
|
||||
<script src="{% static 'js/libtoaster.js' %}">
|
||||
@@ -32,6 +34,8 @@
|
||||
</script>
|
||||
{% endif %}
|
||||
<script>
|
||||
$.views.settings.delimiters("<%", "%>");
|
||||
|
||||
libtoaster.ctx = {
|
||||
jsUrl : "{% static 'js/' %}",
|
||||
htmlUrl : "{% static 'html/' %}",
|
||||
@@ -48,7 +52,9 @@
|
||||
xhrCustomRecipeUrl : "{% url 'xhr_customrecipe' %}",
|
||||
projectId : {{project.id}},
|
||||
xhrBuildRequestUrl: "{% url 'xhr_buildrequest' project.id %}",
|
||||
mostRecentBuildsUrl: "{% url 'most_recent_builds' %}?project_id={{project.id}}",
|
||||
{% else %}
|
||||
mostRecentBuildsUrl: "{% url 'most_recent_builds' %}",
|
||||
projectId : undefined,
|
||||
projectPageUrl : undefined,
|
||||
projectName : undefined,
|
||||
|
||||
@@ -52,104 +52,106 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<!-- begin left sidebar container -->
|
||||
<div id="nav" class="col-md-2">
|
||||
<ul class="nav nav-pills nav-stacked" id="build-menu">
|
||||
<li
|
||||
{% if request.resolver_match.url_name == 'builddashboard' %}
|
||||
class="active"
|
||||
{% endif %} >
|
||||
<a href="{% url 'builddashboard' build.pk %}">Build summary</a>
|
||||
</li>
|
||||
{% if build.has_images and build.outcome == build.SUCCEEDED %}
|
||||
<li class="nav-header" data-menu-heading="images">Images</li>
|
||||
{% block nav-target %}
|
||||
{% for t in build.get_sorted_target_list %}
|
||||
{% if t.has_images %}
|
||||
<li id="menu-{{t.target}}"><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endblock %}
|
||||
{% endif %}
|
||||
<li class="nav-header">Build</li>
|
||||
<li id="menu-configuration"><a href="{% url 'configuration' build.pk %}">Configuration</a></li>
|
||||
<li><a href="{% url 'tasks' build.pk %}">Tasks</a></li>
|
||||
<li><a href="{% url 'recipes' build.pk %}">Recipes</a></li>
|
||||
<li><a href="{% url 'packages' build.pk %}">Packages</a></li>
|
||||
<li class="nav-header">Performance</li>
|
||||
<li><a href="{% url 'buildtime' build.pk %}">Time</a></li>
|
||||
<li><a href="{% url 'cputime' build.pk %}">CPU usage</a></li>
|
||||
<li><a href="{% url 'diskio' build.pk %}">Disk I/O</a></li>
|
||||
<!-- begin left sidebar container for builds which started properly -->
|
||||
{% if build.started %}
|
||||
<div class="row">
|
||||
<div id="nav" class="col-md-2">
|
||||
<ul class="nav nav-pills nav-stacked" id="build-menu">
|
||||
<li
|
||||
{% if request.resolver_match.url_name == 'builddashboard' %}
|
||||
class="active"
|
||||
{% endif %} >
|
||||
<a href="{% url 'builddashboard' build.pk %}">Build summary</a>
|
||||
</li>
|
||||
{% if build.has_images and build.outcome == build.SUCCEEDED %}
|
||||
<li class="nav-header" data-menu-heading="images">Images</li>
|
||||
{% block nav-target %}
|
||||
{% for t in build.get_sorted_target_list %}
|
||||
{% if t.has_images %}
|
||||
<li id="menu-{{t.target}}"><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endblock %}
|
||||
{% endif %}
|
||||
<li class="nav-header">Build</li>
|
||||
<li id="menu-configuration"><a href="{% url 'configuration' build.pk %}">Configuration</a></li>
|
||||
<li><a href="{% url 'tasks' build.pk %}">Tasks</a></li>
|
||||
<li><a href="{% url 'recipes' build.pk %}">Recipes</a></li>
|
||||
<li><a href="{% url 'packages' build.pk %}">Packages</a></li>
|
||||
<li class="nav-header">Performance</li>
|
||||
<li><a href="{% url 'buildtime' build.pk %}">Time</a></li>
|
||||
<li><a href="{% url 'cputime' build.pk %}">CPU usage</a></li>
|
||||
<li><a href="{% url 'diskio' build.pk %}">Disk I/O</a></li>
|
||||
|
||||
<li class="nav-header">Actions</li>
|
||||
<a class="btn btn-default btn-block navbar-btn" href="{% url 'build_artifact' build.id 'cookerlog' build.id %}">Download build log</a>
|
||||
<li class="nav-header">Actions</li>
|
||||
<a class="btn btn-default btn-block navbar-btn" href="{% url 'build_artifact' build.id 'cookerlog' build.id %}">Download build log</a>
|
||||
|
||||
{% with build.get_custom_image_recipes as custom_image_recipes %}
|
||||
{% if custom_image_recipes.count > 0 %}
|
||||
<!-- edit custom image built during this build -->
|
||||
<button class="btn btn-default btn-block navbar-btn" data-role="edit-custom-image-trigger">Edit custom image</button>
|
||||
{% include 'editcustomimage_modal.html' %}
|
||||
{% with build.get_custom_image_recipes as custom_image_recipes %}
|
||||
{% if custom_image_recipes.count > 0 %}
|
||||
<!-- edit custom image built during this build -->
|
||||
<button class="btn btn-default btn-block navbar-btn" data-role="edit-custom-image-trigger">Edit custom image</button>
|
||||
{% include 'editcustomimage_modal.html' %}
|
||||
<script>
|
||||
var editableCustomImageRecipes = {{ custom_image_recipes | objects_to_dictionaries:"id,name" | json }};
|
||||
|
||||
$(document).ready(function () {
|
||||
var editCustomImageTrigger = $('[data-role="edit-custom-image-trigger"]');
|
||||
var editCustomImageModal = $('#edit-custom-image-modal');
|
||||
|
||||
// edit custom image which was built during this build
|
||||
editCustomImageTrigger.click(function () {
|
||||
// single editable custom image: redirect to the edit page
|
||||
// for that image
|
||||
if (editableCustomImageRecipes.length === 1) {
|
||||
var url = '{% url "customrecipe" build.project.id custom_image_recipes.first.id %}';
|
||||
document.location.href = url;
|
||||
}
|
||||
// multiple editable custom images: show modal to select
|
||||
// one of them for editing
|
||||
else {
|
||||
editCustomImageModal.modal('show');
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
|
||||
<!-- new custom image from image recipe in this build -->
|
||||
{% if build.has_image_recipes %}
|
||||
<button class="btn btn-default btn-block navbar-btn" data-role="new-custom-image-trigger">New custom image</button>
|
||||
{% include 'newcustomimage_modal.html' %}
|
||||
<script>
|
||||
var editableCustomImageRecipes = {{ custom_image_recipes | objects_to_dictionaries:"id,name" | json }};
|
||||
// imageRecipes includes both custom image recipes and built-in
|
||||
// image recipes, any of which can be used as the basis for a
|
||||
// new custom image
|
||||
var imageRecipes = {{ build.get_image_recipes | objects_to_dictionaries:"id,name" | json }};
|
||||
|
||||
$(document).ready(function () {
|
||||
var editCustomImageTrigger = $('[data-role="edit-custom-image-trigger"]');
|
||||
var editCustomImageModal = $('#edit-custom-image-modal');
|
||||
var newCustomImageModal = $('#new-custom-image-modal');
|
||||
var newCustomImageTrigger = $('[data-role="new-custom-image-trigger"]');
|
||||
|
||||
// edit custom image which was built during this build
|
||||
editCustomImageTrigger.click(function () {
|
||||
// single editable custom image: redirect to the edit page
|
||||
// for that image
|
||||
if (editableCustomImageRecipes.length === 1) {
|
||||
var url = '{% url "customrecipe" build.project.id custom_image_recipes.first.id %}';
|
||||
document.location.href = url;
|
||||
}
|
||||
// multiple editable custom images: show modal to select
|
||||
// one of them for editing
|
||||
else {
|
||||
editCustomImageModal.modal('show');
|
||||
// show create new custom image modal to select an image built
|
||||
// during this build as the basis for the custom recipe
|
||||
newCustomImageTrigger.click(function () {
|
||||
if (!imageRecipes.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
newCustomImageModalSetRecipes(imageRecipes);
|
||||
newCustomImageModal.modal('show');
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
|
||||
<!-- new custom image from image recipe in this build -->
|
||||
{% if build.has_image_recipes %}
|
||||
<button class="btn btn-default btn-block navbar-btn" data-role="new-custom-image-trigger">New custom image</button>
|
||||
{% include 'newcustomimage_modal.html' %}
|
||||
<script>
|
||||
// imageRecipes includes both custom image recipes and built-in
|
||||
// image recipes, any of which can be used as the basis for a
|
||||
// new custom image
|
||||
var imageRecipes = {{ build.get_image_recipes | objects_to_dictionaries:"id,name" | json }};
|
||||
|
||||
$(document).ready(function () {
|
||||
var newCustomImageModal = $('#new-custom-image-modal');
|
||||
var newCustomImageTrigger = $('[data-role="new-custom-image-trigger"]');
|
||||
|
||||
// show create new custom image modal to select an image built
|
||||
// during this build as the basis for the custom recipe
|
||||
newCustomImageTrigger.click(function () {
|
||||
if (!imageRecipes.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
newCustomImageModalSetRecipes(imageRecipes);
|
||||
newCustomImageModal.modal('show');
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{% endif %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</div>
|
||||
<!-- end left sidebar container -->
|
||||
{% endif %}
|
||||
|
||||
<!-- right container; need class="row" for builds without left-hand menu -->
|
||||
<div{% if not build.started %} class="row"{% endif %}>
|
||||
{% block buildinfomain %}{% endblock %}
|
||||
</div>
|
||||
<!-- end left sidebar container -->
|
||||
|
||||
<!-- begin right container -->
|
||||
{% block buildinfomain %}{% endblock %}
|
||||
<!-- end right container -->
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
{% block buildinfomain %}
|
||||
<!-- page title -->
|
||||
<div class="col-md-10">
|
||||
<div class="{% if build.started %}col-md-10{% else %}col-md-12{% endif %}">
|
||||
<div class="page-header build-data">
|
||||
<h1>{{build.get_sorted_target_list|field_values:"target"|join:", "}} {{build.machine}}</h1>
|
||||
</div>
|
||||
@@ -38,8 +38,15 @@
|
||||
{% endif %}
|
||||
<span class="pull-right">
|
||||
Build time:
|
||||
<a class="alert-link" href="{% url 'buildtime' build.pk %}">{{ build.timespent_seconds|sectohms }}</a>
|
||||
</span>
|
||||
<span data-build-field="buildtime">
|
||||
{% if build.outcome == build.SUCCEEDED %}
|
||||
<a href="{% url 'buildtime' build.pk %}">{{ build.timespent_seconds|sectohms }}</a>
|
||||
{% else %}
|
||||
{{ build.timespent_seconds|sectohms }}
|
||||
{% endif %}
|
||||
</span>
|
||||
</span>
|
||||
|
||||
{%endif%}
|
||||
</div>
|
||||
|
||||
@@ -55,9 +62,9 @@
|
||||
</div>
|
||||
<div class="panel-collapse collapse in" id="error-info">
|
||||
<div class="panel-body">
|
||||
<div class="col-md-10">
|
||||
<div class="{% if build.started %}col-md-10{% else %}col-md-12{% endif %}">
|
||||
{% for error in build.errors %}
|
||||
<div class="alert alert-danger" data-error="{{ error.id }}">
|
||||
<div class="alert alert-danger" data-log-message-id="{{error.pk}}">
|
||||
<pre>{{error.message}}</pre>
|
||||
</div>
|
||||
{% endfor %}
|
||||
@@ -194,79 +201,82 @@
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
<!-- build summary -->
|
||||
<h2>Build summary</h2>
|
||||
<div class="row">
|
||||
<div class="col-md-4 dashboard-section">
|
||||
<div class="well well-transparent">
|
||||
<h3><a href="{%url 'configuration' build.pk%}">Configuration</a></h3>
|
||||
<dl>
|
||||
<dt>Machine</dt><dd>{{build.machine}}</dd>
|
||||
<dt>Distro</dt><dd>{{build.distro}}</dd>
|
||||
<dt>Layers</dt><dd><ul class="list-unstyled">{% for i in build.layer_version_build.all|dictsort:"layer.name" %}<li>{{i.layer.name}}</li>{%endfor%}</ul></dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4 dashboard-section">
|
||||
<div class="well well-transparent">
|
||||
<h3><a href="{%url 'tasks' build.pk%}">Tasks</a></h3>
|
||||
<dl>
|
||||
{% query build.task_build outcome=4 order__gt=0 as exectask%}
|
||||
{% if exectask.count > 0 %}
|
||||
<dt>Failed tasks</dt>
|
||||
<dd>
|
||||
{% if exectask.count == 1 %}
|
||||
<a class="text-danger" href="{% url "task" build.id exectask.0.id %}">
|
||||
{{exectask.0.recipe.name}}
|
||||
<span class="task-name">{{exectask.0.task_name}}</span>
|
||||
</a>
|
||||
|
||||
<a href="{% url 'build_artifact' build.id "tasklogfile" exectask.0.id %}">
|
||||
<span class="glyphicon glyphicon-download-alt get-help" title="Download task log file"></i>
|
||||
</a>
|
||||
|
||||
{% elif exectask.count > 1%}
|
||||
<a class="text-danger" href="{% url "tasks" build.id %}?filter=outcome%3A4">{{exectask.count}}</a>
|
||||
{% endif %}
|
||||
</dd>
|
||||
{% endif %}
|
||||
<dt>Total number of tasks</dt><dd><a href="{% url 'tasks' build.pk %}">{% query build.task_build order__gt=0 as alltasks %}{{alltasks.count}}</a></dd>
|
||||
<dt>
|
||||
Tasks executed
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="'Executed' tasks are those that need to be run in order to generate the task output"></span>
|
||||
</dt>
|
||||
<dd><a href="{% url 'tasks' build.pk %}?filter=task_executed%3A1&count=25&search=&page=1&orderby=order%3A%2B">{% query build.task_build task_executed=1 order__gt=0 as exectask%}{{exectask.count}}</a></dd>
|
||||
<dt>
|
||||
Tasks not executed
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="'Not executed' tasks don't need to run because their outcome is provided by another task"></span>
|
||||
</dt>
|
||||
<dd><a href="{% url 'tasks' build.pk %}?filter=task_executed%3A0&count=25&search=&page=1&orderby=order%3A%2B">{% query build.task_build task_executed=0 order__gt=0 as noexectask%}{{noexectask.count}}</a></dd>
|
||||
<dt>
|
||||
Reuse
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="The percentage of 'not executed' tasks over the total number of tasks, which is a measure of the efficiency of your build"></span>
|
||||
</dt>
|
||||
<dd>
|
||||
{% query build.task_build order__gt=0 as texec %}
|
||||
{% if noexectask.count|multiply:100|divide:texec.count < 0 %}
|
||||
0
|
||||
{% else %}
|
||||
{{noexectask.count|multiply:100|divide:texec.count}}
|
||||
{% endif %}
|
||||
%
|
||||
</dd>
|
||||
</dl>
|
||||
{% if build.started %}
|
||||
<!-- build summary -->
|
||||
<h2 data-role="build-summary-heading">Build summary</h2>
|
||||
<div class="row">
|
||||
<div class="col-md-4 dashboard-section">
|
||||
<div class="well well-transparent">
|
||||
<h3><a href="{%url 'configuration' build.pk%}">Configuration</a></h3>
|
||||
<dl>
|
||||
<dt>Machine</dt><dd>{{build.machine}}</dd>
|
||||
<dt>Distro</dt><dd>{{build.distro}}</dd>
|
||||
<dt>Layers</dt><dd><ul class="list-unstyled">{% for i in build.layer_version_build.all|dictsort:"layer.name" %}<li>{{i.layer.name}}</li>{%endfor%}</ul></dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4 dashboard-section">
|
||||
<div class="well well-transparent">
|
||||
<h3><a href="{%url 'tasks' build.pk%}">Tasks</a></h3>
|
||||
<dl>
|
||||
{% query build.task_build outcome=4 order__gt=0 as exectask%}
|
||||
{% if exectask.count > 0 %}
|
||||
<dt>Failed tasks</dt>
|
||||
<dd>
|
||||
{% if exectask.count == 1 %}
|
||||
<a class="text-danger" href="{% url "task" build.id exectask.0.id %}">
|
||||
{{exectask.0.recipe.name}}
|
||||
<span class="task-name">{{exectask.0.task_name}}</span>
|
||||
</a>
|
||||
|
||||
<a href="{% url 'build_artifact' build.id "tasklogfile" exectask.0.id %}">
|
||||
<span class="glyphicon glyphicon-download-alt get-help" title="Download task log file"></i>
|
||||
</a>
|
||||
|
||||
{% elif exectask.count > 1%}
|
||||
<a class="text-danger" href="{% url "tasks" build.id %}?filter=outcome%3A4">{{exectask.count}}</a>
|
||||
{% endif %}
|
||||
</dd>
|
||||
{% endif %}
|
||||
<dt>Total number of tasks</dt><dd><a href="{% url 'tasks' build.pk %}">{% query build.task_build order__gt=0 as alltasks %}{{alltasks.count}}</a></dd>
|
||||
<dt>
|
||||
Tasks executed
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="'Executed' tasks are those that need to be run in order to generate the task output"></span>
|
||||
</dt>
|
||||
<dd><a href="{% url 'tasks' build.pk %}?filter=task_executed%3A1&count=25&search=&page=1&orderby=order%3A%2B">{% query build.task_build task_executed=1 order__gt=0 as exectask%}{{exectask.count}}</a></dd>
|
||||
<dt>
|
||||
Tasks not executed
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="'Not executed' tasks don't need to run because their outcome is provided by another task"></span>
|
||||
</dt>
|
||||
<dd><a href="{% url 'tasks' build.pk %}?filter=task_executed%3A0&count=25&search=&page=1&orderby=order%3A%2B">{% query build.task_build task_executed=0 order__gt=0 as noexectask%}{{noexectask.count}}</a></dd>
|
||||
<dt>
|
||||
Reuse
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="The percentage of 'not executed' tasks over the total number of tasks, which is a measure of the efficiency of your build"></span>
|
||||
</dt>
|
||||
<dd>
|
||||
{% query build.task_build order__gt=0 as texec %}
|
||||
{% if noexectask.count|multiply:100|divide:texec.count < 0 %}
|
||||
0
|
||||
{% else %}
|
||||
{{noexectask.count|multiply:100|divide:texec.count}}
|
||||
{% endif %}
|
||||
%
|
||||
</dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4 dashboard-section">
|
||||
<div class="well well-transparent">
|
||||
<h3><a href="{% url 'recipes' build.pk %}">Recipes</a> & <a href="{% url 'packages' build.pk %}">Packages</a></h3>
|
||||
<dl>
|
||||
<dt>Recipes built</dt><dd><a href="{% url 'recipes' build.pk %}">{{recipecount}}</a></dd>
|
||||
<dt>Packages built</dt><dd><a href="{% url 'packages' build.pk %}">{{packagecount}}</a></dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4 dashboard-section">
|
||||
<div class="well well-transparent">
|
||||
<h3><a href="{% url 'recipes' build.pk %}">Recipes</a> & <a href="{% url 'packages' build.pk %}">Packages</a></h3>
|
||||
<dl>
|
||||
<dt>Recipes built</dt><dd><a href="{% url 'recipes' build.pk %}">{{recipecount}}</a></dd>
|
||||
<dt>Packages built</dt><dd><a href="{% url 'packages' build.pk %}">{{packagecount}}</a></dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %} <!-- end build summary -->
|
||||
|
||||
{% if build.warnings.count %}
|
||||
<div class="panel panel-default" id="warnings">
|
||||
@@ -278,9 +288,9 @@
|
||||
</div>
|
||||
<div class="panel-collapse collapse" id="warning-info">
|
||||
<div class="panel-body">
|
||||
<div class="col-md-10">
|
||||
<div class="{% if build.started %}col-md-10{% else %}col-md-12{% endif %}">
|
||||
{% for warning in logmessages %}{% if warning.level == 1 %}
|
||||
<div class="alert alert-warning">
|
||||
<div class="alert alert-warning" data-log-message-id="{{warning.pk}}">
|
||||
<pre>{{warning.message}}</pre>
|
||||
</div>
|
||||
{% endif %}{% endfor %}
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
{% extends "baseprojectpage.html" %}
|
||||
|
||||
{% load static %}
|
||||
{% load projecttags %}
|
||||
{% load humanize %}
|
||||
|
||||
|
||||
{% block projectinfomain %}
|
||||
<!-- begin content -->
|
||||
|
||||
<div class="row">
|
||||
|
||||
<!-- end left sidebar container -->
|
||||
<!-- Begin right container -->
|
||||
<div class="col-md-10">
|
||||
<div class="page-header">
|
||||
<h1>
|
||||
<span data-toggle="tooltip" {%if buildrequest.brtarget_set.all.count > 1%}title="Targets: {%for target in buildrequest.brtarget_set.all%}{{target.target}} {%endfor%}"{%endif%}>{{buildrequest.brtarget_set.all.0.target}} {%if buildrequest.brtarget_set.all.count > 1%}(+ {{buildrequest.brtarget_set.all.count|add:"-1"}}){%endif%} {{buildrequest.get_machine}} </span>
|
||||
|
||||
</h1>
|
||||
</div>
|
||||
<div class="alert alert-error">
|
||||
<p class="lead">
|
||||
<strong>Failed</strong>
|
||||
on {{ buildrequest.updated|date:'d/m/y H:i' }}
|
||||
with
|
||||
|
||||
<i class="icon-minus-sign error" style="margin-left:6px;"></i>
|
||||
<strong><a class="error accordion-toggle toggle-errors" href="#errors">
|
||||
{{buildrequest.brerror_set.all.count}} error{{buildrequest.brerror_set.all.count|pluralize}}
|
||||
</a></strong>
|
||||
<span class="pull-right">Build time: {{buildrequest.get_duration|sectohms}}</span>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="accordion" id="errors">
|
||||
<div class="accordion-group">
|
||||
<div class="accordion-heading">
|
||||
<a class="accordion-toggle error toggle-errors">
|
||||
<h2>
|
||||
<i class="icon-minus-sign"></i>
|
||||
{{buildrequest.brerror_set.all.count}} error{{buildrequest.brerror_set.all.count|pluralize}}
|
||||
</h2>
|
||||
</a>
|
||||
</div>
|
||||
<div class="accordion-body collapse in" id="collapse-errors">
|
||||
<div class="accordion-inner">
|
||||
<div class="col-md-10">
|
||||
{% for error in buildrequest.brerror_set.all %}
|
||||
<div class="alert alert-error">
|
||||
ERROR: <div class="air well"><pre>{{error.errmsg}}</pre></div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div> <!-- end of row -->
|
||||
|
||||
|
||||
{%endblock%}
|
||||
@@ -53,11 +53,25 @@
|
||||
<tbody>{% for lv in build.layer_version_build.all|dictsort:"layer.name" %}
|
||||
<tr>
|
||||
<td>{{lv.layer.name}}</td>
|
||||
{% if lv.layer.local_source_dir %}
|
||||
<td>
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{lv.layer.name}} is not in a Git repository, so there is no branch associated with it"> </span>
|
||||
</td>
|
||||
{% else %}
|
||||
<td>{{lv.branch}}</td>
|
||||
{% endif %}
|
||||
{% if lv.layer.local_source_dir %}
|
||||
<td>
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{lv.layer.name}} is not in a Git repository, so there is no commit associated with it"> </span>
|
||||
</td>
|
||||
{% else %}
|
||||
<td> <a class="btn btn-default" data-content="<ul class='list-unstyled'>
|
||||
<li>{{lv.commit}}</li> </ul>">
|
||||
{{lv.commit|truncatechars:13}}
|
||||
</a></td>
|
||||
{% endif %}
|
||||
</tr>{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
@@ -26,16 +26,14 @@
|
||||
</script>
|
||||
|
||||
<form class="col-md-11">
|
||||
<span class="help-block">The layer you are importing must be compatible with <strong>{{project.release.description}}</strong>, which is the release you are using in this project.</span>
|
||||
<div class="alert alert-error" id="import-error" style="display:none">
|
||||
<button type="button" class="close" data-dismiss="alert">×</button>
|
||||
<h3> </h3>
|
||||
<p></p>
|
||||
<ul></ul>
|
||||
</div>
|
||||
<fieldset>
|
||||
<h2>Layer repository information</h2>
|
||||
<span class="help-block">The layer you are importing must be compatible with <strong>{{project.release.description}}</strong>, which is the release you are using in this project.</span>
|
||||
<div class="alert alert-error" id="import-error" style="display:none">
|
||||
<button type="button" class="close" data-dismiss="alert">×</button>
|
||||
<h3> </h3>
|
||||
<p></p>
|
||||
<ul></ul>
|
||||
</div>
|
||||
|
||||
<div class="form-group" id="layer-name-ctrl">
|
||||
<label class="control-label" for="import-layer-name">
|
||||
Layer name
|
||||
@@ -45,82 +43,112 @@
|
||||
<span class="help-block" style="display: none;" id="invalid-layer-name-hint">A valid layer name can only include letters, numbers and dashes</span>
|
||||
<span class="help-inline" style="display: none;" id="duplicated-layer-name-hint"></span>
|
||||
</div>
|
||||
|
||||
<div id="duplicate-layer-info" style="display:none">
|
||||
<div class="alert alert-warning">
|
||||
<h3>A layer called <a href="" class="dup-layer-link"><span class="dup-layer-name"></span></a> already exists</h3>
|
||||
<p>Layer names must be unqiue. Please use a different layer name.</p>
|
||||
<dl>
|
||||
<dt>
|
||||
The <span class="dup-layer-name"></span> repository url is
|
||||
</dt>
|
||||
<dd>
|
||||
<span id="dup-layer-vcs-url"></span>
|
||||
</dd>
|
||||
<dt>
|
||||
The <span class="dup-layer-name"></span> revision is
|
||||
</dt>
|
||||
<dd>
|
||||
<span id="dup-layer-revision"></span>
|
||||
</dd>
|
||||
</dl>
|
||||
<p><a href="" class="dup-layer-link">View the <span class="dup-layer-name"></span> layer information</a></p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="fields-apart-from-layer-name">
|
||||
<div class="form-group">
|
||||
<label for="layer-git-repo-url">
|
||||
Git repository URL
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="Fetch/clone URL of the repository. Currently, Toaster only supports Git repositories." ></span>
|
||||
</label>
|
||||
|
||||
<input type="text" id="layer-git-repo-url" class="form-control" required>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="layer-subdir">
|
||||
Repository subdirectory
|
||||
<span class="text-muted">(optional)</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="Subdirectory within the repository where the layer is located, if not in the root (usually only used if the repository contains more than one layer)"></span>
|
||||
</label>
|
||||
<input type="text" class="form-control" id="layer-subdir">
|
||||
</div>
|
||||
<div class="form-group" id="layer-revision-ctrl">
|
||||
<label for="layer-git-ref">Git revision
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="You can provide a Git branch, a tag or a commit SHA as the revision"></span>
|
||||
</label>
|
||||
<input type="text" class="form-control" id="layer-git-ref" required>
|
||||
<span class="help-inline" style="diaply:none;" id="invalid-layer-revision-hint"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</fieldset>
|
||||
|
||||
<div class="fields-apart-from-layer-name">
|
||||
<fieldset>
|
||||
<h2>
|
||||
Layer dependencies
|
||||
<small class="text-muted">(optional)</small>
|
||||
<span class="glyphicon glyphicon-question-sign get-help heading-help" title="Other layers this layer depends upon"></span>
|
||||
</h2>
|
||||
<ul class="list-unstyled lead" id="layer-deps-list">
|
||||
</ul>
|
||||
<div class="form-inline">
|
||||
<div class="form-group">
|
||||
<input type="text" autocomplete="off" data-minLength="1" data-autocomplete="off" data-provide="typeahead" placeholder="Type a layer name" id="layer-dependency" class="form-control">
|
||||
</div>
|
||||
<button class="btn btn-default" id="add-layer-dependency-btn">
|
||||
Add layer
|
||||
</button>
|
||||
<span class="help-inline">You can only add layers Toaster knows about</span>
|
||||
</div>
|
||||
</fieldset>
|
||||
<div class="top-air" id="form-actions">
|
||||
<button class="btn btn-primary btn-lg" data-toggle="modal" id="import-and-add-btn" data-target="#dependencies-message" disabled>Import and add to project</button>
|
||||
<span class="help-inline" id="import-and-add-hint" style="vertical-align: middle;">To import a layer you need to enter a layer name, a Git repository URL and a Git revision (branch, tag or commit)</span>
|
||||
<div id="duplicate-layer-info" style="display:none">
|
||||
<div class="alert alert-warning">
|
||||
<h3>A layer called <a href="" class="dup-layer-link"><span class="dup-layer-name"></span></a> already exists</h3>
|
||||
<p>Layer names must be unqiue. Please use a different layer name.</p>
|
||||
<dl id="git-layer-dup" style="display:none;">
|
||||
<dt>
|
||||
The <span class="dup-layer-name"></span> repository url is
|
||||
</dt>
|
||||
<dd>
|
||||
<span id="dup-layer-vcs-url"></span>
|
||||
</dd>
|
||||
<dt>
|
||||
The <span class="dup-layer-name"></span> revision is
|
||||
</dt>
|
||||
<dd>
|
||||
<span id="dup-layer-revision"></span>
|
||||
</dd>
|
||||
</dl>
|
||||
<dl id="local-layer-dup" style="display:none;">
|
||||
<dt>
|
||||
The <span class="dup-layer-name"></span> directory is
|
||||
</dt>
|
||||
<dd>
|
||||
<span id="dup-local-source-dir-name"></span>
|
||||
</dd>
|
||||
</dl>
|
||||
<p><a href="" class="dup-layer-link">View the <span class="dup-layer-name"></span> layer information</a></p>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
<fieldset class="fields-apart-from-layer-name" id="repo-select">
|
||||
<legend class="radioLegend">Where is the layer source code?</legend>
|
||||
<div class="radio">
|
||||
<label>
|
||||
<input type="radio" id="git-repo-radio" name="repo" value="git" checked="checked">
|
||||
In a <strong>Git repository</strong>
|
||||
</label>
|
||||
<p class="help-block radio-help">To build the layer Toaster must be able to access the Git repository, otherwise builds will fail. Toaster will fetch and checkout your chosen Git revision every time you start a build.</p>
|
||||
</div>
|
||||
<div class="radio">
|
||||
<label>
|
||||
<input type="radio" id="local-dir-radio" name="repo" value="local">
|
||||
In a <strong>directory</strong>
|
||||
</label>
|
||||
<p class="help-block radio-help">Use this option for quick layer development, by simply providing the path to the layer source code.</p>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset class="fields-apart-from-layer-name" id="git-repo">
|
||||
<legend>Git repository information</legend>
|
||||
<div class="form-group">
|
||||
<label for="layer-git-repo-url">
|
||||
Git repository URL
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="Fetch/clone URL of the repository. Currently, Toaster only supports Git repositories." ></span>
|
||||
</label>
|
||||
<input type="text" id="layer-git-repo-url" class="form-control" required>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="layer-subdir">
|
||||
Repository subdirectory
|
||||
<span class="text-muted">(optional)</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="Subdirectory within the repository where the layer is located, if not in the root (usually only used if the repository contains more than one layer)"></span>
|
||||
</label>
|
||||
<input type="text" class="form-control" id="layer-subdir">
|
||||
</div>
|
||||
<div class="form-group" id="layer-revision-ctrl">
|
||||
<label for="layer-git-ref">
|
||||
Git revision
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="You can provide a Git branch, a tag or a commit SHA as the revision"></span>
|
||||
</label>
|
||||
<input type="text" class="form-control" id="layer-git-ref" required>
|
||||
<span class="help-inline" style="diaply:none;" id="invalid-layer-revision-hint"></span>
|
||||
</div>
|
||||
</fieldset>
|
||||
|
||||
<fieldset class="fields-apart-from-layer-name" id="local-dir" style="display:none;">
|
||||
<legend>Layer directory information</legend>
|
||||
<label for="local-dir-path" class="control-label">Enter the absolute path to the layer directory</label>
|
||||
<input type="text" class="form-control" id="local-dir-path" required/>
|
||||
<p class="help-block" id="hintError-dir-path-starts-with-slash" style="display:none;">The absolute path must start with "/".</p>
|
||||
<p class="help-block" id="hintError-dir-path" style="display:none;">The directory path cannot include spaces or any of these characters: . \ ? % * : | " " < ></p>
|
||||
</fieldset>
|
||||
|
||||
<fieldset class="fields-apart-from-layer-name">
|
||||
<legend>
|
||||
Layer dependencies
|
||||
<small class="text-muted">(optional)</small>
|
||||
<span class="glyphicon glyphicon-question-sign get-help heading-help" title="Other layers this layer depends upon"></span>
|
||||
</legend>
|
||||
<ul class="list-unstyled lead" id="layer-deps-list">
|
||||
</ul>
|
||||
<div class="form-inline">
|
||||
<div class="form-group">
|
||||
<input type="text" autocomplete="off" data-minLength="1" data-autocomplete="off" data-provide="typeahead" placeholder="Type a layer name" id="layer-dependency" class="form-control">
|
||||
</div>
|
||||
<button class="btn btn-default" id="add-layer-dependency-btn">
|
||||
Add layer
|
||||
</button>
|
||||
<span class="help-inline">You can only add layers Toaster knows about</span>
|
||||
</fieldset>
|
||||
<div class="top-air fields-apart-from-layer-name" id="form-actions">
|
||||
<button class="btn btn-primary btn-lg" data-toggle="modal" id="import-and-add-btn" data-target="#dependencies-message" disabled>Import and add to project</button>
|
||||
<span class="help-inline" id="import-git-layer-and-add-hint" style="vertical-align: middle;">To import a layer you need to enter a layer name, a Git repository URL and a Git revision (branch, tag or commit)</span>
|
||||
<span class="help-inline" id="import-local-dir-and-add-hint" style="vertical-align: middle;display:none;">To import a layer you need to enter a layer name and the absolute path to the layer directory</span>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
{% else %} {#project and project release#}
|
||||
<div class="page-header">
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
</ul>
|
||||
|
||||
{# If this is not an imported layer then hide the edit ui #}
|
||||
{% if not layerversion.layer_source_id or layerversion.layer_source.sourcetype != layerversion.layer_source.TYPE_IMPORTED %}
|
||||
{% if layerversion.layer_source != layer_source.TYPE_IMPORTED %}
|
||||
<style scoped>
|
||||
.glyphicon-edit {
|
||||
display:none;
|
||||
@@ -69,8 +69,9 @@
|
||||
inCurrentPrj : false,
|
||||
{% endif %}
|
||||
layerdetailurl : "{% url 'layerdetails' project.id layerversion.id %}",
|
||||
sourceId: {{layerversion.layer_source_id|json}},
|
||||
}
|
||||
layer_source: {{layerversion.layer_source|json}},
|
||||
},
|
||||
layerSourceTypes: {{layer_source|json}},
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -89,7 +90,6 @@
|
||||
{% endif %}>({{layerversion.get_vcs_reference|truncatechars:13}})</small>
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<!-- container for tabs -->
|
||||
<div class="col-md-8 tabbable">
|
||||
@@ -277,15 +277,15 @@
|
||||
<span class="glyphicon glyphicon-edit"></span>
|
||||
<span class="glyphicon glyphicon-trash delete-current-value" data-toggle="tooltip" title="Delete"></span>
|
||||
</dd>
|
||||
{% if layerversion.layer.up_id %}
|
||||
{% if layerversion.layer_source == layer_source.TYPE_LAYERINDEX %}
|
||||
<dt>Layer index</dt>
|
||||
<dd>
|
||||
<a href="http://layers.openembedded.org/layerindex/branch/{{layerversion.up_branch.name}}/layer/{{layerversion.layer.name}}">layer index link</a>
|
||||
|
||||
<a href="http://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a>
|
||||
</dd>
|
||||
{% endif %}
|
||||
</dl>
|
||||
{% if layerversion.layer_source_id and layerversion.layer_source.sourcetype == layerversion.layer_source.TYPE_IMPORTED %}
|
||||
{# Only show delete link for imported layers #}
|
||||
{% if layerversion.layer_source == layer_source.TYPE_IMPORTED %}
|
||||
<i class="icon-trash text-danger"></i>
|
||||
<a href="#delete-layer-modal" role="button" class="text-danger" data-toggle="modal" data-target="#delete-layer-modal">Delete {{layerversion.layer.name}}</a>
|
||||
{% endif %}
|
||||
|
||||
@@ -1,26 +1,9 @@
|
||||
{% load static %}
|
||||
{% load projecttags %}
|
||||
{% load project_url_tag %}
|
||||
{% load humanize %}
|
||||
{% load project_url_tag %}
|
||||
<script src="{% static 'js/mrbsection.js' %}"></script>
|
||||
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
var ctx = {
|
||||
mrbType : "{{mrb_type}}",
|
||||
}
|
||||
|
||||
try {
|
||||
mrbSectionInit(ctx);
|
||||
} catch (e) {
|
||||
document.write("Sorry, An error has occurred loading this page");
|
||||
console.warn(e);
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
{% if mru %}
|
||||
|
||||
{% if mrb_type == 'project' %}
|
||||
<h2>
|
||||
Latest project builds
|
||||
@@ -38,144 +21,254 @@
|
||||
<div id="latest-builds">
|
||||
{% for build in mru %}
|
||||
<div data-latest-build-result="{{build.id}}" class="alert build-result {% if build.outcome == build.SUCCEEDED %}alert-success{% elif build.outcome == build.FAILED %}alert-danger{% else %}alert-info{% endif %}">
|
||||
<!-- project title -->
|
||||
{% if mrb_type != 'project' %}
|
||||
<div class="row project-name">
|
||||
<div class="col-md-12">
|
||||
<small>
|
||||
<a class="alert-link text-uppercase" href={% project_url build.project %}>{{build.project.name}}</a>
|
||||
<a class="alert-link text-uppercase" href="{% project_url build.project %}">
|
||||
{{build.project.name}}
|
||||
</a>
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
{% if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
|
||||
<a href="{% url 'builddashboard' build.pk %}" class="alert-link">
|
||||
{% endif %}
|
||||
|
||||
{% if build.target_set.all.count > 0 %}
|
||||
<span data-toggle="tooltip"
|
||||
{% if build.target_set.all.count > 1 %}
|
||||
{{build.get_sorted_target_list.0.target}}
|
||||
title="Recipes:
|
||||
{% for target in build.get_sorted_target_list %}
|
||||
{% if target.task %}
|
||||
{{target.target}}:{{target.task}}
|
||||
{% else %}
|
||||
{{target.target}}
|
||||
{% endif %}
|
||||
{% endfor %}"
|
||||
{% endif %}
|
||||
>
|
||||
{% if build.target_set.all.0.task %}
|
||||
{{build.get_sorted_target_list.0.target}}:{{build.target_set.all.0.task}}
|
||||
{% else %}
|
||||
{{build.get_sorted_target_list.0.target}}
|
||||
{% endif %}
|
||||
|
||||
{% if build.target_set.all.count > 1 %}
|
||||
(+{{build.target_set.all.count|add:"-1"}})
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
{% if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
|
||||
</a>
|
||||
{% endif %}
|
||||
<div class="row" data-role="build-status-container">
|
||||
<div class="col-md-12">
|
||||
Loading...
|
||||
</div>
|
||||
|
||||
{% if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
|
||||
<div class="col-md-2">
|
||||
{% if build.completed_on|format_build_date %}
|
||||
{{build.completed_on|date:'d/m/y H:i'}}
|
||||
{% else %}
|
||||
{{ build.completed_on|date:'H:i' }}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
|
||||
<div class="col-md-2">
|
||||
{% if build.errors.count %}
|
||||
<span class="glyphicon glyphicon-minus-sign"></span>
|
||||
<a href="{%url 'builddashboard' build.pk%}#errors" class="alert-link">
|
||||
{{build.errors.count}} error{{build.errors.count|pluralize}}
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="col-md-2">
|
||||
{% if build.warnings.count %}
|
||||
<span class="glyphicon glyphicon-warning-sign build-warnings"></span>
|
||||
<a href="{%url 'builddashboard' build.pk%}#warnings" class="alert-link build-warnings">
|
||||
{{build.warnings.count}} warning{{build.warnings.count|pluralize}}
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="col-md-3">
|
||||
Build time: <a class="alert-link" href="{% url 'buildtime' build.pk %}">{{ build.timespent_seconds|sectohms }}
|
||||
</a>
|
||||
|
||||
{% if build.project.is_default %}
|
||||
<span class="pull-right glyphicon glyphicon-question-sign get-help {% if build.outcome == build.SUCCEEDED %}get-help-green{% elif build.outcome == build.FAILED %}get-help-red{% else %}get-help-blue{% endif %}"
|
||||
title="Builds in this project cannot be started from Toaster: they are started from the command line">
|
||||
</span>
|
||||
{% else %}
|
||||
<a href="#" class="run-again-btn alert-link {% if build.outcome == build.SUCCEEDED %}success{% elif build.outcome == build.FAILED %}danger{% else %}info{% endif %} pull-right"
|
||||
data-request-url="{% url 'xhr_buildrequest' build.project.pk %}"
|
||||
data-target='{{build.target_set.all|get_tasks|json}}'>
|
||||
<span class="glyphicon glyphicon-repeat"></span>
|
||||
Rebuild
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if build.outcome == build.IN_PROGRESS %}
|
||||
<div class="col-md-4" style="display:none" id="cancelling-msg-{{build.buildrequest.pk}}">
|
||||
Cancelling the build ...
|
||||
</div>
|
||||
|
||||
<div class="col-md-4 col-md-offset-1 progress-info">
|
||||
<div class="progress" id="build-pc-done-title-{{build.pk}}">
|
||||
<div id="build-pc-done-bar-{{build.pk}}" style="width: {{build.completeper}}%;" class="progress-bar">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-md-4 progress-info">
|
||||
<span id="build-pc-done-{{build.pk}}">{{build.completeper}}</span>% of tasks complete
|
||||
{# No build cancel for command line builds project #}
|
||||
{% if build.project.is_default %}
|
||||
<span class="glyphicon glyphicon-question-sign get-help get-help-blue pull-right" title="Builds in this project cannot be cancelled from Toaster: they can only be cancelled from the command line"></span>
|
||||
{% else %}
|
||||
<a href="#" class="cancel-build-btn pull-right alert-link"
|
||||
data-buildrequest-id={{build.buildrequest.pk}}
|
||||
data-request-url="{% url 'xhr_buildrequest' build.project.pk %}">
|
||||
<span class="glyphicon glyphicon-remove-circle"></span>
|
||||
Cancel
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %} {# end if in progress #}
|
||||
|
||||
{% if build.outcome == build.CANCELLED %}
|
||||
<div class="col-md-6">
|
||||
Build cancelled
|
||||
</div>
|
||||
|
||||
<div class="col-md-3">
|
||||
<a href="#" class="info pull-right run-again-btn alert-link"
|
||||
data-request-url="{% url 'xhr_buildrequest' build.project.pk %}"
|
||||
data-target='{{build.target_set.all|get_tasks|json}}'>
|
||||
<span class="glyphicon glyphicon-repeat"></span>
|
||||
Rebuild
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
<!-- build main template -->
|
||||
<script id="build-template" type="text/x-jsrender">
|
||||
<div class="col-md-3">
|
||||
<!-- only show link for completed builds -->
|
||||
<%if state == 'Succeeded' || state == 'Failed'%>
|
||||
<a class="alert-link" href="<%:dashboard_url%>">
|
||||
<span data-toggle="tooltip" data-role="targets-text" title="Recipes: <%:targets%>">
|
||||
<%:targets_abbreviated%>
|
||||
</span>
|
||||
</a>
|
||||
<%else targets_abbreviated !== ''%>
|
||||
<span data-toggle="tooltip" data-role="targets-text" title="Recipes: <%:targets%>">
|
||||
<%:targets_abbreviated%>
|
||||
</span>
|
||||
<%else%>
|
||||
Fetching recipe names...
|
||||
<%/if%>
|
||||
</div>
|
||||
|
||||
<div data-build-state="<%:state%>">
|
||||
<%if state == 'Parsing'%>
|
||||
<%include tmpl='#parsing-recipes-build-template'/%>
|
||||
<%else state == 'Queued'%>
|
||||
<%include tmpl='#queued-build-template'/%>
|
||||
<%else state == 'Succeeded' || state == 'Failed'%>
|
||||
<%include tmpl='#succeeded-or-failed-build-template'/%>
|
||||
<%else state == 'Cancelling'%>
|
||||
<%include tmpl='#cancelling-build-template'/%>
|
||||
<%else state == 'Starting'%>
|
||||
<%include tmpl='#starting-template'/%>
|
||||
<%else state == 'In Progress'%>
|
||||
<%include tmpl='#in-progress-build-template'/%>
|
||||
<%else state == 'Cancelled'%>
|
||||
<%include tmpl='#cancelled-build-template'/%>
|
||||
<%/if%>
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<!-- queued build -->
|
||||
<script id="queued-build-template" type="text/x-jsrender">
|
||||
<div class="col-md-5">
|
||||
<span class="glyphicon glyphicon-question-sign get-help get-help-blue"
|
||||
title="This build is waiting for the build directory to become available">
|
||||
</span>
|
||||
|
||||
Build queued
|
||||
</div>
|
||||
|
||||
<div class="col-md-4">
|
||||
<!-- cancel button -->
|
||||
<%include tmpl='#cancel-template'/%>
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<!-- parsing recipes build -->
|
||||
<script id="parsing-recipes-build-template" type="text/x-jsrender">
|
||||
<!-- progress bar and parse completion percentage -->
|
||||
<div data-role="build-status" class="col-md-4 col-md-offset-1 progress-info">
|
||||
<!-- progress bar -->
|
||||
<div class="progress">
|
||||
<div id="recipes-parsed-percentage-bar-<%:id%>"
|
||||
style="width: <%:recipes_parsed_percentage%>%;"
|
||||
class="progress-bar">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-md-4 progress-info">
|
||||
<!-- parse completion percentage -->
|
||||
<span class="glyphicon glyphicon-question-sign get-help get-help-blue"
|
||||
title="BitBake is parsing the layers required for your build">
|
||||
</span>
|
||||
|
||||
Parsing <span id="recipes-parsed-percentage-<%:id%>"><%:recipes_parsed_percentage%></span>% complete
|
||||
|
||||
<%include tmpl='#cancel-template'/%>
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<!-- in progress build; tasks still starting -->
|
||||
<script id="starting-template" type="text/x-jsrender">
|
||||
<div class="col-md-5">
|
||||
<span class="glyphicon glyphicon-question-sign get-help get-help-blue"
|
||||
title="This build is waiting for tasks to start">
|
||||
</span>
|
||||
|
||||
Tasks starting...
|
||||
</div>
|
||||
|
||||
<div class="col-md-4">
|
||||
<!-- cancel button -->
|
||||
<%include tmpl='#cancel-template'/%>
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<!-- in progress build; at least one task finished -->
|
||||
<script id="in-progress-build-template" type="text/x-jsrender">
|
||||
<!-- progress bar and task completion percentage -->
|
||||
<div data-role="build-status" class="col-md-4 col-md-offset-1 progress-info">
|
||||
<!-- progress bar -->
|
||||
<div class="progress" id="build-pc-done-title-<%:id%>">
|
||||
<div id="build-pc-done-bar-<%:id%>"
|
||||
style="width: <%:tasks_complete_percentage%>%;"
|
||||
class="progress-bar">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-md-4 progress-info">
|
||||
<!-- task completion percentage -->
|
||||
<span id="build-pc-done-<%:id%>"><%:tasks_complete_percentage%></span>% of
|
||||
tasks complete
|
||||
|
||||
<!-- cancel button -->
|
||||
<%include tmpl='#cancel-template'/%>
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<!-- cancelling build -->
|
||||
<script id="cancelling-build-template" type="text/x-jsrender">
|
||||
<div class="col-md-9">
|
||||
Cancelling the build ...
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<!-- succeeded or failed build -->
|
||||
<script id="succeeded-or-failed-build-template" type="text/x-jsrender">
|
||||
<!-- completed_on -->
|
||||
<div class="col-md-2">
|
||||
<%:completed_on%>
|
||||
</div>
|
||||
|
||||
<!-- errors -->
|
||||
<div class="col-md-2">
|
||||
<%if errors%>
|
||||
<span class="glyphicon glyphicon-minus-sign"></span>
|
||||
<a href="<%:dashboard_errors_url%>" class="alert-link">
|
||||
<%:errors%> error<%:errors_pluralise%>
|
||||
</a>
|
||||
<%/if%>
|
||||
</div>
|
||||
|
||||
<!-- warnings -->
|
||||
<div class="col-md-2">
|
||||
<%if warnings%>
|
||||
<span class="glyphicon glyphicon-warning-sign build-warnings"></span>
|
||||
<a href="<%:dashboard_warnings_url%>" class="alert-link build-warnings">
|
||||
<%:warnings%> warning<%:warnings_pluralise%>
|
||||
</a>
|
||||
<%/if%>
|
||||
</div>
|
||||
|
||||
<!-- build time -->
|
||||
<div class="col-md-3">
|
||||
Build time:
|
||||
|
||||
<span data-role="data-recent-build-buildtime-field">
|
||||
<%if state == 'Succeeded'%>
|
||||
<a class="alert-link" href="<%:buildtime_url%>"><%:buildtime%></a>
|
||||
<%else%>
|
||||
<%:buildtime%>
|
||||
<%/if%>
|
||||
</span>
|
||||
|
||||
<!-- rebuild button -->
|
||||
<%include tmpl='#rebuild-template'/%>
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<!-- cancelled build -->
|
||||
<script id="cancelled-build-template" type="text/x-jsrender">
|
||||
<!-- build cancelled message -->
|
||||
<div class="col-md-6">
|
||||
Build cancelled
|
||||
</div>
|
||||
|
||||
<!-- rebuild button -->
|
||||
<div class="col-md-3">
|
||||
<%include tmpl='#rebuild-template'/%>
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<!-- rebuild button or no rebuild icon -->
|
||||
<script id="rebuild-template" type="text/x-jsrender">
|
||||
<%if is_default_project_build%>
|
||||
<!-- no rebuild info icon -->
|
||||
<span class="pull-right glyphicon glyphicon-question-sign get-help <%if state == 'Succeeded'%>get-help-green<%else state == 'Failed'%>get-help-red<%else%>get-help-blue<%/if%>"
|
||||
title="Builds in this project cannot be started from Toaster: they are started from the command line">
|
||||
</span>
|
||||
<%else%>
|
||||
<!-- rebuild button -->
|
||||
<span class="rebuild-btn alert-link <%if state == 'Success'%>success<%else state == 'Failed'%>danger<%else%>info<%/if%> pull-right"
|
||||
data-request-url="<%:rebuild_url%>" data-target='<%:build_targets_json%>'>
|
||||
<span class="glyphicon glyphicon-repeat"></span>
|
||||
Rebuild
|
||||
</span>
|
||||
<%/if%>
|
||||
</script>
|
||||
|
||||
<!-- cancel button or no cancel icon -->
|
||||
<script id="cancel-template" type="text/x-jsrender">
|
||||
<%if is_default_project_build%>
|
||||
<!-- no cancel icon -->
|
||||
<span class="glyphicon glyphicon-question-sign get-help get-help-blue pull-right" title="Builds in this project cannot be cancelled from Toaster: they can only be cancelled from the command line"></span>
|
||||
<%else%>
|
||||
<!-- cancel button -->
|
||||
<span class="cancel-build-btn pull-right alert-link"
|
||||
data-buildrequest-id="<%:buildrequest_id%>" data-request-url="<%:cancel_url%>">
|
||||
<span class="glyphicon glyphicon-remove-circle"></span>
|
||||
Cancel
|
||||
</span>
|
||||
<%/if%>
|
||||
</script>
|
||||
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
var ctx = {
|
||||
mrbType : "{{mrb_type}}",
|
||||
}
|
||||
|
||||
try {
|
||||
mrbSectionInit(ctx);
|
||||
} catch (e) {
|
||||
document.write("Sorry, An error has occurred loading this page");
|
||||
console.warn(e);
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -48,7 +48,12 @@
|
||||
</div>
|
||||
|
||||
<div class="modal-footer">
|
||||
<button id="create-new-custom-image-btn" class="btn btn-primary btn-lg" data-original-title="" title="" disabled>Create custom image</button>
|
||||
<button id="create-new-custom-image-btn" class="btn btn-primary btn-large" disabled>
|
||||
<span data-role="submit-state">Create custom image</span>
|
||||
<span data-role="loading-state" style="display:none">
|
||||
<i class="fa-pulse icon-spinner"></i> Creating custom image...
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -133,19 +133,33 @@
|
||||
{% endcomment %}
|
||||
{% endif %}
|
||||
</dd>
|
||||
{% if package.recipe.layer_version.branch %}
|
||||
<dt>
|
||||
Layer branch
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="The Git branch of the layer providing the recipe that builds this package"></span>
|
||||
{%if package.recipe.layer_version.layer.local_source_dir %}
|
||||
<dd>
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="The source code of {{package.recipe.layer_version.layer.name}} is not in a Git repository, so there is no branch associated with it"></span>
|
||||
</dd>
|
||||
{% endif %}
|
||||
</dt>
|
||||
{% if not package.recipe.layer_version.layer.local_source_dir %}
|
||||
<dd>{{package.recipe.layer_version.branch}}</dd>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
<dt>
|
||||
Layer commit
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="The Git commit of the layer providing the recipe that builds this package"></span>
|
||||
{%if package.recipe.layer_version.layer.local_source_dir %}
|
||||
<dd>
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="The source code of {{package.recipe.layer_version.layer.name}} is not in a Git repository, so there is no commit associated with it"></span>
|
||||
</dd>
|
||||
{% endif %}
|
||||
</dt>
|
||||
|
||||
{% if not package.recipe.layer_version.layer.local_source_dir %}
|
||||
<dd class="iscommit">{{package.recipe.layer_version.commit}}</dd>
|
||||
{% endif %}
|
||||
|
||||
</dl>
|
||||
<div> <!-- end well -->
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
<h2 class="top-air" data-role="page-title"></h2>
|
||||
|
||||
{% if not build_in_progress_none_completed %}
|
||||
{% if not build_in_progress_none_completed %}
|
||||
{% url 'projectbuilds' project.id as xhr_table_url %}
|
||||
{% include 'toastertable.html' %}
|
||||
{% endif %}
|
||||
|
||||
@@ -69,21 +69,37 @@
|
||||
Recipe file
|
||||
</dt>
|
||||
<dd><code>{{object.file_path}} {% if object.pathflags %}<i>({{object.pathflags}})</i>{% endif %}</code></dd>
|
||||
{% if layer_version.branch %}
|
||||
<dt>
|
||||
<span class="glyphicon glyphicon-question-sign get-help"
|
||||
title="The Git branch of the layer providing the
|
||||
recipe"></span>
|
||||
Layer branch
|
||||
</dt>
|
||||
{% if layer_version.layer.local_source_dir %}
|
||||
<dd>
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="The source
|
||||
code of {{layer_version.layer.name}} is not in a git repository
|
||||
so there is no branch associated with it"></span>
|
||||
</dd>
|
||||
{% else %}
|
||||
<dd>{{layer_version.branch}}</dd>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
<dt>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="The Git
|
||||
commit of the layer providing the recipe"></span>
|
||||
Layer commit
|
||||
</dt>
|
||||
{% if layer_version.layer.local_source_dir %}
|
||||
<dd>
|
||||
<span class="text-muted">Not applicable</span>
|
||||
<span class="glyphicon glyphicon-question-sign get-help" title="The source
|
||||
code of {{layer_version.layer.name}} is not in a git repository
|
||||
so there is no commit associated with it"></span>
|
||||
</dd>
|
||||
{% else %}
|
||||
<dd class="iscommit">{{layer_version.commit}}</dd>
|
||||
{% endif %}
|
||||
{% if object.provides_set.all %}
|
||||
<dt>
|
||||
<span class="glyphicon glyphicon-question-sign get-help"
|
||||
|
||||
@@ -25,8 +25,10 @@
|
||||
{%if task.task_executed %}
|
||||
{# executed tasks outcome #}
|
||||
{% if task.logfile %}
|
||||
<a class="btn btn-default btn-lg" href="{% url 'build_artifact' build.id
|
||||
"tasklogfile" task.pk %}">Download task log</a>
|
||||
<a class="btn btn-default btn-lg"
|
||||
href="{% url 'build_artifact' build.id 'tasklogfile' task.pk %}">
|
||||
Download task log
|
||||
</a>
|
||||
{% endif %}
|
||||
{# show stack trace for failed task #}
|
||||
{% if task.outcome == task.OUTCOME_FAILED and log_head %}
|
||||
@@ -156,8 +158,10 @@ this prebuilt task is reusing"></span></a>
|
||||
{%elif task.outcome == task.OUTCOME_CACHED%}
|
||||
{% for t in task.get_related_setscene %}
|
||||
{% if forloop.last %}
|
||||
<a class="btn btn-default btn-lg" href="{% url
|
||||
'build_artifact' build.id "tasklogfile" t.pk %}">Download task log</a>
|
||||
<a class="btn btn-default btn-lg"
|
||||
href="{% url 'build_artifact' build.id "tasklogfile" t.pk %}">
|
||||
Download task log
|
||||
</a>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
|
||||
@@ -270,14 +270,6 @@ def get_dict_value(dictionary, key):
|
||||
except (KeyError, IndexError):
|
||||
return ''
|
||||
|
||||
@register.filter
|
||||
def format_build_date(completed_on):
|
||||
now = timezone.now()
|
||||
delta = now - completed_on
|
||||
|
||||
if delta.days >= 1:
|
||||
return True
|
||||
|
||||
@register.filter
|
||||
def is_shaid(text):
|
||||
""" return True if text length is 40 characters and all hex-digits
|
||||
|
||||
@@ -28,10 +28,10 @@ from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
|
||||
from orm.models import Project, Release, BitbakeVersion, Package, LogMessage
|
||||
from orm.models import ReleaseLayerSourcePriority, LayerSource, Layer, Build
|
||||
from orm.models import LayerSource, Layer, Build
|
||||
from orm.models import Layer_Version, Recipe, Machine, ProjectLayer, Target
|
||||
from orm.models import CustomImageRecipe, ProjectVariable
|
||||
from orm.models import Branch, CustomImagePackage
|
||||
from orm.models import CustomImagePackage
|
||||
|
||||
import toastermain
|
||||
import inspect
|
||||
@@ -149,12 +149,12 @@ class ViewTests(TestCase):
|
||||
|
||||
def test_xhr_import_layer(self):
|
||||
"""Test xhr_importlayer API"""
|
||||
LayerSource.objects.create(sourcetype=LayerSource.TYPE_IMPORTED)
|
||||
#Test for importing an already existing layer
|
||||
args = {'vcs_url' : "git://git.example.com/test",
|
||||
'name' : "base-layer",
|
||||
'git_ref': "c12b9596afd236116b25ce26dbe0d793de9dc7ce",
|
||||
'project_id': self.project.id,
|
||||
'local_source_dir': "",
|
||||
'dir_path' : "/path/in/repository"}
|
||||
response = self.client.post(reverse('xhr_importlayer'), args)
|
||||
data = json.loads(response.content.decode('utf-8'))
|
||||
|
||||
@@ -55,6 +55,7 @@ class LayersTypeAhead(ToasterTypeAhead):
|
||||
'vcs_url' : layer_version.layer.vcs_url,
|
||||
'vcs_reference' : vcs_reference,
|
||||
'detail' : detail,
|
||||
'local_source_dir' : layer_version.layer.local_source_dir,
|
||||
}
|
||||
|
||||
results.append(needed_fields)
|
||||
|
||||
@@ -214,6 +214,9 @@ urlpatterns = patterns('toastergui.views',
|
||||
api.XhrBuildRequest.as_view(),
|
||||
name='xhr_buildrequest'),
|
||||
|
||||
url(r'^mostrecentbuilds$', api.MostRecentBuildsView.as_view(),
|
||||
name='most_recent_builds'),
|
||||
|
||||
# default redirection
|
||||
url(r'^$', RedirectView.as_view(url='landing', permanent=True)),
|
||||
)
|
||||
|
||||
@@ -161,7 +161,7 @@ def _lv_to_dict(prj, x = None):
|
||||
return {"id": x.pk,
|
||||
"name": x.layer.name,
|
||||
"tooltip": "%s | %s" % (x.layer.vcs_url,x.get_vcs_reference()),
|
||||
"detail": "(%s" % x.layer.vcs_url + (")" if x.up_branch == None else " | "+x.get_vcs_reference()+")"),
|
||||
"detail": "(%s" % x.layer.vcs_url + (")" if x.release == None else " | "+x.get_vcs_reference()+")"),
|
||||
"giturl": x.layer.vcs_url,
|
||||
"layerdetailurl" : reverse('layerdetails', args=(prj.id,x.pk)),
|
||||
"revision" : x.get_vcs_reference(),
|
||||
@@ -822,11 +822,21 @@ def _find_task_dep(task_object):
|
||||
def _find_task_revdep(task_object):
|
||||
tdeps = Task_Dependency.objects.filter(depends_on=task_object).filter(task__order__gt=0)
|
||||
tdeps = tdeps.exclude(task__outcome = Task.OUTCOME_NA).select_related("task", "task__recipe", "task__build")
|
||||
|
||||
# exclude self-dependencies to prevent infinite dependency loop
|
||||
# in generateCoveredList2()
|
||||
tdeps = tdeps.exclude(task=task_object)
|
||||
|
||||
return [tdep.task for tdep in tdeps]
|
||||
|
||||
def _find_task_revdep_list(tasklist):
|
||||
tdeps = Task_Dependency.objects.filter(depends_on__in=tasklist).filter(task__order__gt=0)
|
||||
tdeps = tdeps.exclude(task__outcome=Task.OUTCOME_NA).select_related("task", "task__recipe", "task__build")
|
||||
|
||||
# exclude self-dependencies to prevent infinite dependency loop
|
||||
# in generateCoveredList2()
|
||||
tdeps = tdeps.exclude(task=F('depends_on'))
|
||||
|
||||
return [tdep.task for tdep in tdeps]
|
||||
|
||||
def _find_task_provider(task_object):
|
||||
@@ -1288,7 +1298,7 @@ if True:
|
||||
from django.contrib.auth import authenticate, login
|
||||
from django.contrib.auth.decorators import login_required
|
||||
|
||||
from orm.models import Branch, LayerSource, ToasterSetting, Release, Machine, LayerVersionDependency
|
||||
from orm.models import LayerSource, ToasterSetting, Release, Machine, LayerVersionDependency
|
||||
from bldcontrol.models import BuildRequest
|
||||
|
||||
import traceback
|
||||
@@ -1450,11 +1460,10 @@ if True:
|
||||
freqtargets = tmp
|
||||
|
||||
layers = [{"id": x.layercommit.pk, "orderid": x.pk, "name" : x.layercommit.layer.name,
|
||||
"vcs_url": x.layercommit.layer.vcs_url, "vcs_reference" : x.layercommit.get_vcs_reference(),
|
||||
"vcs_url": x.layercommit.layer.vcs_url, "local_source_dir": x.layercommit.layer.local_source_dir, "vcs_reference" : x.layercommit.get_vcs_reference(),
|
||||
"url": x.layercommit.layer.layer_index_url, "layerdetailurl": x.layercommit.get_detailspage_url(prj.pk),
|
||||
# This branch name is actually the release
|
||||
"branch" : {"name" : x.layercommit.get_vcs_reference(),
|
||||
"layersource" : x.layercommit.up_branch.layer_source.name if x.layercommit.up_branch != None else None}
|
||||
"layersource" : x.layercommit.layer_source }
|
||||
} for x in prj.projectlayer_set.all().order_by("id")]
|
||||
|
||||
context = {
|
||||
@@ -1662,18 +1671,12 @@ if True:
|
||||
prj = Project.objects.get(pk=request.POST['project_id'])
|
||||
|
||||
# Strip trailing/leading whitespace from all values
|
||||
# put into a new dict because POST one is immutable
|
||||
# put into a new dict because POST one is immutable.
|
||||
post_data = dict()
|
||||
for key,val in request.POST.items():
|
||||
post_data[key] = val.strip()
|
||||
|
||||
|
||||
# We need to know what release the current project is so that we
|
||||
# can set the imported layer's up_branch_id
|
||||
prj_branch_name = Release.objects.get(pk=prj.release_id).branch_name
|
||||
up_branch, branch_created = Branch.objects.get_or_create(name=prj_branch_name, layer_source_id=LayerSource.TYPE_IMPORTED)
|
||||
|
||||
layer_source = LayerSource.objects.get(sourcetype=LayerSource.TYPE_IMPORTED)
|
||||
try:
|
||||
layer, layer_created = Layer.objects.get_or_create(name=post_data['name'])
|
||||
except MultipleObjectsReturned:
|
||||
@@ -1681,8 +1684,8 @@ if True:
|
||||
|
||||
if layer:
|
||||
if layer_created:
|
||||
layer.layer_source = layer_source
|
||||
layer.vcs_url = post_data['vcs_url']
|
||||
layer.vcs_url = post_data.get('vcs_url')
|
||||
layer.local_source_dir = post_data.get('local_source_dir')
|
||||
layer.up_date = timezone.now()
|
||||
layer.save()
|
||||
else:
|
||||
@@ -1692,12 +1695,24 @@ if True:
|
||||
if layer.vcs_url != post_data['vcs_url']:
|
||||
return HttpResponse(jsonfilter({"error": "hint-layer-exists-with-different-url" , "current_url" : layer.vcs_url, "current_id": layer.id }), content_type = "application/json")
|
||||
|
||||
|
||||
layer_version, version_created = Layer_Version.objects.get_or_create(layer_source=layer_source, layer=layer, project=prj, up_branch_id=up_branch.id,branch=post_data['git_ref'], commit=post_data['git_ref'], dirpath=post_data['dir_path'])
|
||||
layer_version, version_created = \
|
||||
Layer_Version.objects.get_or_create(
|
||||
layer_source=LayerSource.TYPE_IMPORTED,
|
||||
layer=layer, project=prj,
|
||||
release=prj.release,
|
||||
branch=post_data['git_ref'],
|
||||
commit=post_data['git_ref'],
|
||||
dirpath=post_data['dir_path'])
|
||||
|
||||
if layer_version:
|
||||
if not version_created:
|
||||
return HttpResponse(jsonfilter({"error": "hint-layer-version-exists", "existing_layer_version": layer_version.id }), content_type = "application/json")
|
||||
return HttpResponse(jsonfilter({"error":
|
||||
"hint-layer-version-exists",
|
||||
"existing_layer_version":
|
||||
layer_version.id }),
|
||||
content_type = "application/json")
|
||||
|
||||
layer_version.layer_source = LayerSource.TYPE_IMPORTED
|
||||
|
||||
layer_version.up_date = timezone.now()
|
||||
layer_version.save()
|
||||
@@ -1752,7 +1767,6 @@ if True:
|
||||
|
||||
return HttpResponse(jsonfilter(json_response), content_type = "application/json")
|
||||
|
||||
|
||||
@xhr_response
|
||||
def xhr_customrecipe(request):
|
||||
"""
|
||||
@@ -2179,20 +2193,33 @@ if True:
|
||||
}
|
||||
return render(request, template, context)
|
||||
|
||||
# TODO merge with api pseudo api here is used for deps modal
|
||||
@_template_renderer('layerdetails.html')
|
||||
def layerdetails(request, pid, layerid):
|
||||
project = Project.objects.get(pk=pid)
|
||||
layer_version = Layer_Version.objects.get(pk=layerid)
|
||||
|
||||
context = {'project' : project,
|
||||
'layerversion' : layer_version,
|
||||
'layerdeps' : {"list": [{"id": dep.id,
|
||||
"name": dep.layer.name,
|
||||
"layerdetailurl": reverse('layerdetails', args=(pid, dep.pk)),
|
||||
"vcs_url": dep.layer.vcs_url,
|
||||
"vcs_reference": dep.get_vcs_reference()} \
|
||||
for dep in layer_version.get_alldeps(project.id)]},
|
||||
'projectlayers': [player.layercommit.id for player in ProjectLayer.objects.filter(project=project)]
|
||||
project_layers = ProjectLayer.objects.filter(
|
||||
project=project).values_list("layercommit_id",
|
||||
flat=True)
|
||||
|
||||
context = {
|
||||
'project': project,
|
||||
'layer_source': LayerSource.types_dict(),
|
||||
'layerversion': layer_version,
|
||||
'layerdeps': {
|
||||
"list": [
|
||||
{
|
||||
"id": dep.id,
|
||||
"name": dep.layer.name,
|
||||
"layerdetailurl": reverse('layerdetails',
|
||||
args=(pid, dep.pk)),
|
||||
"vcs_url": dep.layer.vcs_url,
|
||||
"vcs_reference": dep.get_vcs_reference()
|
||||
}
|
||||
for dep in layer_version.get_alldeps(project.id)]
|
||||
},
|
||||
'projectlayers': list(project_layers)
|
||||
}
|
||||
|
||||
return context
|
||||
|
||||
@@ -194,7 +194,14 @@
|
||||
class or the
|
||||
<link linkend='ref-classes-package'><filename>package</filename></link>
|
||||
class.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The class also contains some commonly used functions such as
|
||||
<filename>oe_runmake</filename>, which runs
|
||||
<filename>make</filename> with the arguments specified in
|
||||
<link linkend='var-EXTRA_OEMAKE'><filename>EXTRA_OEMAKE</filename></link>
|
||||
variable as well as the arguments passed directly to
|
||||
<filename>oe_runmake</filename>.
|
||||
</para>
|
||||
</section>
|
||||
@@ -1599,6 +1606,17 @@
|
||||
<link linkend='var-FILES'><filename>FILES</filename></link>
|
||||
variable values that contain "//", which is invalid.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>host-user-contaminated:</filename></emphasis>
|
||||
Checks that no package produced by the recipe contains any
|
||||
files outside of <filename>/home</filename> with a user or
|
||||
group ID that matches the user running BitBake.
|
||||
A match usually indicates that the files are being installed
|
||||
with an incorrect UID/GID, since target IDs are independent
|
||||
from host IDs.
|
||||
For additional information, see the section describing the
|
||||
<link linkend='ref-tasks-install'><filename>do_install</filename></link>
|
||||
task.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>incompatible-license:</filename></emphasis>
|
||||
Report when packages are excluded from being created due to
|
||||
being marked with a license that is in
|
||||
@@ -1626,6 +1644,25 @@
|
||||
<filename>do_install</filename> if the files are not
|
||||
needed in any package.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>invalid-chars:</filename></emphasis>
|
||||
Checks that the recipe metadata variables
|
||||
<link linkend='var-DESCRIPTION'><filename>DESCRIPTION</filename></link>,
|
||||
<link linkend='var-SUMMARY'><filename>SUMMARY</filename></link>,
|
||||
<link linkend='var-LICENSE'><filename>LICENSE</filename></link>,
|
||||
and
|
||||
<link linkend='var-SECTION'><filename>SECTION</filename></link>
|
||||
do not contain non-UTF-8 characters.
|
||||
Some package managers do not support such characters.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>invalid-packageconfig:</filename></emphasis>
|
||||
Checks that no undefined features are being added to
|
||||
<link linkend='var-PACKAGECONFIG'><filename>PACKAGECONFIG</filename></link>.
|
||||
For example, any name "foo" for which the following form
|
||||
does not exist:
|
||||
<literallayout class='monospaced'>
|
||||
PACKAGECONFIG[foo] = "..."
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>la:</filename></emphasis>
|
||||
Checks <filename>.la</filename> files for any <filename>TMPDIR</filename>
|
||||
paths.
|
||||
|
||||
@@ -35,43 +35,23 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='ref-tasks-checkpkg'>
|
||||
<title><filename>do_checkpkg</filename></title>
|
||||
|
||||
<para>
|
||||
Provides information about the recipe including its upstream
|
||||
version and status.
|
||||
The upstream version and status reveals whether or not a version
|
||||
of the recipe exists upstream and a status of not updated, updated,
|
||||
or unknown.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>checkpkg</filename> task is included as part of the
|
||||
<link linkend='ref-classes-distrodata'><filename>distrodata</filename></link>
|
||||
class.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To build the <filename>checkpkg</filename> task, use the
|
||||
<filename>bitbake</filename> command with the "-c" option and
|
||||
task name:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake core-image-minimal -c checkpkg
|
||||
</literallayout>
|
||||
By default, the results are stored in
|
||||
<link linkend='var-LOG_DIR'><filename>$LOG_DIR</filename></link>
|
||||
(e.g. <filename>$BUILD_DIR/tmp/log</filename>).
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='ref-tasks-compile'>
|
||||
<title><filename>do_compile</filename></title>
|
||||
|
||||
<para>
|
||||
Compiles the source in the compilation directory, which is pointed
|
||||
to by the
|
||||
<link linkend='var-B'><filename>B</filename></link> variable.
|
||||
Compiles the source code.
|
||||
This task runs with the current working directory set
|
||||
to
|
||||
<filename>${</filename><link linkend='var-B'><filename>B</filename></link><filename>}</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The default behavior of this task is to run the
|
||||
<filename>oe_runmake</filename> task if a makefile
|
||||
(<filename>Makefile</filename>, <filename>makefile</filename>,
|
||||
or <filename>GNUmakefile</filename>) is found.
|
||||
If no such file is found, the <filename>do_compile</filename>
|
||||
task does nothing.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -90,6 +70,20 @@
|
||||
<para>
|
||||
Configures the source by enabling and disabling any build-time and
|
||||
configuration options for the software being built.
|
||||
The task runs with the current working directory set to
|
||||
<filename>${</filename><link linkend='var-B'><filename>B</filename></link><filename>}</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The default behavior of this task is to run
|
||||
<filename>oe_runmake clean</filename> if a makefile
|
||||
(<filename>Makefile</filename>, <filename>makefile</filename>,
|
||||
or <filename>GNUmakefile</filename>) is found and
|
||||
<link linkend='var-CLEANBROKEN'><filename>CLEANBROKEN</filename></link>
|
||||
is not set to "1".
|
||||
If no such file is found or the <filename>CLEANBROKEN</filename>
|
||||
variable is set to "1", the <filename>do_configure</filename>
|
||||
task does nothing.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -106,18 +100,62 @@
|
||||
<title><filename>do_deploy</filename></title>
|
||||
|
||||
<para>
|
||||
Writes output files that are to be deployed to the deploy
|
||||
directory, which is defined by the
|
||||
<link linkend='var-DEPLOYDIR'><filename>DEPLOYDIR</filename></link>
|
||||
variable.
|
||||
Writes output files that are to be deployed to
|
||||
<filename>${</filename><link linkend='var-DEPLOY_DIR_IMAGE'><filename>DEPLOY_DIR_IMAGE</filename></link><filename>}</filename>.
|
||||
The task runs with the current working directory set to
|
||||
<filename>${</filename><link linkend='var-B'><filename>B</filename></link><filename>}</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>do_deploy</filename> task is a
|
||||
shared state (sstate) task, which means that the task can
|
||||
be accelerated through sstate use.
|
||||
Realize also that if the task is re-executed, any previous output
|
||||
is removed (i.e. "cleaned").
|
||||
Recipes implementing this task should inherit the
|
||||
<link linkend='ref-classes-deploy'><filename>deploy</filename></link>
|
||||
class and should write the output to
|
||||
<filename>${</filename><link linkend='var-DEPLOYDIR'><filename>DEPLOYDIR</filename></link><filename>}</filename>,
|
||||
which is not to be confused with
|
||||
<filename>${</filename><link linkend='var-DEPLOY_DIR'><filename>DEPLOY_DIR</filename></link><filename>}</filename>.
|
||||
The <filename>deploy</filename> class sets up
|
||||
<filename>do_deploy</filename> as a shared state (sstate) task that
|
||||
can be accelerated through sstate use.
|
||||
The sstate mechanism takes care of copying the output from
|
||||
<filename>${DEPLOYDIR}</filename> to
|
||||
<filename>${DEPLOY_DIR_IMAGE}</filename>.
|
||||
<note>
|
||||
<title>Caution</title>
|
||||
Do not write the output directly to
|
||||
<filename>${DEPLOY_DIR_IMAGE}</filename>, as this causes
|
||||
the sstate mechanism to malfunction.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>do_deploy</filename> task is not added as a task
|
||||
by default and consequently needs to be added manually.
|
||||
If you want the task to run after
|
||||
<link linkend='ref-tasks-compile'><filename>do_compile</filename></link>,
|
||||
you can add it by doing the following:
|
||||
<literallayout class='monospaced'>
|
||||
addtask deploy after do_compile
|
||||
</literallayout>
|
||||
Adding <filename>do_deploy</filename> after other tasks works the
|
||||
same way.
|
||||
<note>
|
||||
You do not need to add <filename>before do_build</filename>
|
||||
to the <filename>addtask</filename> command (though it is
|
||||
harmless), because the
|
||||
<link linkend='ref-classes-base'><filename>base</filename></link>
|
||||
class contains the following:
|
||||
<literallayout class='monospaced'>
|
||||
do_build[recrdeptask] += "do_deploy"
|
||||
</literallayout>
|
||||
See the
|
||||
"<ulink url='&YOCTO_DOCS_BB_URL;#dependencies'>Dependencies</ulink>"
|
||||
section in the BitBake User Manual for more information.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If the <filename>do_deploy</filename> task re-executes, any
|
||||
previous output is removed (i.e. "cleaned").
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -217,11 +255,50 @@
|
||||
<title><filename>do_install</filename></title>
|
||||
|
||||
<para>
|
||||
Copies files from the compilation directory, which is defined by
|
||||
the
|
||||
<link linkend='var-B'><filename>B</filename></link> variable,
|
||||
to a holding area defined by the
|
||||
<link linkend='var-D'><filename>D</filename></link> variable.
|
||||
Copies files that are to be packaged into the holding area
|
||||
<filename>${</filename><link linkend='var-D'><filename>D</filename></link><filename>}</filename>.
|
||||
This task runs with the current working directory set to
|
||||
<filename>${</filename><link linkend='var-B'><filename>B</filename></link><filename>}</filename>,
|
||||
which is the compilation directory.
|
||||
<note>
|
||||
<title>Caution</title>
|
||||
|
||||
<para>
|
||||
When installing files, be careful not to set the owner and
|
||||
group IDs of the installed files to unintended values.
|
||||
Some methods of copying files, notably when using the
|
||||
recursive <filename>cp</filename> command, can preserve the
|
||||
UID and/or GID of the original file, which is usually not
|
||||
what you want.
|
||||
The
|
||||
<link linkend='ref-classes-insane'><filename>host-user-contaminated</filename></link>
|
||||
QA check checks for files that probably have the wrong
|
||||
ownership.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Safe methods for installing files include the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
The <filename>install</filename> utility.
|
||||
This utility is the preferred method.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
The <filename>cp</filename> command with the
|
||||
"--no-preserve=ownership" option.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
The <filename>tar</filename> command with the
|
||||
"--no-same-owner" option.
|
||||
See the <filename>bin_package.bbclass</filename>
|
||||
file in the <filename>meta/classes</filename>
|
||||
directory of the
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>
|
||||
for an example.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -238,8 +315,15 @@
|
||||
<title><filename>do_package</filename></title>
|
||||
|
||||
<para>
|
||||
Analyzes the content of the holding area and splits it into subsets
|
||||
based on available packages and files.
|
||||
Analyzes the content of the holding area
|
||||
<filename>${</filename><link linkend='var-D'><filename>D</filename></link><filename>}</filename>
|
||||
and splits the content into subsets based on available packages
|
||||
and files.
|
||||
This task makes use of the
|
||||
<link linkend='var-PACKAGES'><filename>PACKAGES</filename></link>
|
||||
and
|
||||
<link linkend='var-FILES'><filename>FILES</filename></link>
|
||||
variables.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -355,7 +439,9 @@
|
||||
<para>
|
||||
Copies a subset of the files installed by the
|
||||
<link linkend='ref-tasks-install'><filename>do_install</filename></link>
|
||||
task into the sysroot to make them available to other recipes.
|
||||
task into the sysroot directory
|
||||
<filename>${</filename><link linkend='var-STAGING_DIR_HOST'><filename>STAGING_DIR_HOST</filename></link><filename>}</filename>
|
||||
to make them available to other recipes.
|
||||
Files that would typically not be needed by other recipes at build
|
||||
time are skipped.
|
||||
Skipped files include files installed into
|
||||
@@ -421,6 +507,36 @@
|
||||
<filename>bitbake -c</filename> command-line option):
|
||||
</para>
|
||||
|
||||
<section id='ref-tasks-checkpkg'>
|
||||
<title><filename>do_checkpkg</filename></title>
|
||||
|
||||
<para>
|
||||
Provides information about the recipe including its upstream
|
||||
version and status.
|
||||
The upstream version and status reveals whether or not a version
|
||||
of the recipe exists upstream and a status of not updated, updated,
|
||||
or unknown.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>checkpkg</filename> task is included as part of the
|
||||
<link linkend='ref-classes-distrodata'><filename>distrodata</filename></link>
|
||||
class.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To build the <filename>checkpkg</filename> task, use the
|
||||
<filename>bitbake</filename> command with the "-c" option and
|
||||
task name:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake core-image-minimal -c checkpkg
|
||||
</literallayout>
|
||||
By default, the results are stored in
|
||||
<link linkend='var-LOG_DIR'><filename>$LOG_DIR</filename></link>
|
||||
(e.g. <filename>$BUILD_DIR/tmp/log</filename>).
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='ref-tasks-checkuri'>
|
||||
<title><filename>do_checkuri</filename></title>
|
||||
|
||||
|
||||
@@ -536,7 +536,7 @@
|
||||
By default, this directory is the same as the <link linkend='var-S'><filename>S</filename></link>
|
||||
directory, which is defined as:
|
||||
<literallayout class='monospaced'>
|
||||
S = "${WORKDIR}/${BP}/"
|
||||
S = "${WORKDIR}/${BP}"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
@@ -2597,7 +2597,7 @@
|
||||
task.
|
||||
This location defaults to:
|
||||
<literallayout class='monospaced'>
|
||||
${WORKDIR}/image
|
||||
${<link linkend='var-WORKDIR'>WORKDIR</link>}/image
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -8035,6 +8035,31 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-MULTIMACH_HOST_SYS'><glossterm>MULTIMACH_HOST_SYS</glossterm>
|
||||
<info>
|
||||
MULTIMACH_HOST_SYS[doc] = "Separates files for different machines such that you can build for multiple host machines using the same output directories."
|
||||
</info>
|
||||
<glossdef>
|
||||
<para role="glossdeffirst">
|
||||
<!-- <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
|
||||
Serves the same purpose as
|
||||
<link linkend='var-MULTIMACH_TARGET_SYS'><filename>MULTIMACH_TARGET_SYS</filename></link>,
|
||||
but for the "HOST" system, in situations that involve a
|
||||
"HOST" and a "TARGET" system.
|
||||
See the
|
||||
<link linkend='var-STAGING_DIR_TARGET'><filename>STAGING_DIR_TARGET</filename></link>
|
||||
variable for more information.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The default value of this variable is:
|
||||
<literallayout class='monospaced'>
|
||||
${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-MULTIMACH_TARGET_SYS'><glossterm>MULTIMACH_TARGET_SYS</glossterm>
|
||||
<info>
|
||||
MULTIMACH_TARGET_SYS[doc] = "Separates files for different machines such that you can build for multiple target machines using the same output directories."
|
||||
@@ -8042,10 +8067,33 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
|
||||
<glossdef>
|
||||
<para role="glossdeffirst">
|
||||
<!-- <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
|
||||
Separates files for different machines such that you can build
|
||||
for multiple target machines using the same output directories.
|
||||
See the <link linkend='var-STAMP'><filename>STAMP</filename></link> variable
|
||||
for an example.
|
||||
Uniquely identifies the type of the target system for
|
||||
which packages are being built.
|
||||
This variable allows output for different types of target
|
||||
systems to be put into different subdirectories of the same
|
||||
output directory.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The default value of this variable is:
|
||||
<literallayout class='monospaced'>
|
||||
${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}
|
||||
</literallayout>
|
||||
Some classes (e.g.
|
||||
<link linkend='ref-classes-cross-canadian'><filename>cross-canadian</filename></link>)
|
||||
modify the <filename>MULTIMACH_TARGET_SYS</filename> value.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
See the
|
||||
<link linkend='var-STAMP'><filename>STAMP</filename></link>
|
||||
variable for an example.
|
||||
<link linkend='var-MULTIMACH_HOST_SYS'><filename>MULTIMACH_HOST_SYS</filename></link>
|
||||
is the corresponding variable for the host system in
|
||||
situations that involve a "HOST" and a "TARGET" system.
|
||||
See the
|
||||
<link linkend='var-STAGING_DIR_TARGET'><filename>STAGING_DIR_TARGET</filename></link>
|
||||
variable for more information.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
@@ -10178,6 +10226,18 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
|
||||
Thus, the <filename>RDEPENDS</filename> variable has the
|
||||
<filename>${PN}-dev</filename> package name as part of the
|
||||
variable.
|
||||
<note>
|
||||
<title>Caution</title>
|
||||
<filename>RDEPENDS_${PN}-dev</filename> includes
|
||||
<filename>${</filename><link linkend='var-PN'><filename>PN</filename></link><filename>}</filename>
|
||||
by default.
|
||||
This default is set in the BitBake configuration file
|
||||
(<filename>meta/conf/bitbake.conf</filename>).
|
||||
Be careful not to accidentally remove
|
||||
<filename>${PN}</filename> when modifying
|
||||
<filename>RDEPENDS_${PN}-dev</filename>.
|
||||
Use the "+=" operator rather than the "=" operator.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -10257,30 +10317,6 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-REMOVE_PKG_SUPPORT'><glossterm>REMOVE_PKG_SUPPORT</glossterm>
|
||||
<info>
|
||||
REMOVE_PKG_SUPPORT[doc] = "Allows for removal of supporting packages for images that do not contain a package manager."
|
||||
</info>
|
||||
<glossdef>
|
||||
<para role="glossdeffirst">
|
||||
<!-- <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
|
||||
For images not containing a package manager,
|
||||
<filename>REMOVE_PKG_SUPPORT</filename> allows the removal
|
||||
of supporting packages.
|
||||
Packages that will be uninstalled include:
|
||||
<filename>update-rc.d</filename>,
|
||||
<filename>base-passwd</filename>,
|
||||
<filename>shadow</filename>,
|
||||
<filename>run-postinsts</filename>, and
|
||||
<filename>update-alternatives</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Set the variable to "1" to remove these packages.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-REQUIRED_DISTRO_FEATURES'><glossterm>REQUIRED_DISTRO_FEATURES</glossterm>
|
||||
<info>
|
||||
REQUIRED_DISTRO_FEATURES[doc] = "When a recipe inherits the distro_features_check class, this variable identifies distribution features that must exist in the current configuration in order for the OpenEmbedded build system to build the recipe."
|
||||
@@ -12283,18 +12319,24 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
|
||||
|
||||
<glossentry id='var-STAGING_DIR_HOST'><glossterm>STAGING_DIR_HOST</glossterm>
|
||||
<info>
|
||||
STAGING_DIR_HOST[doc] = "Specifies the path to the primary sysroot directory for which the target is being built."
|
||||
STAGING_DIR_HOST[doc] = "Specifies the path to the sysroot directory for the system that the component is built to run on."
|
||||
</info>
|
||||
<glossdef>
|
||||
<para role="glossdeffirst">
|
||||
<!-- <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
|
||||
Specifies the path to the primary sysroot directory for
|
||||
which the target is being built.
|
||||
Depending on the type of recipe and the build target, the
|
||||
recipe's value is as follows:
|
||||
Specifies the path to the sysroot directory for the system
|
||||
that the component is built to run on (the system that hosts
|
||||
the component).
|
||||
This sysroot is the one that the recipe's
|
||||
<link linkend='ref-tasks-populate_sysroot'><filename>do_populate_sysroot</filename></link>
|
||||
task copies files into.
|
||||
Depending on the type of recipe and the build target,
|
||||
<filename>STAGING_DIR_HOST</filename> can have the
|
||||
following values:
|
||||
<itemizedlist>
|
||||
<listitem><para>For recipes building for the target
|
||||
machine, the value is "${STAGING_DIR}/${MACHINE}".
|
||||
machine, the value is
|
||||
"${<link linkend='var-STAGING_DIR'>STAGING_DIR</link>}/${<link linkend='var-MACHINE'>MACHINE</link>}".
|
||||
</para></listitem>
|
||||
<listitem><para>For native recipes building
|
||||
for the build host, the value is empty given the
|
||||
@@ -12304,7 +12346,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
|
||||
<listitem><para>For native SDK
|
||||
recipes that build for the SDK
|
||||
(<filename>nativesdk</filename>), the value is
|
||||
"${STAGING_DIR}/${MULTIMACH_HOST_SYS}".
|
||||
"${STAGING_DIR}/${<link linkend='var-MULTIMACH_HOST_SYS'>MULTIMACH_HOST_SYS</link>}".
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -12313,27 +12355,29 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
|
||||
|
||||
<glossentry id='var-STAGING_DIR_NATIVE'><glossterm>STAGING_DIR_NATIVE</glossterm>
|
||||
<info>
|
||||
STAGING_DIR_NATIVE[doc] = "Specifies the path to the sysroot directory for the build host."
|
||||
STAGING_DIR_NATIVE[doc] = "Specifies the path to the sysroot directory used when building components that run on the build host itself."
|
||||
</info>
|
||||
<glossdef>
|
||||
<para role="glossdeffirst">
|
||||
<!-- <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
|
||||
Specifies the path to the sysroot directory for the
|
||||
build host.
|
||||
Specifies the path to the sysroot directory used when
|
||||
building components that run on the build host itself.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-STAGING_DIR_TARGET'><glossterm>STAGING_DIR_TARGET</glossterm>
|
||||
<info>
|
||||
STAGING_DIR_TARGET[doc] = "Specifies the path to the sysroot directory for the target for which the current recipe is being built."
|
||||
STAGING_DIR_TARGET[doc] = "Specifies the path to the sysroot used for the system for which the component generates code."
|
||||
</info>
|
||||
<glossdef>
|
||||
<para role="glossdeffirst">
|
||||
<!-- <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
|
||||
Specifies the path to the sysroot directory for the
|
||||
target for which the current recipe is being built.
|
||||
In most cases, this path is the
|
||||
Specifies the path to the sysroot used for the system for
|
||||
which the component generates code.
|
||||
For components that do not generate code, which is the
|
||||
majority, <filename>STAGING_DIR_TARGET</filename> is set
|
||||
to match
|
||||
<link linkend='var-STAGING_DIR_HOST'><filename>STAGING_DIR_HOST</filename></link>.
|
||||
</para>
|
||||
|
||||
@@ -12344,10 +12388,12 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
|
||||
Using terminology from GNU, the primary system is referred
|
||||
to as the "HOST" and the secondary, or different, system is
|
||||
referred to as the "TARGET".
|
||||
Thus, the binaries run on the "HOST" system and
|
||||
Thus, the binaries run on the "HOST" system
|
||||
and generate binaries for the "TARGET" system.
|
||||
<filename>STAGING_DIR_TARGET</filename> points to the
|
||||
sysroot used for the "TARGET" system.
|
||||
The <filename>STAGING_DIR_HOST</filename> variable points
|
||||
to the sysroot used for the "HOST" system, while
|
||||
<filename>STAGING_DIR_TARGET</filename>
|
||||
points to the sysroot used for the "TARGET" system.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
@@ -754,11 +754,13 @@
|
||||
section in the BitBake User Manual.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
The
|
||||
<filename>do_deploy[dirs] = "${DEPLOYDIR} ${B}"</filename>
|
||||
The <filename>do_deploy[dirs] = "${DEPLOYDIR} ${B}"</filename>
|
||||
line creates <filename>${DEPLOYDIR}</filename> and
|
||||
<filename>${B}</filename> before the
|
||||
<filename>do_deploy</filename> task runs.
|
||||
<filename>do_deploy</filename> task runs, and also sets
|
||||
the current working directory of
|
||||
<filename>do_deploy</filename> to
|
||||
<filename>${B}</filename>.
|
||||
For more information, see the
|
||||
"<ulink url='&YOCTO_DOCS_BB_URL;#variable-flags'>Variable Flags</ulink>"
|
||||
section in the BitBake User Manual.
|
||||
|
||||
@@ -246,6 +246,80 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='checking-for-missing-build-time-dependencies'>
|
||||
<title>Checking for Missing Build-Time Dependencies</title>
|
||||
|
||||
<para>
|
||||
A recipe might build successfully even though some of its
|
||||
build-time dependencies are missing from
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>.
|
||||
Following are the two most common ways in which that can happen:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
The build-time dependency just happens to already exist in
|
||||
the staging sysroot
|
||||
(<link linkend='var-STAGING_DIR_HOST'><filename>STAGING_DIR_HOST</filename></link>)
|
||||
by the time the recipe is built.
|
||||
This situation occurs when the build-time dependency is
|
||||
built earlier during recipe processing.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
The component built by the recipe conditionally enables
|
||||
functionality depending on whether it can find the
|
||||
build-time dependency in the staging sysroot.
|
||||
If the build-time dependency is missing, the corresponding
|
||||
functionality is disabled.
|
||||
This condition is known as a "floating dependency".
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Because dealing with the second case is more complex, focus will
|
||||
be on the first case.
|
||||
The
|
||||
<link linkend='ref-classes-insane'><filename>build-deps</filename></link>
|
||||
QA check checks that every library the component linked against is
|
||||
declared as a build-time dependency.
|
||||
If that is not the case, then the first situation described in the
|
||||
previous list exists, and <filename>build-deps</filename> reports
|
||||
a missing build-time dependency.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Another, more manual, way to check a recipe for missing build-time
|
||||
dependencies of the first type is to build with an empty staging
|
||||
sysroot.
|
||||
This method can also find missing build-time dependencies
|
||||
that are not in the form of libraries, which the
|
||||
<filename>build-deps</filename> QA check is unable to find.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
An easy way to empty the staging sysroots is to simply remove
|
||||
<link linkend='var-TMPDIR'><filename>TMPDIR</filename></link>,
|
||||
which is usually
|
||||
<filename>${</filename><link linkend='var-BUILDDIR'><filename>BUILDDIR</filename></link><filename>}/tmp</filename>,
|
||||
as it includes the staging sysroots.
|
||||
Another, faster method to empty the staging sysroots is to use the
|
||||
<filename>scripts/wipe-sysroot</filename> script,
|
||||
which removes just the staging sysroots and keeps everything else
|
||||
in <filename>TMPDIR</filename>.
|
||||
<note>
|
||||
The <filename>scripts/</filename> directory appears in
|
||||
<filename>PATH</filename> after running the build environment
|
||||
initialization script (i.e.
|
||||
<link linkend='structure-core-script'><filename>oe-init-build-env</filename></link>
|
||||
or
|
||||
<link linkend='structure-memres-core-script'><filename>oe-init-build-env-memres</filename></link>),
|
||||
which results in the ability to to run
|
||||
<filename>wipe-sysroot</filename> immediately.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section id='usingpoky-debugging-dependencies'>
|
||||
<title>Dependency Graphs</title>
|
||||
|
||||
@@ -356,8 +430,8 @@
|
||||
<section id='recipe-logging-mechanisms'>
|
||||
<title>Recipe Logging Mechanisms</title>
|
||||
<para>
|
||||
Best practices exist while writing recipes that both log build
|
||||
progress and act on build conditions such as warnings and errors.
|
||||
The Yocto Project provides several logging functions for producing
|
||||
debugging output and reporting errors and warnings.
|
||||
For Python functions, the following logging functions exist.
|
||||
All of these functions log to
|
||||
<filename>${T}/log.do_</filename><replaceable>task</replaceable>,
|
||||
@@ -411,13 +485,13 @@
|
||||
<filename>bbplain</filename>, <filename>bbnote</filename>,
|
||||
<filename>bbdebug</filename>, <filename>bbwarn</filename>,
|
||||
<filename>bberror</filename>, and <filename>bbfatal</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For guidance on how logging is handled in both Python and Bash recipes, see the
|
||||
<filename>logging.bbclass</filename> file in the
|
||||
The
|
||||
<link linkend='ref-classes-logging'><filename>logging</filename></link>
|
||||
class implements these functions.
|
||||
See that class in the
|
||||
<filename>meta/classes</filename> folder of the
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>.
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>
|
||||
for information.
|
||||
</para>
|
||||
|
||||
<section id='logging-with-python'>
|
||||
|
||||
@@ -48,7 +48,6 @@ RECIPE_MAINTAINER_pn-at-spi2-atk = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-at-spi2-core = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-atk = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-attr = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-augeas = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-autoconf = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-autogen-native = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-automake = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
@@ -229,7 +228,7 @@ RECIPE_MAINTAINER_pn-glib-networking = "Jussi Kukkonen <jussi.kukkonen@intel.com
|
||||
RECIPE_MAINTAINER_pn-glibc = "Richard Purdie <richard.purdie@linuxfoundation.org>"
|
||||
RECIPE_MAINTAINER_pn-glibc-initial = "Richard Purdie <richard.purdie@linuxfoundation.org>"
|
||||
RECIPE_MAINTAINER_pn-glibc-locale = "Richard Purdie <richard.purdie@linuxfoundation.org>"
|
||||
RECIPE_MAINTAINER_pn-glibc-mrace = "Richard Purdie <richard.purdie@linuxfoundation.org>"
|
||||
RECIPE_MAINTAINER_pn-glibc-mtrace = "Richard Purdie <richard.purdie@linuxfoundation.org>"
|
||||
RECIPE_MAINTAINER_pn-glibc-scripts = "Richard Purdie <richard.purdie@linuxfoundation.org>"
|
||||
RECIPE_MAINTAINER_pn-glproto = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-gmp = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
@@ -237,6 +236,7 @@ RECIPE_MAINTAINER_pn-gnome-common = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-gnome-desktop-testing = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-gnome-desktop3 = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-gnome-doc-utils = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-gnome-themes-standard = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-gnu-config = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-gnu-efi = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-gnupg = "Hongxu Jia <hongxu.jia@windriver.com>"
|
||||
@@ -277,7 +277,7 @@ RECIPE_MAINTAINER_pn-hicolor-icon-theme = "Jussi Kukkonen <jussi.kukkonen@intel.
|
||||
RECIPE_MAINTAINER_pn-hostap-conf = "Maxin B. John <maxin.john@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-hostap-utils = "Maxin B. John <maxin.john@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-hwlatdetect = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-i2ctools = "Maxin B. John <maxin.john@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-i2c-tools = "Maxin B. John <maxin.john@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-icecc-create-env-native = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-icon-naming-utils = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-icu = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
@@ -288,8 +288,8 @@ RECIPE_MAINTAINER_pn-initramfs-framework = "Ross Burton <ross.burton@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-initramfs-live-boot = "Ross Burton <ross.burton@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-initramfs-live-install = "Ross Burton <ross.burton@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-initramfs-live-install-efi = "Ross Burton <ross.burton@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-initramfs-live-install-efi-tests = "Ross Burton <ross.burton@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-initramfs-live-install-tests = "Ross Burton <ross.burton@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-initramfs-live-install-efi-testfs = "Ross Burton <ross.burton@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-initramfs-live-install-testfs = "Ross Burton <ross.burton@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-initscripts = "Ross Burton <ross.burton@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-inputproto = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-intltool = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
@@ -312,10 +312,10 @@ RECIPE_MAINTAINER_pn-kexec-tools = "Alexander Kanavin <alexander.kanavin@intel.c
|
||||
RECIPE_MAINTAINER_pn-keymaps = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-kmod = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-kmod-native = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-l3afpad = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-lame = "Tanu Kaskinen <tanuk@iki.fi>"
|
||||
RECIPE_MAINTAINER_pn-latencytop = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-ldconfig-native = "Khem Raj <raj.khem@gmail.com>"
|
||||
RECIPE_MAINTAINER_pn-leafpad = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-less = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-liba52 = "Tanu Kaskinen <tanuk@iki.fi>"
|
||||
RECIPE_MAINTAINER_pn-libacpi = "Maxin B. John <maxin.john@intel.com>"
|
||||
@@ -596,7 +596,7 @@ RECIPE_MAINTAINER_pn-pcmciautils = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-perf = "Bruce Ashfield <bruce.ashfield@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-perl = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-perl-native = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-piglit = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-piglit = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-pigz = "Hongxu Jia <hongxu.jia@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-pinentry = "Armin Kuster <akuster808@gmail.com>"
|
||||
RECIPE_MAINTAINER_pn-pixman = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
@@ -624,9 +624,7 @@ RECIPE_MAINTAINER_pn-puzzles = "Jussi Kukkonen <jussi.kukkonen@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-native = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-async = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-dbus = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-distribute = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-docutils = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-git = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-gitdb = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-imaging = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
@@ -635,9 +633,9 @@ RECIPE_MAINTAINER_pn-python-native = "Alejandro Hernandez <alejandro.hernandez@l
|
||||
RECIPE_MAINTAINER_pn-python-nose = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-numpy = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-pexpect = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-ptyprocess = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-pycairo = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-pycurl = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-pygobject = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-pygtk = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-pyrex = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python-scons = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
@@ -647,13 +645,17 @@ RECIPE_MAINTAINER_pn-python-smartpm = "Alejandro Hernandez <alejandro.hernandez@
|
||||
RECIPE_MAINTAINER_pn-python-smmap = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3 = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-async = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-dbus = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-distribute = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-docutils = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-git = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-gitdb = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-mako = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-native = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-nose = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-numpy = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-pip = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-pygobject = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-setuptools = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-python3-smmap = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-qemu = "Aníbal Limón <anibal.limon@linux.intel.com>"
|
||||
@@ -718,6 +720,8 @@ RECIPE_MAINTAINER_pn-syslinux = "Alexander Kanavin <alexander.kanavin@intel.com>
|
||||
RECIPE_MAINTAINER_pn-sysprof = "Alexander Kanavin <alexander.kanavin@intel.com>"
|
||||
RECIPE_MAINTAINER_pn-sysstat = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-systemd = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-systemd-boot = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-systemd-bootchart = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-systemd-compat-units = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-systemd-serialgetty = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-systemd-systemctl-native = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
@@ -730,6 +734,7 @@ RECIPE_MAINTAINER_pn-tar = "Chen Qi <Qi.Chen@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-tcf-agent = "Randy Witt <randy.e.witt@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-tcl = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-tcp-wrappers = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-testexport-tarball = "Richard Purdie <richard.purdie@linuxfoundation.org>"
|
||||
RECIPE_MAINTAINER_pn-texi2html = "Robert Yang <liezhi.yang@windriver.com>"
|
||||
RECIPE_MAINTAINER_pn-texinfo = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
RECIPE_MAINTAINER_pn-texinfo-dummy-native = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
|
||||
|
||||
@@ -70,9 +70,10 @@ https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n"
|
||||
CONNECTIVITY_CHECK_URIS ?= "https://www.example.com/"
|
||||
|
||||
SANITY_TESTED_DISTROS ?= " \
|
||||
poky-1.7 \n \
|
||||
poky-1.8 \n \
|
||||
poky-2.0 \n \
|
||||
poky-2.1 \n \
|
||||
poky-2.2 \n \
|
||||
Ubuntu-14.04 \n \
|
||||
Ubuntu-14.10 \n \
|
||||
Ubuntu-15.04 \n \
|
||||
@@ -80,9 +81,10 @@ SANITY_TESTED_DISTROS ?= " \
|
||||
Ubuntu-16.04 \n \
|
||||
Fedora-22 \n \
|
||||
Fedora-23 \n \
|
||||
CentOS-7.* \n \
|
||||
CentOSLinux-7.* \n \
|
||||
Debian-8.* \n \
|
||||
openSUSE-project-13.2 \n \
|
||||
openSUSE-13.2 \n \
|
||||
SUSELINUX-42.1 \n \
|
||||
"
|
||||
#
|
||||
# OELAYOUT_ABI allows us to notify users when the format of TMPDIR changes in
|
||||
|
||||
6
meta-selftest/lib/oeqa/runtime/selftest.json
Normal file
6
meta-selftest/lib/oeqa/runtime/selftest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"test_install_package": {
|
||||
"pkg": "socat",
|
||||
"rm": true
|
||||
}
|
||||
}
|
||||
55
meta-selftest/lib/oeqa/runtime/selftest.py
Normal file
55
meta-selftest/lib/oeqa/runtime/selftest.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import os
|
||||
|
||||
from oeqa.oetest import oeRuntimeTest, skipModule
|
||||
from oeqa.utils.commands import runCmd
|
||||
from oeqa.utils.decorators import *
|
||||
|
||||
class Selftest(oeRuntimeTest):
|
||||
|
||||
@skipUnlessPassed("test_ssh")
|
||||
@tag("selftest_package_install")
|
||||
def test_install_package(self):
|
||||
"""
|
||||
Summary: Check basic package installation functionality.
|
||||
Expected: 1. Before the test socat must be installed using scp.
|
||||
2. After the test socat must be unistalled using ssh.
|
||||
This can't be checked in this test.
|
||||
Product: oe-core
|
||||
Author: Mariano Lopez <mariano.lopez@intel.com>
|
||||
"""
|
||||
|
||||
(status, output) = self.target.run("socat -V")
|
||||
self.assertEqual(status, 0, msg="socat is not installed")
|
||||
|
||||
@skipUnlessPassed("test_install_package")
|
||||
@tag("selftest_package_install")
|
||||
def test_verify_unistall(self):
|
||||
"""
|
||||
Summary: Check basic package installation functionality.
|
||||
Expected: 1. test_install_package must unistall socat.
|
||||
This test is just to verify that.
|
||||
Product: oe-core
|
||||
Author: Mariano Lopez <mariano.lopez@intel.com>
|
||||
"""
|
||||
|
||||
(status, output) = self.target.run("socat -V")
|
||||
self.assertNotEqual(status, 0, msg="socat is still installed")
|
||||
|
||||
@tag("selftest_sdk")
|
||||
def test_sdk(self):
|
||||
|
||||
result = runCmd("env -0")
|
||||
sdk_path = search_sdk_path(result.output)
|
||||
self.assertTrue(sdk_path, msg="Can't find SDK path")
|
||||
|
||||
tar_cmd = os.path.join(sdk_path, "tar")
|
||||
result = runCmd("%s --help" % tar_cmd)
|
||||
|
||||
def search_sdk_path(env):
|
||||
for line in env.split("\0"):
|
||||
(key, _, value) = line.partition("=")
|
||||
if key == "PATH":
|
||||
for path in value.split(":"):
|
||||
if "pokysdk" in path:
|
||||
return path
|
||||
return ""
|
||||
56
meta-yocto-bsp/lib/oeqa/selftest/systemd_boot.py
Normal file
56
meta-yocto-bsp/lib/oeqa/selftest/systemd_boot.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from oeqa.selftest.base import oeSelfTest
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
|
||||
from oeqa.utils.decorators import testcase
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
|
||||
class Systemdboot(oeSelfTest):
|
||||
|
||||
def _common_setup(self):
|
||||
"""
|
||||
Common setup for test cases: 1445, XXXX
|
||||
"""
|
||||
|
||||
# Set EFI_PROVIDER = "gummiboot" and MACHINE = "genericx86-64" in conf/local.conf
|
||||
features = 'EFI_PROVIDER = "systemd-boot"\n'
|
||||
features += 'MACHINE = "genericx86-64"'
|
||||
self.append_config(features)
|
||||
|
||||
def _common_build(self):
|
||||
"""
|
||||
Common build for test cases: 1445 , XXXX
|
||||
"""
|
||||
|
||||
# Build a genericx86-64/efi gummiboot image
|
||||
bitbake('mtools-native core-image-minimal')
|
||||
|
||||
|
||||
@testcase(1445)
|
||||
def test_efi_systemdboot_images_can_be_built(self):
|
||||
"""
|
||||
Summary: Check if systemd-boot images can be built correctly
|
||||
Expected: 1. File systemd-boot.efi should be available in $poky/build/tmp/deploy/images/genericx86-64
|
||||
2. 'systemd-boot" can be built correctly
|
||||
Product: oe-core
|
||||
Author: Jose Perez Carranza <jose.perez.carranza@intel.com>
|
||||
AutomatedBy: Jose Perez Carranza <jose.perez.carranza@intel.com>
|
||||
"""
|
||||
|
||||
# We'd use DEPLOY_DIR_IMAGE here, except that we need its value for
|
||||
# MACHINE="genericx86-64 which is probably not the one configured
|
||||
systemdbootfile = os.path.join(get_bb_var('DEPLOY_DIR'), 'images', 'genericx86-64', 'systemd-bootx64.efi')
|
||||
|
||||
self._common_setup()
|
||||
|
||||
# Ensure we're actually testing that this gets built and not that
|
||||
# it was around from an earlier build
|
||||
bitbake('-c cleansstate systemd-boot')
|
||||
runCmd('rm -f %s' % systemdbootfile)
|
||||
|
||||
self._common_build()
|
||||
|
||||
found = os.path.isfile(systemdbootfile)
|
||||
self.assertTrue(found, 'Systemd-Boot file %s not found' % systemdbootfile)
|
||||
@@ -1,69 +0,0 @@
|
||||
#
|
||||
# Automatically generated make config: don't edit
|
||||
# Fri Nov 23 15:49:33 2007
|
||||
#
|
||||
# TARGET_alpha is not set
|
||||
# TARGET_arm is not set
|
||||
# TARGET_bfin is not set
|
||||
# TARGET_cris is not set
|
||||
# TARGET_e1 is not set
|
||||
# TARGET_frv is not set
|
||||
# TARGET_h8300 is not set
|
||||
# TARGET_hppa is not set
|
||||
# TARGET_i386=y
|
||||
# TARGET_i960 is not set
|
||||
# TARGET_ia64 is not set
|
||||
# TARGET_m68k is not set
|
||||
# TARGET_microblaze is not set
|
||||
# TARGET_mips is not set
|
||||
# TARGET_nios is not set
|
||||
# TARGET_nios2 is not set
|
||||
# TARGET_powerpc is not set
|
||||
# TARGET_sh is not set
|
||||
# TARGET_sh64 is not set
|
||||
# TARGET_sparc is not set
|
||||
# TARGET_v850 is not set
|
||||
# TARGET_vax is not set
|
||||
TARGET_x86_64=y
|
||||
|
||||
#
|
||||
# Target Architecture Features and Options
|
||||
#
|
||||
TARGET_ARCH="x86_64"
|
||||
FORCE_OPTIONS_FOR_ARCH=y
|
||||
# CONFIG_GENERIC_386 is not set
|
||||
# CONFIG_386 is not set
|
||||
# CONFIG_486 is not set
|
||||
# CONFIG_586 is not set
|
||||
# CONFIG_586MMX is not set
|
||||
# CONFIG_686 is not set
|
||||
# CONFIG_PENTIUMII is not set
|
||||
# CONFIG_PENTIUMIII is not set
|
||||
# CONFIG_PENTIUM4 is not set
|
||||
# CONFIG_K6 is not set
|
||||
# CONFIG_K7 is not set
|
||||
# CONFIG_ELAN is not set
|
||||
# CONFIG_CRUSOE is not set
|
||||
# CONFIG_WINCHIPC6 is not set
|
||||
# CONFIG_WINCHIP2 is not set
|
||||
# CONFIG_CYRIXIII is not set
|
||||
# CONFIG_NEHEMIAH is not set
|
||||
TARGET_SUBARCH=""
|
||||
|
||||
#
|
||||
# Using ELF file format
|
||||
#
|
||||
ARCH_LITTLE_ENDIAN=y
|
||||
|
||||
#
|
||||
# Using Little Endian
|
||||
#
|
||||
ARCH_HAS_MMU=y
|
||||
ARCH_USE_MMU=y
|
||||
UCLIBC_HAS_FLOATS=y
|
||||
UCLIBC_HAS_FPU=y
|
||||
DO_C99_MATH=y
|
||||
KERNEL_HEADERS="/usr/include"
|
||||
HAVE_DOT_CONFIG=y
|
||||
|
||||
UCLIBC_HAS_FENV=y
|
||||
@@ -1,69 +0,0 @@
|
||||
#
|
||||
# Automatically generated make config: don't edit
|
||||
# Fri Nov 23 15:49:33 2007
|
||||
#
|
||||
# TARGET_alpha is not set
|
||||
# TARGET_arm is not set
|
||||
# TARGET_bfin is not set
|
||||
# TARGET_cris is not set
|
||||
# TARGET_e1 is not set
|
||||
# TARGET_frv is not set
|
||||
# TARGET_h8300 is not set
|
||||
# TARGET_hppa is not set
|
||||
TARGET_i386=y
|
||||
# TARGET_i960 is not set
|
||||
# TARGET_ia64 is not set
|
||||
# TARGET_m68k is not set
|
||||
# TARGET_microblaze is not set
|
||||
# TARGET_mips is not set
|
||||
# TARGET_nios is not set
|
||||
# TARGET_nios2 is not set
|
||||
# TARGET_powerpc is not set
|
||||
# TARGET_sh is not set
|
||||
# TARGET_sh64 is not set
|
||||
# TARGET_sparc is not set
|
||||
# TARGET_v850 is not set
|
||||
# TARGET_vax is not set
|
||||
# TARGET_x86_64 is not set
|
||||
|
||||
#
|
||||
# Target Architecture Features and Options
|
||||
#
|
||||
TARGET_ARCH="i386"
|
||||
FORCE_OPTIONS_FOR_ARCH=y
|
||||
CONFIG_GENERIC_386=y
|
||||
# CONFIG_386 is not set
|
||||
# CONFIG_486 is not set
|
||||
# CONFIG_586 is not set
|
||||
# CONFIG_586MMX is not set
|
||||
# CONFIG_686 is not set
|
||||
# CONFIG_PENTIUMII is not set
|
||||
# CONFIG_PENTIUMIII is not set
|
||||
# CONFIG_PENTIUM4 is not set
|
||||
# CONFIG_K6 is not set
|
||||
# CONFIG_K7 is not set
|
||||
# CONFIG_ELAN is not set
|
||||
# CONFIG_CRUSOE is not set
|
||||
# CONFIG_WINCHIPC6 is not set
|
||||
# CONFIG_WINCHIP2 is not set
|
||||
# CONFIG_CYRIXIII is not set
|
||||
# CONFIG_NEHEMIAH is not set
|
||||
TARGET_SUBARCH=""
|
||||
|
||||
#
|
||||
# Using ELF file format
|
||||
#
|
||||
ARCH_LITTLE_ENDIAN=y
|
||||
|
||||
#
|
||||
# Using Little Endian
|
||||
#
|
||||
ARCH_HAS_MMU=y
|
||||
ARCH_USE_MMU=y
|
||||
UCLIBC_HAS_FLOATS=y
|
||||
UCLIBC_HAS_FPU=y
|
||||
DO_C99_MATH=y
|
||||
KERNEL_HEADERS="/usr/include"
|
||||
HAVE_DOT_CONFIG=y
|
||||
|
||||
UCLIBC_HAS_FENV=y
|
||||
@@ -1 +0,0 @@
|
||||
FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user