Compare commits
569 Commits
1.2_M3.fin
...
edison
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8886aee5b9 | ||
|
|
2e99dcc03a | ||
|
|
1f53e5a96a | ||
|
|
bf46fa7d50 | ||
|
|
35d25f853c | ||
|
|
ccc2918ef7 | ||
|
|
c47f892526 | ||
|
|
3635f0ce53 | ||
|
|
678f6ee437 | ||
|
|
7ce46066a5 | ||
|
|
d3e51cea0f | ||
|
|
fb2d503992 | ||
|
|
ad050e2912 | ||
|
|
ec385088c4 | ||
|
|
aee274f5fa | ||
|
|
74b0cafd65 | ||
|
|
9622701c00 | ||
|
|
7e3332c3cc | ||
|
|
99e39f6c12 | ||
|
|
c0433b3b94 | ||
|
|
28989f2d04 | ||
|
|
d088fac872 | ||
|
|
9f915d46be | ||
|
|
a6f25334ec | ||
|
|
ed26c02ac6 | ||
|
|
860acfbdaa | ||
|
|
470fd8de07 | ||
|
|
7082a56c95 | ||
|
|
4919821572 | ||
|
|
a8289a9943 | ||
|
|
8b72a3c863 | ||
|
|
7315ac6e8f | ||
|
|
522b5864e9 | ||
|
|
c11f65a290 | ||
|
|
b2ec918b67 | ||
|
|
4cd83a91fb | ||
|
|
953ad50a27 | ||
|
|
fcf7db01c0 | ||
|
|
c8fc8aa4ae | ||
|
|
e50f5673e2 | ||
|
|
0cb1be9fae | ||
|
|
565d5abd87 | ||
|
|
fee19b3dda | ||
|
|
eb1ed1f02a | ||
|
|
664c12885b | ||
|
|
22fa92fa14 | ||
|
|
a717c33e5d | ||
|
|
d39f8db69e | ||
|
|
efd4cb0ddf | ||
|
|
61e550b462 | ||
|
|
4b72728755 | ||
|
|
1cc3056b3a | ||
|
|
27e5be1709 | ||
|
|
6c9c0f1f45 | ||
|
|
8650364e50 | ||
|
|
450913f388 | ||
|
|
de01c5c217 | ||
|
|
d5a5f63867 | ||
|
|
06072024be | ||
|
|
107ec95659 | ||
|
|
af3e5039e8 | ||
|
|
08d70734d5 | ||
|
|
164a4d1bac | ||
|
|
7e0dd59e30 | ||
|
|
aca161f8a0 | ||
|
|
94c8d01eba | ||
|
|
e1e0dd932b | ||
|
|
6c335846d9 | ||
|
|
774f93e8d3 | ||
|
|
535cfa538b | ||
|
|
ac63b3f8ef | ||
|
|
4039b5b97c | ||
|
|
67334bfb26 | ||
|
|
36e13dd42f | ||
|
|
de485f4973 | ||
|
|
56310cbc4c | ||
|
|
68cd8deadc | ||
|
|
b2a0243f05 | ||
|
|
a99b7d39dc | ||
|
|
755508c423 | ||
|
|
adbf38414e | ||
|
|
c3be61e204 | ||
|
|
490753f440 | ||
|
|
f3fc5e1e3f | ||
|
|
e2c5e5a513 | ||
|
|
c5a9efca96 | ||
|
|
15905aec48 | ||
|
|
2b92d9f6d3 | ||
|
|
bfa48c3c09 | ||
|
|
ef6062981b | ||
|
|
f57eca6f28 | ||
|
|
885ebdae10 | ||
|
|
33561a5417 | ||
|
|
bb31c819be | ||
|
|
d1c5de9ccb | ||
|
|
4274ebdd00 | ||
|
|
0fbd6a1615 | ||
|
|
bda8a084f5 | ||
|
|
939ec1ca1e | ||
|
|
69b307523c | ||
|
|
6482c0e20d | ||
|
|
ac9c62c907 | ||
|
|
806c23ef2e | ||
|
|
4c496a970f | ||
|
|
fa6eb32a5a | ||
|
|
eaec7e9624 | ||
|
|
e6ea83fece | ||
|
|
613e985811 | ||
|
|
b900d54f57 | ||
|
|
83e5279d62 | ||
|
|
b36cde2308 | ||
|
|
c1a2249c96 | ||
|
|
e3afb1ebc8 | ||
|
|
686345f1d0 | ||
|
|
7b15a9372c | ||
|
|
b52792d84d | ||
|
|
a684aa1df4 | ||
|
|
69a3fba2aa | ||
|
|
c6ec5a0d9e | ||
|
|
de68393270 | ||
|
|
12e5797e51 | ||
|
|
6c65263f8d | ||
|
|
32f0a45c33 | ||
|
|
726f3bce5a | ||
|
|
75f253d7d2 | ||
|
|
a5c04850e6 | ||
|
|
cef4500611 | ||
|
|
df2da07184 | ||
|
|
77912d65c7 | ||
|
|
878425f147 | ||
|
|
fa9ad15e41 | ||
|
|
961f75d11b | ||
|
|
60c5ef3508 | ||
|
|
e5e7a913c2 | ||
|
|
165e39a0bb | ||
|
|
09d5966e46 | ||
|
|
0bd433ebf5 | ||
|
|
c2e003ecd5 | ||
|
|
9f51e226dc | ||
|
|
681499ebfe | ||
|
|
5d96094939 | ||
|
|
10d9e0805e | ||
|
|
c1c6613ddd | ||
|
|
5db4eaac2d | ||
|
|
f99f36f637 | ||
|
|
7705d9a8cc | ||
|
|
bcec98bf1c | ||
|
|
0d9809c4ec | ||
|
|
dcf64630f8 | ||
|
|
fa610f7f20 | ||
|
|
d97ad36d90 | ||
|
|
de7377a170 | ||
|
|
c471ec56b4 | ||
|
|
9d55534cc7 | ||
|
|
54f4e9b66c | ||
|
|
3e783002b3 | ||
|
|
935678cbe1 | ||
|
|
ee75b5020b | ||
|
|
5f21a24580 | ||
|
|
100002e4c6 | ||
|
|
71824019fb | ||
|
|
3400b3d2df | ||
|
|
745d83f968 | ||
|
|
340a680de2 | ||
|
|
3048bd79b3 | ||
|
|
ef37926f31 | ||
|
|
4c30bcfbfe | ||
|
|
709ad80662 | ||
|
|
08a834a08a | ||
|
|
6c3dd24e59 | ||
|
|
66d6c031b0 | ||
|
|
957882caef | ||
|
|
8781450256 | ||
|
|
9d23f215a0 | ||
|
|
2c1a0b7d32 | ||
|
|
9e52c53a5d | ||
|
|
f812a2c912 | ||
|
|
d3c848094f | ||
|
|
37d694ae80 | ||
|
|
e391e1a200 | ||
|
|
199f985754 | ||
|
|
395ffa8930 | ||
|
|
90920546e4 | ||
|
|
1d9ec42166 | ||
|
|
57481984c9 | ||
|
|
05051d864d | ||
|
|
48ee7e9b3a | ||
|
|
1278cee687 | ||
|
|
b5195d2739 | ||
|
|
7561770d43 | ||
|
|
03fbfe7cf1 | ||
|
|
9faa58ecdc | ||
|
|
cc19812fb4 | ||
|
|
110d499544 | ||
|
|
7fe64f43f4 | ||
|
|
47007075d4 | ||
|
|
9dc2193d31 | ||
|
|
403d5e0b7d | ||
|
|
5d9dfed5c4 | ||
|
|
9924a6c72d | ||
|
|
ccf6077d4e | ||
|
|
38978dc0b8 | ||
|
|
9152ef8b1d | ||
|
|
4e4521b5bf | ||
|
|
501211d4d5 | ||
|
|
155aad308c | ||
|
|
11e383d24c | ||
|
|
aff0c68b0f | ||
|
|
3b75e27536 | ||
|
|
5f3b7a7616 | ||
|
|
fb8d219960 | ||
|
|
ae88920dec | ||
|
|
57c6f14828 | ||
|
|
bd9a5e1b88 | ||
|
|
8614fcf709 | ||
|
|
2202d845ab | ||
|
|
a55d8c6aa4 | ||
|
|
b6312e2d51 | ||
|
|
5a41a612c9 | ||
|
|
aaea770f1f | ||
|
|
6e4607f23a | ||
|
|
5a192f85d9 | ||
|
|
9ce56ec4ca | ||
|
|
e47cfd447c | ||
|
|
fc2433de1d | ||
|
|
8cf7c76ce1 | ||
|
|
5d8269d28a | ||
|
|
9f542cf856 | ||
|
|
398a0159a6 | ||
|
|
8ce627f9b1 | ||
|
|
a7e5ad1268 | ||
|
|
8223a46ca0 | ||
|
|
1890a0f3b2 | ||
|
|
2dbcd4154c | ||
|
|
0524f419cf | ||
|
|
a90c197e94 | ||
|
|
21458bd419 | ||
|
|
7e96247751 | ||
|
|
07e2aa9b80 | ||
|
|
5c37b7ea47 | ||
|
|
79081f46ec | ||
|
|
4f9e333b05 | ||
|
|
dc09c258f0 | ||
|
|
5de0f305f9 | ||
|
|
52dc5edde3 | ||
|
|
9d086cd151 | ||
|
|
2edde1021f | ||
|
|
afc60481c7 | ||
|
|
eb94ba9052 | ||
|
|
6fa445d50e | ||
|
|
795843df09 | ||
|
|
8a20492e8a | ||
|
|
70ff3b6d98 | ||
|
|
ba79e6f631 | ||
|
|
071d5de3f3 | ||
|
|
1fa324c533 | ||
|
|
958c7f773f | ||
|
|
141240c409 | ||
|
|
89e945be6a | ||
|
|
7d30c2df87 | ||
|
|
90a4f95d3d | ||
|
|
53db004d24 | ||
|
|
f7d5b31d6c | ||
|
|
35d3782099 | ||
|
|
1080ef1105 | ||
|
|
7bd151a4f3 | ||
|
|
e1f53370ed | ||
|
|
e708d0ab68 | ||
|
|
e6e867558b | ||
|
|
ab81049f37 | ||
|
|
0b2f036a81 | ||
|
|
6ed9f0763b | ||
|
|
1e225af16e | ||
|
|
385365f689 | ||
|
|
dec4fb1bee | ||
|
|
ef1a8f21e0 | ||
|
|
0c1b16db4c | ||
|
|
02c530f442 | ||
|
|
df2fddf9cb | ||
|
|
6f0c0167c6 | ||
|
|
47c5f1c3bc | ||
|
|
29a5cc693c | ||
|
|
c3a1b97511 | ||
|
|
f1369ae9fe | ||
|
|
8620d997d4 | ||
|
|
7d06a71c02 | ||
|
|
7c5028614b | ||
|
|
6844fac9d5 | ||
|
|
41b5ca8582 | ||
|
|
7a1504dfe8 | ||
|
|
062623f6ef | ||
|
|
62ad5b81cb | ||
|
|
ada8ebb116 | ||
|
|
f1f2cbbc0d | ||
|
|
c434795edf | ||
|
|
25330d9f38 | ||
|
|
c1c5eb6866 | ||
|
|
51e089403a | ||
|
|
058ef489a0 | ||
|
|
3eb7e626d0 | ||
|
|
386e75b7f0 | ||
|
|
f72a801d51 | ||
|
|
4ffc32566a | ||
|
|
3f692305dc | ||
|
|
4ff17dc89d | ||
|
|
1a506c5dfd | ||
|
|
84865e45ea | ||
|
|
ae97dbe1db | ||
|
|
edb2641243 | ||
|
|
c8635bab0b | ||
|
|
cd50451812 | ||
|
|
877979c8b5 | ||
|
|
f69eca96d1 | ||
|
|
c0a8c9b985 | ||
|
|
09ab224a2f | ||
|
|
0e9001afd5 | ||
|
|
d63678cdfa | ||
|
|
1af2581f0b | ||
|
|
9dcb176dc8 | ||
|
|
64ba74deff | ||
|
|
ff047d3a77 | ||
|
|
b137421cfc | ||
|
|
3a8590f105 | ||
|
|
3571525ab8 | ||
|
|
204762c531 | ||
|
|
394d340ab1 | ||
|
|
4bf5435d95 | ||
|
|
05dba88379 | ||
|
|
1ab5a6851d | ||
|
|
781866f64e | ||
|
|
702c428804 | ||
|
|
5882121a94 | ||
|
|
6c2f754a0a | ||
|
|
acd0bedbce | ||
|
|
90f8d53800 | ||
|
|
23c6b49566 | ||
|
|
f204d16012 | ||
|
|
3796541746 | ||
|
|
0e676f74c5 | ||
|
|
26666187e3 | ||
|
|
c270f92b08 | ||
|
|
1670051a79 | ||
|
|
c61f04c34e | ||
|
|
2b26745c70 | ||
|
|
28ca6cc34b | ||
|
|
ada59bde67 | ||
|
|
9a68fb1364 | ||
|
|
f87c92143e | ||
|
|
f38e44bbb2 | ||
|
|
4d7f50382e | ||
|
|
6803d97bdb | ||
|
|
81ed10442b | ||
|
|
1a46002fad | ||
|
|
2747b2003e | ||
|
|
375297ea28 | ||
|
|
c2662a5095 | ||
|
|
da56e3df88 | ||
|
|
388dbe4928 | ||
|
|
dbcce81f66 | ||
|
|
46ac868403 | ||
|
|
6e1105e1e8 | ||
|
|
4494f59a26 | ||
|
|
13590b23c6 | ||
|
|
2c3861ee68 | ||
|
|
9cf7aabecf | ||
|
|
81d1a4aadf | ||
|
|
6578845f69 | ||
|
|
b5a4e78df5 | ||
|
|
68b55c1e85 | ||
|
|
4234beb034 | ||
|
|
ddb5143d9d | ||
|
|
25dcd673f5 | ||
|
|
1ad7977742 | ||
|
|
fe40f117c1 | ||
|
|
0550d8c73e | ||
|
|
bc821a2ab5 | ||
|
|
aa72ed0b23 | ||
|
|
e0a2bbd2a4 | ||
|
|
1a2454fcba | ||
|
|
92675a93ba | ||
|
|
1bafc89431 | ||
|
|
dc785b64c1 | ||
|
|
257dbe8d39 | ||
|
|
c81c4cb0c7 | ||
|
|
da3edbd85b | ||
|
|
44aa4f320a | ||
|
|
38dbccd997 | ||
|
|
6c27a7b50e | ||
|
|
7ef3bc97b7 | ||
|
|
807b96f882 | ||
|
|
2add98ffc8 | ||
|
|
ed7fe93178 | ||
|
|
397081ef41 | ||
|
|
570eeea297 | ||
|
|
b9232eb2b4 | ||
|
|
405578286d | ||
|
|
1c937b6359 | ||
|
|
567200dcf2 | ||
|
|
b4f5708c05 | ||
|
|
b8cb28fc2f | ||
|
|
495d37ab0b | ||
|
|
9d60cb9450 | ||
|
|
5592e80877 | ||
|
|
979ecf3eea | ||
|
|
770f5bb229 | ||
|
|
44211ed500 | ||
|
|
ac715efc14 | ||
|
|
b256ae8f80 | ||
|
|
fa969ffb59 | ||
|
|
e67311606e | ||
|
|
b17aecd70a | ||
|
|
1cb265f575 | ||
|
|
31b7cac818 | ||
|
|
05738313c3 | ||
|
|
25cf1a65ec | ||
|
|
442730168e | ||
|
|
2ca5c8c03e | ||
|
|
fa056279ea | ||
|
|
7ec098bedc | ||
|
|
68d048abfd | ||
|
|
d5848aa719 | ||
|
|
9edf601d2d | ||
|
|
155d0deae8 | ||
|
|
85408dfd36 | ||
|
|
43fb63af31 | ||
|
|
8add7fccde | ||
|
|
01f5e6778c | ||
|
|
1ff81200ac | ||
|
|
d2f1ca8cba | ||
|
|
caab52f6cc | ||
|
|
b4eb195b34 | ||
|
|
3dbabb693d | ||
|
|
5dd34a717e | ||
|
|
e7cfb3b469 | ||
|
|
c2494d3014 | ||
|
|
9d87cd9952 | ||
|
|
1851a96b47 | ||
|
|
efd2d7ee05 | ||
|
|
b16bc3d277 | ||
|
|
adcf8bf7b5 | ||
|
|
fda17235fd | ||
|
|
c5bdef5617 | ||
|
|
77640e96dd | ||
|
|
98d9b82759 | ||
|
|
1aac5c310f | ||
|
|
1924f52cc8 | ||
|
|
6535ba6077 | ||
|
|
eae4945a9d | ||
|
|
5ec43fdbb8 | ||
|
|
5ed59ae0f2 | ||
|
|
e02d553b45 | ||
|
|
720446629b | ||
|
|
db9d36f196 | ||
|
|
4cca048ab8 | ||
|
|
51b3d9dd53 | ||
|
|
bc885cd8d3 | ||
|
|
c657668a07 | ||
|
|
02e3d4dc70 | ||
|
|
57746012d0 | ||
|
|
8a48ec4297 | ||
|
|
61637a5241 | ||
|
|
8a475908b5 | ||
|
|
b7d2cf0525 | ||
|
|
4d7fbeda35 | ||
|
|
baf536c62c | ||
|
|
0c48a6805e | ||
|
|
1ea2c63bf5 | ||
|
|
f33f49a348 | ||
|
|
cc6819ede7 | ||
|
|
2a68be025b | ||
|
|
f05471dcf8 | ||
|
|
5fe2c53493 | ||
|
|
6b2ae5fd17 | ||
|
|
4eeeded4a7 | ||
|
|
931db10bd0 | ||
|
|
522268be49 | ||
|
|
8e17bffa42 | ||
|
|
cc004358f1 | ||
|
|
0e623482d5 | ||
|
|
ec31ee62d5 | ||
|
|
a59ca8316b | ||
|
|
4423b5b024 | ||
|
|
b47f39dbc3 | ||
|
|
9d72b706fa | ||
|
|
5d4888723b | ||
|
|
49e3171850 | ||
|
|
66ddb69916 | ||
|
|
de1dcde413 | ||
|
|
9f36b1fe16 | ||
|
|
38c7a8a069 | ||
|
|
23bac7cb0e | ||
|
|
0021456aad | ||
|
|
a568995f40 | ||
|
|
f82ac840aa | ||
|
|
cd2c80dedc | ||
|
|
ed93525e65 | ||
|
|
4025831e90 | ||
|
|
94c381f71b | ||
|
|
588e21b339 | ||
|
|
3429095e86 | ||
|
|
feb11f1079 | ||
|
|
fbec475275 | ||
|
|
317fc4fbd0 | ||
|
|
909dd5b306 | ||
|
|
24623d149d | ||
|
|
5687f68f3e | ||
|
|
f282b7a027 | ||
|
|
32b1c9150f | ||
|
|
7a541d69dd | ||
|
|
aa1cb68ce2 | ||
|
|
dc1f3a3bd0 | ||
|
|
5fbb040355 | ||
|
|
9886c510f9 | ||
|
|
49de6096b1 | ||
|
|
7eb193fc49 | ||
|
|
4aa6a8e9a6 | ||
|
|
a1f3aff110 | ||
|
|
bb351c2f41 | ||
|
|
bed552f8d0 | ||
|
|
41c564fe60 | ||
|
|
cae817e833 | ||
|
|
7bb8b8f438 | ||
|
|
c32652716d | ||
|
|
748fd4543b | ||
|
|
56f7ed979c | ||
|
|
3a15c9f8d0 | ||
|
|
fc7ceaead0 | ||
|
|
a626a5c208 | ||
|
|
cb333ad6f3 | ||
|
|
5b58674c6b | ||
|
|
1017d2aec8 | ||
|
|
9786db045f | ||
|
|
158b84844e | ||
|
|
421c22d32c | ||
|
|
90ccadecc3 | ||
|
|
07638448b0 | ||
|
|
f343aa4cc6 | ||
|
|
5cd07954ea | ||
|
|
e0338b844f | ||
|
|
cde57ddf84 | ||
|
|
bee5046908 | ||
|
|
4e6b4c09a5 | ||
|
|
89496194ba | ||
|
|
19f9b25947 | ||
|
|
f97e445fc6 | ||
|
|
2766a88a3b | ||
|
|
b57c529115 | ||
|
|
319f4ee481 | ||
|
|
2cf26ef150 | ||
|
|
2c1b5b1054 | ||
|
|
b8be92c34d | ||
|
|
6b4133b08f | ||
|
|
96d43c2410 | ||
|
|
cde2aa61cf | ||
|
|
1d18aeafa6 | ||
|
|
b8a67d3000 | ||
|
|
c3c8084855 | ||
|
|
cd0ef4d7c1 | ||
|
|
c9e35a126a | ||
|
|
4456226e45 | ||
|
|
bf8f071c5b | ||
|
|
3b1e8a214e | ||
|
|
1015dfce8d | ||
|
|
a0b1c14587 | ||
|
|
66934fc311 | ||
|
|
80de0f946b | ||
|
|
5e65389335 | ||
|
|
d513e5f92c | ||
|
|
e9f8b99215 |
6
README
@@ -20,10 +20,6 @@ The Yocto Project has extensive documentation about the system including a
|
||||
reference manual which can be found at:
|
||||
http://yoctoproject.org/community/documentation
|
||||
|
||||
OpenEmbedded-Core is a layer containing the core metadata for current versions
|
||||
of OpenEmbedded. It is distro-less (can build a functional image with
|
||||
DISTRO = "") and contains only emulated machine support.
|
||||
|
||||
For information about OpenEmbedded, see the OpenEmbedded website:
|
||||
For information about OpenEmbedded see their website:
|
||||
http://www.openembedded.org/
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
|
||||
__version__ = "1.15.1"
|
||||
__version__ = "1.13.3"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
|
||||
@@ -165,11 +165,6 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without UI, the frontend can connect with bitbake server itself",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to",
|
||||
action = "store", dest = "bind", default = False)
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
configuration = BBConfiguration(options)
|
||||
@@ -191,15 +186,6 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
sys.exit("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default], none." % servertype)
|
||||
|
||||
if configuration.server_only:
|
||||
if configuration.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--server-only' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
if not configuration.bind:
|
||||
sys.exit("FATAL: The '--server-only' option requires a name/address to bind to with the -B option.\n")
|
||||
|
||||
if configuration.bind and configuration.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '-B' or '--bind' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
# Save a logfile for cooker into the current working directory. When the
|
||||
# server is daemonized this logfile will be truncated.
|
||||
cooker_logfile = os.path.join(os.getcwd(), "cooker.log")
|
||||
@@ -220,11 +206,8 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
bb.utils.clean_environment()
|
||||
|
||||
server = server.BitBakeServer()
|
||||
if configuration.bind:
|
||||
server.initServer((configuration.bind, 0))
|
||||
else:
|
||||
server.initServer()
|
||||
|
||||
server.initServer()
|
||||
idle = server.getServerIdleCB()
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, idle, initialenv)
|
||||
@@ -239,17 +222,14 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
if not configuration.server_only:
|
||||
# Setup a connection to the server (cooker)
|
||||
server_connection = server.establishConnection()
|
||||
# Setup a connection to the server (cooker)
|
||||
server_connection = server.establishConnection()
|
||||
|
||||
try:
|
||||
return server.launchUI(ui_main, server_connection.connection, server_connection.events)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("server address: %s, server port: %s" % (server.serverinfo.host, server.serverinfo.port))
|
||||
try:
|
||||
return server.launchUI(ui_main, server_connection.connection, server_connection.events)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
@@ -4,15 +4,10 @@
|
||||
# displaying useful information, or acting against them.
|
||||
# See the help output for details on available commands.
|
||||
|
||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
|
||||
import cmd
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import fnmatch
|
||||
from collections import defaultdict
|
||||
|
||||
bindir = os.path.dirname(__file__)
|
||||
topdir = os.path.dirname(bindir)
|
||||
@@ -23,7 +18,6 @@ import bb.cooker
|
||||
import bb.providers
|
||||
import bb.utils
|
||||
from bb.cooker import state
|
||||
import bb.fetch2
|
||||
|
||||
|
||||
logger = logging.getLogger('BitBake')
|
||||
@@ -42,8 +36,6 @@ def main(args):
|
||||
|
||||
cmds = Commands(initialenv)
|
||||
if args:
|
||||
# Allow user to specify e.g. show-layers instead of show_layers
|
||||
args = [args[0].replace('-', '_')] + args[1:]
|
||||
cmds.onecmd(' '.join(args))
|
||||
else:
|
||||
cmds.do_help('')
|
||||
@@ -61,7 +53,6 @@ class Commands(cmd.Cmd):
|
||||
self.config_data = self.cooker.configuration.data
|
||||
bb.providers.logger.setLevel(logging.ERROR)
|
||||
self.cooker_data = None
|
||||
self.bblayers = (self.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
|
||||
def register_idle_function(self, function, data):
|
||||
pass
|
||||
@@ -97,14 +88,14 @@ class Commands(cmd.Cmd):
|
||||
"""display general help or help on a specified command"""
|
||||
if topic:
|
||||
sys.stdout.write('%s: ' % topic)
|
||||
cmd.Cmd.do_help(self, topic.replace('-', '_'))
|
||||
cmd.Cmd.do_help(self,topic)
|
||||
else:
|
||||
sys.stdout.write("usage: bitbake-layers <command> [arguments]\n\n")
|
||||
sys.stdout.write("Available commands:\n")
|
||||
procnames = self.get_names()
|
||||
for procname in procnames:
|
||||
if procname[:3] == 'do_':
|
||||
sys.stdout.write(" %s\n" % procname[3:].replace('_', '-'))
|
||||
sys.stdout.write(" %s\n" % procname[3:])
|
||||
doc = getattr(self, procname).__doc__
|
||||
if doc:
|
||||
sys.stdout.write(" %s\n" % doc.splitlines()[0])
|
||||
@@ -115,227 +106,41 @@ class Commands(cmd.Cmd):
|
||||
logger.plain('')
|
||||
logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority"))
|
||||
logger.plain('=' * 74)
|
||||
for layerdir in self.bblayers:
|
||||
layername = self.get_layer_name(layerdir)
|
||||
layerdirs = str(self.config_data.getVar('BBLAYERS', True)).split()
|
||||
for layerdir in layerdirs:
|
||||
layername = '?'
|
||||
layerpri = 0
|
||||
for layer, _, regex, pri in self.cooker.status.bbfile_config_priorities:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
layername = layer
|
||||
layerpri = pri
|
||||
break
|
||||
|
||||
logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), layerpri))
|
||||
|
||||
|
||||
def version_str(self, pe, pv, pr = None):
|
||||
verstr = "%s" % pv
|
||||
if pr:
|
||||
verstr = "%s-%s" % (verstr, pr)
|
||||
if pe:
|
||||
verstr = "%s:%s" % (pe, verstr)
|
||||
return verstr
|
||||
|
||||
|
||||
def do_show_overlayed(self, args):
|
||||
"""list overlayed recipes (where the same recipe exists in another layer that has a higher layer priority)
|
||||
"""list overlayed recipes (where there is a recipe in another layer that has a higher layer priority)
|
||||
|
||||
usage: show-overlayed [-f] [-s]
|
||||
usage: show_overlayed
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Note that skipped recipes that
|
||||
are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with the ones they overlay indented underneath
|
||||
-s only list overlayed recipes where the version is the same
|
||||
Highest priority recipes are listed with the recipes they overlay as subitems.
|
||||
"""
|
||||
self.check_prepare_cooker()
|
||||
|
||||
show_filenames = False
|
||||
show_same_ver_only = False
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-s':
|
||||
show_same_ver_only = True
|
||||
else:
|
||||
sys.stderr.write("show-overlayed: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
|
||||
items_listed = self.list_recipes('Overlayed recipes', None, True, show_same_ver_only, show_filenames, True)
|
||||
|
||||
# Check for overlayed .bbclass files
|
||||
classes = defaultdict(list)
|
||||
for layerdir in self.bblayers:
|
||||
classdir = os.path.join(layerdir, 'classes')
|
||||
if os.path.exists(classdir):
|
||||
for classfile in os.listdir(classdir):
|
||||
if os.path.splitext(classfile)[1] == '.bbclass':
|
||||
classes[classfile].append(classdir)
|
||||
|
||||
# Locating classes and other files is a bit more complicated than recipes -
|
||||
# layer priority is not a factor; instead BitBake uses the first matching
|
||||
# file in BBPATH, which is manipulated directly by each layer's
|
||||
# conf/layer.conf in turn, thus the order of layers in bblayers.conf is a
|
||||
# factor - however, each layer.conf is free to either prepend or append to
|
||||
# BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might
|
||||
# not be exactly the order present in bblayers.conf either.
|
||||
bbpath = str(self.config_data.getVar('BBPATH', True))
|
||||
overlayed_class_found = False
|
||||
for (classfile, classdirs) in classes.items():
|
||||
if len(classdirs) > 1:
|
||||
if not overlayed_class_found:
|
||||
logger.plain('=== Overlayed classes ===')
|
||||
overlayed_class_found = True
|
||||
|
||||
mainfile = bb.utils.which(bbpath, os.path.join('classes', classfile))
|
||||
if show_filenames:
|
||||
logger.plain('%s' % mainfile)
|
||||
else:
|
||||
# We effectively have to guess the layer here
|
||||
logger.plain('%s:' % classfile)
|
||||
mainlayername = '?'
|
||||
for layerdir in self.bblayers:
|
||||
classdir = os.path.join(layerdir, 'classes')
|
||||
if mainfile.startswith(classdir):
|
||||
mainlayername = self.get_layer_name(layerdir)
|
||||
logger.plain(' %s' % mainlayername)
|
||||
for classdir in classdirs:
|
||||
fullpath = os.path.join(classdir, classfile)
|
||||
if fullpath != mainfile:
|
||||
if show_filenames:
|
||||
print(' %s' % fullpath)
|
||||
else:
|
||||
print(' %s' % self.get_layer_name(os.path.dirname(classdir)))
|
||||
|
||||
if overlayed_class_found:
|
||||
items_listed = True;
|
||||
|
||||
if not items_listed:
|
||||
logger.note('No overlayed files found')
|
||||
|
||||
|
||||
def do_show_recipes(self, args):
|
||||
"""list available recipes, showing the layer they are provided by
|
||||
|
||||
usage: show-recipes [-f] [-m] [pnspec]
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Optionally you may specify
|
||||
pnspec to match a specified recipe name (supports wildcards). Note that
|
||||
skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with other available recipes indented underneath
|
||||
-m only list where multiple recipes (in the same layer or different
|
||||
layers) exist for the same recipe name
|
||||
"""
|
||||
self.check_prepare_cooker()
|
||||
|
||||
show_filenames = False
|
||||
show_multi_provider_only = False
|
||||
pnspec = None
|
||||
title = 'Available recipes:'
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-m':
|
||||
show_multi_provider_only = True
|
||||
elif not arg.startswith('-'):
|
||||
pnspec = arg
|
||||
title = 'Available recipes matching %s:' % pnspec
|
||||
else:
|
||||
sys.stderr.write("show-recipes: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
self.list_recipes(title, pnspec, False, False, show_filenames, show_multi_provider_only)
|
||||
|
||||
|
||||
def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only):
|
||||
pkg_pn = self.cooker.status.pkg_pn
|
||||
(latest_versions, preferred_versions) = bb.providers.findProviders(self.cooker.configuration.data, self.cooker.status, pkg_pn)
|
||||
allproviders = bb.providers.allProviders(self.cooker.status)
|
||||
|
||||
# Ensure we list skipped recipes
|
||||
# We are largely guessing about PN, PV and the preferred version here,
|
||||
# but we have no choice since skipped recipes are not fully parsed
|
||||
skiplist = self.cooker.skiplist.keys()
|
||||
skiplist.sort( key=lambda fileitem: self.cooker.calc_bbfile_priority(fileitem) )
|
||||
skiplist.reverse()
|
||||
for fn in skiplist:
|
||||
recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_')
|
||||
p = recipe_parts[0]
|
||||
if len(recipe_parts) > 1:
|
||||
ver = (None, recipe_parts[1], None)
|
||||
else:
|
||||
ver = (None, 'unknown', None)
|
||||
allproviders[p].append((ver, fn))
|
||||
if not p in pkg_pn:
|
||||
pkg_pn[p] = 'dummy'
|
||||
preferred_versions[p] = (ver, fn)
|
||||
|
||||
def print_item(f, pn, ver, layer, ispref):
|
||||
if f in skiplist:
|
||||
skipped = ' (skipped)'
|
||||
else:
|
||||
skipped = ''
|
||||
if show_filenames:
|
||||
if ispref:
|
||||
logger.plain("%s%s", f, skipped)
|
||||
else:
|
||||
logger.plain(" %s%s", f, skipped)
|
||||
else:
|
||||
if ispref:
|
||||
logger.plain("%s:", pn)
|
||||
logger.plain(" %s %s%s", layer.ljust(20), ver, skipped)
|
||||
|
||||
preffiles = []
|
||||
items_listed = False
|
||||
for p in sorted(pkg_pn):
|
||||
if pnspec:
|
||||
if not fnmatch.fnmatch(p, pnspec):
|
||||
continue
|
||||
|
||||
if len(allproviders[p]) > 1 or not show_multi_provider_only:
|
||||
pref = preferred_versions[p]
|
||||
preffile = bb.cache.Cache.virtualfn2realfn(pref[1])[0]
|
||||
if preffile not in preffiles:
|
||||
preflayer = self.get_file_layer(preffile)
|
||||
multilayer = False
|
||||
same_ver = True
|
||||
provs = []
|
||||
for prov in allproviders[p]:
|
||||
provfile = bb.cache.Cache.virtualfn2realfn(prov[1])[0]
|
||||
provlayer = self.get_file_layer(provfile)
|
||||
provs.append((provfile, provlayer, prov[0]))
|
||||
if provlayer != preflayer:
|
||||
multilayer = True
|
||||
if prov[0] != pref[0]:
|
||||
same_ver = False
|
||||
|
||||
if (multilayer or not show_overlayed_only) and (same_ver or not show_same_ver_only):
|
||||
if not items_listed:
|
||||
logger.plain('=== %s ===' % title)
|
||||
items_listed = True
|
||||
print_item(preffile, p, self.version_str(pref[0][0], pref[0][1]), preflayer, True)
|
||||
for (provfile, provlayer, provver) in provs:
|
||||
if provfile != preffile:
|
||||
print_item(provfile, p, self.version_str(provver[0], provver[1]), provlayer, False)
|
||||
# Ensure we don't show two entries for BBCLASSEXTENDed recipes
|
||||
preffiles.append(preffile)
|
||||
|
||||
return items_listed
|
||||
|
||||
if self.cooker.overlayed:
|
||||
logger.plain('Overlayed recipes:')
|
||||
for f in self.cooker.overlayed.iterkeys():
|
||||
logger.plain('%s' % f)
|
||||
for of in self.cooker.overlayed[f]:
|
||||
logger.plain(' %s' % of)
|
||||
else:
|
||||
logger.plain('No overlayed recipes found')
|
||||
|
||||
def do_flatten(self, args):
|
||||
"""flattens layer configuration into a separate output directory.
|
||||
|
||||
usage: flatten [layer1 layer2 [layer3]...] <outputdir>
|
||||
usage: flatten <outputdir>
|
||||
|
||||
Takes the specified layers (or all layers in the current layer
|
||||
configuration if none are specified) and builds a "flattened" directory
|
||||
Takes the current layer configuration and builds a "flattened" directory
|
||||
containing the contents of all layers, with any overlayed recipes removed
|
||||
and bbappends appended to the corresponding recipes. Note that some manual
|
||||
cleanup may still be necessary afterwards, in particular:
|
||||
@@ -343,61 +148,21 @@ cleanup may still be necessary afterwards, in particular:
|
||||
* where non-recipe files (such as patches) are overwritten (the flatten
|
||||
command will show a warning for these)
|
||||
* where anything beyond the normal layer setup has been added to
|
||||
layer.conf (only the lowest priority number layer's layer.conf is used)
|
||||
layer.conf (only the lowest priority layer's layer.conf is used)
|
||||
* overridden/appended items from bbappends will need to be tidied up
|
||||
* when the flattened layers do not have the same directory structure (the
|
||||
flatten command should show a warning when this will cause a problem)
|
||||
|
||||
Warning: if you flatten several layers where another layer is intended to
|
||||
be used "inbetween" them (in layer priority order) such that recipes /
|
||||
bbappends in the layers interact, and then attempt to use the new output
|
||||
layer together with that other layer, you may no longer get the same
|
||||
build results (as the layer priority order has effectively changed).
|
||||
"""
|
||||
arglist = args.split()
|
||||
if len(arglist) < 1:
|
||||
if len(arglist) != 1:
|
||||
logger.error('Please specify an output directory')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
if len(arglist) == 2:
|
||||
logger.error('If you specify layers to flatten you must specify at least two')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
outputdir = arglist[-1]
|
||||
if os.path.exists(outputdir) and os.listdir(outputdir):
|
||||
logger.error('Directory %s exists and is non-empty, please clear it out first' % outputdir)
|
||||
if os.path.exists(arglist[0]) and os.listdir(arglist[0]):
|
||||
logger.error('Directory %s exists and is non-empty, please clear it out first' % arglist[0])
|
||||
return
|
||||
|
||||
self.check_prepare_cooker()
|
||||
layers = self.bblayers
|
||||
if len(arglist) > 2:
|
||||
layernames = arglist[:-1]
|
||||
found_layernames = []
|
||||
found_layerdirs = []
|
||||
for layerdir in layers:
|
||||
layername = self.get_layer_name(layerdir)
|
||||
if layername in layernames:
|
||||
found_layerdirs.append(layerdir)
|
||||
found_layernames.append(layername)
|
||||
|
||||
for layername in layernames:
|
||||
if not layername in found_layernames:
|
||||
logger.error('Unable to find layer %s in current configuration, please run "%s show-layers" to list configured layers' % (layername, os.path.basename(sys.argv[0])))
|
||||
return
|
||||
layers = found_layerdirs
|
||||
else:
|
||||
layernames = []
|
||||
|
||||
# Ensure a specified path matches our list of layers
|
||||
def layer_path_match(path):
|
||||
for layerdir in layers:
|
||||
if path.startswith(os.path.join(layerdir, '')):
|
||||
return layerdir
|
||||
return None
|
||||
|
||||
appended_recipes = []
|
||||
layers = (self.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
for layer in layers:
|
||||
overlayed = []
|
||||
for f in self.cooker.overlayed.iterkeys():
|
||||
@@ -415,7 +180,7 @@ build results (as the layer priority order has effectively changed).
|
||||
ext = os.path.splitext(f1)[1]
|
||||
if ext != '.bbappend':
|
||||
fdest = f1full[len(layer):]
|
||||
fdest = os.path.normpath(os.sep.join([outputdir,fdest]))
|
||||
fdest = os.path.normpath(os.sep.join([arglist[0],fdest]))
|
||||
bb.utils.mkdirhier(os.path.dirname(fdest))
|
||||
if os.path.exists(fdest):
|
||||
if f1 == 'layer.conf' and root.endswith('/conf'):
|
||||
@@ -430,84 +195,25 @@ build results (as the layer priority order has effectively changed).
|
||||
if appends:
|
||||
logger.plain(' Applying appends to %s' % fdest )
|
||||
for appendname in appends:
|
||||
if layer_path_match(appendname):
|
||||
self.apply_append(appendname, fdest)
|
||||
appended_recipes.append(f1)
|
||||
self.apply_append(appendname, fdest)
|
||||
|
||||
# Take care of when some layers are excluded and yet we have included bbappends for those recipes
|
||||
for recipename in self.cooker_data.appends.iterkeys():
|
||||
if recipename not in appended_recipes:
|
||||
appends = self.cooker_data.appends[recipename]
|
||||
first_append = None
|
||||
for appendname in appends:
|
||||
layer = layer_path_match(appendname)
|
||||
if layer:
|
||||
if first_append:
|
||||
self.apply_append(appendname, first_append)
|
||||
else:
|
||||
fdest = appendname[len(layer):]
|
||||
fdest = os.path.normpath(os.sep.join([outputdir,fdest]))
|
||||
bb.utils.mkdirhier(os.path.dirname(fdest))
|
||||
bb.utils.copyfile(appendname, fdest)
|
||||
first_append = fdest
|
||||
|
||||
# Get the regex for the first layer in our list (which is where the conf/layer.conf file will
|
||||
# have come from)
|
||||
first_regex = None
|
||||
layerdir = layers[0]
|
||||
for layername, pattern, regex, _ in self.cooker.status.bbfile_config_priorities:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
first_regex = regex
|
||||
break
|
||||
|
||||
if first_regex:
|
||||
# Find the BBFILES entries that match (which will have come from this conf/layer.conf file)
|
||||
bbfiles = str(self.config_data.getVar('BBFILES', True)).split()
|
||||
bbfiles_layer = []
|
||||
for item in bbfiles:
|
||||
if first_regex.match(item):
|
||||
newpath = os.path.join(outputdir, item[len(layerdir)+1:])
|
||||
bbfiles_layer.append(newpath)
|
||||
|
||||
if bbfiles_layer:
|
||||
# Check that all important layer files match BBFILES
|
||||
for root, dirs, files in os.walk(outputdir):
|
||||
for f1 in files:
|
||||
ext = os.path.splitext(f1)[1]
|
||||
if ext in ['.bb', '.bbappend']:
|
||||
f1full = os.sep.join([root, f1])
|
||||
entry_found = False
|
||||
for item in bbfiles_layer:
|
||||
if fnmatch.fnmatch(f1full, item):
|
||||
entry_found = True
|
||||
break
|
||||
if not entry_found:
|
||||
logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full)
|
||||
|
||||
def get_file_layer(self, filename):
|
||||
def get_append_layer(self, appendname):
|
||||
for layer, _, regex, _ in self.cooker.status.bbfile_config_priorities:
|
||||
if regex.match(filename):
|
||||
for layerdir in self.bblayers:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
return self.get_layer_name(layerdir)
|
||||
if regex.match(appendname):
|
||||
return layer
|
||||
return "?"
|
||||
|
||||
def get_layer_name(self, layerdir):
|
||||
return os.path.basename(layerdir.rstrip(os.sep))
|
||||
|
||||
def apply_append(self, appendname, recipename):
|
||||
appendfile = open(appendname, 'r')
|
||||
recipefile = open(recipename, 'a')
|
||||
recipefile.write('\n')
|
||||
recipefile.write('##### bbappended from %s #####\n' % self.get_file_layer(appendname))
|
||||
recipefile.write('##### bbappended from %s #####\n' % self.get_append_layer(appendname))
|
||||
recipefile.writelines(appendfile.readlines())
|
||||
recipefile.close()
|
||||
appendfile.close()
|
||||
|
||||
def do_show_appends(self, args):
|
||||
"""list bbappend files and recipe files they apply to
|
||||
|
||||
usage: show-appends
|
||||
usage: show_appends
|
||||
|
||||
Recipes are listed with the bbappends that apply to them as subitems.
|
||||
"""
|
||||
|
||||
@@ -10,39 +10,37 @@ import prserv.serv
|
||||
|
||||
__version__="1.0.0"
|
||||
|
||||
PRHOST_DEFAULT='0.0.0.0'
|
||||
PRHOST_DEFAULT=''
|
||||
PRPORT_DEFAULT=8585
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__),
|
||||
usage = "%prog < --start | --stop > [options]")
|
||||
usage = "%prog [options]")
|
||||
|
||||
parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store",
|
||||
dest="dbfile", type="string", default="prserv.sqlite3")
|
||||
parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store",
|
||||
parser.add_option("-f", "--file", help="database filename(default prserv.db)", action="store",
|
||||
dest="dbfile", type="string", default="prserv.db")
|
||||
parser.add_option("-l", "--log", help="log filename(default prserv.log)", action="store",
|
||||
dest="logfile", type="string", default="prserv.log")
|
||||
parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
|
||||
action = "store", type="string", dest="loglevel", default = "INFO")
|
||||
action = "store", type="string", dest="loglevel", default = "WARNING")
|
||||
parser.add_option("--start", help="start daemon",
|
||||
action="store_true", dest="start")
|
||||
action="store_true", dest="start", default="True")
|
||||
parser.add_option("--stop", help="stop daemon",
|
||||
action="store_true", dest="stop")
|
||||
action="store_false", dest="start")
|
||||
parser.add_option("--host", help="ip address to bind", action="store",
|
||||
dest="host", type="string", default=PRHOST_DEFAULT)
|
||||
parser.add_option("--port", help="port number(default: 8585)", action="store",
|
||||
parser.add_option("--port", help="port number(default 8585)", action="store",
|
||||
dest="port", type="int", default=PRPORT_DEFAULT)
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
prserv.init_logger(os.path.abspath(options.logfile),options.loglevel)
|
||||
|
||||
if options.start:
|
||||
ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile))
|
||||
elif options.stop:
|
||||
ret=prserv.serv.stop_daemon(options.host, options.port)
|
||||
prserv.serv.start_daemon(options)
|
||||
else:
|
||||
ret=parser.print_help()
|
||||
return ret
|
||||
prserv.serv.stop_daemon()
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
|
||||
@@ -91,7 +91,7 @@ def register_idle_function(self, function, data):
|
||||
cooker = bb.cooker.BBCooker(config, register_idle_function, initialenv)
|
||||
config_data = cooker.configuration.data
|
||||
cooker.status = config_data
|
||||
cooker.handleCollections(config_data.getVar("BBFILE_COLLECTIONS", 1))
|
||||
cooker.handleCollections(bb.data.getVar("BBFILE_COLLECTIONS", config_data, 1))
|
||||
|
||||
fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
|
||||
buildfile = cooker.matchFile(fn)
|
||||
@@ -108,9 +108,9 @@ if taskname.endswith("_setscene"):
|
||||
if hashdata:
|
||||
bb.parse.siggen.set_taskdata(hashdata["hashes"], hashdata["deps"])
|
||||
for h in hashdata["hashes"]:
|
||||
the_data.setVar("BBHASH_%s" % h, hashdata["hashes"][h])
|
||||
bb.data.setVar("BBHASH_%s" % h, hashdata["hashes"][h], the_data)
|
||||
for h in hashdata["deps"]:
|
||||
the_data.setVar("BBHASHDEPS_%s" % h, hashdata["deps"][h])
|
||||
bb.data.setVar("BBHASHDEPS_%s" % h, hashdata["deps"][h], the_data)
|
||||
|
||||
ret = 0
|
||||
if dryrun != "True":
|
||||
|
||||
@@ -462,7 +462,7 @@ def main():
|
||||
state_group = 2
|
||||
|
||||
for key in bb.data.keys(documentation):
|
||||
data = documentation.getVarFlag(key, "doc")
|
||||
data = bb.data.getVarFlag(key, "doc", documentation)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
|
||||
@@ -52,8 +52,8 @@ syn match bbExport "^export" nextgroup=bbIdentifier skipwhite
|
||||
syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
|
||||
syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained
|
||||
syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained
|
||||
syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)" contained nextgroup=bbVarValue
|
||||
syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
|
||||
syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue
|
||||
syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
|
||||
syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
|
||||
syn region bbVarPyValue start=+${@+ skip=+\\$+ excludenl end=+}+ contained contains=@python
|
||||
|
||||
|
||||
@@ -186,7 +186,7 @@ include</literal> directive.</para>
|
||||
<title>Defining Python functions into the global Python namespace</title>
|
||||
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||
<para><screen>def get_depends(bb, d):
|
||||
if d.getVar('SOMECONDITION', True):
|
||||
if bb.data.getVar('SOMECONDITION', d, True):
|
||||
return "dependencywithcond"
|
||||
else:
|
||||
return "dependency"
|
||||
@@ -204,7 +204,7 @@ include</literal> directive.</para>
|
||||
<section>
|
||||
<title>Inheritance</title>
|
||||
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||
<para>The <literal>inherit</literal> directive is a means of specifying what classes of functionality your .bb requires. It is a rudimentary form of inheritance. For example, you can easily abstract out the tasks involved in building a package that uses autoconf and automake, and put that into a bbclass for your packages to make use of. A given bbclass is located by searching for classes/filename.bbclass in <envar>BBPATH</envar>, where filename is what you inherited.</para>
|
||||
<para>The <literal>inherit</literal> directive is a means of specifying what classes of functionality your .bb requires. It is a rudimentary form of inheritance. For example, you can easily abstract out the tasks involved in building a package that uses autoconf and automake, and put that into a bbclass for your packages to make use of. A given bbclass is located by searching for classes/filename.oeclass in <envar>BBPATH</envar>, where filename is what you inherited.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Tasks</title>
|
||||
@@ -388,7 +388,7 @@ ftp://.*/.* http://somemirror.org/sources/ \n \
|
||||
http://.*/.* http://somemirror.org/sources/ \n \
|
||||
https://.*/.* http://somemirror.org/sources/ \n"</screen></para>
|
||||
|
||||
<para>Non-local downloaded output is placed into the directory specified by the <varname>DL_DIR</varname>. For non local archive downloads the code can verify sha256 and md5 checksums for the download to ensure the file has been downloaded correctly. These may be specified either in the form <varname>SRC_URI[md5sum]</varname> for the md5 checksum and <varname>SRC_URI[sha256sum]</varname> for the sha256 checksum or as parameters on the SRC_URI such as SRC_URI="http://example.com/foobar.tar.bz2;md5sum=4a8e0f237e961fd7785d19d07fdb994d". If <varname>BB_STRICT_CHECKSUM</varname> is set, any download without a checksum will trigger an error message. In cases where multiple files are listed in SRC_URI, the name parameter is used assign names to the urls and these are then specified in the checksums in the form SRC_URI[name.sha256sum].</para>
|
||||
<para>Non-local downloaded output is placed into the directory specified by the <varname>DL_DIR</varname>. For non local downloads the code can check checksums for the download to ensure the file has been downloaded correctly. These are specified in the form <varname>SRC_URI[md5sum]</varname> for the md5 checksum and <varname>SRC_URI[sha256sum]</varname> for the sha256 checksum. If <varname>BB_STRICT_CHECKSUM</varname> is set, any download without a checksum will trigger an error message. In cases where multiple files are listed in SRC_URI, the name parameter is used assign names to the urls and these are then specified in the checksums in the form SRC_URI[name.sha256sum].</para>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
@@ -21,24 +21,12 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.15.1"
|
||||
__version__ = "1.13.3"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 6, 0):
|
||||
raise RuntimeError("Sorry, python 2.6.0 or later is required for this version of bitbake")
|
||||
|
||||
|
||||
class BBHandledException(Exception):
|
||||
"""
|
||||
The big dilemma for generic bitbake code is what information to give the user
|
||||
when an exception occurs. Any exception inheriting this base exception class
|
||||
has already provided information to the user via some 'fired' message type such as
|
||||
an explicitly fired event using bb.fire, or a bb.error message. If bitbake
|
||||
encounters an exception derived from this class, no backtrace or other information
|
||||
will be given to the user, its assumed the earlier event provided the relevant information.
|
||||
"""
|
||||
pass
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
@@ -79,7 +67,7 @@ if "BBDEBUG" in os.environ:
|
||||
if level:
|
||||
bb.msg.set_debug_level(level)
|
||||
|
||||
if os.environ.get("BBFETCH2"):
|
||||
if True or os.environ.get("BBFETCH2"):
|
||||
from bb import fetch2 as fetch
|
||||
sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ class FuncFailed(Exception):
|
||||
self.logfile = logfile
|
||||
self.name = name
|
||||
if name:
|
||||
self.msg = 'Function failed: %s' % name
|
||||
self.msg = "Function '%s' failed" % name
|
||||
else:
|
||||
self.msg = "Function failed"
|
||||
|
||||
@@ -70,9 +70,9 @@ class TaskBase(event.Event):
|
||||
|
||||
def __init__(self, t, d ):
|
||||
self._task = t
|
||||
self._package = d.getVar("PF", 1)
|
||||
self._package = bb.data.getVar("PF", d, 1)
|
||||
event.Event.__init__(self)
|
||||
self._message = "package %s: task %s: %s" % (d.getVar("PF", 1), t, bb.event.getName(self)[4:])
|
||||
self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:])
|
||||
|
||||
def getTask(self):
|
||||
return self._task
|
||||
@@ -91,16 +91,9 @@ class TaskSucceeded(TaskBase):
|
||||
class TaskFailed(TaskBase):
|
||||
"""Task execution failed"""
|
||||
|
||||
def __init__(self, task, logfile, metadata, errprinted = False):
|
||||
self.logfile = logfile
|
||||
self.errprinted = errprinted
|
||||
super(TaskFailed, self).__init__(task, metadata)
|
||||
|
||||
class TaskFailedSilent(TaskBase):
|
||||
"""Task execution failed (silently)"""
|
||||
def __init__(self, task, logfile, metadata):
|
||||
self.logfile = logfile
|
||||
super(TaskFailedSilent, self).__init__(task, metadata)
|
||||
super(TaskFailed, self).__init__(task, metadata)
|
||||
|
||||
class TaskInvalid(TaskBase):
|
||||
|
||||
@@ -159,6 +152,8 @@ def exec_func(func, d, dirs = None):
|
||||
bb.utils.mkdirhier(adir)
|
||||
|
||||
ispython = flags.get('python')
|
||||
if flags.get('fakeroot') and not flags.get('task'):
|
||||
bb.fatal("Function %s specifies fakeroot but isn't a task?!" % func)
|
||||
|
||||
lockflag = flags.get('lockfiles')
|
||||
if lockflag:
|
||||
@@ -227,10 +222,9 @@ def exec_func_shell(function, d, runfile, cwd=None):
|
||||
|
||||
with open(runfile, 'w') as script:
|
||||
script.write('#!/bin/sh -e\n')
|
||||
data.emit_func(function, script, d)
|
||||
|
||||
if bb.msg.loggerVerboseLogs:
|
||||
if bb.msg.loggerDefaultVerbose:
|
||||
script.write("set -x\n")
|
||||
data.emit_func(function, script, d)
|
||||
if cwd:
|
||||
script.write("cd %s\n" % cwd)
|
||||
script.write("%s\n" % function)
|
||||
@@ -238,10 +232,6 @@ def exec_func_shell(function, d, runfile, cwd=None):
|
||||
os.chmod(runfile, 0775)
|
||||
|
||||
cmd = runfile
|
||||
if d.getVarFlag(function, 'fakeroot'):
|
||||
fakerootcmd = d.getVar('FAKEROOT', True)
|
||||
if fakerootcmd:
|
||||
cmd = [fakerootcmd, runfile]
|
||||
|
||||
if bb.msg.loggerDefaultVerbose:
|
||||
logfile = LogTee(logger, sys.stdout)
|
||||
@@ -296,13 +286,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
|
||||
postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
|
||||
|
||||
class ErrorCheckHandler(logging.Handler):
|
||||
def __init__(self):
|
||||
self.triggered = False
|
||||
logging.Handler.__init__(self, logging.ERROR)
|
||||
def emit(self, record):
|
||||
self.triggered = True
|
||||
|
||||
# Handle logfiles
|
||||
si = file('/dev/null', 'r')
|
||||
try:
|
||||
@@ -328,9 +311,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
handler.setLevel(logging.DEBUG - 2)
|
||||
bblogger.addHandler(handler)
|
||||
|
||||
errchk = ErrorCheckHandler()
|
||||
bblogger.addHandler(errchk)
|
||||
|
||||
localdata.setVar('BB_LOGFILE', logfn)
|
||||
|
||||
event.fire(TaskStarted(task, localdata), localdata)
|
||||
@@ -341,12 +321,9 @@ def _exec_task(fn, task, d, quieterr):
|
||||
for func in (postfuncs or '').split():
|
||||
exec_func(func, localdata)
|
||||
except FuncFailed as exc:
|
||||
if quieterr:
|
||||
event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
|
||||
else:
|
||||
errprinted = errchk.triggered
|
||||
if not quieterr:
|
||||
logger.error(str(exc))
|
||||
event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
|
||||
event.fire(TaskFailed(task, logfn, localdata), localdata)
|
||||
return 1
|
||||
finally:
|
||||
sys.stdout.flush()
|
||||
@@ -389,7 +366,7 @@ def exec_task(fn, task, d):
|
||||
if not quieterr:
|
||||
logger.error("Build of %s failed" % (task))
|
||||
logger.error(format_exc())
|
||||
failedevent = TaskFailed(task, None, d, True)
|
||||
failedevent = TaskFailed(task, None, d)
|
||||
event.fire(failedevent, d)
|
||||
return 1
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
import os
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import bb.data
|
||||
import bb.utils
|
||||
|
||||
logger = logging.getLogger("BitBake.Cache")
|
||||
@@ -42,10 +43,10 @@ except ImportError:
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "143"
|
||||
__cache_version__ = "142"
|
||||
|
||||
def getCacheFile(path, filename, data_hash):
|
||||
return os.path.join(path, filename + "." + data_hash)
|
||||
def getCacheFile(path, filename):
|
||||
return os.path.join(path, filename)
|
||||
|
||||
# RecipeInfoCommon defines common data retrieving methods
|
||||
# from meta data for caches. CoreRecipeInfo as well as other
|
||||
@@ -137,9 +138,11 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
|
||||
self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
|
||||
self.inherits = self.getvar('__inherit_cache', metadata)
|
||||
self.summary = self.getvar('SUMMARY', metadata)
|
||||
self.license = self.getvar('LICENSE', metadata)
|
||||
self.section = self.getvar('SECTION', metadata)
|
||||
self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
|
||||
self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
|
||||
self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
|
||||
|
||||
@classmethod
|
||||
def init_cacheData(cls, cachedata):
|
||||
@@ -171,8 +174,10 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
|
||||
cachedata.basetaskhash = {}
|
||||
cachedata.inherits = {}
|
||||
cachedata.summary = {}
|
||||
cachedata.license = {}
|
||||
cachedata.section = {}
|
||||
cachedata.fakerootenv = {}
|
||||
cachedata.fakerootnoenv = {}
|
||||
cachedata.fakerootdirs = {}
|
||||
|
||||
def add_cacheData(self, cachedata, fn):
|
||||
@@ -234,8 +239,10 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.basetaskhash[identifier] = taskhash
|
||||
|
||||
cachedata.inherits[fn] = self.inherits
|
||||
cachedata.summary[fn] = self.summary
|
||||
cachedata.license[fn] = self.license
|
||||
cachedata.section[fn] = self.section
|
||||
cachedata.fakerootenv[fn] = self.fakerootenv
|
||||
cachedata.fakerootnoenv[fn] = self.fakerootnoenv
|
||||
cachedata.fakerootdirs[fn] = self.fakerootdirs
|
||||
|
||||
|
||||
@@ -245,19 +252,18 @@ class Cache(object):
|
||||
BitBake Cache implementation
|
||||
"""
|
||||
|
||||
def __init__(self, data, data_hash, caches_array):
|
||||
def __init__(self, data, caches_array):
|
||||
# Pass caches_array information into Cache Constructor
|
||||
# It will be used in later for deciding whether we
|
||||
# need extra cache file dump/load support
|
||||
self.caches_array = caches_array
|
||||
self.cachedir = data.getVar("CACHE", True)
|
||||
self.cachedir = bb.data.getVar("CACHE", data, True)
|
||||
self.clean = set()
|
||||
self.checked = set()
|
||||
self.depends_cache = {}
|
||||
self.data = None
|
||||
self.data_fn = None
|
||||
self.cacheclean = True
|
||||
self.data_hash = data_hash
|
||||
|
||||
if self.cachedir in [None, '']:
|
||||
self.has_cache = False
|
||||
@@ -266,17 +272,26 @@ class Cache(object):
|
||||
return
|
||||
|
||||
self.has_cache = True
|
||||
self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
|
||||
self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat")
|
||||
|
||||
logger.debug(1, "Using cache in '%s'", self.cachedir)
|
||||
bb.utils.mkdirhier(self.cachedir)
|
||||
|
||||
# If any of configuration.data's dependencies are newer than the
|
||||
# cache there isn't even any point in loading it...
|
||||
newest_mtime = 0
|
||||
deps = bb.data.getVar("__base_depends", data)
|
||||
|
||||
old_mtimes = [old_mtime for _, old_mtime in deps]
|
||||
old_mtimes.append(newest_mtime)
|
||||
newest_mtime = max(old_mtimes)
|
||||
|
||||
cache_ok = True
|
||||
if self.caches_array:
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cache_ok = cache_ok and os.path.exists(cachefile)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||
cache_ok = cache_ok and (bb.parse.cached_mtime_noerror(cachefile) >= newest_mtime)
|
||||
cache_class.init_cacheData(self)
|
||||
if cache_ok:
|
||||
self.load_cachefile()
|
||||
@@ -310,7 +325,7 @@ class Cache(object):
|
||||
# Calculate the correct cachesize of all those cache files
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
cachesize += os.fstat(cachefile.fileno()).st_size
|
||||
|
||||
@@ -318,7 +333,7 @@ class Cache(object):
|
||||
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
while cachefile:
|
||||
@@ -336,7 +351,7 @@ class Cache(object):
|
||||
current_percent = 100 * current_progress / cachesize
|
||||
if current_percent > previous_percent:
|
||||
previous_percent = current_percent
|
||||
bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
|
||||
bb.event.fire(bb.event.CacheLoadProgress(current_progress),
|
||||
self.data)
|
||||
|
||||
previous_progress += current_progress
|
||||
@@ -571,7 +586,7 @@ class Cache(object):
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cache_class_name = cache_class.__name__
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||
file_dict[cache_class_name] = open(cachefile, "wb")
|
||||
pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
@@ -676,7 +691,7 @@ def init(cooker):
|
||||
Files causing parsing errors are evicted from the cache.
|
||||
|
||||
"""
|
||||
return Cache(cooker.configuration.data, cooker.configuration.data_hash)
|
||||
return Cache(cooker.configuration.data)
|
||||
|
||||
|
||||
class CacheData(object):
|
||||
|
||||
@@ -40,7 +40,6 @@ class HobRecipeInfo(RecipeInfoCommon):
|
||||
self.summary = self.getvar('SUMMARY', metadata)
|
||||
self.license = self.getvar('LICENSE', metadata)
|
||||
self.section = self.getvar('SECTION', metadata)
|
||||
self.description = self.getvar('DESCRIPTION', metadata)
|
||||
|
||||
@classmethod
|
||||
def init_cacheData(cls, cachedata):
|
||||
@@ -48,10 +47,8 @@ class HobRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.summary = {}
|
||||
cachedata.license = {}
|
||||
cachedata.section = {}
|
||||
cachedata.description = {}
|
||||
|
||||
def add_cacheData(self, cachedata, fn):
|
||||
cachedata.summary[fn] = self.summary
|
||||
cachedata.license[fn] = self.license
|
||||
cachedata.section[fn] = self.section
|
||||
cachedata.description[fn] = self.description
|
||||
|
||||
@@ -36,8 +36,8 @@ pythonparsecache = {}
|
||||
shellparsecache = {}
|
||||
|
||||
def parser_cachefile(d):
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
|
||||
bb.data.getVar("CACHE", d, True))
|
||||
if cachedir in [None, '']:
|
||||
return None
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
|
||||
@@ -30,6 +30,11 @@ Commands are queued in a CommandQueue
|
||||
|
||||
import bb.event
|
||||
import bb.cooker
|
||||
import bb.data
|
||||
|
||||
async_cmds = {}
|
||||
sync_cmds = {}
|
||||
|
||||
|
||||
class CommandCompleted(bb.event.Event):
|
||||
pass
|
||||
@@ -56,6 +61,16 @@ class Command:
|
||||
# FIXME Add lock for this
|
||||
self.currentAsyncCommand = None
|
||||
|
||||
for attr in CommandsSync.__dict__:
|
||||
command = attr[:].lower()
|
||||
method = getattr(CommandsSync, attr)
|
||||
sync_cmds[command] = (method)
|
||||
|
||||
for attr in CommandsAsync.__dict__:
|
||||
command = attr[:].lower()
|
||||
method = getattr(CommandsAsync, attr)
|
||||
async_cmds[command] = (method)
|
||||
|
||||
def runCommand(self, commandline):
|
||||
try:
|
||||
command = commandline.pop(0)
|
||||
@@ -98,12 +113,9 @@ class Command:
|
||||
else:
|
||||
self.finishAsyncCommand("Exited with %s" % arg)
|
||||
return False
|
||||
except Exception as exc:
|
||||
except Exception:
|
||||
import traceback
|
||||
if isinstance(exc, bb.BBHandledException):
|
||||
self.finishAsyncCommand("")
|
||||
else:
|
||||
self.finishAsyncCommand(traceback.format_exc())
|
||||
self.finishAsyncCommand(traceback.format_exc())
|
||||
return False
|
||||
|
||||
def finishAsyncCommand(self, msg=None, code=None):
|
||||
@@ -150,7 +162,7 @@ class CommandsSync:
|
||||
if len(params) > 1:
|
||||
expand = params[1]
|
||||
|
||||
return command.cooker.configuration.data.getVar(varname, expand)
|
||||
return bb.data.getVar(varname, command.cooker.configuration.data, expand)
|
||||
|
||||
def setVariable(self, command, params):
|
||||
"""
|
||||
@@ -158,13 +170,7 @@ class CommandsSync:
|
||||
"""
|
||||
varname = params[0]
|
||||
value = params[1]
|
||||
command.cooker.configuration.data.setVar(varname, value)
|
||||
|
||||
def initCooker(self, command, params):
|
||||
"""
|
||||
Init the cooker to initial state with nothing parsed
|
||||
"""
|
||||
command.cooker.initialize()
|
||||
bb.data.setVar(varname, value, command.cooker.configuration.data)
|
||||
|
||||
def resetCooker(self, command, params):
|
||||
"""
|
||||
@@ -173,18 +179,6 @@ class CommandsSync:
|
||||
"""
|
||||
command.cooker.reset()
|
||||
|
||||
def getCpuCount(self, command, params):
|
||||
"""
|
||||
Get the CPU count on the bitbake server
|
||||
"""
|
||||
return bb.utils.cpu_count()
|
||||
|
||||
def triggerEvent(self, command, params):
|
||||
"""
|
||||
Trigger a certain event
|
||||
"""
|
||||
event = params[0]
|
||||
bb.event.fire(eval(event), command.cooker.configuration.data)
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
@@ -242,35 +236,17 @@ class CommandsAsync:
|
||||
included in the package list.
|
||||
If pkg_list provided use that list (plus any extras brought in by
|
||||
klass) rather than generating a tree for all packages.
|
||||
|
||||
Add a new option "resolve" to indicate if we need to resolve the
|
||||
replacement for "virtual/xxx" like pn.
|
||||
"""
|
||||
klass = params[0]
|
||||
resolve = False
|
||||
if len(params) > 2:
|
||||
pkg_list = params[1]
|
||||
resolve = params[2]
|
||||
elif len(params) > 1:
|
||||
if len(params) > 1:
|
||||
pkg_list = params[1]
|
||||
else:
|
||||
pkg_list = []
|
||||
|
||||
command.cooker.generateTargetsTree(klass, pkg_list, resolve)
|
||||
command.cooker.generateTargetsTree(klass, pkg_list)
|
||||
command.finishAsyncCommand()
|
||||
generateTargetsTree.needcache = True
|
||||
|
||||
def findCoreBaseFiles(self, command, params):
|
||||
"""
|
||||
Find certain files in COREBASE directory. i.e. Layers
|
||||
"""
|
||||
subdir = params[0]
|
||||
filename = params[1]
|
||||
|
||||
command.cooker.findCoreBaseFiles(subdir, filename)
|
||||
command.finishAsyncCommand()
|
||||
findCoreBaseFiles.needcache = False
|
||||
|
||||
def findConfigFiles(self, command, params):
|
||||
"""
|
||||
Find config files which provide appropriate values
|
||||
@@ -280,7 +256,7 @@ class CommandsAsync:
|
||||
|
||||
command.cooker.findConfigFiles(varname)
|
||||
command.finishAsyncCommand()
|
||||
findConfigFiles.needcache = False
|
||||
findConfigFiles.needcache = True
|
||||
|
||||
def findFilesMatchingInDir(self, command, params):
|
||||
"""
|
||||
@@ -292,7 +268,7 @@ class CommandsAsync:
|
||||
|
||||
command.cooker.findFilesMatchingInDir(pattern, directory)
|
||||
command.finishAsyncCommand()
|
||||
findFilesMatchingInDir.needcache = False
|
||||
findFilesMatchingInDir.needcache = True
|
||||
|
||||
def findConfigFilePath(self, command, params):
|
||||
"""
|
||||
@@ -359,13 +335,3 @@ class CommandsAsync:
|
||||
else:
|
||||
command.finishAsyncCommand()
|
||||
compareRevisions.needcache = True
|
||||
|
||||
def parseConfigurationFiles(self, command, params):
|
||||
"""
|
||||
Parse the configuration files
|
||||
"""
|
||||
prefiles = params[0]
|
||||
postfiles = params[1]
|
||||
command.cooker.parseConfigurationFiles(prefiles, postfiles)
|
||||
command.finishAsyncCommand()
|
||||
parseConfigurationFiles.needcache = False
|
||||
|
||||
@@ -34,10 +34,8 @@ from cStringIO import StringIO
|
||||
from contextlib import closing
|
||||
from functools import wraps
|
||||
from collections import defaultdict
|
||||
import bb, bb.exceptions, bb.command
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
|
||||
import Queue
|
||||
import prserv.serv
|
||||
import bb, bb.exceptions
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
collectlog = logging.getLogger("BitBake.Collection")
|
||||
@@ -45,9 +43,9 @@ buildlog = logging.getLogger("BitBake.Build")
|
||||
parselog = logging.getLogger("BitBake.Parsing")
|
||||
providerlog = logging.getLogger("BitBake.Provider")
|
||||
|
||||
class NoSpecificMatch(bb.BBHandledException):
|
||||
class MultipleMatches(Exception):
|
||||
"""
|
||||
Exception raised when no or multiple file matches are found
|
||||
Exception raised when multiple file matches are found
|
||||
"""
|
||||
|
||||
class NothingToBuild(Exception):
|
||||
@@ -55,11 +53,6 @@ class NothingToBuild(Exception):
|
||||
Exception raised when there is nothing to build
|
||||
"""
|
||||
|
||||
class CollectionError(bb.BBHandledException):
|
||||
"""
|
||||
Exception raised when layer configuration is incorrect
|
||||
"""
|
||||
|
||||
class state:
|
||||
initial, parsing, running, shutdown, stop = range(5)
|
||||
|
||||
@@ -142,14 +135,10 @@ class BBCooker:
|
||||
self.configuration.data = None
|
||||
self.loadConfigurationData()
|
||||
|
||||
# Take a lock so only one copy of bitbake can run against a given build
|
||||
# directory at a time
|
||||
lockfile = self.configuration.data.expand("${TOPDIR}/bitbake.lock")
|
||||
self.lock = bb.utils.lockfile(lockfile, False, False)
|
||||
if not self.lock:
|
||||
bb.fatal("Only one copy of bitbake should be run against a build directory")
|
||||
if not self.configuration.cmd:
|
||||
self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
|
||||
|
||||
bbpkgs = self.configuration.data.getVar('BBPKGS', True)
|
||||
bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True)
|
||||
if bbpkgs and len(self.configuration.pkgs_to_build) == 0:
|
||||
self.configuration.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
@@ -174,20 +163,11 @@ class BBCooker:
|
||||
|
||||
self.parser = None
|
||||
|
||||
def initConfigurationData(self):
|
||||
self.configuration.data = bb.data.init()
|
||||
|
||||
if not self.server_registration_cb:
|
||||
bb.data.setVar("BB_WORKERCONTEXT", "1", self.configuration.data)
|
||||
|
||||
filtered_keys = bb.utils.approved_variables()
|
||||
bb.data.inheritFromOS(self.configuration.data, self.savedenv, filtered_keys)
|
||||
|
||||
def loadConfigurationData(self):
|
||||
self.configuration.data = bb.data.init()
|
||||
|
||||
if not self.server_registration_cb:
|
||||
self.configuration.data.setVar("BB_WORKERCONTEXT", "1")
|
||||
bb.data.setVar("BB_WORKERCONTEXT", "1", self.configuration.data)
|
||||
|
||||
filtered_keys = bb.utils.approved_variables()
|
||||
bb.data.inheritFromOS(self.configuration.data, self.savedenv, filtered_keys)
|
||||
@@ -202,17 +182,13 @@ class BBCooker:
|
||||
sys.exit(1)
|
||||
|
||||
if not self.configuration.cmd:
|
||||
self.configuration.cmd = self.configuration.data.getVar("BB_DEFAULT_TASK", True) or "build"
|
||||
self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
|
||||
|
||||
def parseConfiguration(self):
|
||||
|
||||
# Set log file verbosity
|
||||
verboselogs = bb.utils.to_boolean(self.configuration.data.getVar("BB_VERBOSE_LOGS", "0"))
|
||||
if verboselogs:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
|
||||
# Change nice level if we're asked to
|
||||
nice = self.configuration.data.getVar("BB_NICE_LEVEL", True)
|
||||
nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True)
|
||||
if nice:
|
||||
curnice = os.nice(0)
|
||||
nice = int(nice) - curnice
|
||||
@@ -268,8 +244,20 @@ class BBCooker:
|
||||
# Need files parsed
|
||||
self.updateCache()
|
||||
|
||||
# Need to ensure data store is expanded
|
||||
localdata = data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
|
||||
pkg_pn = self.status.pkg_pn
|
||||
(latest_versions, preferred_versions) = bb.providers.findProviders(self.configuration.data, self.status, pkg_pn)
|
||||
preferred_versions = {}
|
||||
latest_versions = {}
|
||||
|
||||
# Sort by priority
|
||||
for pn in pkg_pn:
|
||||
(last_ver, last_file, pref_ver, pref_file) = bb.providers.findBestProvider(pn, localdata, self.status)
|
||||
preferred_versions[pn] = (pref_ver, pref_file)
|
||||
latest_versions[pn] = (last_ver, last_file)
|
||||
|
||||
logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
|
||||
logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
|
||||
@@ -298,7 +286,7 @@ class BBCooker:
|
||||
# this showEnvironment() code path doesn't use the cache
|
||||
self.parseConfiguration()
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", 1) )
|
||||
self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
|
||||
|
||||
fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
|
||||
fn = self.matchFile(fn)
|
||||
@@ -343,7 +331,6 @@ class BBCooker:
|
||||
"""
|
||||
Prepare a runqueue and taskdata object for iteration over pkgs_to_build
|
||||
"""
|
||||
bb.event.fire(bb.event.TreeDataPreparationStarted(), self.configuration.data)
|
||||
# Need files parsed
|
||||
self.updateCache()
|
||||
# If we are told to do the None task then query the default task
|
||||
@@ -360,14 +347,11 @@ class BBCooker:
|
||||
taskdata = bb.taskdata.TaskData(False, skiplist=self.skiplist)
|
||||
|
||||
runlist = []
|
||||
current = 0
|
||||
for k in pkgs_to_build:
|
||||
taskdata.add_provider(localdata, self.status, k)
|
||||
runlist.append([k, "do_%s" % task])
|
||||
current += 1
|
||||
bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(pkgs_to_build)), self.configuration.data)
|
||||
taskdata.add_unresolved(localdata, self.status)
|
||||
bb.event.fire(bb.event.TreeDataPreparationCompleted(len(pkgs_to_build)), self.configuration.data)
|
||||
|
||||
return runlist, taskdata
|
||||
|
||||
def generateTaskDepTreeData(self, pkgs_to_build, task):
|
||||
@@ -442,20 +426,7 @@ class BBCooker:
|
||||
|
||||
return depend_tree
|
||||
|
||||
def append_package(self, taskdata, depend_tree_package, package):
|
||||
if package not in depend_tree_package:
|
||||
targetid = taskdata.getrun_id(package)
|
||||
if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
|
||||
fnid = taskdata.run_targets[targetid][0]
|
||||
fn = taskdata.fn_index[fnid]
|
||||
pn = self.status.pkg_fn[fn]
|
||||
version = "%s:%s-%s" % self.status.pkg_pepvpr[fn]
|
||||
depend_tree_package[package] = {}
|
||||
depend_tree_package[package]["pn"] = pn
|
||||
depend_tree_package[package]["filename"] = fn
|
||||
depend_tree_package[package]["version"] = version
|
||||
|
||||
def generatePkgDepTreeData(self, pkgs_to_build, task, resolve=False):
|
||||
def generatePkgDepTreeData(self, pkgs_to_build, task):
|
||||
"""
|
||||
Create a dependency tree of pkgs_to_build, returning the data.
|
||||
"""
|
||||
@@ -472,7 +443,6 @@ class BBCooker:
|
||||
depend_tree["rdepends-pn"] = {}
|
||||
depend_tree["packages"] = {}
|
||||
depend_tree["rdepends-pkg"] = {}
|
||||
depend_tree["rrecs-pkg"] = {}
|
||||
|
||||
for task in xrange(len(tasks_fnid)):
|
||||
fnid = tasks_fnid[task]
|
||||
@@ -482,9 +452,6 @@ class BBCooker:
|
||||
summary = self.status.summary[fn]
|
||||
lic = self.status.license[fn]
|
||||
section = self.status.section[fn]
|
||||
description = self.status.description[fn]
|
||||
rdepends = self.status.rundeps[fn]
|
||||
rrecs = self.status.runrecs[fn]
|
||||
if pn not in depend_tree["pn"]:
|
||||
depend_tree["pn"][pn] = {}
|
||||
depend_tree["pn"][pn]["filename"] = fn
|
||||
@@ -492,8 +459,6 @@ class BBCooker:
|
||||
depend_tree["pn"][pn]["summary"] = summary
|
||||
depend_tree["pn"][pn]["license"] = lic
|
||||
depend_tree["pn"][pn]["section"] = section
|
||||
depend_tree["pn"][pn]["description"] = description
|
||||
depend_tree["pn"][pn]["packages"] = rdepends.keys()
|
||||
|
||||
if fnid not in seen_fnids:
|
||||
seen_fnids.append(fnid)
|
||||
@@ -501,44 +466,25 @@ class BBCooker:
|
||||
|
||||
depend_tree["depends"][pn] = []
|
||||
for dep in taskdata.depids[fnid]:
|
||||
if resolve:
|
||||
item = taskdata.build_names_index[dep]
|
||||
pn_provider = ""
|
||||
targetid = taskdata.getbuild_id(item)
|
||||
if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
|
||||
fnid = taskdata.build_targets[targetid][0]
|
||||
fn_provider = taskdata.fn_index[fnid]
|
||||
pn_provider = self.status.pkg_fn[fn_provider]
|
||||
else:
|
||||
pn_provider = item
|
||||
depend_tree["depends"][pn].append(pn_provider)
|
||||
else:
|
||||
depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
|
||||
depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
|
||||
|
||||
depend_tree["rdepends-pn"][pn] = []
|
||||
for rdep in taskdata.rdepids[fnid]:
|
||||
depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
|
||||
|
||||
rdepends = self.status.rundeps[fn]
|
||||
for package in rdepends:
|
||||
depend_tree["rdepends-pkg"][package] = []
|
||||
for rdepend in rdepends[package]:
|
||||
depend_tree["rdepends-pkg"][package].append(rdepend)
|
||||
if resolve:
|
||||
self.append_package(taskdata, depend_tree["packages"], rdepend)
|
||||
if not package in packages:
|
||||
packages.append(package)
|
||||
|
||||
for package in rrecs:
|
||||
depend_tree["rrecs-pkg"][package] = []
|
||||
for rrec in rrecs[package]:
|
||||
depend_tree["rrecs-pkg"][package].append(rrec)
|
||||
if resolve:
|
||||
self.append_package(taskdata, depend_tree["packages"], rrec)
|
||||
if not package in packages:
|
||||
packages.append(package)
|
||||
packages.append(package)
|
||||
|
||||
for package in packages:
|
||||
self.append_package(taskdata, depend_tree["packages"], package)
|
||||
if package not in depend_tree["packages"]:
|
||||
depend_tree["packages"][package] = {}
|
||||
depend_tree["packages"][package]["pn"] = pn
|
||||
depend_tree["packages"][package]["filename"] = fn
|
||||
depend_tree["packages"][package]["version"] = version
|
||||
|
||||
return depend_tree
|
||||
|
||||
@@ -646,7 +592,7 @@ class BBCooker:
|
||||
bb.data.expandKeys(localdata)
|
||||
|
||||
# Handle PREFERRED_PROVIDERS
|
||||
for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
|
||||
for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, True) or "").split():
|
||||
try:
|
||||
(providee, provider) = p.split(':')
|
||||
except:
|
||||
@@ -682,18 +628,6 @@ class BBCooker:
|
||||
if regex in unmatched:
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
|
||||
def findCoreBaseFiles(self, subdir, configfile):
|
||||
corebase = self.configuration.data.getVar('COREBASE', True) or ""
|
||||
paths = []
|
||||
for root, dirs, files in os.walk(corebase + '/' + subdir):
|
||||
for d in dirs:
|
||||
configfilepath = os.path.join(root, d, configfile)
|
||||
if os.path.exists(configfilepath):
|
||||
paths.append(os.path.join(root, d))
|
||||
|
||||
if paths:
|
||||
bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.configuration.data)
|
||||
|
||||
def findConfigFilePath(self, configfile):
|
||||
"""
|
||||
Find the location on disk of configfile and if it exists and was parsed by BitBake
|
||||
@@ -706,8 +640,8 @@ class BBCooker:
|
||||
# Generate a list of parsed configuration files by searching the files
|
||||
# listed in the __depends and __base_depends variables with a .conf suffix.
|
||||
conffiles = []
|
||||
dep_files = self.configuration.data.getVar('__depends') or set()
|
||||
dep_files.union(self.configuration.data.getVar('__base_depends') or set())
|
||||
dep_files = bb.data.getVar('__depends', self.configuration.data) or set()
|
||||
dep_files.union(bb.data.getVar('__base_depends', self.configuration.data) or set())
|
||||
|
||||
for f in dep_files:
|
||||
if f[0].endswith(".conf"):
|
||||
@@ -735,7 +669,7 @@ class BBCooker:
|
||||
|
||||
matches = []
|
||||
p = re.compile(re.escape(filepattern))
|
||||
bbpaths = self.configuration.data.getVar('BBPATH', True).split(':')
|
||||
bbpaths = bb.data.getVar('BBPATH', self.configuration.data, True).split(':')
|
||||
for path in bbpaths:
|
||||
dirpath = os.path.join(path, directory)
|
||||
if os.path.exists(dirpath):
|
||||
@@ -757,7 +691,7 @@ class BBCooker:
|
||||
|
||||
data = self.configuration.data
|
||||
# iterate configs
|
||||
bbpaths = data.getVar('BBPATH', True).split(':')
|
||||
bbpaths = bb.data.getVar('BBPATH', data, True).split(':')
|
||||
for path in bbpaths:
|
||||
confpath = os.path.join(path, "conf", var)
|
||||
if os.path.exists(confpath):
|
||||
@@ -784,7 +718,7 @@ class BBCooker:
|
||||
|
||||
return pkg_list
|
||||
|
||||
def generateTargetsTree(self, klass=None, pkgs=[], resolve=False):
|
||||
def generateTargetsTree(self, klass=None, pkgs=[]):
|
||||
"""
|
||||
Generate a dependency tree of buildable targets
|
||||
Generate an event with the result
|
||||
@@ -799,7 +733,7 @@ class BBCooker:
|
||||
pkgs = pkgs + extra_pkgs
|
||||
|
||||
# generate a dependency tree for all our packages
|
||||
tree = self.generatePkgDepTreeData(pkgs, 'build', resolve)
|
||||
tree = self.generatePkgDepTreeData(pkgs, 'build')
|
||||
bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.configuration.data)
|
||||
|
||||
def buildWorldTargetList(self):
|
||||
@@ -862,16 +796,16 @@ class BBCooker:
|
||||
parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
|
||||
data = _parse(layerconf, data)
|
||||
|
||||
layers = (data.getVar('BBLAYERS', True) or "").split()
|
||||
layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
|
||||
|
||||
data = bb.data.createCopy(data)
|
||||
for layer in layers:
|
||||
parselog.debug(2, "Adding layer %s", layer)
|
||||
data.setVar('LAYERDIR', layer)
|
||||
bb.data.setVar('LAYERDIR', layer, data)
|
||||
data = _parse(os.path.join(layer, "conf", "layer.conf"), data)
|
||||
data.expandVarref('LAYERDIR')
|
||||
|
||||
data.delVar('LAYERDIR')
|
||||
bb.data.delVar('LAYERDIR', data)
|
||||
|
||||
if not data.getVar("BBPATH", True):
|
||||
raise SystemExit("The BBPATH variable is not set")
|
||||
@@ -889,21 +823,18 @@ class BBCooker:
|
||||
|
||||
# Nomally we only register event handlers at the end of parsing .bb files
|
||||
# We register any handlers we've found so far here...
|
||||
for var in data.getVar('__BBHANDLERS') or []:
|
||||
bb.event.register(var, data.getVar(var))
|
||||
for var in bb.data.getVar('__BBHANDLERS', data) or []:
|
||||
bb.event.register(var, bb.data.getVar(var, data))
|
||||
|
||||
if data.getVar("BB_WORKERCONTEXT", False) is None:
|
||||
bb.fetch.fetcher_init(data)
|
||||
bb.codeparser.parser_cache_init(data)
|
||||
bb.event.fire(bb.event.ConfigParsed(), data)
|
||||
bb.parse.init_parser(data)
|
||||
data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
|
||||
bb.event.fire(bb.event.ConfigParsed(), data)
|
||||
self.configuration.data = data
|
||||
self.configuration.data_hash = data.get_hash()
|
||||
|
||||
def handleCollections( self, collections ):
|
||||
"""Handle collections"""
|
||||
errors = False
|
||||
self.status.bbfile_config_priorities = []
|
||||
if collections:
|
||||
collection_priorities = {}
|
||||
@@ -912,13 +843,12 @@ class BBCooker:
|
||||
min_prio = 0
|
||||
for c in collection_list:
|
||||
# Get collection priority if defined explicitly
|
||||
priority = self.configuration.data.getVar("BBFILE_PRIORITY_%s" % c, 1)
|
||||
priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
|
||||
if priority:
|
||||
try:
|
||||
prio = int(priority)
|
||||
except ValueError:
|
||||
parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
|
||||
errors = True
|
||||
if min_prio == 0 or prio < min_prio:
|
||||
min_prio = prio
|
||||
collection_priorities[c] = prio
|
||||
@@ -926,7 +856,7 @@ class BBCooker:
|
||||
collection_priorities[c] = None
|
||||
|
||||
# Check dependencies and store information for priority calculation
|
||||
deps = self.configuration.data.getVar("LAYERDEPENDS_%s" % c, 1)
|
||||
deps = bb.data.getVar("LAYERDEPENDS_%s" % c, self.configuration.data, 1)
|
||||
if deps:
|
||||
depnamelist = []
|
||||
deplist = deps.split()
|
||||
@@ -937,7 +867,6 @@ class BBCooker:
|
||||
depver = int(depsplit[1])
|
||||
except ValueError:
|
||||
parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
|
||||
errors = True
|
||||
continue
|
||||
else:
|
||||
depver = None
|
||||
@@ -946,23 +875,19 @@ class BBCooker:
|
||||
|
||||
if dep in collection_list:
|
||||
if depver:
|
||||
layerver = self.configuration.data.getVar("LAYERVERSION_%s" % dep, 1)
|
||||
layerver = bb.data.getVar("LAYERVERSION_%s" % dep, self.configuration.data, 1)
|
||||
if layerver:
|
||||
try:
|
||||
lver = int(layerver)
|
||||
except ValueError:
|
||||
parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
|
||||
errors = True
|
||||
continue
|
||||
if lver <> depver:
|
||||
parselog.error("Layer dependency %s of layer %s is at version %d, expected %d", dep, c, lver, depver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer dependency %s of layer %s has no version, expected %d", dep, c, depver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer dependency %s of layer %s not found", dep, c)
|
||||
errors = True
|
||||
collection_depends[c] = depnamelist
|
||||
else:
|
||||
collection_depends[c] = []
|
||||
@@ -983,29 +908,24 @@ class BBCooker:
|
||||
# Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
|
||||
for c in collection_list:
|
||||
calc_layer_priority(c)
|
||||
regex = self.configuration.data.getVar("BBFILE_PATTERN_%s" % c, 1)
|
||||
regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
|
||||
if regex == None:
|
||||
parselog.error("BBFILE_PATTERN_%s not defined" % c)
|
||||
errors = True
|
||||
continue
|
||||
try:
|
||||
cre = re.compile(regex)
|
||||
except re.error:
|
||||
parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
|
||||
errors = True
|
||||
continue
|
||||
self.status.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
|
||||
if errors:
|
||||
# We've already printed the actual error(s)
|
||||
raise CollectionError("Errors during parsing layer configuration")
|
||||
|
||||
def buildSetVars(self):
|
||||
"""
|
||||
Setup any variables needed before starting a build
|
||||
"""
|
||||
if not self.configuration.data.getVar("BUILDNAME"):
|
||||
self.configuration.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'))
|
||||
self.configuration.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()))
|
||||
if not bb.data.getVar("BUILDNAME", self.configuration.data):
|
||||
bb.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'), self.configuration.data)
|
||||
bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()), self.configuration.data)
|
||||
|
||||
def matchFiles(self, bf):
|
||||
"""
|
||||
@@ -1033,15 +953,10 @@ class BBCooker:
|
||||
"""
|
||||
matches = self.matchFiles(buildfile)
|
||||
if len(matches) != 1:
|
||||
if matches:
|
||||
msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
|
||||
if matches:
|
||||
for f in matches:
|
||||
msg += "\n %s" % f
|
||||
parselog.error(msg)
|
||||
else:
|
||||
parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
|
||||
raise NoSpecificMatch
|
||||
parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches)))
|
||||
for f in matches:
|
||||
parselog.error(" %s" % f)
|
||||
raise MultipleMatches
|
||||
return matches[0]
|
||||
|
||||
def buildFile(self, buildfile, task):
|
||||
@@ -1057,7 +972,7 @@ class BBCooker:
|
||||
# buildFile() doesn't use the cache
|
||||
self.parseConfiguration()
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", 1) )
|
||||
self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
|
||||
|
||||
# If we are told to do the None task then query the default task
|
||||
if (task == None):
|
||||
@@ -1101,7 +1016,7 @@ class BBCooker:
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
taskdata.add_provider(self.configuration.data, self.status, item)
|
||||
|
||||
buildname = self.configuration.data.getVar("BUILDNAME")
|
||||
buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data)
|
||||
|
||||
# Execute the runqueue
|
||||
@@ -1119,6 +1034,8 @@ class BBCooker:
|
||||
try:
|
||||
retval = rq.execute_runqueue()
|
||||
except runqueue.TaskFailure as exc:
|
||||
for fnid in exc.args:
|
||||
buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
|
||||
failures += len(exc.args)
|
||||
retval = False
|
||||
except SystemExit as exc:
|
||||
@@ -1126,7 +1043,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.configuration.event_data)
|
||||
bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data)
|
||||
self.command.finishAsyncCommand()
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1147,7 +1064,6 @@ class BBCooker:
|
||||
if (task == None):
|
||||
task = self.configuration.cmd
|
||||
|
||||
universe = ('universe' in targets)
|
||||
targets = self.checkPackages(targets)
|
||||
|
||||
def buildTargetsIdle(server, rq, abort):
|
||||
@@ -1159,6 +1075,8 @@ class BBCooker:
|
||||
try:
|
||||
retval = rq.execute_runqueue()
|
||||
except runqueue.TaskFailure as exc:
|
||||
for fnid in exc.args:
|
||||
buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
|
||||
failures += len(exc.args)
|
||||
retval = False
|
||||
except SystemExit as exc:
|
||||
@@ -1166,7 +1084,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.configuration.data)
|
||||
bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data)
|
||||
self.command.finishAsyncCommand()
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1175,8 +1093,8 @@ class BBCooker:
|
||||
|
||||
self.buildSetVars()
|
||||
|
||||
buildname = self.configuration.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.data)
|
||||
buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data)
|
||||
|
||||
localdata = data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(localdata)
|
||||
@@ -1191,8 +1109,6 @@ class BBCooker:
|
||||
taskdata.add_unresolved(localdata, self.status)
|
||||
|
||||
rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
|
||||
if universe:
|
||||
rq.rqdata.warn_multi_bb = True
|
||||
|
||||
self.server_registration_cb(buildTargetsIdle, rq)
|
||||
|
||||
@@ -1211,16 +1127,16 @@ class BBCooker:
|
||||
del self.status
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
|
||||
ignore = self.configuration.data.getVar("ASSUME_PROVIDED", 1) or ""
|
||||
ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
|
||||
self.status.ignored_dependencies = set(ignore.split())
|
||||
|
||||
for dep in self.configuration.extra_assume_provided:
|
||||
self.status.ignored_dependencies.add(dep)
|
||||
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", 1) )
|
||||
self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
|
||||
|
||||
(filelist, masked) = self.collect_bbfiles()
|
||||
self.configuration.data.renameVar("__depends", "__base_depends")
|
||||
bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
|
||||
|
||||
self.parser = CookerParser(self, filelist, masked)
|
||||
self.state = state.parsing
|
||||
@@ -1246,7 +1162,6 @@ class BBCooker:
|
||||
pkgs_to_build.append(t)
|
||||
|
||||
if 'universe' in pkgs_to_build:
|
||||
parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
|
||||
parselog.debug(1, "collating packages for \"universe\"")
|
||||
pkgs_to_build.remove('universe')
|
||||
for t in self.status.universe_target:
|
||||
@@ -1312,7 +1227,7 @@ class BBCooker:
|
||||
if g not in newfiles:
|
||||
newfiles.append(g)
|
||||
|
||||
bbmask = self.configuration.data.getVar('BBMASK', 1)
|
||||
bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1)
|
||||
|
||||
if bbmask:
|
||||
try:
|
||||
@@ -1371,11 +1286,9 @@ class BBCooker:
|
||||
# Empty the environment. The environment will be populated as
|
||||
# necessary from the data store.
|
||||
#bb.utils.empty_environment()
|
||||
prserv.serv.auto_start(self.configuration.data)
|
||||
return
|
||||
|
||||
def post_serve(self):
|
||||
prserv.serv.auto_shutdown(self.configuration.data)
|
||||
bb.event.fire(CookerExit(), self.configuration.event_data)
|
||||
|
||||
def shutdown(self):
|
||||
@@ -1387,10 +1300,6 @@ class BBCooker:
|
||||
def reparseFiles(self):
|
||||
return
|
||||
|
||||
def initialize(self):
|
||||
self.state = state.initial
|
||||
self.initConfigurationData()
|
||||
|
||||
def reset(self):
|
||||
self.state = state.initial
|
||||
self.loadConfigurationData()
|
||||
@@ -1461,7 +1370,7 @@ def _parse(fn, data, include=True):
|
||||
|
||||
@catch_parse_error
|
||||
def _inherit(bbclass, data):
|
||||
bb.parse.BBHandler.inherit([bbclass], "configuration INHERITs", 0, data)
|
||||
bb.parse.BBHandler.inherit([bbclass], data)
|
||||
return data
|
||||
|
||||
class ParsingFailure(Exception):
|
||||
@@ -1470,94 +1379,26 @@ class ParsingFailure(Exception):
|
||||
self.recipe = recipe
|
||||
Exception.__init__(self, realexception, recipe)
|
||||
|
||||
class Feeder(multiprocessing.Process):
|
||||
def __init__(self, jobs, to_parsers, quit):
|
||||
self.quit = quit
|
||||
self.jobs = jobs
|
||||
self.to_parsers = to_parsers
|
||||
multiprocessing.Process.__init__(self)
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
try:
|
||||
quit = self.quit.get_nowait()
|
||||
except Queue.Empty:
|
||||
pass
|
||||
else:
|
||||
if quit == 'cancel':
|
||||
self.to_parsers.cancel_join_thread()
|
||||
break
|
||||
|
||||
try:
|
||||
job = self.jobs.pop()
|
||||
except IndexError:
|
||||
break
|
||||
|
||||
try:
|
||||
self.to_parsers.put(job, timeout=0.5)
|
||||
except Queue.Full:
|
||||
self.jobs.insert(0, job)
|
||||
continue
|
||||
|
||||
class Parser(multiprocessing.Process):
|
||||
def __init__(self, jobs, results, quit, init):
|
||||
self.jobs = jobs
|
||||
self.results = results
|
||||
self.quit = quit
|
||||
self.init = init
|
||||
multiprocessing.Process.__init__(self)
|
||||
|
||||
def run(self):
|
||||
if self.init:
|
||||
self.init()
|
||||
|
||||
pending = []
|
||||
while True:
|
||||
try:
|
||||
self.quit.get_nowait()
|
||||
except Queue.Empty:
|
||||
pass
|
||||
else:
|
||||
self.results.cancel_join_thread()
|
||||
break
|
||||
|
||||
if pending:
|
||||
result = pending.pop()
|
||||
else:
|
||||
try:
|
||||
job = self.jobs.get(timeout=0.25)
|
||||
except Queue.Empty:
|
||||
continue
|
||||
|
||||
if job is None:
|
||||
break
|
||||
result = self.parse(*job)
|
||||
|
||||
try:
|
||||
self.results.put(result, timeout=0.25)
|
||||
except Queue.Full:
|
||||
pending.append(result)
|
||||
|
||||
def parse(self, filename, appends, caches_array):
|
||||
try:
|
||||
return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
|
||||
except Exception as exc:
|
||||
tb = sys.exc_info()[2]
|
||||
exc.recipe = filename
|
||||
exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
|
||||
return True, exc
|
||||
# Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
|
||||
# and for example a worker thread doesn't just exit on its own in response to
|
||||
# a SystemExit event for example.
|
||||
except BaseException as exc:
|
||||
return True, ParsingFailure(exc, filename)
|
||||
def parse_file(task):
|
||||
filename, appends, caches_array = task
|
||||
try:
|
||||
return True, bb.cache.Cache.parse(filename, appends, parse_file.cfg, caches_array)
|
||||
except Exception as exc:
|
||||
tb = sys.exc_info()[2]
|
||||
exc.recipe = filename
|
||||
exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
|
||||
raise exc
|
||||
# Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
|
||||
# and for example a worker thread doesn't just exit on its own in response to
|
||||
# a SystemExit event for example.
|
||||
except BaseException as exc:
|
||||
raise ParsingFailure(exc, filename)
|
||||
|
||||
class CookerParser(object):
|
||||
def __init__(self, cooker, filelist, masked):
|
||||
self.filelist = filelist
|
||||
self.cooker = cooker
|
||||
self.cfgdata = cooker.configuration.data
|
||||
self.cfghash = cooker.configuration.data_hash
|
||||
|
||||
# Accounting statistics
|
||||
self.parsed = 0
|
||||
@@ -1573,7 +1414,7 @@ class CookerParser(object):
|
||||
self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
|
||||
multiprocessing.cpu_count())
|
||||
|
||||
self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
|
||||
self.bb_cache = bb.cache.Cache(self.cfgdata, cooker.caches_array)
|
||||
self.fromcache = []
|
||||
self.willparse = []
|
||||
for filename in self.filelist:
|
||||
@@ -1588,28 +1429,22 @@ class CookerParser(object):
|
||||
self.start()
|
||||
|
||||
def start(self):
|
||||
def init(cfg):
|
||||
parse_file.cfg = cfg
|
||||
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cooker.configuration.data, ), exitpriority=1)
|
||||
|
||||
self.results = self.load_cached()
|
||||
self.processes = []
|
||||
|
||||
if self.toparse:
|
||||
bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
|
||||
def init():
|
||||
Parser.cfg = self.cfgdata
|
||||
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
|
||||
|
||||
self.feeder_quit = multiprocessing.Queue(maxsize=1)
|
||||
self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
|
||||
self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
|
||||
self.result_queue = multiprocessing.Queue()
|
||||
self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
|
||||
self.feeder.start()
|
||||
for i in range(0, self.num_processes):
|
||||
parser = Parser(self.jobs, self.result_queue, self.parser_quit, init)
|
||||
parser.start()
|
||||
self.processes.append(parser)
|
||||
self.pool = multiprocessing.Pool(self.num_processes, init, [self.cfgdata])
|
||||
parsed = self.pool.imap(parse_file, self.willparse)
|
||||
self.pool.close()
|
||||
|
||||
self.results = itertools.chain(self.results, self.parse_generator())
|
||||
self.results = itertools.chain(self.results, parsed)
|
||||
|
||||
def shutdown(self, clean=True, force=False):
|
||||
def shutdown(self, clean=True):
|
||||
if not self.toparse:
|
||||
return
|
||||
|
||||
@@ -1619,22 +1454,9 @@ class CookerParser(object):
|
||||
self.virtuals, self.error,
|
||||
self.total)
|
||||
bb.event.fire(event, self.cfgdata)
|
||||
self.feeder_quit.put(None)
|
||||
for process in self.processes:
|
||||
self.jobs.put(None)
|
||||
else:
|
||||
self.feeder_quit.put('cancel')
|
||||
|
||||
self.parser_quit.cancel_join_thread()
|
||||
for process in self.processes:
|
||||
self.parser_quit.put(None)
|
||||
|
||||
self.jobs.cancel_join_thread()
|
||||
sys.exit(1)
|
||||
|
||||
for process in self.processes:
|
||||
process.join()
|
||||
self.feeder.join()
|
||||
self.pool.terminate()
|
||||
self.pool.join()
|
||||
|
||||
sync = threading.Thread(target=self.bb_cache.sync)
|
||||
sync.start()
|
||||
@@ -1646,22 +1468,6 @@ class CookerParser(object):
|
||||
cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
|
||||
yield not cached, infos
|
||||
|
||||
def parse_generator(self):
|
||||
while True:
|
||||
if self.parsed >= self.toparse:
|
||||
break
|
||||
|
||||
try:
|
||||
result = self.result_queue.get(timeout=0.25)
|
||||
except Queue.Empty:
|
||||
pass
|
||||
else:
|
||||
value = result[1]
|
||||
if isinstance(value, BaseException):
|
||||
raise value
|
||||
else:
|
||||
yield result
|
||||
|
||||
def parse_next(self):
|
||||
try:
|
||||
parsed, result = self.results.next()
|
||||
@@ -1669,27 +1475,27 @@ class CookerParser(object):
|
||||
self.shutdown()
|
||||
return False
|
||||
except ParsingFailure as exc:
|
||||
logger.error('Unable to parse %s: %s' %
|
||||
self.shutdown(clean=False)
|
||||
bb.fatal('Unable to parse %s: %s' %
|
||||
(exc.recipe, bb.exceptions.to_string(exc.realexception)))
|
||||
self.shutdown(clean=False)
|
||||
except (bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
logger.error(str(exc))
|
||||
self.shutdown(clean=False)
|
||||
bb.fatal(str(exc))
|
||||
except SyntaxError as exc:
|
||||
logger.error('Unable to parse %s', exc.recipe)
|
||||
self.shutdown(clean=False)
|
||||
sys.exit(1)
|
||||
except Exception as exc:
|
||||
etype, value, tb = sys.exc_info()
|
||||
logger.error('Unable to parse %s', value.recipe,
|
||||
exc_info=(etype, value, exc.traceback))
|
||||
self.shutdown(clean=False)
|
||||
sys.exit(1)
|
||||
|
||||
self.current += 1
|
||||
self.virtuals += len(result)
|
||||
if parsed:
|
||||
self.parsed += 1
|
||||
if self.parsed % self.progress_chunk == 0:
|
||||
bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
|
||||
bb.event.fire(bb.event.ParseProgress(self.parsed),
|
||||
self.cfgdata)
|
||||
else:
|
||||
self.cached += 1
|
||||
|
||||
@@ -266,7 +266,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
seen |= deps
|
||||
newdeps = set()
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, "func"):
|
||||
if bb.data.getVarFlag(dep, "func", d):
|
||||
emit_var(dep, o, d, False) and o.write('\n')
|
||||
newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
|
||||
newdeps -= seen
|
||||
@@ -319,7 +319,7 @@ def generate_dependencies(d):
|
||||
deps = {}
|
||||
values = {}
|
||||
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
tasklist = bb.data.getVar('__BBTASKS', d) or []
|
||||
for task in tasklist:
|
||||
deps[task], values[task] = build_dependencies(task, keys, shelldeps, vardepvals, d)
|
||||
newdeps = deps[task]
|
||||
@@ -333,7 +333,7 @@ def generate_dependencies(d):
|
||||
deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, vardepvals, d)
|
||||
newdeps |= deps[dep]
|
||||
newdeps -= seen
|
||||
#print "For %s: %s" % (task, str(deps[task]))
|
||||
#print "For %s: %s" % (task, str(taskdeps[task]))
|
||||
return tasklist, deps, values
|
||||
|
||||
def inherits_class(klass, d):
|
||||
|
||||
@@ -31,7 +31,6 @@ BitBake build tools.
|
||||
import copy, re
|
||||
from collections import MutableMapping
|
||||
import logging
|
||||
import hashlib
|
||||
import bb, bb.codeparser
|
||||
from bb import utils
|
||||
from bb.COW import COWDictBase
|
||||
@@ -147,7 +146,7 @@ class DataSmart(MutableMapping):
|
||||
|
||||
return varparse
|
||||
|
||||
def expand(self, s, varname = None):
|
||||
def expand(self, s, varname):
|
||||
return self.expandWithRefs(s, varname).value
|
||||
|
||||
|
||||
@@ -305,14 +304,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
self.delVar(key)
|
||||
|
||||
def appendVar(self, key, value):
|
||||
value = (self.getVar(key, False) or "") + value
|
||||
self.setVar(key, value)
|
||||
|
||||
def prependVar(self, key, value):
|
||||
value = value + (self.getVar(key, False) or "")
|
||||
self.setVar(key, value)
|
||||
|
||||
def delVar(self, var):
|
||||
self.expand_cache = {}
|
||||
self.dict[var] = {}
|
||||
@@ -348,14 +339,6 @@ class DataSmart(MutableMapping):
|
||||
if var in self.dict and flag in self.dict[var]:
|
||||
del self.dict[var][flag]
|
||||
|
||||
def appendVarFlag(self, key, flag, value):
|
||||
value = (self.getVarFlag(key, flag, False) or "") + value
|
||||
self.setVarFlag(key, flag, value)
|
||||
|
||||
def prependVarFlag(self, key, flag, value):
|
||||
value = value + (self.getVarFlag(key, flag, False) or "")
|
||||
self.setVarFlag(key, flag, value)
|
||||
|
||||
def setVarFlags(self, var, flags):
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
@@ -460,15 +443,3 @@ class DataSmart(MutableMapping):
|
||||
|
||||
def __delitem__(self, var):
|
||||
self.delVar(var)
|
||||
|
||||
def get_hash(self):
|
||||
data = ""
|
||||
config_whitelist = set((self.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split())
|
||||
keys = set(key for key in iter(self) if not key.startswith("__"))
|
||||
for key in keys:
|
||||
if key in config_whitelist:
|
||||
continue
|
||||
value = self.getVar(key, False) or ""
|
||||
data = data + key + ': ' + str(value) + '\n'
|
||||
|
||||
return hashlib.md5(data).hexdigest()
|
||||
|
||||
@@ -204,27 +204,6 @@ def getName(e):
|
||||
else:
|
||||
return e.__name__
|
||||
|
||||
class OperationStarted(Event):
|
||||
"""An operation has begun"""
|
||||
def __init__(self, msg = "Operation Started"):
|
||||
Event.__init__(self)
|
||||
self.msg = msg
|
||||
|
||||
class OperationCompleted(Event):
|
||||
"""An operation has completed"""
|
||||
def __init__(self, total, msg = "Operation Completed"):
|
||||
Event.__init__(self)
|
||||
self.total = total
|
||||
self.msg = msg
|
||||
|
||||
class OperationProgress(Event):
|
||||
"""An operation is in progress"""
|
||||
def __init__(self, current, total, msg = "Operation in Progress"):
|
||||
Event.__init__(self)
|
||||
self.current = current
|
||||
self.total = total
|
||||
self.msg = msg + ": %s/%s" % (current, total);
|
||||
|
||||
class ConfigParsed(Event):
|
||||
"""Configuration Parsing Complete"""
|
||||
|
||||
@@ -297,20 +276,14 @@ class BuildBase(Event):
|
||||
|
||||
|
||||
|
||||
class BuildStarted(BuildBase, OperationStarted):
|
||||
class BuildStarted(BuildBase):
|
||||
"""bbmake build run started"""
|
||||
def __init__(self, n, p, failures = 0):
|
||||
OperationStarted.__init__(self, "Building Started")
|
||||
BuildBase.__init__(self, n, p, failures)
|
||||
|
||||
class BuildCompleted(BuildBase, OperationCompleted):
|
||||
|
||||
class BuildCompleted(BuildBase):
|
||||
"""bbmake build run completed"""
|
||||
def __init__(self, total, n, p, failures = 0):
|
||||
if not failures:
|
||||
OperationCompleted.__init__(self, total, "Building Succeeded")
|
||||
else:
|
||||
OperationCompleted.__init__(self, total, "Building Failed")
|
||||
BuildBase.__init__(self, n, p, failures)
|
||||
|
||||
|
||||
|
||||
|
||||
class NoProvider(Event):
|
||||
@@ -356,16 +329,17 @@ class MultipleProviders(Event):
|
||||
"""
|
||||
return self._candidates
|
||||
|
||||
class ParseStarted(OperationStarted):
|
||||
class ParseStarted(Event):
|
||||
"""Recipe parsing for the runqueue has begun"""
|
||||
def __init__(self, total):
|
||||
OperationStarted.__init__(self, "Recipe parsing Started")
|
||||
Event.__init__(self)
|
||||
self.total = total
|
||||
|
||||
class ParseCompleted(OperationCompleted):
|
||||
class ParseCompleted(Event):
|
||||
"""Recipe parsing for the runqueue has completed"""
|
||||
|
||||
def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
|
||||
OperationCompleted.__init__(self, total, "Recipe parsing Completed")
|
||||
Event.__init__(self)
|
||||
self.cached = cached
|
||||
self.parsed = parsed
|
||||
self.skipped = skipped
|
||||
@@ -373,44 +347,33 @@ class ParseCompleted(OperationCompleted):
|
||||
self.masked = masked
|
||||
self.errors = errors
|
||||
self.sofar = cached + parsed
|
||||
|
||||
class ParseProgress(OperationProgress):
|
||||
"""Recipe parsing progress"""
|
||||
def __init__(self, current, total):
|
||||
OperationProgress.__init__(self, current, total, "Recipe parsing")
|
||||
|
||||
|
||||
class CacheLoadStarted(OperationStarted):
|
||||
"""Loading of the dependency cache has begun"""
|
||||
def __init__(self, total):
|
||||
OperationStarted.__init__(self, "Loading cache Started")
|
||||
self.total = total
|
||||
|
||||
class CacheLoadProgress(OperationProgress):
|
||||
"""Cache loading progress"""
|
||||
def __init__(self, current, total):
|
||||
OperationProgress.__init__(self, current, total, "Loading cache")
|
||||
class ParseProgress(Event):
|
||||
"""Recipe parsing progress"""
|
||||
|
||||
class CacheLoadCompleted(OperationCompleted):
|
||||
def __init__(self, current):
|
||||
self.current = current
|
||||
|
||||
class CacheLoadStarted(Event):
|
||||
"""Loading of the dependency cache has begun"""
|
||||
def __init__(self, total):
|
||||
Event.__init__(self)
|
||||
self.total = total
|
||||
|
||||
class CacheLoadProgress(Event):
|
||||
"""Cache loading progress"""
|
||||
def __init__(self, current):
|
||||
Event.__init__(self)
|
||||
self.current = current
|
||||
|
||||
class CacheLoadCompleted(Event):
|
||||
"""Cache loading is complete"""
|
||||
def __init__(self, total, num_entries):
|
||||
OperationCompleted.__init__(self, total, "Loading cache Completed")
|
||||
Event.__init__(self)
|
||||
self.total = total
|
||||
self.num_entries = num_entries
|
||||
|
||||
class TreeDataPreparationStarted(OperationStarted):
|
||||
"""Tree data preparation started"""
|
||||
def __init__(self):
|
||||
OperationStarted.__init__(self, "Preparing tree data Started")
|
||||
|
||||
class TreeDataPreparationProgress(OperationProgress):
|
||||
"""Tree data preparation is in progress"""
|
||||
def __init__(self, current, total):
|
||||
OperationProgress.__init__(self, current, total, "Preparing tree data")
|
||||
|
||||
class TreeDataPreparationCompleted(OperationCompleted):
|
||||
"""Tree data preparation completed"""
|
||||
def __init__(self, total):
|
||||
OperationCompleted.__init__(self, total, "Preparing tree data Completed")
|
||||
|
||||
class DepTreeGenerated(Event):
|
||||
"""
|
||||
@@ -439,14 +402,6 @@ class FilesMatchingFound(Event):
|
||||
self._pattern = pattern
|
||||
self._matches = matches
|
||||
|
||||
class CoreBaseFilesFound(Event):
|
||||
"""
|
||||
Event when a list of appropriate config files has been generated
|
||||
"""
|
||||
def __init__(self, paths):
|
||||
Event.__init__(self)
|
||||
self._paths = paths
|
||||
|
||||
class ConfigFilesFound(Event):
|
||||
"""
|
||||
Event when a list of appropriate config files has been generated
|
||||
@@ -504,16 +459,3 @@ class LogHandler(logging.Handler):
|
||||
def filter(self, record):
|
||||
record.taskpid = worker_pid
|
||||
return True
|
||||
|
||||
class RequestPackageInfo(Event):
|
||||
"""
|
||||
Event to request package information
|
||||
"""
|
||||
|
||||
class PackageInfo(Event):
|
||||
"""
|
||||
Package information for GUI
|
||||
"""
|
||||
def __init__(self, pkginfolist):
|
||||
Event.__init__(self)
|
||||
self._pkginfolist = pkginfolist
|
||||
|
||||
@@ -154,7 +154,7 @@ def fetcher_init(d):
|
||||
Calls before this must not hit the cache.
|
||||
"""
|
||||
# When to drop SCM head revisions controlled by user policy
|
||||
srcrev_policy = d.getVar('BB_SRCREV_POLICY', 1) or "clear"
|
||||
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
|
||||
if srcrev_policy == "cache":
|
||||
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||
elif srcrev_policy == "clear":
|
||||
@@ -200,7 +200,7 @@ def fetcher_compare_revisions(d):
|
||||
def init(urls, d, setup = True):
|
||||
urldata = {}
|
||||
|
||||
fn = d.getVar('FILE', 1)
|
||||
fn = bb.data.getVar('FILE', d, 1)
|
||||
if fn in urldata_cache:
|
||||
urldata = urldata_cache[fn]
|
||||
|
||||
@@ -243,7 +243,7 @@ def verify_checksum(u, ud, d):
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data)
|
||||
if d.getVar("BB_STRICT_CHECKSUM", True) == "1":
|
||||
if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
|
||||
raise FetchError("No checksum specified for %s." % u)
|
||||
return
|
||||
|
||||
@@ -276,7 +276,7 @@ def go(d, urls = None):
|
||||
|
||||
if m.try_premirror(u, ud, d):
|
||||
# First try fetching uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(d.getVar('PREMIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
||||
localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d))
|
||||
elif os.path.exists(ud.localfile):
|
||||
localpath = ud.localfile
|
||||
@@ -291,7 +291,7 @@ def go(d, urls = None):
|
||||
# Remove any incomplete file
|
||||
bb.utils.remove(ud.localpath)
|
||||
# Finally, try fetching uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(d.getVar('MIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
|
||||
localpath = try_mirrors (d, u, mirrors)
|
||||
if not localpath or not os.path.exists(localpath):
|
||||
raise FetchError("Unable to fetch URL %s from any source." % u)
|
||||
@@ -327,7 +327,7 @@ def checkstatus(d, urls = None):
|
||||
m = ud.method
|
||||
logger.debug(1, "Testing URL %s", u)
|
||||
# First try checking uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(d.getVar('PREMIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
||||
ret = try_mirrors(d, u, mirrors, True)
|
||||
if not ret:
|
||||
# Next try checking from the original uri, u
|
||||
@@ -335,7 +335,7 @@ def checkstatus(d, urls = None):
|
||||
ret = m.checkstatus(u, ud, d)
|
||||
except:
|
||||
# Finally, try checking uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(d.getVar('MIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
|
||||
ret = try_mirrors (d, u, mirrors, True)
|
||||
|
||||
if not ret:
|
||||
@@ -383,7 +383,7 @@ def get_srcrev(d):
|
||||
scms = []
|
||||
|
||||
# Only call setup_localpath on URIs which supports_srcrev()
|
||||
urldata = init(d.getVar('SRC_URI', 1).split(), d, False)
|
||||
urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
|
||||
for u in urldata:
|
||||
ud = urldata[u]
|
||||
if ud.method.supports_srcrev():
|
||||
@@ -395,8 +395,8 @@ def get_srcrev(d):
|
||||
logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
|
||||
raise ParameterError
|
||||
|
||||
if d.getVar('BB_SRCREV_POLICY', True) != "cache":
|
||||
d.setVar('__BB_DONT_CACHE', '1')
|
||||
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
|
||||
bb.data.setVar('__BB_DONT_CACHE', '1', d)
|
||||
|
||||
if len(scms) == 1:
|
||||
return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
|
||||
@@ -404,7 +404,7 @@ def get_srcrev(d):
|
||||
#
|
||||
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
|
||||
#
|
||||
format = d.getVar('SRCREV_FORMAT', 1)
|
||||
format = bb.data.getVar('SRCREV_FORMAT', d, 1)
|
||||
if not format:
|
||||
logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
||||
raise ParameterError
|
||||
@@ -539,8 +539,8 @@ class FetchData(object):
|
||||
else:
|
||||
self.md5_name = "md5sum"
|
||||
self.sha256_name = "sha256sum"
|
||||
self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
|
||||
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
|
||||
self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
|
||||
self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
|
||||
|
||||
for m in methods:
|
||||
if m.supports(url, self, d):
|
||||
@@ -555,7 +555,7 @@ class FetchData(object):
|
||||
self.localpath = self.parm["localpath"]
|
||||
self.basename = os.path.basename(self.localpath)
|
||||
else:
|
||||
premirrors = d.getVar('PREMIRRORS', True)
|
||||
premirrors = bb.data.getVar('PREMIRRORS', d, True)
|
||||
local = ""
|
||||
if premirrors and self.url:
|
||||
aurl = self.url.split(";")[0]
|
||||
@@ -775,7 +775,7 @@ class Fetch(object):
|
||||
|
||||
latest_rev = self._build_revision(url, ud, d)
|
||||
last_rev = localcounts.get(key + '_rev')
|
||||
uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False
|
||||
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
||||
count = None
|
||||
if uselocalcount:
|
||||
count = Fetch.localcount_internal_helper(ud, d)
|
||||
@@ -803,7 +803,7 @@ class Fetch(object):
|
||||
|
||||
def generate_revision_key(self, url, ud, d):
|
||||
key = self._revision_key(url, ud, d)
|
||||
return "%s-%s" % (key, d.getVar("PN", True) or "")
|
||||
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
||||
|
||||
from . import cvs
|
||||
from . import git
|
||||
|
||||
@@ -34,7 +34,7 @@ class Git(Fetch):
|
||||
#
|
||||
# Only enable _sortable revision if the key is set
|
||||
#
|
||||
if d.getVar("BB_GIT_CLONE_FOR_SRCREV", True):
|
||||
if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
|
||||
self._sortable_buildindex = self._sortable_buildindex_disabled
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
@@ -220,7 +220,7 @@ class Git(Fetch):
|
||||
|
||||
def generate_revision_key(self, url, ud, d, branch=False):
|
||||
key = self._revision_key(url, ud, d, branch)
|
||||
return "%s-%s" % (key, d.getVar("PN", True) or "")
|
||||
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
||||
|
||||
def _latest_revision(self, url, ud, d):
|
||||
"""
|
||||
@@ -276,7 +276,7 @@ class Git(Fetch):
|
||||
del localcounts[oldkey + '_rev']
|
||||
localcounts[key + '_rev'] = last_rev
|
||||
|
||||
uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False
|
||||
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
||||
count = None
|
||||
if uselocalcount:
|
||||
count = Fetch.localcount_internal_helper(ud, d)
|
||||
|
||||
@@ -28,7 +28,7 @@ from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
import os, re
|
||||
import logging
|
||||
import bb.persist_data, bb.utils
|
||||
import bb.data, bb.persist_data, bb.utils
|
||||
from bb import data
|
||||
|
||||
__version__ = "2"
|
||||
@@ -55,10 +55,7 @@ class MalformedUrl(BBFetchException):
|
||||
class FetchError(BBFetchException):
|
||||
"""General fetcher exception when something happens incorrectly"""
|
||||
def __init__(self, message, url = None):
|
||||
if url:
|
||||
msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
|
||||
else:
|
||||
msg = "Fetcher failure: %s" % message
|
||||
msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (message, url)
|
||||
@@ -96,6 +93,28 @@ class ParameterError(BBFetchException):
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (message, url)
|
||||
|
||||
class MD5SumError(BBFetchException):
|
||||
"""Exception raised when a MD5 checksum of a file does not match for a downloaded file"""
|
||||
def __init__(self, path, wanted, got, url):
|
||||
msg = "File: '%s' has md5 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
|
||||
self.url = url
|
||||
self.path = path
|
||||
self.wanted = wanted
|
||||
self.got = got
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (path, wanted, got, url)
|
||||
|
||||
class SHA256SumError(MD5SumError):
|
||||
"""Exception raised when a SHA256 checksum of a file does not match for a downloaded file"""
|
||||
def __init__(self, path, wanted, got, url):
|
||||
msg = "File: '%s' has sha256 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
|
||||
self.url = url
|
||||
self.path = path
|
||||
self.wanted = wanted
|
||||
self.got = got
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (path, wanted, got, url)
|
||||
|
||||
class NetworkAccess(BBFetchException):
|
||||
"""Exception raised when network access is disabled but it is required."""
|
||||
def __init__(self, url, cmd):
|
||||
@@ -214,7 +233,7 @@ def fetcher_init(d):
|
||||
Calls before this must not hit the cache.
|
||||
"""
|
||||
# When to drop SCM head revisions controlled by user policy
|
||||
srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
|
||||
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
|
||||
if srcrev_policy == "cache":
|
||||
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||
elif srcrev_policy == "clear":
|
||||
@@ -259,8 +278,8 @@ def verify_checksum(u, ud, d):
|
||||
verify the MD5 and SHA256 checksum for downloaded src
|
||||
|
||||
return value:
|
||||
- True: a checksum matched
|
||||
- False: neither checksum matched
|
||||
- True: checksum matched
|
||||
- False: checksum unmatched
|
||||
|
||||
if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
|
||||
if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
|
||||
@@ -273,46 +292,20 @@ def verify_checksum(u, ud, d):
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
|
||||
if (strict and ud.md5_expected == None and ud.sha256_expected == None):
|
||||
raise FetchError('No checksum specified for %s, please add at least one to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
|
||||
(ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data), u)
|
||||
|
||||
# Log missing sums so user can more easily add them
|
||||
if ud.md5_expected == None:
|
||||
logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data)
|
||||
|
||||
if ud.sha256_expected == None:
|
||||
logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.sha256_name, sha256data)
|
||||
|
||||
md5mismatch = False
|
||||
sha256mismatch = False
|
||||
if (ud.md5_expected == None or ud.sha256_expected == None):
|
||||
logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data)
|
||||
if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
|
||||
raise FetchError("No checksum specified for %s." % u, u)
|
||||
return
|
||||
|
||||
if ud.md5_expected != md5data:
|
||||
md5mismatch = True
|
||||
raise MD5SumError(ud.localpath, ud.md5_expected, md5data, u)
|
||||
|
||||
if ud.sha256_expected != sha256data:
|
||||
sha256mismatch = True
|
||||
|
||||
# We want to alert the user if a checksum is defined in the recipe but
|
||||
# it does not match.
|
||||
msg = ""
|
||||
if md5mismatch and ud.md5_expected:
|
||||
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
|
||||
|
||||
if sha256mismatch and ud.sha256_expected:
|
||||
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
|
||||
|
||||
if len(msg):
|
||||
raise FetchError('Checksum mismatch!%s' % msg, u)
|
||||
|
||||
raise SHA256SumError(ud.localpath, ud.sha256_expected, sha256data, u)
|
||||
|
||||
def update_stamp(u, ud, d):
|
||||
"""
|
||||
@@ -339,8 +332,8 @@ def subprocess_setup():
|
||||
|
||||
def get_autorev(d):
|
||||
# only not cache src rev in autorev case
|
||||
if d.getVar('BB_SRCREV_POLICY', True) != "cache":
|
||||
d.setVar('__BB_DONT_CACHE', '1')
|
||||
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
|
||||
bb.data.setVar('__BB_DONT_CACHE', '1', d)
|
||||
return "AUTOINC"
|
||||
|
||||
def get_srcrev(d):
|
||||
@@ -353,7 +346,7 @@ def get_srcrev(d):
|
||||
"""
|
||||
|
||||
scms = []
|
||||
fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
|
||||
fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d)
|
||||
urldata = fetcher.ud
|
||||
for u in urldata:
|
||||
if urldata[u].method.supports_srcrev():
|
||||
@@ -368,7 +361,7 @@ def get_srcrev(d):
|
||||
#
|
||||
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
|
||||
#
|
||||
format = d.getVar('SRCREV_FORMAT', True)
|
||||
format = bb.data.getVar('SRCREV_FORMAT', d, True)
|
||||
if not format:
|
||||
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
||||
|
||||
@@ -403,7 +396,7 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
||||
'GIT_PROXY_IGNORE', 'SOCKS5_USER', 'SOCKS5_PASSWD']
|
||||
|
||||
for var in exportvars:
|
||||
val = d.getVar(var, True)
|
||||
val = bb.data.getVar(var, d, True)
|
||||
if val:
|
||||
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
|
||||
|
||||
@@ -422,11 +415,8 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
||||
output += line
|
||||
|
||||
status = stdout_handle.close() or 0
|
||||
signal = os.WTERMSIG(status)
|
||||
if os.WIFEXITED(status):
|
||||
exitstatus = os.WEXITSTATUS(status)
|
||||
else:
|
||||
exitstatus = 0
|
||||
signal = status >> 8
|
||||
exitstatus = status & 0xff
|
||||
|
||||
if (signal or status != 0):
|
||||
for f in cleanup:
|
||||
@@ -437,8 +427,8 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
||||
|
||||
if signal:
|
||||
raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
|
||||
elif exitstatus:
|
||||
raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, exitstatus, output))
|
||||
elif status != 0:
|
||||
raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
|
||||
|
||||
return output
|
||||
|
||||
@@ -446,7 +436,7 @@ def check_network_access(d, info = "", url = None):
|
||||
"""
|
||||
log remote network access, and error if BB_NO_NETWORK is set
|
||||
"""
|
||||
if d.getVar("BB_NO_NETWORK", True) == "1":
|
||||
if bb.data.getVar("BB_NO_NETWORK", d, True) == "1":
|
||||
raise NetworkAccess(url, info)
|
||||
else:
|
||||
logger.debug(1, "Fetcher accessed the network with the command %s" % info)
|
||||
@@ -479,7 +469,7 @@ def try_mirrors(d, origud, mirrors, check = False):
|
||||
return found
|
||||
continue
|
||||
|
||||
if not os.path.exists(ud.donestamp) or ud.method.need_update(newuri, ud, ld):
|
||||
if ud.method.need_update(newuri, ud, ld):
|
||||
ud.method.download(newuri, ud, ld)
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(newuri, ud, ld)
|
||||
@@ -532,15 +522,15 @@ def srcrev_internal_helper(ud, d, name):
|
||||
return ud.parm['tag']
|
||||
|
||||
rev = None
|
||||
pn = d.getVar("PN", True)
|
||||
pn = bb.data.getVar("PN", d, True)
|
||||
if name != '':
|
||||
rev = d.getVar("SRCREV_%s_pn-%s" % (name, pn), True)
|
||||
rev = bb.data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True)
|
||||
if not rev:
|
||||
rev = d.getVar("SRCREV_%s" % name, True)
|
||||
rev = bb.data.getVar("SRCREV_%s" % name, d, True)
|
||||
if not rev:
|
||||
rev = d.getVar("SRCREV_pn-%s" % pn, True)
|
||||
rev = bb.data.getVar("SRCREV_pn-%s" % pn, d, True)
|
||||
if not rev:
|
||||
rev = d.getVar("SRCREV", True)
|
||||
rev = bb.data.getVar("SRCREV", d, True)
|
||||
if rev == "INVALID":
|
||||
raise FetchError("Please set SRCREV to a valid value", ud.url)
|
||||
if rev == "AUTOINC":
|
||||
@@ -575,14 +565,8 @@ class FetchData(object):
|
||||
else:
|
||||
self.md5_name = "md5sum"
|
||||
self.sha256_name = "sha256sum"
|
||||
if self.md5_name in self.parm:
|
||||
self.md5_expected = self.parm[self.md5_name]
|
||||
else:
|
||||
self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
|
||||
if self.sha256_name in self.parm:
|
||||
self.sha256_expected = self.parm[self.sha256_name]
|
||||
else:
|
||||
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
|
||||
self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
|
||||
self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
|
||||
|
||||
self.names = self.parm.get("name",'default').split(',')
|
||||
|
||||
@@ -606,7 +590,7 @@ class FetchData(object):
|
||||
self.localpath = self.method.localpath(self.url, self, d)
|
||||
|
||||
# Note: These files should always be in DL_DIR whereas localpath may not be.
|
||||
basepath = d.expand("${DL_DIR}/%s" % os.path.basename(self.localpath or self.basename))
|
||||
basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath or self.basename), d)
|
||||
self.donestamp = basepath + '.done'
|
||||
self.lockfile = basepath + '.lock'
|
||||
|
||||
@@ -632,12 +616,12 @@ class FetchData(object):
|
||||
if "srcdate" in self.parm:
|
||||
return self.parm['srcdate']
|
||||
|
||||
pn = d.getVar("PN", True)
|
||||
pn = bb.data.getVar("PN", d, True)
|
||||
|
||||
if pn:
|
||||
return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
|
||||
return bb.data.getVar("SRCDATE_%s" % pn, d, True) or bb.data.getVar("SRCDATE", d, True) or bb.data.getVar("DATE", d, True)
|
||||
|
||||
return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
|
||||
return bb.data.getVar("SRCDATE", d, True) or bb.data.getVar("DATE", d, True)
|
||||
|
||||
class FetchMethod(object):
|
||||
"""Base class for 'fetch'ing data"""
|
||||
@@ -709,7 +693,7 @@ class FetchMethod(object):
|
||||
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z']:
|
||||
efile = os.path.join(data.getVar('WORKDIR', True),os.path.basename('.'.join(dots[0:-1])))
|
||||
efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
cmd = None
|
||||
@@ -753,7 +737,7 @@ class FetchMethod(object):
|
||||
dest = os.path.join(rootdir, os.path.basename(file))
|
||||
if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
|
||||
if os.path.isdir(file):
|
||||
filesdir = os.path.realpath(data.getVar("FILESDIR", True))
|
||||
filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True))
|
||||
destdir = "."
|
||||
if file[0:len(filesdir)] == filesdir:
|
||||
destdir = file[len(filesdir):file.rfind('/')]
|
||||
@@ -785,7 +769,7 @@ class FetchMethod(object):
|
||||
bb.utils.mkdirhier(newdir)
|
||||
os.chdir(newdir)
|
||||
|
||||
cmd = "PATH=\"%s\" %s" % (data.getVar('PATH', True), cmd)
|
||||
cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd)
|
||||
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
|
||||
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
|
||||
|
||||
@@ -830,10 +814,10 @@ class FetchMethod(object):
|
||||
|
||||
localcount = None
|
||||
if name != '':
|
||||
pn = d.getVar("PN", True)
|
||||
localcount = d.getVar("LOCALCOUNT_" + name, True)
|
||||
pn = bb.data.getVar("PN", d, True)
|
||||
localcount = bb.data.getVar("LOCALCOUNT_" + name, d, True)
|
||||
if not localcount:
|
||||
localcount = d.getVar("LOCALCOUNT", True)
|
||||
localcount = bb.data.getVar("LOCALCOUNT", d, True)
|
||||
return localcount
|
||||
|
||||
localcount_internal_helper = staticmethod(localcount_internal_helper)
|
||||
@@ -865,7 +849,7 @@ class FetchMethod(object):
|
||||
|
||||
latest_rev = self._build_revision(url, ud, d, name)
|
||||
last_rev = localcounts.get(key + '_rev')
|
||||
uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False
|
||||
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
||||
count = None
|
||||
if uselocalcount:
|
||||
count = FetchMethod.localcount_internal_helper(ud, d, name)
|
||||
@@ -893,7 +877,7 @@ class FetchMethod(object):
|
||||
|
||||
def generate_revision_key(self, url, ud, d, name):
|
||||
key = self._revision_key(url, ud, d, name)
|
||||
return "%s-%s" % (key, d.getVar("PN", True) or "")
|
||||
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
||||
|
||||
class Fetch(object):
|
||||
def __init__(self, urls, d, cache = True):
|
||||
@@ -903,7 +887,7 @@ class Fetch(object):
|
||||
self.d = d
|
||||
self.ud = {}
|
||||
|
||||
fn = d.getVar('FILE', True)
|
||||
fn = bb.data.getVar('FILE', d, True)
|
||||
if cache and fn in urldata_cache:
|
||||
self.ud = urldata_cache[fn]
|
||||
|
||||
@@ -919,7 +903,7 @@ class Fetch(object):
|
||||
self.ud[url] = FetchData(url, self.d)
|
||||
|
||||
self.ud[url].setup_localpath(self.d)
|
||||
return self.d.expand(self.ud[url].localpath)
|
||||
return bb.data.expand(self.ud[url].localpath, self.d)
|
||||
|
||||
def localpaths(self):
|
||||
"""
|
||||
@@ -941,8 +925,8 @@ class Fetch(object):
|
||||
if len(urls) == 0:
|
||||
urls = self.urls
|
||||
|
||||
network = self.d.getVar("BB_NO_NETWORK", True)
|
||||
premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
|
||||
network = bb.data.getVar("BB_NO_NETWORK", self.d, True)
|
||||
premirroronly = (bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) == "1")
|
||||
|
||||
for u in urls:
|
||||
ud = self.ud[u]
|
||||
@@ -953,20 +937,19 @@ class Fetch(object):
|
||||
lf = bb.utils.lockfile(ud.lockfile)
|
||||
|
||||
try:
|
||||
self.d.setVar("BB_NO_NETWORK", network)
|
||||
bb.data.setVar("BB_NO_NETWORK", network, self.d)
|
||||
|
||||
if os.path.exists(ud.donestamp) and not m.need_update(u, ud, self.d):
|
||||
if not m.need_update(u, ud, self.d):
|
||||
localpath = ud.localpath
|
||||
elif m.try_premirror(u, ud, self.d):
|
||||
logger.debug(1, "Trying PREMIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
|
||||
localpath = try_mirrors(self.d, ud, mirrors, False)
|
||||
|
||||
if premirroronly:
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
bb.data.setVar("BB_NO_NETWORK", "1", self.d)
|
||||
|
||||
firsterr = None
|
||||
if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(u, ud, self.d)):
|
||||
if not localpath and m.need_update(u, ud, self.d):
|
||||
try:
|
||||
logger.debug(1, "Trying Upstream")
|
||||
m.download(u, ud, self.d)
|
||||
@@ -981,20 +964,16 @@ class Fetch(object):
|
||||
raise
|
||||
|
||||
except BBFetchException as e:
|
||||
logger.warn('Failed to fetch URL %s' % u)
|
||||
logger.debug(1, str(e))
|
||||
firsterr = e
|
||||
logger.warn(str(e))
|
||||
# Remove any incomplete fetch
|
||||
if os.path.isfile(ud.localpath):
|
||||
bb.utils.remove(ud.localpath)
|
||||
logger.debug(1, "Trying MIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
|
||||
localpath = try_mirrors (self.d, ud, mirrors)
|
||||
|
||||
if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
|
||||
if firsterr:
|
||||
logger.error(str(firsterr))
|
||||
raise FetchError("Unable to fetch URL from any source.", u)
|
||||
raise FetchError("Unable to fetch URL %s from any source." % u, u)
|
||||
|
||||
update_stamp(u, ud, self.d)
|
||||
|
||||
@@ -1015,7 +994,7 @@ class Fetch(object):
|
||||
m = ud.method
|
||||
logger.debug(1, "Testing URL %s", u)
|
||||
# First try checking uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
if not ret:
|
||||
# Next try checking from the original uri, u
|
||||
@@ -1023,7 +1002,7 @@ class Fetch(object):
|
||||
ret = m.checkstatus(u, ud, self.d)
|
||||
except:
|
||||
# Finally, try checking uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
|
||||
ret = try_mirrors (self.d, ud, mirrors, True)
|
||||
|
||||
if not ret:
|
||||
@@ -1041,7 +1020,7 @@ class Fetch(object):
|
||||
ud = self.ud[u]
|
||||
ud.setup_localpath(self.d)
|
||||
|
||||
if self.d.expand(self.localpath) is None:
|
||||
if bb.data.expand(self.localpath, self.d) is None:
|
||||
continue
|
||||
|
||||
if ud.lockfile:
|
||||
|
||||
@@ -38,12 +38,6 @@ Supported SRC_URI options are:
|
||||
who has its own routine to checkout code.
|
||||
The default is "0", set nocheckout=1 if needed.
|
||||
|
||||
- bareclone
|
||||
Create a bare clone of the source code and don't checkout the source code
|
||||
when unpacking. Set this option for the recipe who has its own routine to
|
||||
checkout code and tracking branch requirements.
|
||||
The default is "0", set bareclone=1 if needed.
|
||||
|
||||
"""
|
||||
|
||||
#Copyright (C) 2005 Richard Purdie
|
||||
@@ -74,7 +68,7 @@ class Git(FetchMethod):
|
||||
#
|
||||
# Only enable _sortable revision if the key is set
|
||||
#
|
||||
if d.getVar("BB_GIT_CLONE_FOR_SRCREV", True):
|
||||
if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
|
||||
self._sortable_buildindex = self._sortable_buildindex_disabled
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
@@ -101,11 +95,6 @@ class Git(FetchMethod):
|
||||
|
||||
ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
|
||||
|
||||
# bareclone implies nocheckout
|
||||
ud.bareclone = ud.parm.get("bareclone","0") == "1"
|
||||
if ud.bareclone:
|
||||
ud.nocheckout = 1
|
||||
|
||||
branches = ud.parm.get("branch", "master").split(',')
|
||||
if len(branches) != len(ud.names):
|
||||
raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
|
||||
@@ -126,7 +115,7 @@ class Git(FetchMethod):
|
||||
ud.branches[name] = ud.revisions[name]
|
||||
ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.'))
|
||||
gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
|
||||
# for rebaseable git repo, it is necessary to keep mirror tar ball
|
||||
# per revision, so that even the revision disappears from the
|
||||
# upstream repo in the future, the mirror will remain intact and still
|
||||
@@ -157,7 +146,7 @@ class Git(FetchMethod):
|
||||
def try_premirror(self, u, ud, d):
|
||||
# If we don't do this, updating an existing checkout with only premirrors
|
||||
# is not possible
|
||||
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
|
||||
if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None:
|
||||
return True
|
||||
if os.path.exists(ud.clonedir):
|
||||
return False
|
||||
@@ -231,11 +220,7 @@ class Git(FetchMethod):
|
||||
if os.path.exists(destdir):
|
||||
bb.utils.prunedir(destdir)
|
||||
|
||||
cloneflags = "-s -n"
|
||||
if ud.bareclone:
|
||||
cloneflags += " --mirror"
|
||||
|
||||
runfetchcmd("git clone %s %s/ %s" % (cloneflags, ud.clonedir, destdir), d)
|
||||
runfetchcmd("git clone -s -n %s/ %s" % (ud.clonedir, destdir), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
|
||||
@@ -1,237 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2012 Robert Yang
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os, logging, re, sys
|
||||
import bb
|
||||
logger = logging.getLogger("BitBake.Monitor")
|
||||
|
||||
def printErr(info):
|
||||
logger.error("%s\n Disk space monitor will NOT be enabled" % info)
|
||||
|
||||
def convertGMK(unit):
|
||||
|
||||
""" Convert the space unit G, M, K, the unit is case-insensitive """
|
||||
|
||||
unitG = re.match('([1-9][0-9]*)[gG]\s?$', unit)
|
||||
if unitG:
|
||||
return int(unitG.group(1)) * (1024 ** 3)
|
||||
unitM = re.match('([1-9][0-9]*)[mM]\s?$', unit)
|
||||
if unitM:
|
||||
return int(unitM.group(1)) * (1024 ** 2)
|
||||
unitK = re.match('([1-9][0-9]*)[kK]\s?$', unit)
|
||||
if unitK:
|
||||
return int(unitK.group(1)) * 1024
|
||||
unitN = re.match('([1-9][0-9]*)\s?$', unit)
|
||||
if unitN:
|
||||
return int(unitN.group(1))
|
||||
else:
|
||||
return None
|
||||
|
||||
def getMountedDev(path):
|
||||
|
||||
""" Get the device mounted at the path, uses /proc/mounts """
|
||||
|
||||
# Get the mount point of the filesystem containing path
|
||||
# st_dev is the ID of device containing file
|
||||
parentDev = os.stat(path).st_dev
|
||||
currentDev = parentDev
|
||||
# When the current directory's device is different from the
|
||||
# parrent's, then the current directory is a mount point
|
||||
while parentDev == currentDev:
|
||||
mountPoint = path
|
||||
# Use dirname to get the parrent's directory
|
||||
path = os.path.dirname(path)
|
||||
# Reach the "/"
|
||||
if path == mountPoint:
|
||||
break
|
||||
parentDev= os.stat(path).st_dev
|
||||
|
||||
try:
|
||||
with open("/proc/mounts", "r") as ifp:
|
||||
for line in ifp:
|
||||
procLines = line.rstrip('\n').split()
|
||||
if procLines[1] == mountPoint:
|
||||
return procLines[0]
|
||||
except EnvironmentError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def getDiskData(BBDirs, configuration):
|
||||
|
||||
"""Prepare disk data for disk space monitor"""
|
||||
|
||||
# Save the device IDs, need the ID to be unique (the dictionary's key is
|
||||
# unique), so that when more than one directories are located in the same
|
||||
# device, we just monitor it once
|
||||
devDict = {}
|
||||
for pathSpaceInode in BBDirs.split():
|
||||
# The input format is: "dir,space,inode", dir is a must, space
|
||||
# and inode are optional
|
||||
pathSpaceInodeRe = re.match('([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
|
||||
if not pathSpaceInodeRe:
|
||||
printErr("Invalid value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
|
||||
return None
|
||||
|
||||
action = pathSpaceInodeRe.group(1)
|
||||
if action not in ("ABORT", "STOPTASKS", "WARN"):
|
||||
printErr("Unknown disk space monitor action: %s" % action)
|
||||
return None
|
||||
|
||||
path = os.path.realpath(pathSpaceInodeRe.group(2))
|
||||
if not path:
|
||||
printErr("Invalid path value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
|
||||
return None
|
||||
|
||||
# The disk space or inode is optional, but it should have a correct
|
||||
# value once it is specified
|
||||
minSpace = pathSpaceInodeRe.group(3)
|
||||
if minSpace:
|
||||
minSpace = convertGMK(minSpace)
|
||||
if not minSpace:
|
||||
printErr("Invalid disk space value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(3))
|
||||
return None
|
||||
else:
|
||||
# 0 means that it is not specified
|
||||
minSpace = None
|
||||
|
||||
minInode = pathSpaceInodeRe.group(4)
|
||||
if minInode:
|
||||
minInode = convertGMK(minInode)
|
||||
if not minInode:
|
||||
printErr("Invalid inode value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(4))
|
||||
return None
|
||||
else:
|
||||
# 0 means that it is not specified
|
||||
minInode = None
|
||||
|
||||
if minSpace is None and minInode is None:
|
||||
printErr("No disk space or inode value in found BB_DISKMON_DIRS: %s" % pathSpaceInode)
|
||||
return None
|
||||
# mkdir for the directory since it may not exist, for example the
|
||||
# DL_DIR may not exist at the very beginning
|
||||
if not os.path.exists(path):
|
||||
bb.utils.mkdirhier(path)
|
||||
mountedDev = getMountedDev(path)
|
||||
devDict[mountedDev] = action, path, minSpace, minInode
|
||||
|
||||
return devDict
|
||||
|
||||
def getInterval(configuration):
|
||||
|
||||
""" Get the disk space interval """
|
||||
|
||||
interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", 1)
|
||||
if not interval:
|
||||
# The default value is 50M and 5K.
|
||||
return 50 * 1024 * 1024, 5 * 1024
|
||||
else:
|
||||
# The disk space or inode interval is optional, but it should
|
||||
# have a correct value once it is specified
|
||||
intervalRe = re.match('([^,]*),?\s*(.*)', interval)
|
||||
if intervalRe:
|
||||
intervalSpace = intervalRe.group(1)
|
||||
if intervalSpace:
|
||||
intervalSpace = convertGMK(intervalSpace)
|
||||
if not intervalSpace:
|
||||
printErr("Invalid disk space interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(1))
|
||||
return None, None
|
||||
intervalInode = intervalRe.group(2)
|
||||
if intervalInode:
|
||||
intervalInode = convertGMK(intervalInode)
|
||||
if not intervalInode:
|
||||
printErr("Invalid disk inode interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(2))
|
||||
return None, None
|
||||
return intervalSpace, intervalInode
|
||||
else:
|
||||
printErr("Invalid interval value in BB_DISKMON_WARNINTERVAL: %s" % interval)
|
||||
return None, None
|
||||
|
||||
class diskMonitor:
|
||||
|
||||
"""Prepare the disk space monitor data"""
|
||||
|
||||
def __init__(self, configuration):
|
||||
|
||||
self.enableMonitor = False
|
||||
|
||||
BBDirs = configuration.getVar("BB_DISKMON_DIRS", 1) or None
|
||||
if BBDirs:
|
||||
self.devDict = getDiskData(BBDirs, configuration)
|
||||
if self.devDict:
|
||||
self.spaceInterval, self.inodeInterval = getInterval(configuration)
|
||||
if self.spaceInterval and self.inodeInterval:
|
||||
self.enableMonitor = True
|
||||
# These are for saving the previous disk free space and inode, we
|
||||
# use them to avoid print too many warning messages
|
||||
self.preFreeS = {}
|
||||
self.preFreeI = {}
|
||||
# This is for STOPTASKS and ABORT, to avoid print the message repeatly
|
||||
# during waiting the tasks to finish
|
||||
self.checked = {}
|
||||
for dev in self.devDict:
|
||||
self.preFreeS[dev] = 0
|
||||
self.preFreeI[dev] = 0
|
||||
self.checked[dev] = False
|
||||
if self.spaceInterval is None and self.inodeInterval is None:
|
||||
self.enableMonitor = False
|
||||
|
||||
def check(self, rq):
|
||||
|
||||
""" Take action for the monitor """
|
||||
|
||||
if self.enableMonitor:
|
||||
for dev in self.devDict:
|
||||
st = os.statvfs(self.devDict[dev][1])
|
||||
|
||||
# The free space, float point number
|
||||
freeSpace = st.f_bavail * st.f_frsize
|
||||
|
||||
if self.devDict[dev][2] and freeSpace < self.devDict[dev][2]:
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeS[dev] == 0 or self.preFreeS[dev] - freeSpace > self.spaceInterval and not self.checked[dev]:
|
||||
logger.warn("The free space of %s is running low (%.3fGB left)" % (dev, freeSpace / 1024 / 1024 / 1024.0))
|
||||
self.preFreeS[dev] = freeSpace
|
||||
|
||||
if self.devDict[dev][0] == "STOPTASKS" and not self.checked[dev]:
|
||||
logger.error("No new tasks can be excuted since the disk space monitor action is \"STOPTASKS\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(False)
|
||||
elif self.devDict[dev][0] == "ABORT" and not self.checked[dev]:
|
||||
logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(True)
|
||||
|
||||
# The free inodes, float point number
|
||||
freeInode = st.f_favail
|
||||
|
||||
if self.devDict[dev][3] and freeInode < self.devDict[dev][3]:
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeI[dev] == 0 or self.preFreeI[dev] - freeInode > self.inodeInterval and not self.checked[dev]:
|
||||
logger.warn("The free inode of %s is running low (%.3fK left)" % (dev, freeInode / 1024.0))
|
||||
self.preFreeI[dev] = freeInode
|
||||
|
||||
if self.devDict[dev][0] == "STOPTASKS" and not self.checked[dev]:
|
||||
logger.error("No new tasks can be excuted since the disk space monitor action is \"STOPTASKS\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(False)
|
||||
elif self.devDict[dev][0] == "ABORT" and not self.checked[dev]:
|
||||
logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(True)
|
||||
return
|
||||
@@ -100,7 +100,6 @@ class BBLogFilter(object):
|
||||
|
||||
loggerDefaultDebugLevel = 0
|
||||
loggerDefaultVerbose = False
|
||||
loggerVerboseLogs = False
|
||||
loggerDefaultDomains = []
|
||||
|
||||
def init_msgconfig(verbose, debug, debug_domains = []):
|
||||
@@ -109,8 +108,6 @@ def init_msgconfig(verbose, debug, debug_domains = []):
|
||||
"""
|
||||
bb.msg.loggerDefaultDebugLevel = debug
|
||||
bb.msg.loggerDefaultVerbose = verbose
|
||||
if verbose:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
bb.msg.loggerDefaultDomains = debug_domains
|
||||
|
||||
def addDefaultlogFilter(handler):
|
||||
|
||||
@@ -37,17 +37,6 @@ logger = logging.getLogger("BitBake.Parsing")
|
||||
|
||||
class ParseError(Exception):
|
||||
"""Exception raised when parsing fails"""
|
||||
def __init__(self, msg, filename, lineno=0):
|
||||
self.msg = msg
|
||||
self.filename = filename
|
||||
self.lineno = lineno
|
||||
Exception.__init__(self, msg, filename, lineno)
|
||||
|
||||
def __str__(self):
|
||||
if self.lineno:
|
||||
return "ParseError at %s:%d: %s" % (self.filename, self.lineno, self.msg)
|
||||
else:
|
||||
return "ParseError in %s: %s" % (self.filename, self.msg)
|
||||
|
||||
class SkipPackage(Exception):
|
||||
"""Exception raised to skip this package"""
|
||||
@@ -73,9 +62,9 @@ def update_mtime(f):
|
||||
def mark_dependency(d, f):
|
||||
if f.startswith('./'):
|
||||
f = "%s/%s" % (os.getcwd(), f[2:])
|
||||
deps = d.getVar('__depends') or set()
|
||||
deps = bb.data.getVar('__depends', d) or set()
|
||||
deps.update([(f, cached_mtime(f))])
|
||||
d.setVar('__depends', deps)
|
||||
bb.data.setVar('__depends', deps, d)
|
||||
|
||||
def supports(fn, data):
|
||||
"""Returns true if we have a handler for this file, false otherwise"""
|
||||
@@ -89,7 +78,7 @@ def handle(fn, data, include = 0):
|
||||
for h in handlers:
|
||||
if h['supports'](fn, data):
|
||||
return h['handle'](fn, data, include)
|
||||
raise ParseError("not a BitBake file", fn)
|
||||
raise ParseError("%s is not a BitBake file" % fn)
|
||||
|
||||
def init(fn, data):
|
||||
for h in handlers:
|
||||
@@ -101,7 +90,7 @@ def init_parser(d):
|
||||
|
||||
def resolve_file(fn, d):
|
||||
if not os.path.isabs(fn):
|
||||
bbpath = d.getVar("BBPATH", True)
|
||||
bbpath = bb.data.getVar("BBPATH", d, True)
|
||||
newfn = bb.utils.which(bbpath, fn)
|
||||
if not newfn:
|
||||
raise IOError("file %s not found in %s" % (fn, bbpath))
|
||||
@@ -122,7 +111,7 @@ def vars_from_file(mypkg, d):
|
||||
parts = myfile[0].split('_')
|
||||
__pkgsplit_cache__[mypkg] = parts
|
||||
if len(parts) > 3:
|
||||
raise ParseError("Unable to generate default variables from filename (too many underscores)", mypkg)
|
||||
raise ParseError("Unable to generate default variables from the filename: %s (too many underscores)" % mypkg)
|
||||
exp = 3 - len(parts)
|
||||
tmplist = []
|
||||
while exp != 0:
|
||||
@@ -131,13 +120,4 @@ def vars_from_file(mypkg, d):
|
||||
parts.extend(tmplist)
|
||||
return parts
|
||||
|
||||
def get_file_depends(d):
|
||||
'''Return the dependent files'''
|
||||
dep_files = []
|
||||
depends = d.getVar('__depends', True) or set()
|
||||
depends = depends.union(d.getVar('__base_depends', True) or set())
|
||||
for (fn, _) in depends:
|
||||
dep_files.append(os.path.abspath(fn))
|
||||
return " ".join(dep_files)
|
||||
|
||||
from bb.parse.parse_py import __version__, ConfHandler, BBHandler
|
||||
|
||||
@@ -54,14 +54,14 @@ class IncludeNode(AstNode):
|
||||
"""
|
||||
Include the file and evaluate the statements
|
||||
"""
|
||||
s = data.expand(self.what_file)
|
||||
s = bb.data.expand(self.what_file, data)
|
||||
logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
|
||||
|
||||
# TODO: Cache those includes... maybe not here though
|
||||
if self.force:
|
||||
bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, "include required")
|
||||
bb.parse.ConfHandler.include(self.filename, s, data, "include required")
|
||||
else:
|
||||
bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False)
|
||||
bb.parse.ConfHandler.include(self.filename, s, data, False)
|
||||
|
||||
class ExportNode(AstNode):
|
||||
def __init__(self, filename, lineno, var):
|
||||
@@ -69,7 +69,7 @@ class ExportNode(AstNode):
|
||||
self.var = var
|
||||
|
||||
def eval(self, data):
|
||||
data.setVarFlag(self.var, "export", 1)
|
||||
bb.data.setVarFlag(self.var, "export", 1, data)
|
||||
|
||||
class DataNode(AstNode):
|
||||
"""
|
||||
@@ -92,7 +92,7 @@ class DataNode(AstNode):
|
||||
groupd = self.groupd
|
||||
key = groupd["var"]
|
||||
if "exp" in groupd and groupd["exp"] != None:
|
||||
data.setVarFlag(key, "export", 1)
|
||||
bb.data.setVarFlag(key, "export", 1, data)
|
||||
if "ques" in groupd and groupd["ques"] != None:
|
||||
val = self.getFunc(key, data)
|
||||
if val == None:
|
||||
@@ -100,7 +100,7 @@ class DataNode(AstNode):
|
||||
elif "colon" in groupd and groupd["colon"] != None:
|
||||
e = data.createCopy()
|
||||
bb.data.update_data(e)
|
||||
val = e.expand(groupd["value"], key + "[:=]")
|
||||
val = bb.data.expand(groupd["value"], e, key + "[:=]")
|
||||
elif "append" in groupd and groupd["append"] != None:
|
||||
val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"])
|
||||
elif "prepend" in groupd and groupd["prepend"] != None:
|
||||
@@ -113,11 +113,11 @@ class DataNode(AstNode):
|
||||
val = groupd["value"]
|
||||
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
data.setVarFlag(key, groupd['flag'], val)
|
||||
bb.data.setVarFlag(key, groupd['flag'], val, data)
|
||||
elif groupd["lazyques"]:
|
||||
data.setVarFlag(key, "defaultval", val)
|
||||
bb.data.setVarFlag(key, "defaultval", val, data)
|
||||
else:
|
||||
data.setVar(key, val)
|
||||
bb.data.setVar(key, val, data)
|
||||
|
||||
class MethodNode(AstNode):
|
||||
def __init__(self, filename, lineno, func_name, body):
|
||||
@@ -131,12 +131,12 @@ class MethodNode(AstNode):
|
||||
if not funcname in bb.methodpool._parsed_fns:
|
||||
text = "def %s(d):\n" % (funcname) + '\n'.join(self.body)
|
||||
bb.methodpool.insert_method(funcname, text, self.filename)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS') or []
|
||||
anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or []
|
||||
anonfuncs.append(funcname)
|
||||
data.setVar('__BBANONFUNCS', anonfuncs)
|
||||
bb.data.setVar('__BBANONFUNCS', anonfuncs, data)
|
||||
else:
|
||||
data.setVarFlag(self.func_name, "func", 1)
|
||||
data.setVar(self.func_name, '\n'.join(self.body))
|
||||
bb.data.setVarFlag(self.func_name, "func", 1, data)
|
||||
bb.data.setVar(self.func_name, '\n'.join(self.body), data)
|
||||
|
||||
class PythonMethodNode(AstNode):
|
||||
def __init__(self, filename, lineno, function, define, body):
|
||||
@@ -152,9 +152,9 @@ class PythonMethodNode(AstNode):
|
||||
text = '\n'.join(self.body)
|
||||
if not bb.methodpool.parsed_module(self.define):
|
||||
bb.methodpool.insert_method(self.define, text, self.filename)
|
||||
data.setVarFlag(self.function, "func", 1)
|
||||
data.setVarFlag(self.function, "python", 1)
|
||||
data.setVar(self.function, text)
|
||||
bb.data.setVarFlag(self.function, "func", 1, data)
|
||||
bb.data.setVarFlag(self.function, "python", 1, data)
|
||||
bb.data.setVar(self.function, text, data)
|
||||
|
||||
class MethodFlagsNode(AstNode):
|
||||
def __init__(self, filename, lineno, key, m):
|
||||
@@ -163,19 +163,19 @@ class MethodFlagsNode(AstNode):
|
||||
self.m = m
|
||||
|
||||
def eval(self, data):
|
||||
if data.getVar(self.key):
|
||||
if bb.data.getVar(self.key, data):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.setVarFlag(self.key, 'python', None)
|
||||
data.setVarFlag(self.key, 'fakeroot', None)
|
||||
bb.data.setVarFlag(self.key, 'python', None, data)
|
||||
bb.data.setVarFlag(self.key, 'fakeroot', None, data)
|
||||
if self.m.group("py") is not None:
|
||||
data.setVarFlag(self.key, "python", "1")
|
||||
bb.data.setVarFlag(self.key, "python", "1", data)
|
||||
else:
|
||||
data.delVarFlag(self.key, "python")
|
||||
bb.data.delVarFlag(self.key, "python", data)
|
||||
if self.m.group("fr") is not None:
|
||||
data.setVarFlag(self.key, "fakeroot", "1")
|
||||
bb.data.setVarFlag(self.key, "fakeroot", "1", data)
|
||||
else:
|
||||
data.delVarFlag(self.key, "fakeroot")
|
||||
bb.data.delVarFlag(self.key, "fakeroot", data)
|
||||
|
||||
class ExportFuncsNode(AstNode):
|
||||
def __init__(self, filename, lineno, fns, classes):
|
||||
@@ -197,25 +197,25 @@ class ExportFuncsNode(AstNode):
|
||||
vars.append([allvars[0], allvars[2]])
|
||||
|
||||
for (var, calledvar) in vars:
|
||||
if data.getVar(var) and not data.getVarFlag(var, 'export_func'):
|
||||
if bb.data.getVar(var, data) and not bb.data.getVarFlag(var, 'export_func', data):
|
||||
continue
|
||||
|
||||
if data.getVar(var):
|
||||
data.setVarFlag(var, 'python', None)
|
||||
data.setVarFlag(var, 'func', None)
|
||||
if bb.data.getVar(var, data):
|
||||
bb.data.setVarFlag(var, 'python', None, data)
|
||||
bb.data.setVarFlag(var, 'func', None, data)
|
||||
|
||||
for flag in [ "func", "python" ]:
|
||||
if data.getVarFlag(calledvar, flag):
|
||||
data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag))
|
||||
if bb.data.getVarFlag(calledvar, flag, data):
|
||||
bb.data.setVarFlag(var, flag, bb.data.getVarFlag(calledvar, flag, data), data)
|
||||
for flag in [ "dirs" ]:
|
||||
if data.getVarFlag(var, flag):
|
||||
data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag))
|
||||
if bb.data.getVarFlag(var, flag, data):
|
||||
bb.data.setVarFlag(calledvar, flag, bb.data.getVarFlag(var, flag, data), data)
|
||||
|
||||
if data.getVarFlag(calledvar, "python"):
|
||||
data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n")
|
||||
if bb.data.getVarFlag(calledvar, "python", data):
|
||||
bb.data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", data)
|
||||
else:
|
||||
data.setVar(var, "\t" + calledvar + "\n")
|
||||
data.setVarFlag(var, 'export_func', '1')
|
||||
bb.data.setVar(var, "\t" + calledvar + "\n", data)
|
||||
bb.data.setVarFlag(var, 'export_func', '1', data)
|
||||
|
||||
class AddTaskNode(AstNode):
|
||||
def __init__(self, filename, lineno, func, before, after):
|
||||
@@ -229,25 +229,25 @@ class AddTaskNode(AstNode):
|
||||
if self.func[:3] != "do_":
|
||||
var = "do_" + self.func
|
||||
|
||||
data.setVarFlag(var, "task", 1)
|
||||
bbtasks = data.getVar('__BBTASKS') or []
|
||||
bb.data.setVarFlag(var, "task", 1, data)
|
||||
bbtasks = bb.data.getVar('__BBTASKS', data) or []
|
||||
if not var in bbtasks:
|
||||
bbtasks.append(var)
|
||||
data.setVar('__BBTASKS', bbtasks)
|
||||
bb.data.setVar('__BBTASKS', bbtasks, data)
|
||||
|
||||
existing = data.getVarFlag(var, "deps") or []
|
||||
existing = bb.data.getVarFlag(var, "deps", data) or []
|
||||
if self.after is not None:
|
||||
# set up deps for function
|
||||
for entry in self.after.split():
|
||||
if entry not in existing:
|
||||
existing.append(entry)
|
||||
data.setVarFlag(var, "deps", existing)
|
||||
bb.data.setVarFlag(var, "deps", existing, data)
|
||||
if self.before is not None:
|
||||
# set up things that depend on this func
|
||||
for entry in self.before.split():
|
||||
existing = data.getVarFlag(entry, "deps") or []
|
||||
existing = bb.data.getVarFlag(entry, "deps", data) or []
|
||||
if var not in existing:
|
||||
data.setVarFlag(entry, "deps", [var] + existing)
|
||||
bb.data.setVarFlag(entry, "deps", [var] + existing, data)
|
||||
|
||||
class BBHandlerNode(AstNode):
|
||||
def __init__(self, filename, lineno, fns):
|
||||
@@ -255,11 +255,11 @@ class BBHandlerNode(AstNode):
|
||||
self.hs = fns.split()
|
||||
|
||||
def eval(self, data):
|
||||
bbhands = data.getVar('__BBHANDLERS') or []
|
||||
bbhands = bb.data.getVar('__BBHANDLERS', data) or []
|
||||
for h in self.hs:
|
||||
bbhands.append(h)
|
||||
data.setVarFlag(h, "handler", 1)
|
||||
data.setVar('__BBHANDLERS', bbhands)
|
||||
bb.data.setVarFlag(h, "handler", 1, data)
|
||||
bb.data.setVar('__BBHANDLERS', bbhands, data)
|
||||
|
||||
class InheritNode(AstNode):
|
||||
def __init__(self, filename, lineno, classes):
|
||||
@@ -267,7 +267,7 @@ class InheritNode(AstNode):
|
||||
self.classes = classes
|
||||
|
||||
def eval(self, data):
|
||||
bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
|
||||
bb.parse.BBHandler.inherit(self.classes, data)
|
||||
|
||||
def handleInclude(statements, filename, lineno, m, force):
|
||||
statements.append(IncludeNode(filename, lineno, m.group(1), force))
|
||||
@@ -308,9 +308,9 @@ def handleInherit(statements, filename, lineno, m):
|
||||
|
||||
def finalize(fn, d, variant = None):
|
||||
all_handlers = {}
|
||||
for var in d.getVar('__BBHANDLERS') or []:
|
||||
for var in bb.data.getVar('__BBHANDLERS', d) or []:
|
||||
# try to add the handler
|
||||
handler = d.getVar(var)
|
||||
handler = bb.data.getVar(var, d)
|
||||
bb.event.register(var, handler)
|
||||
|
||||
bb.event.fire(bb.event.RecipePreFinalise(fn), d)
|
||||
@@ -318,18 +318,16 @@ def finalize(fn, d, variant = None):
|
||||
bb.data.expandKeys(d)
|
||||
bb.data.update_data(d)
|
||||
code = []
|
||||
for funcname in d.getVar("__BBANONFUNCS") or []:
|
||||
for funcname in bb.data.getVar("__BBANONFUNCS", d) or []:
|
||||
code.append("%s(d)" % funcname)
|
||||
bb.utils.simple_exec("\n".join(code), {"d": d})
|
||||
bb.data.update_data(d)
|
||||
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
tasklist = bb.data.getVar('__BBTASKS', d) or []
|
||||
bb.build.add_tasks(tasklist, d)
|
||||
|
||||
bb.parse.siggen.finalise(fn, d, variant)
|
||||
|
||||
d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
|
||||
|
||||
bb.event.fire(bb.event.RecipeParsed(fn), d)
|
||||
|
||||
def _create_variants(datastores, names, function):
|
||||
@@ -380,7 +378,7 @@ def multi_finalize(fn, d):
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
bb.data.setVar("__SKIPPED", e.args[0], d)
|
||||
datastores = {"": safe_d}
|
||||
|
||||
versions = (d.getVar("BBVERSIONS", True) or "").split()
|
||||
@@ -423,7 +421,7 @@ def multi_finalize(fn, d):
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
bb.data.setVar("__SKIPPED", e.args[0], d)
|
||||
|
||||
_create_variants(datastores, versions, verfunc)
|
||||
|
||||
@@ -452,7 +450,7 @@ def multi_finalize(fn, d):
|
||||
d.setVar("BBEXTENDVARIANT", variantmap[name])
|
||||
else:
|
||||
d.setVar("PN", "%s-%s" % (pn, name))
|
||||
bb.parse.BBHandler.inherit([extendedmap[name]], fn, 0, d)
|
||||
bb.parse.BBHandler.inherit([extendedmap[name]], d)
|
||||
|
||||
safe_d.setVar("BBCLASSEXTEND", extended)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc)
|
||||
@@ -463,7 +461,7 @@ def multi_finalize(fn, d):
|
||||
if not onlyfinalise or variant in onlyfinalise:
|
||||
finalize(fn, variant_d, variant)
|
||||
except bb.parse.SkipPackage as e:
|
||||
variant_d.setVar("__SKIPPED", e.args[0])
|
||||
bb.data.setVar("__SKIPPED", e.args[0], variant_d)
|
||||
|
||||
if len(datastores) > 1:
|
||||
variants = filter(None, datastores.iterkeys())
|
||||
|
||||
@@ -68,8 +68,10 @@ def supports(fn, d):
|
||||
"""Return True if fn has a supported extension"""
|
||||
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
|
||||
|
||||
def inherit(files, fn, lineno, d):
|
||||
def inherit(files, d):
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
fn = ""
|
||||
lineno = 0
|
||||
for file in files:
|
||||
file = data.expand(file, d)
|
||||
if not os.path.isabs(file) and not file.endswith(".bbclass"):
|
||||
@@ -79,7 +81,7 @@ def inherit(files, fn, lineno, d):
|
||||
logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
|
||||
__inherit_cache.append( file )
|
||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
include(fn, file, d, "inherit")
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
|
||||
def get_statements(filename, absolute_filename, base_name):
|
||||
@@ -157,7 +159,7 @@ def handle(fn, d, include):
|
||||
return ast.multi_finalize(fn, d)
|
||||
|
||||
if oldfile:
|
||||
d.setVar("FILE", oldfile)
|
||||
bb.data.setVar("FILE", oldfile, d)
|
||||
|
||||
# we have parsed the bb class now
|
||||
if ext == ".bbclass" or ext == ".inc":
|
||||
@@ -191,6 +193,7 @@ def feeder(lineno, s, fn, root, statements):
|
||||
if lineno == IN_PYTHON_EOF:
|
||||
return
|
||||
|
||||
|
||||
if s and s[0] == '#':
|
||||
if len(__residue__) != 0 and __residue__[0][0] != "#":
|
||||
bb.error("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
|
||||
|
||||
@@ -24,41 +24,41 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import re, os
|
||||
import re, bb.data, os
|
||||
import logging
|
||||
import bb.utils
|
||||
from bb.parse import ParseError, resolve_file, ast, logger
|
||||
|
||||
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"])(?P<value>.*)(?P=apo)$")
|
||||
#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
||||
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
||||
__include_regexp__ = re.compile( r"include\s+(.+)" )
|
||||
__require_regexp__ = re.compile( r"require\s+(.+)" )
|
||||
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
|
||||
__export_regexp__ = re.compile( r"export\s+(.+)" )
|
||||
|
||||
def init(data):
|
||||
topdir = data.getVar('TOPDIR')
|
||||
topdir = bb.data.getVar('TOPDIR', data)
|
||||
if not topdir:
|
||||
data.setVar('TOPDIR', os.getcwd())
|
||||
bb.data.setVar('TOPDIR', os.getcwd(), data)
|
||||
|
||||
|
||||
def supports(fn, d):
|
||||
return fn[-5:] == ".conf"
|
||||
|
||||
def include(oldfn, fn, lineno, data, error_out):
|
||||
def include(oldfn, fn, data, error_out):
|
||||
"""
|
||||
error_out: A string indicating the verb (e.g. "include", "inherit") to be
|
||||
used in a ParseError that will be raised if the file to be included could
|
||||
not be included. Specify False to avoid raising an error in this case.
|
||||
error_out If True a ParseError will be raised if the to be included
|
||||
config-files could not be included.
|
||||
"""
|
||||
if oldfn == fn: # prevent infinite recursion
|
||||
return None
|
||||
|
||||
import bb
|
||||
fn = data.expand(fn)
|
||||
oldfn = data.expand(oldfn)
|
||||
fn = bb.data.expand(fn, data)
|
||||
oldfn = bb.data.expand(oldfn, data)
|
||||
|
||||
if not os.path.isabs(fn):
|
||||
dname = os.path.dirname(oldfn)
|
||||
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", 1))
|
||||
bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1))
|
||||
abs_fn = bb.utils.which(bbpath, fn)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
@@ -68,7 +68,7 @@ def include(oldfn, fn, lineno, data, error_out):
|
||||
ret = handle(fn, data, True)
|
||||
except IOError:
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
|
||||
def handle(fn, data, include):
|
||||
@@ -77,7 +77,7 @@ def handle(fn, data, include):
|
||||
if include == 0:
|
||||
oldfile = None
|
||||
else:
|
||||
oldfile = data.getVar('FILE')
|
||||
oldfile = bb.data.getVar('FILE', data)
|
||||
|
||||
abs_fn = resolve_file(fn, data)
|
||||
f = open(abs_fn, 'r')
|
||||
@@ -102,10 +102,10 @@ def handle(fn, data, include):
|
||||
feeder(lineno, s, fn, statements)
|
||||
|
||||
# DONE WITH PARSING... time to evaluate
|
||||
data.setVar('FILE', abs_fn)
|
||||
bb.data.setVar('FILE', abs_fn, data)
|
||||
statements.eval(data)
|
||||
if oldfile:
|
||||
data.setVar('FILE', oldfile)
|
||||
bb.data.setVar('FILE', oldfile, data)
|
||||
|
||||
return data
|
||||
|
||||
@@ -131,7 +131,7 @@ def feeder(lineno, s, fn, statements):
|
||||
ast.handleExport(statements, fn, lineno, m)
|
||||
return
|
||||
|
||||
raise ParseError("unparsed line: '%s'" % s, fn, lineno);
|
||||
raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s));
|
||||
|
||||
# Add us to the handlers list
|
||||
from bb.parse import handlers
|
||||
|
||||
@@ -41,10 +41,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
|
||||
|
||||
logger = logging.getLogger("BitBake.PersistData")
|
||||
if hasattr(sqlite3, 'enable_shared_cache'):
|
||||
try:
|
||||
sqlite3.enable_shared_cache(True)
|
||||
except sqlite3.OperationalError:
|
||||
pass
|
||||
sqlite3.enable_shared_cache(True)
|
||||
|
||||
|
||||
@total_ordering
|
||||
@@ -198,9 +195,9 @@ def connect(database):
|
||||
|
||||
def persist(domain, d):
|
||||
"""Convenience factory for SQLTable objects based upon metadata"""
|
||||
import bb.utils
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
import bb.data, bb.utils
|
||||
cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
|
||||
bb.data.getVar("CACHE", d, True))
|
||||
if not cachedir:
|
||||
logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -24,53 +24,17 @@
|
||||
import re
|
||||
import logging
|
||||
from bb import data, utils
|
||||
from collections import defaultdict
|
||||
import bb
|
||||
|
||||
logger = logging.getLogger("BitBake.Provider")
|
||||
|
||||
class NoProvider(bb.BBHandledException):
|
||||
class NoProvider(Exception):
|
||||
"""Exception raised when no provider of a build dependency can be found"""
|
||||
|
||||
class NoRProvider(bb.BBHandledException):
|
||||
class NoRProvider(Exception):
|
||||
"""Exception raised when no provider of a runtime dependency can be found"""
|
||||
|
||||
|
||||
def findProviders(cfgData, dataCache, pkg_pn = None):
|
||||
"""
|
||||
Convenience function to get latest and preferred providers in pkg_pn
|
||||
"""
|
||||
|
||||
if not pkg_pn:
|
||||
pkg_pn = dataCache.pkg_pn
|
||||
|
||||
# Need to ensure data store is expanded
|
||||
localdata = data.createCopy(cfgData)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
|
||||
preferred_versions = {}
|
||||
latest_versions = {}
|
||||
|
||||
for pn in pkg_pn:
|
||||
(last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn)
|
||||
preferred_versions[pn] = (pref_ver, pref_file)
|
||||
latest_versions[pn] = (last_ver, last_file)
|
||||
|
||||
return (latest_versions, preferred_versions)
|
||||
|
||||
|
||||
def allProviders(dataCache):
|
||||
"""
|
||||
Find all providers for each pn
|
||||
"""
|
||||
all_providers = defaultdict(list)
|
||||
for (fn, pn) in dataCache.pkg_fn.items():
|
||||
ver = dataCache.pkg_pepvpr[fn]
|
||||
all_providers[pn].append((ver, fn))
|
||||
return all_providers
|
||||
|
||||
|
||||
def sortPriorities(pn, dataCache, pkg_pn = None):
|
||||
"""
|
||||
Reorder pkg_pn by file priority and default preference
|
||||
@@ -120,10 +84,10 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
preferred_ver = None
|
||||
|
||||
localdata = data.createCopy(cfgData)
|
||||
localdata.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn))
|
||||
bb.data.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn), localdata)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
preferred_v = localdata.getVar('PREFERRED_VERSION', True)
|
||||
preferred_v = bb.data.getVar('PREFERRED_VERSION', localdata, True)
|
||||
if preferred_v:
|
||||
m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
|
||||
if m:
|
||||
@@ -284,7 +248,7 @@ def filterProviders(providers, item, cfgData, dataCache):
|
||||
|
||||
eligible = _filterProviders(providers, item, cfgData, dataCache)
|
||||
|
||||
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, 1)
|
||||
prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, cfgData, 1)
|
||||
if prefervar:
|
||||
dataCache.preferred[item] = prefervar
|
||||
|
||||
@@ -322,7 +286,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
|
||||
pn = dataCache.pkg_fn[p]
|
||||
provides = dataCache.pn_provides[pn]
|
||||
for provide in provides:
|
||||
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, 1)
|
||||
prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
|
||||
logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
|
||||
if prefervar in pns and pns[prefervar] not in preferred:
|
||||
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
||||
|
||||
@@ -31,7 +31,6 @@ import fcntl
|
||||
import logging
|
||||
import bb
|
||||
from bb import msg, data, event
|
||||
from bb import monitordisk
|
||||
|
||||
bblogger = logging.getLogger("BitBake")
|
||||
logger = logging.getLogger("BitBake.RunQueue")
|
||||
@@ -188,10 +187,9 @@ class RunQueueData:
|
||||
self.taskData = taskData
|
||||
self.targets = targets
|
||||
self.rq = rq
|
||||
self.warn_multi_bb = False
|
||||
|
||||
self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", 1) or ""
|
||||
self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", 1) or "").split()
|
||||
self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or ""
|
||||
self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split()
|
||||
|
||||
self.reset()
|
||||
|
||||
@@ -676,14 +674,11 @@ class RunQueueData:
|
||||
prov_list[prov] = [fn]
|
||||
elif fn not in prov_list[prov]:
|
||||
prov_list[prov].append(fn)
|
||||
error = False
|
||||
for prov in prov_list:
|
||||
if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
|
||||
msg = "Multiple .bb files are due to be built which each provide %s (%s)." % (prov, " ".join(prov_list[prov]))
|
||||
if self.warn_multi_bb:
|
||||
logger.warn(msg)
|
||||
else:
|
||||
msg += "\n This usually means one provides something the other doesn't and should."
|
||||
logger.error(msg)
|
||||
error = True
|
||||
logger.error("Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should.", prov, " ".join(prov_list[prov]))
|
||||
|
||||
|
||||
# Create a whitelist usable by the stamp checks
|
||||
@@ -770,15 +765,11 @@ class RunQueue:
|
||||
self.cfgData = cfgData
|
||||
self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets)
|
||||
|
||||
self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile"
|
||||
self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None
|
||||
self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION", True) or None
|
||||
self.stamppolicy = bb.data.getVar("BB_STAMP_POLICY", cfgData, True) or "perfile"
|
||||
self.hashvalidate = bb.data.getVar("BB_HASHCHECK_FUNCTION", cfgData, True) or None
|
||||
|
||||
self.state = runQueuePrepare
|
||||
|
||||
# For disk space monitor
|
||||
self.dm = monitordisk.diskMonitor(cfgData)
|
||||
|
||||
def check_stamps(self):
|
||||
unchecked = {}
|
||||
current = []
|
||||
@@ -953,9 +944,6 @@ class RunQueue:
|
||||
else:
|
||||
self.rqexe = RunQueueExecuteScenequeue(self)
|
||||
|
||||
if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp]:
|
||||
self.dm.check(self)
|
||||
|
||||
if self.state is runQueueSceneRun:
|
||||
retval = self.rqexe.execute()
|
||||
|
||||
@@ -970,13 +958,6 @@ class RunQueue:
|
||||
if self.state is runQueueCleanUp:
|
||||
self.rqexe.finish()
|
||||
|
||||
if self.state is runQueueComplete or self.state is runQueueFailed:
|
||||
if self.rqexe.stats.failed:
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
|
||||
else:
|
||||
# Let's avoid the word "failed" if nothing actually did
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and all succeeded.", self.rqexe.stats.completed, self.rqexe.stats.skipped)
|
||||
|
||||
if self.state is runQueueFailed:
|
||||
if not self.rqdata.taskData.tryaltconfigs:
|
||||
raise bb.runqueue.TaskFailure(self.rqexe.failed_fnids)
|
||||
@@ -986,6 +967,7 @@ class RunQueue:
|
||||
|
||||
if self.state is runQueueComplete:
|
||||
# All done
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
|
||||
return False
|
||||
|
||||
if self.state is runQueueChildProcess:
|
||||
@@ -1024,8 +1006,8 @@ class RunQueueExecute:
|
||||
self.cfgData = rq.cfgData
|
||||
self.rqdata = rq.rqdata
|
||||
|
||||
self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", 1) or 1)
|
||||
self.scheduler = self.cfgData.getVar("BB_SCHEDULER", 1) or "speed"
|
||||
self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", self.cfgData, 1) or 1)
|
||||
self.scheduler = bb.data.getVar("BB_SCHEDULER", self.cfgData, 1) or "speed"
|
||||
|
||||
self.runq_buildable = []
|
||||
self.runq_running = []
|
||||
@@ -1067,13 +1049,6 @@ class RunQueueExecute:
|
||||
for pipe in self.build_pipes:
|
||||
self.build_pipes[pipe].read()
|
||||
|
||||
if len(self.failed_fnids) != 0:
|
||||
self.rq.state = runQueueFailed
|
||||
return
|
||||
|
||||
self.rq.state = runQueueComplete
|
||||
return
|
||||
|
||||
def finish(self):
|
||||
self.rq.state = runQueueCleanUp
|
||||
|
||||
@@ -1121,12 +1096,6 @@ class RunQueueExecute:
|
||||
|
||||
logger.debug(2, 'Running %s:%s under fakeroot, fakedirs: %s' %
|
||||
(fn, taskname, ', '.join(fakedirs)))
|
||||
else:
|
||||
envvars = (self.rqdata.dataCache.fakerootnoenv[fn] or "").split()
|
||||
for key, value in (var.split('=') for var in envvars):
|
||||
envbackup[key] = os.environ.get(key)
|
||||
os.environ[key] = value
|
||||
fakeenv[key] = value
|
||||
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
@@ -1156,9 +1125,9 @@ class RunQueueExecute:
|
||||
if umask:
|
||||
os.umask(umask)
|
||||
|
||||
self.cooker.configuration.data.setVar("BB_WORKERCONTEXT", "1")
|
||||
self.cooker.configuration.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self)
|
||||
self.cooker.configuration.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn)
|
||||
bb.data.setVar("BB_WORKERCONTEXT", "1", self.cooker.configuration.data)
|
||||
bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self, self.cooker.configuration.data)
|
||||
bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn, self.cooker.configuration.data)
|
||||
bb.parse.siggen.set_taskdata(self.rqdata.hashes, self.rqdata.hash_deps)
|
||||
ret = 0
|
||||
try:
|
||||
@@ -1233,34 +1202,33 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
for task in xrange(self.stats.total):
|
||||
if task in self.rq.scenequeue_covered:
|
||||
continue
|
||||
logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task])))
|
||||
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
|
||||
ok = True
|
||||
for revdep in self.rqdata.runq_revdeps[task]:
|
||||
if self.rqdata.runq_fnid[task] != self.rqdata.runq_fnid[revdep]:
|
||||
logger.debug(1, 'Found "bad" dep %s (%s) for %s (%s)' % (revdep, self.rqdata.get_user_idstring(revdep), task, self.rqdata.get_user_idstring(task)))
|
||||
|
||||
ok = False
|
||||
break
|
||||
if ok:
|
||||
found = True
|
||||
self.rq.scenequeue_covered.add(task)
|
||||
|
||||
logger.debug(1, 'Skip list (pre setsceneverify) %s', sorted(self.rq.scenequeue_covered))
|
||||
|
||||
# Allow the metadata to elect for setscene tasks to run anyway
|
||||
# Detect when the real task needs to be run anyway by looking to see
|
||||
# if any of its dependencies within the same package are scheduled
|
||||
# to be run.
|
||||
covered_remove = set()
|
||||
if self.rq.setsceneverify:
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.configuration.data }
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
for task in self.rq.scenequeue_covered:
|
||||
task_fnid = self.rqdata.runq_fnid[task]
|
||||
for dep in self.rqdata.runq_depends[task]:
|
||||
if self.rqdata.runq_fnid[dep] == task_fnid:
|
||||
if dep not in self.rq.scenequeue_covered:
|
||||
covered_remove.add(task)
|
||||
break
|
||||
|
||||
for task in covered_remove:
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
taskname = self.rqdata.runq_task[task] + '_setscene'
|
||||
bb.build.del_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
logger.debug(1, 'Not skipping task %s due to setsceneverify', task)
|
||||
logger.debug(1, 'Not skipping task %s because it will have to be run anyway', task)
|
||||
self.rq.scenequeue_covered.remove(task)
|
||||
|
||||
logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
|
||||
@@ -1283,7 +1251,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
if type(obj) is type and
|
||||
issubclass(obj, RunQueueScheduler))
|
||||
|
||||
user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True)
|
||||
user_schedulers = bb.data.getVar("BB_SCHEDULERS", self.cfgData, True)
|
||||
if user_schedulers:
|
||||
for sched in user_schedulers.split():
|
||||
if not "." in sched:
|
||||
@@ -1514,7 +1482,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
sq_revdeps_squash[self.rqdata.runq_setscene.index(taskid)] = set()
|
||||
|
||||
#for task in xrange(len(sq_revdeps_squash)):
|
||||
# print "Task %s: %s.%s is %s " % (task, self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[self.rqdata.runq_setscene[task]]], self.rqdata.runq_task[self.rqdata.runq_setscene[task]] + "_setscene", sq_revdeps_squash[task])
|
||||
# print "Task %s: %s.%s is %s " % (task, self.taskData.fn_index[self.runq_fnid[self.runq_setscene[task]]], self.runq_task[self.runq_setscene[task]] + "_setscene", sq_revdeps_squash[task])
|
||||
|
||||
self.sq_deps = []
|
||||
self.sq_revdeps = sq_revdeps_squash
|
||||
@@ -1662,8 +1630,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
self.task_skip(task)
|
||||
return True
|
||||
|
||||
startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
|
||||
bb.event.fire(startevent, self.cfgData)
|
||||
logger.info("Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
|
||||
self.stats.total, fn, taskname))
|
||||
|
||||
pid, pipein, pipeout = self.fork_off_task(fn, realtask, taskname)
|
||||
|
||||
@@ -1724,15 +1692,6 @@ class runQueueEvent(bb.event.Event):
|
||||
self.stats = stats.copy()
|
||||
bb.event.Event.__init__(self)
|
||||
|
||||
class sceneQueueEvent(runQueueEvent):
|
||||
"""
|
||||
Base sceneQueue event class
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
realtask = rq.rqdata.runq_setscene[task]
|
||||
self.taskstring = rq.rqdata.get_user_idstring(realtask, "_setscene")
|
||||
|
||||
class runQueueTaskStarted(runQueueEvent):
|
||||
"""
|
||||
Event notifing a task was started
|
||||
@@ -1741,14 +1700,6 @@ class runQueueTaskStarted(runQueueEvent):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
self.noexec = noexec
|
||||
|
||||
class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifing a setscene task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
self.noexec = noexec
|
||||
|
||||
class runQueueTaskFailed(runQueueEvent):
|
||||
"""
|
||||
Event notifing a task failed
|
||||
@@ -1757,13 +1708,13 @@ class runQueueTaskFailed(runQueueEvent):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
self.exitcode = exitcode
|
||||
|
||||
class sceneQueueTaskFailed(sceneQueueEvent):
|
||||
class sceneQueueTaskFailed(runQueueTaskFailed):
|
||||
"""
|
||||
Event notifing a setscene task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
self.exitcode = exitcode
|
||||
runQueueTaskFailed.__init__(self, task, stats, exitcode, rq)
|
||||
self.taskstring = rq.rqdata.get_user_idstring(task, "_setscene")
|
||||
|
||||
class runQueueTaskCompleted(runQueueEvent):
|
||||
"""
|
||||
@@ -1771,8 +1722,8 @@ class runQueueTaskCompleted(runQueueEvent):
|
||||
"""
|
||||
|
||||
def check_stamp_fn(fn, taskname, d):
|
||||
rqexe = d.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY")
|
||||
fn = d.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2")
|
||||
rqexe = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d)
|
||||
fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d)
|
||||
fnid = rqexe.rqdata.taskData.getfn_id(fn)
|
||||
taskid = rqexe.rqdata.get_task_id(fnid, taskname)
|
||||
if taskid is not None:
|
||||
|
||||
@@ -163,7 +163,7 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
|
||||
# remove this when you're done with debugging
|
||||
# allow_reuse_address = True
|
||||
|
||||
def __init__(self, interface):
|
||||
def __init__(self, interface = ("localhost", 0)):
|
||||
"""
|
||||
Constructor
|
||||
"""
|
||||
@@ -242,14 +242,14 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
|
||||
return
|
||||
|
||||
class BitbakeServerInfo():
|
||||
def __init__(self, host, port):
|
||||
self.host = host
|
||||
self.port = port
|
||||
def __init__(self, server):
|
||||
self.host = server.host
|
||||
self.port = server.port
|
||||
|
||||
class BitBakeServerConnection():
|
||||
def __init__(self, serverinfo, clientinfo=("localhost", 0)):
|
||||
def __init__(self, serverinfo):
|
||||
self.connection = _create_server(serverinfo.host, serverinfo.port)
|
||||
self.events = uievent.BBUIEventQueue(self.connection, clientinfo)
|
||||
self.events = uievent.BBUIEventQueue(self.connection)
|
||||
for event in bb.event.ui_queue:
|
||||
self.events.queue_event(event)
|
||||
|
||||
@@ -267,8 +267,8 @@ class BitBakeServerConnection():
|
||||
pass
|
||||
|
||||
class BitBakeServer(object):
|
||||
def initServer(self, interface = ("localhost", 0)):
|
||||
self.server = BitBakeXMLRPCServer(interface)
|
||||
def initServer(self):
|
||||
self.server = BitBakeXMLRPCServer()
|
||||
|
||||
def addcooker(self, cooker):
|
||||
self.cooker = cooker
|
||||
@@ -278,7 +278,7 @@ class BitBakeServer(object):
|
||||
return self.server.register_idle_function
|
||||
|
||||
def saveConnectionDetails(self):
|
||||
self.serverinfo = BitbakeServerInfo(self.server.host, self.server.port)
|
||||
self.serverinfo = BitbakeServerInfo(self.server)
|
||||
|
||||
def detach(self, cooker_logfile):
|
||||
daemonize.createDaemon(self.server.serve_forever, cooker_logfile)
|
||||
|
||||
@@ -16,7 +16,7 @@ def init(d):
|
||||
siggens = [obj for obj in globals().itervalues()
|
||||
if type(obj) is type and issubclass(obj, SignatureGenerator)]
|
||||
|
||||
desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
|
||||
desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
|
||||
for sg in siggens:
|
||||
if desired == sg.name:
|
||||
return sg(d)
|
||||
@@ -62,13 +62,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
self.runtaskdeps = {}
|
||||
self.gendeps = {}
|
||||
self.lookupcache = {}
|
||||
self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
|
||||
self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
|
||||
self.taskwhitelist = None
|
||||
self.init_rundepcheck(data)
|
||||
|
||||
def init_rundepcheck(self, data):
|
||||
self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
|
||||
|
||||
if self.taskwhitelist:
|
||||
self.twl = re.compile(self.taskwhitelist)
|
||||
else:
|
||||
@@ -135,24 +131,17 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
for task in taskdeps:
|
||||
d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
|
||||
|
||||
def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
|
||||
# Return True if we should keep the dependency, False to drop it
|
||||
# We only manipulate the dependencies for packages not in the whitelist
|
||||
if self.twl and not self.twl.search(recipename):
|
||||
# then process the actual dependencies
|
||||
if self.twl.search(depname):
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_taskhash(self, fn, task, deps, dataCache):
|
||||
k = fn + "." + task
|
||||
data = dataCache.basetaskhash[k]
|
||||
self.runtaskdeps[k] = []
|
||||
recipename = dataCache.pkg_fn[fn]
|
||||
for dep in sorted(deps, key=clean_basepath):
|
||||
depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
|
||||
if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
|
||||
continue
|
||||
# We only manipulate the dependencies for packages not in the whitelist
|
||||
if self.twl and not self.twl.search(dataCache.pkg_fn[fn]):
|
||||
# then process the actual dependencies
|
||||
dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn')
|
||||
if self.twl.search(dataCache.pkg_fn[dep_fn]):
|
||||
continue
|
||||
if dep not in self.taskhash:
|
||||
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
|
||||
data = data + self.taskhash[dep]
|
||||
@@ -261,13 +250,11 @@ def compare_sigfiles(a, b):
|
||||
|
||||
if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
|
||||
print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
|
||||
if a_data['basewhitelist'] and b_data['basewhitelist']:
|
||||
print "changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])
|
||||
print "changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])
|
||||
|
||||
if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
|
||||
print "taskwhitelist changed from %s to %s" % (a_data['taskwhitelist'], b_data['taskwhitelist'])
|
||||
if a_data['taskwhitelist'] and b_data['taskwhitelist']:
|
||||
print "changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])
|
||||
print "changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])
|
||||
|
||||
if a_data['taskdeps'] != b_data['taskdeps']:
|
||||
print "Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))
|
||||
@@ -279,8 +266,7 @@ def compare_sigfiles(a, b):
|
||||
if changed:
|
||||
for dep in changed:
|
||||
print "List of dependencies for variable %s changed from %s to %s" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])
|
||||
if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
|
||||
print "changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])
|
||||
print "changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])
|
||||
if added:
|
||||
for dep in added:
|
||||
print "Dependency on variable %s was added" % (dep)
|
||||
@@ -300,27 +286,16 @@ def compare_sigfiles(a, b):
|
||||
changed, added, removed = dict_diff(a, b)
|
||||
if added:
|
||||
for dep in added:
|
||||
bdep_found = False
|
||||
if removed:
|
||||
for bdep in removed:
|
||||
if a[dep] == b[bdep]:
|
||||
#print "Dependency on task %s was replaced by %s with same hash" % (dep, bdep)
|
||||
bdep_found = True
|
||||
if not bdep_found:
|
||||
print "Dependency on task %s was added with hash %s" % (dep, a[dep])
|
||||
print "Dependency on task %s was added" % (dep)
|
||||
if removed:
|
||||
for dep in removed:
|
||||
adep_found = False
|
||||
if added:
|
||||
for adep in added:
|
||||
if a[adep] == b[dep]:
|
||||
#print "Dependency on task %s was replaced by %s with same hash" % (adep, dep)
|
||||
adep_found = True
|
||||
if not adep_found:
|
||||
print "Dependency on task %s was removed with hash %s" % (dep, b[dep])
|
||||
print "Dependency on task %s was removed" % (dep)
|
||||
if changed:
|
||||
for dep in changed:
|
||||
print "Hash for dependent task %s changed from %s to %s" % (dep, a[dep], b[dep])
|
||||
elif 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
|
||||
print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))
|
||||
print "changed items: %s" % a_data['runtaskdeps'].symmetric_difference(b_data['runtaskdeps'])
|
||||
|
||||
def dump_sigfile(a):
|
||||
p1 = pickle.Unpickler(file(a, "rb"))
|
||||
|
||||
@@ -1,110 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobwidget import hic
|
||||
from bb.ui.crumbs.runningbuild import RunningBuildTreeView
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# BuildDetailsPage
|
||||
#
|
||||
|
||||
class BuildDetailsPage (HobPage):
|
||||
|
||||
def __init__(self, builder):
|
||||
super(BuildDetailsPage, self).__init__(builder, "Building ...")
|
||||
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
# create visual elements
|
||||
self.vbox = gtk.VBox(False, 15)
|
||||
|
||||
self.progress_box = gtk.HBox(False, 5)
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.progress_box.pack_start(self.progress_bar, expand=True, fill=True)
|
||||
self.stop_button = gtk.LinkButton("Stop the build process", "Stop")
|
||||
self.stop_button.connect("clicked", self.stop_button_clicked_cb)
|
||||
self.progress_box.pack_end(self.stop_button, expand=False, fill=False)
|
||||
|
||||
self.build_tv = RunningBuildTreeView(readonly=True)
|
||||
self.build_tv.set_model(self.builder.handler.build.model)
|
||||
self.scrolled_view = gtk.ScrolledWindow ()
|
||||
self.scrolled_view.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
self.scrolled_view.add(self.build_tv)
|
||||
|
||||
self.button_box = gtk.HBox(False, 5)
|
||||
self.back_button = gtk.LinkButton("Go back to Image Configuration screen", "<< Back to image configuration")
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
self.button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def _remove_all_widget(self):
|
||||
children = self.vbox.get_children() or []
|
||||
for child in children:
|
||||
self.vbox.remove(child)
|
||||
children = self.box_group_area.get_children() or []
|
||||
for child in children:
|
||||
self.box_group_area.remove(child)
|
||||
children = self.get_children() or []
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
|
||||
def show_page(self, step):
|
||||
self._remove_all_widget()
|
||||
if step == self.builder.PACKAGE_GENERATING or step == self.builder.FAST_IMAGE_GENERATING:
|
||||
self.title = "Building packages ..."
|
||||
else:
|
||||
self.title = "Building image ..."
|
||||
self.build_details_top = self.add_onto_top_bar(None)
|
||||
self.pack_start(self.build_details_top, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.progress_bar.reset()
|
||||
self.vbox.pack_start(self.progress_box, expand=False, fill=False)
|
||||
|
||||
self.vbox.pack_start(self.scrolled_view, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_end(self.button_box, expand=False, fill=False)
|
||||
self.show_all()
|
||||
self.back_button.hide()
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=True):
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_title(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def back_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def show_back_button(self):
|
||||
self.back_button.show()
|
||||
|
||||
def stop_button_clicked_cb(self, button):
|
||||
self.builder.stop_build()
|
||||
|
||||
def hide_stop_button(self):
|
||||
self.stop_button.hide()
|
||||
@@ -1,873 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import copy
|
||||
import os
|
||||
import subprocess
|
||||
import shlex
|
||||
from bb.ui.crumbs.template import TemplateMgr
|
||||
from bb.ui.crumbs.imageconfigurationpage import ImageConfigurationPage
|
||||
from bb.ui.crumbs.recipeselectionpage import RecipeSelectionPage
|
||||
from bb.ui.crumbs.packageselectionpage import PackageSelectionPage
|
||||
from bb.ui.crumbs.builddetailspage import BuildDetailsPage
|
||||
from bb.ui.crumbs.imagedetailspage import ImageDetailsPage
|
||||
from bb.ui.crumbs.hobwidget import hwc
|
||||
from bb.ui.crumbs.hig import CrumbsDialog, BinbDialog, \
|
||||
AdvancedSettingDialog, LayerSelectionDialog, \
|
||||
DeployImageDialog, ImageSelectionDialog
|
||||
|
||||
class Configuration:
|
||||
'''Represents the data structure of configuration.'''
|
||||
|
||||
def __init__(self, params):
|
||||
# Settings
|
||||
self.curr_mach = ""
|
||||
self.curr_distro = params["distro"]
|
||||
self.dldir = params["dldir"]
|
||||
self.sstatedir = params["sstatedir"]
|
||||
self.sstatemirror = params["sstatemirror"]
|
||||
self.pmake = params["pmake"]
|
||||
self.bbthread = params["bbthread"]
|
||||
self.curr_package_format = " ".join(params["pclass"].split("package_")).strip()
|
||||
self.image_rootfs_size = params["image_rootfs_size"]
|
||||
self.image_extra_size = params["image_extra_size"]
|
||||
self.image_overhead_factor = params['image_overhead_factor']
|
||||
self.incompat_license = params["incompat_license"]
|
||||
self.curr_sdk_machine = params["sdk_machine"]
|
||||
self.extra_setting = {}
|
||||
self.toolchain_build = False
|
||||
self.image_fstypes = params["image_fstypes"].split()
|
||||
# bblayers.conf
|
||||
self.layers = params["layer"].split()
|
||||
# image/recipes/packages
|
||||
self.selected_image = None
|
||||
self.selected_recipes = []
|
||||
self.selected_packages = []
|
||||
|
||||
def load(self, template):
|
||||
self.curr_mach = template.getVar("MACHINE")
|
||||
self.curr_package_format = " ".join(template.getVar("PACKAGE_CLASSES").split("package_")).strip()
|
||||
self.curr_distro = template.getVar("DISTRO")
|
||||
self.dldir = template.getVar("DL_DIR")
|
||||
self.sstatedir = template.getVar("SSTATE_DIR")
|
||||
self.sstatemirror = template.getVar("SSTATE_MIRROR")
|
||||
self.pmake = int(template.getVar("PARALLEL_MAKE").split()[1])
|
||||
self.bbthread = int(template.getVar("BB_NUMBER_THREAD"))
|
||||
self.image_rootfs_size = int(template.getVar("IMAGE_ROOTFS_SIZE"))
|
||||
self.image_extra_size = int(template.getVar("IMAGE_EXTRA_SPACE"))
|
||||
# image_overhead_factor is read-only.
|
||||
self.incompat_license = template.getVar("INCOMPATIBLE_LICENSE")
|
||||
self.curr_sdk_machine = template.getVar("SDKMACHINE")
|
||||
self.extra_setting = eval(template.getVar("EXTRA_SETTING"))
|
||||
self.toolchain_build = eval(template.getVar("TOOLCHAIN_BUILD"))
|
||||
self.image_fstypes = template.getVar("IMAGE_FSTYPES").split()
|
||||
# bblayers.conf
|
||||
self.layers = template.getVar("BBLAYERS").split()
|
||||
# image/recipes/packages
|
||||
self.selected_image = template.getVar("__SELECTED_IMAGE__")
|
||||
self.selected_recipes = template.getVar("DEPENDS").split()
|
||||
self.selected_packages = template.getVar("IMAGE_INSTALL").split()
|
||||
|
||||
def save(self, template, filename):
|
||||
# bblayers.conf
|
||||
template.setVar("BBLAYERS", " ".join(self.layers))
|
||||
# local.conf
|
||||
template.setVar("MACHINE", self.curr_mach)
|
||||
template.setVar("DISTRO", self.curr_distro)
|
||||
template.setVar("DL_DIR", self.dldir)
|
||||
template.setVar("SSTATE_DIR", self.sstatedir)
|
||||
template.setVar("SSTATE_MIRROR", self.sstatemirror)
|
||||
template.setVar("PARALLEL_MAKE", "-j %s" % self.pmake)
|
||||
template.setVar("BB_NUMBER_THREAD", self.bbthread)
|
||||
template.setVar("PACKAGE_CLASSES", " ".join(["package_" + i for i in self.curr_package_format.split()]))
|
||||
template.setVar("IMAGE_ROOTFS_SIZE", self.image_rootfs_size)
|
||||
template.setVar("IMAGE_EXTRA_SPACE", self.image_extra_size)
|
||||
template.setVar("INCOMPATIBLE_LICENSE", self.incompat_license)
|
||||
template.setVar("SDKMACHINE", self.curr_sdk_machine)
|
||||
template.setVar("EXTRA_SETTING", self.extra_setting)
|
||||
template.setVar("TOOLCHAIN_BUILD", self.toolchain_build)
|
||||
template.setVar("IMAGE_FSTYPES", " ".join(self.image_fstypes).lstrip(" "))
|
||||
# image/recipes/packages
|
||||
self.selected_image = filename
|
||||
template.setVar("__SELECTED_IMAGE__", self.selected_image)
|
||||
template.setVar("DEPENDS", self.selected_recipes)
|
||||
template.setVar("IMAGE_INSTALL", self.selected_packages)
|
||||
|
||||
class Parameters:
|
||||
'''Represents other variables like available machines, etc.'''
|
||||
|
||||
def __init__(self, params):
|
||||
# Variables
|
||||
self.all_machines = []
|
||||
self.all_package_formats = []
|
||||
self.all_distros = []
|
||||
self.all_sdk_machines = []
|
||||
self.max_threads = params["max_threads"]
|
||||
self.all_layers = []
|
||||
self.core_base = params["core_base"]
|
||||
self.image_names = []
|
||||
self.image_addr = params["image_addr"]
|
||||
self.image_types = params["image_types"].split()
|
||||
|
||||
class Builder(gtk.Window):
|
||||
|
||||
(MACHINE_SELECTION,
|
||||
LAYER_CHANGED,
|
||||
RCPPKGINFO_POPULATING,
|
||||
RCPPKGINFO_POPULATED,
|
||||
RECIPE_SELECTION,
|
||||
PACKAGE_GENERATING,
|
||||
PACKAGE_GENERATED,
|
||||
PACKAGE_SELECTION,
|
||||
FAST_IMAGE_GENERATING,
|
||||
IMAGE_GENERATING,
|
||||
IMAGE_GENERATED,
|
||||
MY_IMAGE_OPENED,
|
||||
BACK,
|
||||
END_NOOP) = range(14)
|
||||
|
||||
(IMAGE_CONFIGURATION,
|
||||
RECIPE_DETAILS,
|
||||
BUILD_DETAILS,
|
||||
PACKAGE_DETAILS,
|
||||
IMAGE_DETAILS,
|
||||
END_TAB) = range(6)
|
||||
|
||||
__step2page__ = {
|
||||
MACHINE_SELECTION : IMAGE_CONFIGURATION,
|
||||
LAYER_CHANGED : IMAGE_CONFIGURATION,
|
||||
RCPPKGINFO_POPULATING : IMAGE_CONFIGURATION,
|
||||
RCPPKGINFO_POPULATED : IMAGE_CONFIGURATION,
|
||||
RECIPE_SELECTION : RECIPE_DETAILS,
|
||||
PACKAGE_GENERATING : BUILD_DETAILS,
|
||||
PACKAGE_GENERATED : PACKAGE_DETAILS,
|
||||
PACKAGE_SELECTION : PACKAGE_DETAILS,
|
||||
FAST_IMAGE_GENERATING : BUILD_DETAILS,
|
||||
IMAGE_GENERATING : BUILD_DETAILS,
|
||||
IMAGE_GENERATED : IMAGE_DETAILS,
|
||||
MY_IMAGE_OPENED : IMAGE_DETAILS,
|
||||
END_NOOP : None,
|
||||
}
|
||||
|
||||
def __init__(self, hobHandler, recipe_model, package_model):
|
||||
super(Builder, self).__init__()
|
||||
|
||||
# handler
|
||||
self.handler = hobHandler
|
||||
|
||||
self.template = None
|
||||
|
||||
# settings
|
||||
params = self.handler.get_parameters()
|
||||
self.configuration = Configuration(params)
|
||||
self.parameters = Parameters(params)
|
||||
|
||||
# build step
|
||||
self.current_step = None
|
||||
self.previous_step = None
|
||||
|
||||
self.stopping = False
|
||||
self.build_succeeded = True
|
||||
|
||||
# recipe model and package model
|
||||
self.recipe_model = recipe_model
|
||||
self.package_model = package_model
|
||||
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
# connect the signals to functions
|
||||
#self.connect("configure-event", self.resize_window_cb)
|
||||
self.connect("delete-event", self.destroy_window_cb)
|
||||
self.recipe_model.connect ("recipe-selection-changed", self.recipelist_changed_cb)
|
||||
self.package_model.connect("package-selection-changed", self.packagelist_changed_cb)
|
||||
self.recipe_model.connect ("recipelist-populated", self.recipelist_populated_cb)
|
||||
self.package_model.connect("packagelist-populated", self.packagelist_populated_cb)
|
||||
self.handler.connect("config-updated", self.handler_config_updated_cb)
|
||||
self.handler.connect("package-formats-updated", self.handler_package_formats_updated_cb)
|
||||
self.handler.connect("layers-updated", self.handler_layers_updated_cb)
|
||||
self.handler.connect("parsing-started", self.handler_parsing_started_cb)
|
||||
self.handler.connect("parsing", self.handler_parsing_cb)
|
||||
self.handler.connect("parsing-completed", self.handler_parsing_completed_cb)
|
||||
self.handler.build.connect("build-started", self.handler_build_started_cb)
|
||||
self.handler.build.connect("build-succeeded", self.handler_build_succeeded_cb)
|
||||
self.handler.build.connect("build-failed", self.handler_build_failed_cb)
|
||||
self.handler.build.connect("task-started", self.handler_task_started_cb)
|
||||
self.handler.connect("generating-data", self.handler_generating_data_cb)
|
||||
self.handler.connect("data-generated", self.handler_data_generated_cb)
|
||||
self.handler.connect("command-succeeded", self.handler_command_succeeded_cb)
|
||||
self.handler.connect("command-failed", self.handler_command_failed_cb)
|
||||
|
||||
self.switch_page(self.MACHINE_SELECTION)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_title("HOB -- Image Creator")
|
||||
self.set_icon_name("applications-development")
|
||||
self.set_position(gtk.WIN_POS_CENTER_ALWAYS)
|
||||
self.set_resizable(True)
|
||||
window_width = self.get_screen().get_width()
|
||||
window_height = self.get_screen().get_height()
|
||||
if window_width >= hwc.MAIN_WIN_WIDTH:
|
||||
window_width = hwc.MAIN_WIN_WIDTH
|
||||
window_height = hwc.MAIN_WIN_HEIGHT
|
||||
else:
|
||||
lbl = "<b>Screen dimension mismatched</b>\nfor better usability and visual effects,"
|
||||
lbl = lbl + " the screen dimension should be 1024x768 or above."
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
self.set_size_request(window_width, window_height)
|
||||
|
||||
self.vbox = gtk.VBox(False, 0)
|
||||
self.vbox.set_border_width(0)
|
||||
self.add(self.vbox)
|
||||
|
||||
# create pages
|
||||
self.image_configuration_page = ImageConfigurationPage(self)
|
||||
self.recipe_details_page = RecipeSelectionPage(self)
|
||||
self.build_details_page = BuildDetailsPage(self)
|
||||
self.package_details_page = PackageSelectionPage(self)
|
||||
self.image_details_page = ImageDetailsPage(self)
|
||||
|
||||
self.nb = gtk.Notebook()
|
||||
self.nb.set_show_tabs(False)
|
||||
self.nb.insert_page(self.image_configuration_page, None, self.IMAGE_CONFIGURATION)
|
||||
self.nb.insert_page(self.recipe_details_page, None, self.RECIPE_DETAILS)
|
||||
self.nb.insert_page(self.build_details_page, None, self.BUILD_DETAILS)
|
||||
self.nb.insert_page(self.package_details_page, None, self.PACKAGE_DETAILS)
|
||||
self.nb.insert_page(self.image_details_page, None, self.IMAGE_DETAILS)
|
||||
self.vbox.pack_start(self.nb, expand=True, fill=True)
|
||||
|
||||
self.show_all()
|
||||
self.nb.set_current_page(0)
|
||||
|
||||
def get_split_model(self):
|
||||
return self.handler.split_model
|
||||
|
||||
def load_template(self, path):
|
||||
self.template = TemplateMgr()
|
||||
self.template.load(path)
|
||||
self.configuration.load(self.template)
|
||||
|
||||
if self.get_split_model():
|
||||
if not set(self.configuration.layers) <= set(self.parameters.all_layers):
|
||||
return False
|
||||
else:
|
||||
for layer in self.configuration.layers:
|
||||
if not os.path.exists(layer+'/conf/layer.conf'):
|
||||
return False
|
||||
|
||||
self.switch_page(self.LAYER_CHANGED)
|
||||
|
||||
self.template.destroy()
|
||||
self.template = None
|
||||
|
||||
def save_template(self, path):
|
||||
if path.rfind("/") == -1:
|
||||
filename = "default"
|
||||
path = "."
|
||||
else:
|
||||
filename = path[path.rfind("/") + 1:len(path)]
|
||||
path = path[0:path.rfind("/")]
|
||||
|
||||
self.template = TemplateMgr()
|
||||
self.template.open(filename, path)
|
||||
self.configuration.save(self.template, filename)
|
||||
|
||||
self.template.save()
|
||||
self.template.destroy()
|
||||
self.template = None
|
||||
|
||||
def switch_page(self, next_step):
|
||||
# Main Workflow (Business Logic)
|
||||
self.nb.set_current_page(self.__step2page__[next_step])
|
||||
|
||||
if next_step == self.MACHINE_SELECTION: # init step
|
||||
self.image_configuration_page.show_machine()
|
||||
|
||||
elif next_step == self.LAYER_CHANGED:
|
||||
# after layers is changd by users
|
||||
self.image_configuration_page.show_machine()
|
||||
self.handler.refresh_layers(self.configuration.layers)
|
||||
|
||||
elif next_step == self.RCPPKGINFO_POPULATING:
|
||||
# MACHINE CHANGED action or SETTINGS CHANGED
|
||||
# show the progress bar
|
||||
self.image_configuration_page.show_info_populating()
|
||||
self.generate_recipes()
|
||||
|
||||
elif next_step == self.RCPPKGINFO_POPULATED:
|
||||
self.image_configuration_page.show_info_populated()
|
||||
|
||||
elif next_step == self.RECIPE_SELECTION:
|
||||
pass
|
||||
|
||||
elif next_step == self.PACKAGE_SELECTION:
|
||||
pass
|
||||
|
||||
elif next_step == self.PACKAGE_GENERATING or next_step == self.FAST_IMAGE_GENERATING:
|
||||
# both PACKAGE_GENEATING and FAST_IMAGE_GENERATING share the same page
|
||||
self.build_details_page.show_page(next_step)
|
||||
self.generate_packages()
|
||||
|
||||
elif next_step == self.PACKAGE_GENERATED:
|
||||
pass
|
||||
|
||||
elif next_step == self.IMAGE_GENERATING:
|
||||
# after packages are generated, selected_packages need to
|
||||
# be updated in package_model per selected_image in recipe_model
|
||||
self.build_details_page.show_page(next_step)
|
||||
self.generate_image()
|
||||
|
||||
elif next_step == self.IMAGE_GENERATED:
|
||||
self.image_details_page.show_page(next_step)
|
||||
|
||||
elif next_step == self.MY_IMAGE_OPENED:
|
||||
self.image_details_page.show_page(next_step)
|
||||
|
||||
self.previous_step = self.current_step
|
||||
self.current_step = next_step
|
||||
|
||||
def set_user_config(self):
|
||||
self.handler.init_cooker()
|
||||
# set bb layers
|
||||
self.handler.set_bblayers(self.configuration.layers)
|
||||
# set local configuration
|
||||
self.handler.set_machine(self.configuration.curr_mach)
|
||||
self.handler.set_package_format(self.configuration.curr_package_format)
|
||||
self.handler.set_distro(self.configuration.curr_distro)
|
||||
self.handler.set_dl_dir(self.configuration.dldir)
|
||||
self.handler.set_sstate_dir(self.configuration.sstatedir)
|
||||
self.handler.set_sstate_mirror(self.configuration.sstatemirror)
|
||||
self.handler.set_pmake(self.configuration.pmake)
|
||||
self.handler.set_bbthreads(self.configuration.bbthread)
|
||||
self.handler.set_rootfs_size(self.configuration.image_rootfs_size)
|
||||
self.handler.set_extra_size(self.configuration.image_extra_size)
|
||||
self.handler.set_incompatible_license(self.configuration.incompat_license)
|
||||
self.handler.set_sdk_machine(self.configuration.curr_sdk_machine)
|
||||
self.handler.set_image_fstypes(self.configuration.image_fstypes)
|
||||
self.handler.set_extra_config(self.configuration.extra_setting)
|
||||
self.handler.set_extra_inherit("packageinfo")
|
||||
|
||||
def reset_recipe_model(self):
|
||||
self.recipe_model.reset()
|
||||
|
||||
def reset_package_model(self):
|
||||
self.package_model.reset()
|
||||
|
||||
def update_recipe_model(self, selected_image, selected_recipes):
|
||||
self.recipe_model.set_selected_image(selected_image)
|
||||
self.recipe_model.set_selected_recipes(selected_recipes)
|
||||
|
||||
def update_package_model(self, selected_packages):
|
||||
left = self.package_model.set_selected_packages(selected_packages)
|
||||
self.configuration.selected_packages += left
|
||||
|
||||
def generate_packages(self):
|
||||
# Build packages
|
||||
_, all_recipes = self.recipe_model.get_selected_recipes()
|
||||
self.set_user_config()
|
||||
self.handler.reset_build()
|
||||
self.handler.generate_packages(all_recipes)
|
||||
|
||||
def generate_recipes(self):
|
||||
# Parse recipes
|
||||
self.set_user_config()
|
||||
self.handler.generate_recipes()
|
||||
|
||||
def generate_image(self):
|
||||
# Build image
|
||||
self.set_user_config()
|
||||
all_packages = self.package_model.get_selected_packages()
|
||||
self.handler.reset_build()
|
||||
self.handler.generate_image(all_packages, self.configuration.toolchain_build)
|
||||
|
||||
|
||||
# Callback Functions
|
||||
def handler_config_updated_cb(self, handler, which, values):
|
||||
if which == "distro":
|
||||
self.parameters.all_distros = values
|
||||
elif which == "machine":
|
||||
self.parameters.all_machines = values
|
||||
self.image_configuration_page.update_machine_combo()
|
||||
elif which == "machine-sdk":
|
||||
self.parameters.all_sdk_machines = values
|
||||
|
||||
def handler_package_formats_updated_cb(self, handler, formats):
|
||||
self.parameters.all_package_formats = formats
|
||||
|
||||
def handler_layers_updated_cb(self, handler, layers):
|
||||
self.parameters.all_layers = layers
|
||||
|
||||
def handler_command_succeeded_cb(self, handler, initcmd):
|
||||
if initcmd == self.handler.LAYERS_REFRESH:
|
||||
self.image_configuration_page.switch_machine_combo()
|
||||
elif initcmd in [self.handler.GENERATE_RECIPES,
|
||||
self.handler.GENERATE_PACKAGES,
|
||||
self.handler.GENERATE_IMAGE]:
|
||||
self.handler.request_package_info_async()
|
||||
elif initcmd == self.handler.POPULATE_PACKAGEINFO:
|
||||
if self.current_step == self.FAST_IMAGE_GENERATING:
|
||||
self.switch_page(self.IMAGE_GENERATING)
|
||||
elif self.current_step == self.RCPPKGINFO_POPULATING:
|
||||
self.switch_page(self.RCPPKGINFO_POPULATED)
|
||||
elif self.current_step == self.PACKAGE_GENERATING:
|
||||
self.switch_page(self.PACKAGE_GENERATED)
|
||||
elif self.current_step == self.IMAGE_GENERATING:
|
||||
self.switch_page(self.IMAGE_GENERATED)
|
||||
|
||||
def handler_command_failed_cb(self, handler, msg):
|
||||
lbl = "<b>Error</b>\n"
|
||||
lbl = lbl + "%s\n\n" % msg
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_WARNING)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
self.handler.clear_busy()
|
||||
self.configuration.curr_mach = None
|
||||
self.image_configuration_page.switch_machine_combo()
|
||||
self.switch_page(self.MACHINE_SELECTION)
|
||||
|
||||
def window_sensitive(self, sensitive):
|
||||
self.set_sensitive(sensitive)
|
||||
if sensitive:
|
||||
self.get_root_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
|
||||
else:
|
||||
self.get_root_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
|
||||
|
||||
|
||||
def handler_generating_data_cb(self, handler):
|
||||
self.window_sensitive(False)
|
||||
|
||||
def handler_data_generated_cb(self, handler):
|
||||
self.window_sensitive(True)
|
||||
|
||||
def recipelist_populated_cb(self, recipe_model):
|
||||
selected_image = self.configuration.selected_image
|
||||
selected_recipes = self.configuration.selected_recipes[:]
|
||||
selected_packages = self.configuration.selected_packages[:]
|
||||
|
||||
self.recipe_model.image_list_append(selected_image,
|
||||
" ".join(selected_recipes),
|
||||
" ".join(selected_packages))
|
||||
|
||||
self.image_configuration_page.update_image_combo(self.recipe_model, selected_image)
|
||||
|
||||
self.update_recipe_model(selected_image, selected_recipes)
|
||||
|
||||
def packagelist_populated_cb(self, package_model):
|
||||
selected_packages = self.configuration.selected_packages[:]
|
||||
self.update_package_model(selected_packages)
|
||||
|
||||
def recipelist_changed_cb(self, recipe_model):
|
||||
self.recipe_details_page.refresh_selection()
|
||||
|
||||
def packagelist_changed_cb(self, package_model):
|
||||
self.package_details_page.refresh_selection()
|
||||
|
||||
def handler_parsing_started_cb(self, handler, message):
|
||||
if self.current_step != self.RCPPKGINFO_POPULATING:
|
||||
return
|
||||
|
||||
fraction = 0
|
||||
if message["eventname"] == "TreeDataPreparationStarted":
|
||||
fraction = 0.6 + fraction
|
||||
self.image_configuration_page.update_progress_bar(message["title"], fraction)
|
||||
|
||||
def handler_parsing_cb(self, handler, message):
|
||||
if self.current_step != self.RCPPKGINFO_POPULATING:
|
||||
return
|
||||
|
||||
fraction = message["current"] * 1.0/message["total"]
|
||||
if message["eventname"] == "TreeDataPreparationProgress":
|
||||
fraction = 0.6 + 0.4 * fraction
|
||||
else:
|
||||
fraction = 0.6 * fraction
|
||||
self.image_configuration_page.update_progress_bar(message["title"], fraction)
|
||||
|
||||
def handler_parsing_completed_cb(self, handler, message):
|
||||
if self.current_step != self.RCPPKGINFO_POPULATING:
|
||||
return
|
||||
|
||||
if message["eventname"] == "TreeDataPreparationCompleted":
|
||||
fraction = 1.0
|
||||
else:
|
||||
fraction = 0.6
|
||||
self.image_configuration_page.update_progress_bar(message["title"], fraction)
|
||||
|
||||
def handler_build_started_cb(self, running_build):
|
||||
if self.current_step == self.FAST_IMAGE_GENERATING:
|
||||
fraction = 0
|
||||
elif self.current_step == self.IMAGE_GENERATING:
|
||||
if self.previous_step == self.FAST_IMAGE_GENERATING:
|
||||
fraction = 0.9
|
||||
else:
|
||||
fraction = 0
|
||||
elif self.current_step == self.PACKAGE_GENERATING:
|
||||
fraction = 0
|
||||
self.build_details_page.update_progress_bar("Build Started: ", fraction)
|
||||
|
||||
def handler_build_succeeded_cb(self, running_build):
|
||||
self.build_succeeded = True
|
||||
if self.current_step == self.FAST_IMAGE_GENERATING:
|
||||
fraction = 0.9
|
||||
elif self.current_step == self.IMAGE_GENERATING:
|
||||
fraction = 1.0
|
||||
self.parameters.image_names = []
|
||||
linkname = 'hob-image-' + self.configuration.curr_mach
|
||||
for image_type in self.parameters.image_types:
|
||||
linkpath = self.parameters.image_addr + '/' + linkname + '.' + image_type
|
||||
if os.path.exists(linkpath):
|
||||
self.parameters.image_names.append(os.readlink(linkpath))
|
||||
elif self.current_step == self.PACKAGE_GENERATING:
|
||||
fraction = 1.0
|
||||
self.build_details_page.update_progress_bar("Build Completed: ", fraction)
|
||||
|
||||
def handler_build_failed_cb(self, running_build):
|
||||
self.build_succeeded = False
|
||||
if self.current_step == self.FAST_IMAGE_GENERATING:
|
||||
fraction = 0.9
|
||||
elif self.current_step == self.IMAGE_GENERATING:
|
||||
fraction = 1.0
|
||||
elif self.current_step == self.PACKAGE_GENERATING:
|
||||
fraction = 1.0
|
||||
self.build_details_page.update_progress_bar("Build Failed: ", fraction, False)
|
||||
self.build_details_page.show_back_button()
|
||||
self.build_details_page.hide_stop_button()
|
||||
self.handler.build_failed_async()
|
||||
self.stopping = False
|
||||
|
||||
def handler_task_started_cb(self, running_build, message):
|
||||
fraction = message["current"] * 1.0/message["total"]
|
||||
title = "Build packages"
|
||||
if self.current_step == self.FAST_IMAGE_GENERATING:
|
||||
if message["eventname"] == "sceneQueueTaskStarted":
|
||||
fraction = 0.27 * fraction
|
||||
elif message["eventname"] == "runQueueTaskStarted":
|
||||
fraction = 0.27 + 0.63 * fraction
|
||||
elif self.current_step == self.IMAGE_GENERATING:
|
||||
title = "Build image"
|
||||
if self.previous_step == self.FAST_IMAGE_GENERATING:
|
||||
if message["eventname"] == "sceneQueueTaskStarted":
|
||||
fraction = 0.27 + 0.63 + 0.03 * fraction
|
||||
elif message["eventname"] == "runQueueTaskStarted":
|
||||
fraction = 0.27 + 0.63 + 0.03 + 0.07 * fraction
|
||||
else:
|
||||
if message["eventname"] == "sceneQueueTaskStarted":
|
||||
fraction = 0.2 * fraction
|
||||
elif message["eventname"] == "runQueueTaskStarted":
|
||||
fraction = 0.2 + 0.8 * fraction
|
||||
elif self.current_step == self.PACKAGE_GENERATING:
|
||||
if message["eventname"] == "sceneQueueTaskStarted":
|
||||
fraction = 0.2 * fraction
|
||||
elif message["eventname"] == "runQueueTaskStarted":
|
||||
fraction = 0.2 + 0.8 * fraction
|
||||
self.build_details_page.update_progress_bar(title + ": ", fraction)
|
||||
|
||||
def destroy_window_cb(self, widget, event):
|
||||
lbl = "<b>Do you really want to exit the Hob image creator?</b>"
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
dialog.add_button(gtk.STOCK_YES, gtk.RESPONSE_YES)
|
||||
dialog.add_button(gtk.STOCK_NO, gtk.RESPONSE_NO)
|
||||
dialog.set_default_response(gtk.RESPONSE_NO)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
gtk.main_quit()
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def build_packages(self):
|
||||
_, all_recipes = self.recipe_model.get_selected_recipes()
|
||||
if not all_recipes:
|
||||
lbl = "<b>No selections made</b>\nYou have not made any selections"
|
||||
lbl = lbl + " so there isn't anything to bake at this time."
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
return
|
||||
self.switch_page(self.PACKAGE_GENERATING)
|
||||
|
||||
def build_image(self):
|
||||
selected_packages = self.package_model.get_selected_packages()
|
||||
if not selected_packages:
|
||||
lbl = "<b>No selections made</b>\nYou have not made any selections"
|
||||
lbl = lbl + " so there isn't anything to bake at this time."
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
return
|
||||
self.switch_page(self.IMAGE_GENERATING)
|
||||
|
||||
def just_bake(self):
|
||||
selected_image = self.recipe_model.get_selected_image()
|
||||
selected_packages = self.package_model.get_selected_packages() or []
|
||||
|
||||
# If no base image and no selected packages don't build anything
|
||||
if not (selected_packages or selected_image != self.recipe_model.__dummy_image__):
|
||||
lbl = "<b>No selections made</b>\nYou have not made any selections"
|
||||
lbl = lbl + " so there isn't anything to bake at this time."
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
self.switch_page(self.FAST_IMAGE_GENERATING)
|
||||
|
||||
def show_binb_dialog(self, binb):
|
||||
binb_dialog = BinbDialog("Brought in by:", binb, self)
|
||||
binb_dialog.run()
|
||||
binb_dialog.destroy()
|
||||
|
||||
def show_layer_selection_dialog(self):
|
||||
dialog = LayerSelectionDialog(title = "Layer Selection",
|
||||
layers = copy.deepcopy(self.configuration.layers),
|
||||
all_layers = self.parameters.all_layers,
|
||||
split_model = self.get_split_model(),
|
||||
parent = self,
|
||||
flags = gtk.DIALOG_MODAL
|
||||
| gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR,
|
||||
buttons = (gtk.STOCK_OK, gtk.RESPONSE_YES,
|
||||
gtk.STOCK_CANCEL, gtk.RESPONSE_NO))
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
self.configuration.layers = dialog.layers
|
||||
# DO refresh layers
|
||||
if dialog.layers_changed:
|
||||
self.switch_page(self.LAYER_CHANGED)
|
||||
dialog.destroy()
|
||||
|
||||
def show_load_template_dialog(self):
|
||||
dialog = gtk.FileChooserDialog("Load Template Files", self,
|
||||
gtk.FILE_CHOOSER_ACTION_SAVE,
|
||||
(gtk.STOCK_OPEN, gtk.RESPONSE_YES,
|
||||
gtk.STOCK_CANCEL, gtk.RESPONSE_NO))
|
||||
filter = gtk.FileFilter()
|
||||
filter.set_name("HOB Files")
|
||||
filter.add_pattern("*.hob")
|
||||
dialog.add_filter(filter)
|
||||
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
path = dialog.get_filename()
|
||||
self.load_template(path)
|
||||
dialog.destroy()
|
||||
|
||||
def show_save_template_dialog(self):
|
||||
dialog = gtk.FileChooserDialog("Save Template Files", self,
|
||||
gtk.FILE_CHOOSER_ACTION_SAVE,
|
||||
(gtk.STOCK_SAVE, gtk.RESPONSE_YES,
|
||||
gtk.STOCK_CANCEL, gtk.RESPONSE_NO))
|
||||
dialog.set_current_name("hob")
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
path = dialog.get_filename()
|
||||
self.save_template(path)
|
||||
dialog.destroy()
|
||||
|
||||
def show_load_my_images_dialog(self):
|
||||
dialog = ImageSelectionDialog(self.parameters.image_addr, self.parameters.image_types,
|
||||
"Open My Images", self,
|
||||
gtk.FILE_CHOOSER_ACTION_SAVE,
|
||||
(gtk.STOCK_OPEN, gtk.RESPONSE_YES,
|
||||
gtk.STOCK_CANCEL, gtk.RESPONSE_NO))
|
||||
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
if not dialog.image_names:
|
||||
lbl = "<b>No selections made</b>\nYou have not made any selections"
|
||||
crumbs_dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
crumbs_dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
self.parameters.image_addr = dialog.image_folder
|
||||
self.parameters.image_names = dialog.image_names[:]
|
||||
self.switch_page(self.MY_IMAGE_OPENED)
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def show_adv_settings_dialog(self):
|
||||
dialog = AdvancedSettingDialog(title = "Settings",
|
||||
configuration = copy.deepcopy(self.configuration),
|
||||
all_image_types = self.parameters.image_types,
|
||||
all_package_formats = self.parameters.all_package_formats,
|
||||
all_distros = self.parameters.all_distros,
|
||||
all_sdk_machines = self.parameters.all_sdk_machines,
|
||||
max_threads = self.parameters.max_threads,
|
||||
split_model = self.get_split_model(),
|
||||
parent = self,
|
||||
flags = gtk.DIALOG_MODAL
|
||||
| gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR,
|
||||
buttons = ("Save", gtk.RESPONSE_YES,
|
||||
gtk.STOCK_CANCEL, gtk.RESPONSE_NO))
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
self.configuration = dialog.configuration
|
||||
# DO reparse recipes
|
||||
if dialog.settings_changed:
|
||||
if self.configuration.curr_mach == "":
|
||||
self.switch_page(self.MACHINE_SELECTION)
|
||||
else:
|
||||
self.switch_page(self.RCPPKGINFO_POPULATING)
|
||||
dialog.destroy()
|
||||
|
||||
def deploy_image(self, image_name):
|
||||
if not image_name:
|
||||
lbl = "<b>Please select an image to deploy.</b>"
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
image_path = os.path.join(self.parameters.image_addr, image_name)
|
||||
dialog = DeployImageDialog(title = "Usb Image Maker",
|
||||
image_path = image_path,
|
||||
parent = self,
|
||||
flags = gtk.DIALOG_MODAL
|
||||
| gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR,
|
||||
buttons = ("Close", gtk.RESPONSE_NO,
|
||||
"Make usb image", gtk.RESPONSE_YES))
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
def runqemu_image(self, image_name):
|
||||
if not image_name:
|
||||
lbl = "<b>Please select an image to launch in QEMU.</b>"
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
dialog = gtk.FileChooserDialog("Load Kernel Files", self,
|
||||
gtk.FILE_CHOOSER_ACTION_SAVE,
|
||||
(gtk.STOCK_OPEN, gtk.RESPONSE_YES,
|
||||
gtk.STOCK_CANCEL, gtk.RESPONSE_NO))
|
||||
filter = gtk.FileFilter()
|
||||
filter.set_name("Kernel Files")
|
||||
filter.add_pattern("*.bin")
|
||||
dialog.add_filter(filter)
|
||||
|
||||
dialog.set_current_folder(self.parameters.image_addr)
|
||||
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
kernel_path = dialog.get_filename()
|
||||
image_path = os.path.join(self.parameters.image_addr, image_name)
|
||||
dialog.destroy()
|
||||
|
||||
if response == gtk.RESPONSE_YES:
|
||||
source_env_path = os.path.join(self.parameters.core_base, "oe-init-build-env")
|
||||
tmp_path = os.path.join(os.getcwd(), "tmp")
|
||||
if os.path.exists(image_path) and os.path.exists(kernel_path) \
|
||||
and os.path.exists(source_env_path) and os.path.exists(tmp_path):
|
||||
cmdline = "/usr/bin/xterm -e "
|
||||
cmdline += "\" export OE_TMPDIR=" + tmp_path + "; "
|
||||
cmdline += "source " + source_env_path + " " + os.getcwd() + "; "
|
||||
cmdline += "runqemu " + kernel_path + " " + image_path + "; bash\""
|
||||
subprocess.Popen(shlex.split(cmdline))
|
||||
else:
|
||||
lbl = "<b>Path error</b>\nOne of your paths is wrong,"
|
||||
lbl = lbl + " please make sure the following paths exist:\n"
|
||||
lbl = lbl + "image path:" + image_path + "\n"
|
||||
lbl = lbl + "kernel path:" + kernel_path + "\n"
|
||||
lbl = lbl + "source environment path:" + source_env_path + "\n"
|
||||
lbl = lbl + "tmp path: " + tmp_path + "."
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
def show_packages(self, ask=True):
|
||||
_, selected_recipes = self.recipe_model.get_selected_recipes()
|
||||
if selected_recipes and ask:
|
||||
lbl = "<b>Package list may be incomplete!</b>\nDo you want to build selected recipes"
|
||||
lbl = lbl + " to get a full list (Yes) or just view the existing packages (No)?"
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
dialog.add_button(gtk.STOCK_YES, gtk.RESPONSE_YES)
|
||||
dialog.add_button(gtk.STOCK_NO, gtk.RESPONSE_NO)
|
||||
dialog.set_default_response(gtk.RESPONSE_YES)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
self.switch_page(self.PACKAGE_GENERATING)
|
||||
else:
|
||||
self.switch_page(self.PACKAGE_SELECTION)
|
||||
else:
|
||||
self.switch_page(self.PACKAGE_SELECTION)
|
||||
|
||||
def show_recipes(self):
|
||||
self.switch_page(self.RECIPE_SELECTION)
|
||||
|
||||
def initiate_new_build(self):
|
||||
self.configuration.curr_mach = ""
|
||||
self.image_configuration_page.switch_machine_combo()
|
||||
self.switch_page(self.MACHINE_SELECTION)
|
||||
|
||||
def show_configuration(self):
|
||||
self.switch_page(self.RCPPKGINFO_POPULATED)
|
||||
|
||||
def stop_build(self):
|
||||
if self.stopping:
|
||||
lbl = "<b>Force Stop build?</b>\nYou've already selected Stop once,"
|
||||
lbl = lbl + " would you like to 'Force Stop' the build?\n\n"
|
||||
lbl = lbl + "This will stop the build as quickly as possible but may"
|
||||
lbl = lbl + " well leave your build directory in an unusable state"
|
||||
lbl = lbl + " that requires manual steps to fix.\n"
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_WARNING)
|
||||
dialog.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
|
||||
dialog.add_button("Force Stop", gtk.RESPONSE_YES)
|
||||
else:
|
||||
lbl = "<b>Stop build?</b>\n\nAre you sure you want to stop this"
|
||||
lbl = lbl + " build?\n\n'Force Stop' will stop the build as quickly as"
|
||||
lbl = lbl + " possible but may well leave your build directory in an"
|
||||
lbl = lbl + " unusable state that requires manual steps to fix.\n\n"
|
||||
lbl = lbl + "'Stop' will stop the build as soon as all in"
|
||||
lbl = lbl + " progress build tasks are finished. However if a"
|
||||
lbl = lbl + " lengthy compilation phase is in progress this may take"
|
||||
lbl = lbl + " some time."
|
||||
dialog = CrumbsDialog(self, lbl, gtk.STOCK_DIALOG_WARNING)
|
||||
dialog.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
|
||||
dialog.add_button("Stop", gtk.RESPONSE_OK)
|
||||
dialog.add_button("Force Stop", gtk.RESPONSE_YES)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
if response != gtk.RESPONSE_CANCEL:
|
||||
self.stopping = True
|
||||
if response == gtk.RESPONSE_OK:
|
||||
self.handler.cancel_build()
|
||||
elif response == gtk.RESPONSE_YES:
|
||||
self.handler.cancel_build(True)
|
||||
346
bitbake/lib/bb/ui/crumbs/configurator.py
Normal file
@@ -0,0 +1,346 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import copy
|
||||
import re, os
|
||||
from bb import data
|
||||
|
||||
class Configurator(gobject.GObject):
|
||||
|
||||
"""
|
||||
A GObject to handle writing modified configuration values back
|
||||
to conf files.
|
||||
"""
|
||||
__gsignals__ = {
|
||||
"layers-loaded" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"layers-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
())
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
gobject.GObject.__init__(self)
|
||||
self.bblayers = None
|
||||
self.enabled_layers = {}
|
||||
self.loaded_layers = {}
|
||||
self.config = {}
|
||||
self.orig_config = {}
|
||||
self.preconf = None
|
||||
self.postconf = None
|
||||
|
||||
# NOTE: cribbed from the cooker...
|
||||
def _parse(self, f, data, include=False):
|
||||
try:
|
||||
return bb.parse.handle(f, data, include)
|
||||
except (IOError, bb.parse.ParseError) as exc:
|
||||
parselog.critical("Unable to parse %s: %s" % (f, exc))
|
||||
sys.exit(1)
|
||||
|
||||
def _loadConf(self, path):
|
||||
def getString(var):
|
||||
return bb.data.getVar(var, data, True) or ""
|
||||
|
||||
if self.orig_config:
|
||||
del self.orig_config
|
||||
self.orig_config = {}
|
||||
|
||||
data = bb.data.init()
|
||||
data = self._parse(path, data)
|
||||
|
||||
# We only need to care about certain variables
|
||||
mach = getString('MACHINE')
|
||||
if mach and mach != self.config.get('MACHINE', ''):
|
||||
self.config['MACHINE'] = mach
|
||||
sdkmach = getString('SDKMACHINE')
|
||||
if sdkmach and sdkmach != self.config.get('SDKMACHINE', ''):
|
||||
self.config['SDKMACHINE'] = sdkmach
|
||||
distro = getString('DISTRO')
|
||||
if not distro:
|
||||
distro = "defaultsetup"
|
||||
if distro and distro != self.config.get('DISTRO', ''):
|
||||
self.config['DISTRO'] = distro
|
||||
bbnum = getString('BB_NUMBER_THREADS')
|
||||
if bbnum and bbnum != self.config.get('BB_NUMBER_THREADS', ''):
|
||||
self.config['BB_NUMBER_THREADS'] = bbnum
|
||||
pmake = getString('PARALLEL_MAKE')
|
||||
if pmake and pmake != self.config.get('PARALLEL_MAKE', ''):
|
||||
self.config['PARALLEL_MAKE'] = pmake
|
||||
pclass = getString('PACKAGE_CLASSES')
|
||||
if pclass and pclass != self.config.get('PACKAGE_CLASSES', ''):
|
||||
self.config['PACKAGE_CLASSES'] = pclass
|
||||
fstypes = getString('IMAGE_FSTYPES')
|
||||
if fstypes and fstypes != self.config.get('IMAGE_FSTYPES', ''):
|
||||
self.config['IMAGE_FSTYPES'] = fstypes
|
||||
|
||||
# Values which aren't always set in the conf must be explicitly
|
||||
# loaded as empty values for save to work
|
||||
incompat = getString('INCOMPATIBLE_LICENSE')
|
||||
if incompat and incompat != self.config.get('INCOMPATIBLE_LICENSE', ''):
|
||||
self.config['INCOMPATIBLE_LICENSE'] = incompat
|
||||
else:
|
||||
self.config['INCOMPATIBLE_LICENSE'] = ""
|
||||
|
||||
# Non-standard, namespaces, variables for GUI preferences
|
||||
toolchain = getString('HOB_BUILD_TOOLCHAIN')
|
||||
if toolchain and toolchain != self.config.get('HOB_BUILD_TOOLCHAIN', ''):
|
||||
self.config['HOB_BUILD_TOOLCHAIN'] = toolchain
|
||||
header = getString('HOB_BUILD_TOOLCHAIN_HEADERS')
|
||||
if header and header != self.config.get('HOB_BUILD_TOOLCHAIN_HEADERS', ''):
|
||||
self.config['HOB_BUILD_TOOLCHAIN_HEADERS'] = header
|
||||
|
||||
self.orig_config = copy.deepcopy(self.config)
|
||||
|
||||
def setConfVar(self, var, val):
|
||||
self.config[var] = val
|
||||
|
||||
def getConfVar(self, var):
|
||||
if var in self.config:
|
||||
return self.config[var]
|
||||
else:
|
||||
return ""
|
||||
|
||||
def _loadLayerConf(self, path):
|
||||
self.bblayers = path
|
||||
self.enabled_layers = {}
|
||||
self.loaded_layers = {}
|
||||
data = bb.data.init()
|
||||
data = self._parse(self.bblayers, data)
|
||||
layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
|
||||
for layer in layers:
|
||||
# TODO: we may be better off calling the layer by its
|
||||
# BBFILE_COLLECTIONS value?
|
||||
name = self._getLayerName(layer)
|
||||
self.loaded_layers[name] = layer
|
||||
|
||||
self.enabled_layers = copy.deepcopy(self.loaded_layers)
|
||||
self.emit("layers-loaded")
|
||||
|
||||
def _addConfigFile(self, path):
|
||||
conffiles = ["local.conf", "hob-pre.conf", "hob-post.conf"]
|
||||
pref, sep, filename = path.rpartition("/")
|
||||
|
||||
if filename == "hob-pre.conf":
|
||||
self.preconf = path
|
||||
|
||||
if filename == "hob-post.conf":
|
||||
self.postconf = path
|
||||
|
||||
if filename in conffiles:
|
||||
self._loadConf(path)
|
||||
elif filename == "bblayers.conf":
|
||||
self._loadLayerConf(path)
|
||||
|
||||
def _splitLayer(self, path):
|
||||
# we only care about the path up to /conf/layer.conf
|
||||
layerpath, conf, end = path.rpartition("/conf/")
|
||||
return layerpath
|
||||
|
||||
def _getLayerName(self, path):
|
||||
# Should this be the collection name?
|
||||
layerpath, sep, name = path.rpartition("/")
|
||||
return name
|
||||
|
||||
def disableLayer(self, layer):
|
||||
if layer in self.enabled_layers:
|
||||
del self.enabled_layers[layer]
|
||||
|
||||
def addLayerConf(self, confpath):
|
||||
layerpath = self._splitLayer(confpath)
|
||||
name = self._getLayerName(layerpath)
|
||||
|
||||
if not layerpath or not name:
|
||||
return None, None
|
||||
elif name not in self.enabled_layers:
|
||||
self.addLayer(name, layerpath)
|
||||
return name, layerpath
|
||||
else:
|
||||
return name, None
|
||||
|
||||
def addLayer(self, name, path):
|
||||
self.enabled_layers[name] = path
|
||||
|
||||
def _isLayerConfDirty(self):
|
||||
# if a different number of layers enabled to what was
|
||||
# loaded, definitely different
|
||||
if len(self.enabled_layers) != len(self.loaded_layers):
|
||||
return True
|
||||
|
||||
for layer in self.loaded_layers:
|
||||
# if layer loaded but no longer present, definitely dirty
|
||||
if layer not in self.enabled_layers:
|
||||
return True
|
||||
|
||||
for layer in self.enabled_layers:
|
||||
# if this layer wasn't present at load, definitely dirty
|
||||
if layer not in self.loaded_layers:
|
||||
return True
|
||||
# if this layers path has changed, definitely dirty
|
||||
if self.enabled_layers[layer] != self.loaded_layers[layer]:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _constructLayerEntry(self):
|
||||
"""
|
||||
Returns a string representing the new layer selection
|
||||
"""
|
||||
layers = self.enabled_layers.copy()
|
||||
# Construct BBLAYERS entry
|
||||
layer_entry = "BBLAYERS = \" \\\n"
|
||||
if 'meta' in layers:
|
||||
layer_entry = layer_entry + " %s \\\n" % layers['meta']
|
||||
del layers['meta']
|
||||
for layer in layers:
|
||||
layer_entry = layer_entry + " %s \\\n" % layers[layer]
|
||||
layer_entry = layer_entry + " \""
|
||||
|
||||
return "".join(layer_entry)
|
||||
|
||||
def writeConfFile(self, conffile, contents):
|
||||
"""
|
||||
Make a backup copy of conffile and write a new file in its stead with
|
||||
the lines in the contents list.
|
||||
"""
|
||||
# Create a backup of the conf file
|
||||
bkup = "%s~" % conffile
|
||||
os.rename(conffile, bkup)
|
||||
|
||||
# Write the contents list object to the conf file
|
||||
with open(conffile, "w") as new:
|
||||
new.write("".join(contents))
|
||||
|
||||
def updateConf(self, orig_lines, changed_values):
|
||||
new_config_lines = []
|
||||
for var in changed_values:
|
||||
# Convenience function for re.subn(). If the pattern matches
|
||||
# return a string which contains an assignment using the same
|
||||
# assignment operator as the old assignment.
|
||||
def replace_val(matchobj):
|
||||
var = matchobj.group(1) # config variable
|
||||
op = matchobj.group(2) # assignment operator
|
||||
val = changed_values[var] # new config value
|
||||
return "%s %s \"%s\"" % (var, op, val)
|
||||
|
||||
pattern = '^\s*(%s)\s*([+=?.]+)(.*)' % re.escape(var)
|
||||
p = re.compile(pattern)
|
||||
cnt = 0
|
||||
replaced = False
|
||||
|
||||
# Iterate over the local.conf lines and if they are a match
|
||||
# for the pattern comment out the line and append a new line
|
||||
# with the new VAR op "value" entry
|
||||
for line in orig_lines:
|
||||
new_line, replacements = p.subn(replace_val, line)
|
||||
if replacements:
|
||||
orig_lines[cnt] = "#%s" % line
|
||||
new_config_lines.append(new_line)
|
||||
replaced = True
|
||||
cnt = cnt + 1
|
||||
|
||||
if not replaced:
|
||||
new_config_lines.append("%s = \"%s\"\n" % (var, changed_values[var]))
|
||||
|
||||
# Add the modified variables
|
||||
orig_lines.extend(new_config_lines)
|
||||
return orig_lines
|
||||
|
||||
def writeConf(self):
|
||||
pre_vars = ["MACHINE", "SDKMACHINE", "DISTRO",
|
||||
"INCOMPATIBLE_LICENSE"]
|
||||
post_vars = ["BB_NUMBER_THREADS", "PARALLEL_MAKE", "PACKAGE_CLASSES",
|
||||
"IMAGE_FSTYPES", "HOB_BUILD_TOOLCHAIN",
|
||||
"HOB_BUILD_TOOLCHAIN_HEADERS"]
|
||||
pre_values = {}
|
||||
post_values = {}
|
||||
changed_values = {}
|
||||
pre_lines = None
|
||||
post_lines = None
|
||||
|
||||
for var in self.config:
|
||||
val = self.config[var]
|
||||
if self.orig_config.get(var, None) != val:
|
||||
changed_values[var] = val
|
||||
|
||||
if not len(changed_values):
|
||||
return
|
||||
|
||||
for var in changed_values:
|
||||
if var in pre_vars:
|
||||
pre_values[var] = changed_values[var]
|
||||
elif var in post_vars:
|
||||
post_values[var] = changed_values[var]
|
||||
|
||||
with open(self.preconf, 'r') as pre:
|
||||
pre_lines = pre.readlines()
|
||||
pre_lines = self.updateConf(pre_lines, pre_values)
|
||||
if len(pre_lines):
|
||||
self.writeConfFile(self.preconf, pre_lines)
|
||||
|
||||
with open(self.postconf, 'r') as post:
|
||||
post_lines = post.readlines()
|
||||
post_lines = self.updateConf(post_lines, post_values)
|
||||
if len(post_lines):
|
||||
self.writeConfFile(self.postconf, post_lines)
|
||||
|
||||
del self.orig_config
|
||||
self.orig_config = copy.deepcopy(self.config)
|
||||
|
||||
def insertTempBBPath(self, bbpath, bbfiles):
|
||||
# read the original conf into a list
|
||||
with open(self.postconf, 'r') as config:
|
||||
config_lines = config.readlines()
|
||||
|
||||
if bbpath:
|
||||
config_lines.append("BBPATH := \"${BBPATH}:%s\"\n" % bbpath)
|
||||
if bbfiles:
|
||||
config_lines.append("BBFILES := \"${BBFILES} %s\"\n" % bbfiles)
|
||||
|
||||
self.writeConfFile(self.postconf, config_lines)
|
||||
|
||||
def writeLayerConf(self):
|
||||
# If we've not added/removed new layers don't write
|
||||
if not self._isLayerConfDirty():
|
||||
return
|
||||
|
||||
# This pattern should find the existing BBLAYERS
|
||||
pattern = 'BBLAYERS\s=\s\".*\"'
|
||||
|
||||
replacement = self._constructLayerEntry()
|
||||
|
||||
with open(self.bblayers, "r") as f:
|
||||
contents = f.read()
|
||||
p = re.compile(pattern, re.DOTALL)
|
||||
new = p.sub(replacement, contents)
|
||||
|
||||
self.writeConfFile(self.bblayers, new)
|
||||
|
||||
# set loaded_layers for dirtiness tracking
|
||||
self.loaded_layers = copy.deepcopy(self.enabled_layers)
|
||||
|
||||
self.emit("layers-changed")
|
||||
|
||||
def configFound(self, handler, path):
|
||||
self._addConfigFile(path)
|
||||
|
||||
def loadConfig(self, path):
|
||||
self._addConfigFile(path)
|
||||
@@ -1,11 +1,9 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -20,33 +18,21 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import shlex
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import HobWidget
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
|
||||
import gtk
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
#
|
||||
# CrumbsDialog
|
||||
#
|
||||
class CrumbsDialog(gtk.Dialog):
|
||||
"""
|
||||
A GNOME HIG compliant dialog widget.
|
||||
Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
|
||||
"""
|
||||
def __init__(self, parent=None, label="", icon=gtk.STOCK_INFO):
|
||||
super(CrumbsDialog, self).__init__("", parent, gtk.DIALOG_DESTROY_WITH_PARENT)
|
||||
gtk.Dialog.__init__(self, "", parent, gtk.DIALOG_DESTROY_WITH_PARENT)
|
||||
|
||||
#self.set_property("has-separator", False) # note: deprecated in 2.22
|
||||
|
||||
@@ -73,572 +59,3 @@ class CrumbsDialog(gtk.Dialog):
|
||||
self.label.set_property("yalign", 0.00)
|
||||
self.label.show()
|
||||
first_row.add(self.label)
|
||||
|
||||
#
|
||||
# Brought-in-by Dialog
|
||||
#
|
||||
class BinbDialog(gtk.Dialog):
|
||||
"""
|
||||
A dialog widget to show "brought in by" info when a recipe/package is clicked.
|
||||
"""
|
||||
|
||||
def __init__(self, title, content, parent=None):
|
||||
super(BinbDialog, self).__init__(title, parent, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, None)
|
||||
|
||||
self.set_position(gtk.WIN_POS_MOUSE)
|
||||
self.set_resizable(False)
|
||||
self.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(HobColors.DARK))
|
||||
|
||||
hbox = gtk.HBox(False, 0)
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False, padding=10)
|
||||
|
||||
label = gtk.Label(content)
|
||||
label.set_alignment(0, 0)
|
||||
label.set_line_wrap(True)
|
||||
label.modify_fg(gtk.STATE_NORMAL, gtk.gdk.Color(HobColors.WHITE))
|
||||
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=10)
|
||||
self.vbox.show_all()
|
||||
|
||||
#
|
||||
# AdvancedSettings Dialog
|
||||
#
|
||||
class AdvancedSettingDialog (gtk.Dialog):
|
||||
|
||||
def __init__(self, title, configuration, all_image_types,
|
||||
all_package_formats, all_distros, all_sdk_machines,
|
||||
max_threads, split_model, parent, flags, buttons):
|
||||
super(AdvancedSettingDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
# class members from other objects
|
||||
# bitbake settings from Builder.Configuration
|
||||
self.configuration = configuration
|
||||
self.image_types = all_image_types
|
||||
self.all_package_formats = all_package_formats
|
||||
self.all_distros = all_distros
|
||||
self.all_sdk_machines = all_sdk_machines
|
||||
self.max_threads = max_threads
|
||||
self.split_model = split_model
|
||||
|
||||
# class members for internal use
|
||||
self.pkgfmt_store = None
|
||||
self.distro_combo = None
|
||||
self.dldir_text = None
|
||||
self.sstatedir_text = None
|
||||
self.sstatemirror_text = None
|
||||
self.bb_spinner = None
|
||||
self.pmake_spinner = None
|
||||
self.rootfs_size_spinner = None
|
||||
self.extra_size_spinner = None
|
||||
self.gplv3_checkbox = None
|
||||
self.toolchain_checkbox = None
|
||||
self.setting_store = None
|
||||
self.image_types_checkbuttons = {}
|
||||
|
||||
self.variables = {}
|
||||
self.variables["PACKAGE_FORMAT"] = self.configuration.curr_package_format
|
||||
self.variables["INCOMPATIBLE_LICENSE"] = self.configuration.incompat_license
|
||||
self.variables["IMAGE_FSTYPES"] = self.configuration.image_fstypes
|
||||
self.md5 = hashlib.md5(str(sorted(self.variables.items()))).hexdigest()
|
||||
self.settings_changed = False
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_size_request(500, 500)
|
||||
|
||||
self.nb = gtk.Notebook()
|
||||
self.nb.set_show_tabs(True)
|
||||
self.nb.append_page(self.create_image_types_page(), gtk.Label("Image types"))
|
||||
self.nb.append_page(self.create_output_page(), gtk.Label("Output"))
|
||||
self.nb.append_page(self.create_build_environment_page(), gtk.Label("Build environment"))
|
||||
self.nb.append_page(self.create_others_page(), gtk.Label("Others"))
|
||||
self.nb.set_current_page(0)
|
||||
self.vbox.pack_start(self.nb, expand=True, fill=True)
|
||||
self.vbox.pack_end(gtk.HSeparator(), expand=True, fill=True)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def create_image_types_page(self):
|
||||
advanced_vbox = gtk.VBox(False, 15)
|
||||
advanced_vbox.set_border_width(20)
|
||||
|
||||
rows = (len(self.image_types)+1)/2
|
||||
table = gtk.Table(rows + 1, 10, True)
|
||||
advanced_vbox.pack_start(table, expand=False, fill=False)
|
||||
|
||||
tooltip = "Select image file system types that will be used."
|
||||
image = gtk.Image()
|
||||
image.show()
|
||||
image.set_from_stock(gtk.STOCK_INFO, gtk.ICON_SIZE_BUTTON)
|
||||
image.set_tooltip_text(tooltip)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">Select image types:</span>")
|
||||
table.attach(label, 0, 9, 0, 1)
|
||||
table.attach(image, 9, 10, 0, 1)
|
||||
|
||||
i = 1
|
||||
j = 1
|
||||
for image_type in self.image_types:
|
||||
self.image_types_checkbuttons[image_type] = gtk.CheckButton(image_type)
|
||||
self.image_types_checkbuttons[image_type].set_tooltip_text("Build an %s image" % image_type)
|
||||
table.attach(self.image_types_checkbuttons[image_type], j, j + 4, i, i + 1)
|
||||
if image_type in self.configuration.image_fstypes:
|
||||
self.image_types_checkbuttons[image_type].set_active(True)
|
||||
i += 1
|
||||
if i > rows:
|
||||
i = 1
|
||||
j = j + 4
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def create_output_page(self):
|
||||
advanced_vbox = gtk.VBox(False, 15)
|
||||
advanced_vbox.set_border_width(20)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">Packaging Format:</span>")
|
||||
tooltip = "Select package formats that will be used. "
|
||||
tooltip += "The first format will be used for final image"
|
||||
pkgfmt_widget, self.pkgfmt_store = HobWidget.gen_pkgfmt_widget(self.configuration.curr_package_format, self.all_package_formats, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(pkgfmt_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">Image Rootfs Size: (MB)</span>")
|
||||
tooltip = "Sets the size of your target image.\nThis is the basic size of your target image, unless your selected package size exceeds this value, or you set value to \"Image Extra Size\"."
|
||||
rootfs_size_widget, self.rootfs_size_spinner = HobWidget.gen_spinner_widget(int(self.configuration.image_rootfs_size*1.0/1024), 0, 1024, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(rootfs_size_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">Image Extra Size: (MB)</span>")
|
||||
tooltip = "Sets the extra free space of your target image.\nDefaultly, system will reserve 30% of your image size as your free space. If your image contains zypper, it will bring in 50MB more space. The maximum free space is 1024MB."
|
||||
extra_size_widget, self.extra_size_spinner = HobWidget.gen_spinner_widget(int(self.configuration.image_extra_size*1.0/1024), 0, 1024, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(extra_size_widget, expand=False, fill=False)
|
||||
|
||||
self.gplv3_checkbox = gtk.CheckButton("Exclude GPLv3 packages")
|
||||
self.gplv3_checkbox.set_tooltip_text("Check this box to prevent GPLv3 packages from being included in your image")
|
||||
if "GPLv3" in self.configuration.incompat_license.split():
|
||||
self.gplv3_checkbox.set_active(True)
|
||||
else:
|
||||
self.gplv3_checkbox.set_active(False)
|
||||
advanced_vbox.pack_start(self.gplv3_checkbox, expand=False, fill=False)
|
||||
|
||||
sub_hbox = gtk.HBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_hbox, expand=False, fill=False)
|
||||
self.toolchain_checkbox = gtk.CheckButton("Build Toolchain")
|
||||
self.toolchain_checkbox.set_tooltip_text("Check this box to build the related toolchain with your image")
|
||||
self.toolchain_checkbox.set_active(self.configuration.toolchain_build)
|
||||
sub_hbox.pack_start(self.toolchain_checkbox, expand=False, fill=False)
|
||||
|
||||
tooltip = "This is the Host platform you would like to run the toolchain"
|
||||
sdk_machine_widget, self.sdk_machine_combo = HobWidget.gen_combo_widget(self.configuration.curr_sdk_machine, self.all_sdk_machines, tooltip)
|
||||
sub_hbox.pack_start(sdk_machine_widget, expand=False, fill=False)
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def create_build_environment_page(self):
|
||||
advanced_vbox = gtk.VBox(False, 15)
|
||||
advanced_vbox.set_border_width(20)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">Select Distro:</span>")
|
||||
tooltip = "This is the Yocto distribution you would like to use"
|
||||
distro_widget, self.distro_combo = HobWidget.gen_combo_widget(self.configuration.curr_distro, self.all_distros, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(distro_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">BB_NUMBER_THREADS:</span>")
|
||||
tooltip = "Sets the number of threads that bitbake tasks can run simultaneously"
|
||||
bbthread_widget, self.bb_spinner = HobWidget.gen_spinner_widget(self.configuration.bbthread, 1, self.max_threads, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(bbthread_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">PARALLEL_MAKE:</span>")
|
||||
tooltip = "Sets the make parallism, as known as 'make -j'"
|
||||
pmake_widget, self.pmake_spinner = HobWidget.gen_spinner_widget(self.configuration.pmake, 1, self.max_threads, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(pmake_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">Set Download Directory:</span>")
|
||||
tooltip = "Select a folder that caches the upstream project source code"
|
||||
dldir_widget, self.dldir_text = HobWidget.gen_entry_widget(self.split_model, self.configuration.dldir, self, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(dldir_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">Select SSTATE Directory:</span>")
|
||||
tooltip = "Select a folder that caches your prebuilt results"
|
||||
sstatedir_widget, self.sstatedir_text = HobWidget.gen_entry_widget(self.split_model, self.configuration.sstatedir, self, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(sstatedir_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">Select SSTATE Mirror:</span>")
|
||||
tooltip = "Select the prebuilt mirror that will fasten your build speed"
|
||||
sstatemirror_widget, self.sstatemirror_text = HobWidget.gen_entry_widget(self.split_model, self.configuration.sstatemirror, self, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(sstatemirror_widget, expand=False, fill=False)
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def create_others_page(self):
|
||||
advanced_vbox = gtk.VBox(False, 15)
|
||||
advanced_vbox.set_border_width(20)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 5)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=True, fill=True)
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\">Add your own variables:</span>")
|
||||
tooltip = "This is the key/value pair for your extra settings"
|
||||
setting_widget, self.setting_store = HobWidget.gen_editable_settings(self.configuration.extra_setting, tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(setting_widget, expand=True, fill=True)
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
self.variables = {}
|
||||
|
||||
self.configuration.curr_package_format = ""
|
||||
it = self.pkgfmt_store.get_iter_first()
|
||||
while it:
|
||||
value = self.pkgfmt_store.get_value(it, 2)
|
||||
if value:
|
||||
self.configuration.curr_package_format += (self.pkgfmt_store.get_value(it, 1) + " ")
|
||||
it = self.pkgfmt_store.iter_next(it)
|
||||
self.configuration.curr_package_format = self.configuration.curr_package_format.strip()
|
||||
self.variables["PACKAGE_FORMAT"] = self.configuration.curr_package_format
|
||||
|
||||
self.configuration.curr_distro = self.distro_combo.get_active_text()
|
||||
self.configuration.dldir = self.dldir_text.get_text()
|
||||
self.configuration.sstatedir = self.sstatedir_text.get_text()
|
||||
self.configuration.sstatemirror = self.sstatemirror_text.get_text()
|
||||
self.configuration.bbthread = self.bb_spinner.get_value_as_int()
|
||||
self.configuration.pmake = self.pmake_spinner.get_value_as_int()
|
||||
self.configuration.image_rootfs_size = self.rootfs_size_spinner.get_value_as_int() * 1024
|
||||
self.configuration.image_extra_size = self.extra_size_spinner.get_value_as_int() * 1024
|
||||
|
||||
self.configuration.image_fstypes = []
|
||||
for image_type in self.image_types:
|
||||
if self.image_types_checkbuttons[image_type].get_active():
|
||||
self.configuration.image_fstypes.append(image_type)
|
||||
self.variables["IMAGE_FSTYPES"] = self.configuration.image_fstypes
|
||||
|
||||
if self.gplv3_checkbox.get_active():
|
||||
if "GPLv3" not in self.configuration.incompat_license.split():
|
||||
self.configuration.incompat_license += " GPLv3"
|
||||
else:
|
||||
if "GPLv3" in self.configuration.incompat_license.split():
|
||||
self.configuration.incompat_license = self.configuration.incompat_license.split().remove("GPLv3")
|
||||
self.configuration.incompat_license = " ".join(self.configuration.incompat_license or [])
|
||||
self.configuration.incompat_license = self.configuration.incompat_license.strip()
|
||||
self.variables["INCOMPATIBLE_LICENSE"] = self.configuration.incompat_license
|
||||
|
||||
self.configuration.toolchain_build = self.toolchain_checkbox.get_active()
|
||||
|
||||
self.configuration.extra_setting = {}
|
||||
it = self.setting_store.get_iter_first()
|
||||
while it:
|
||||
key = self.setting_store.get_value(it, 0)
|
||||
value = self.setting_store.get_value(it, 1)
|
||||
self.configuration.extra_setting[key] = value
|
||||
self.variables[key] = value
|
||||
it = self.setting_store.iter_next(it)
|
||||
|
||||
md5 = hashlib.md5(str(sorted(self.variables.items()))).hexdigest()
|
||||
self.settings_changed = (self.md5 != md5)
|
||||
|
||||
#
|
||||
# DeployImageDialog
|
||||
#
|
||||
class DeployImageDialog (gtk.Dialog):
|
||||
|
||||
__dummy_usb__ = "--select a usb drive--"
|
||||
|
||||
def __init__(self, title, image_path, parent, flags, buttons):
|
||||
super(DeployImageDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.image_path = image_path
|
||||
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_border_width(20)
|
||||
self.set_default_size(500, 250)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
markup = "<span font_desc='12'>The image to be written into usb drive:</span>"
|
||||
label.set_markup(markup)
|
||||
self.vbox.pack_start(label, expand=False, fill=False, padding=2)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
tv = gtk.TextView()
|
||||
tv.set_editable(False)
|
||||
tv.set_wrap_mode(gtk.WRAP_WORD)
|
||||
tv.set_cursor_visible(False)
|
||||
buf = gtk.TextBuffer()
|
||||
buf.set_text(self.image_path)
|
||||
tv.set_buffer(buf)
|
||||
scroll.add(tv)
|
||||
self.vbox.pack_start(scroll, expand=True, fill=True)
|
||||
|
||||
self.usb_desc = gtk.Label()
|
||||
self.usb_desc.set_alignment(0.0, 0.5)
|
||||
markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
|
||||
self.usb_desc.set_markup(markup)
|
||||
|
||||
self.usb_combo = gtk.combo_box_new_text()
|
||||
self.usb_combo.connect("changed", self.usb_combo_changed_cb)
|
||||
model = self.usb_combo.get_model()
|
||||
model.clear()
|
||||
self.usb_combo.append_text(self.__dummy_usb__)
|
||||
for usb in self.find_all_usb_devices():
|
||||
self.usb_combo.append_text("/dev/" + usb)
|
||||
self.usb_combo.set_active(0)
|
||||
self.vbox.pack_start(self.usb_combo, expand=True, fill=True)
|
||||
self.vbox.pack_start(self.usb_desc, expand=False, fill=False, padding=2)
|
||||
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.vbox.pack_start(self.progress_bar, expand=False, fill=False)
|
||||
separator = gtk.HSeparator()
|
||||
self.vbox.pack_start(separator, expand=False, fill=True, padding=10)
|
||||
|
||||
self.vbox.show_all()
|
||||
self.progress_bar.hide()
|
||||
|
||||
def popen_read(self, cmd):
|
||||
return os.popen("%s 2>/dev/null" % cmd).read().strip()
|
||||
|
||||
def find_all_usb_devices(self):
|
||||
usb_devs = [ os.readlink(u)
|
||||
for u in self.popen_read('ls /dev/disk/by-id/usb*').split()
|
||||
if not re.search(r'part\d+', u) ]
|
||||
return [ '%s' % u[u.rfind('/')+1:] for u in usb_devs ]
|
||||
|
||||
def get_usb_info(self, dev):
|
||||
return "%s %s" % \
|
||||
(self.popen_read('cat /sys/class/block/%s/device/vendor' % dev),
|
||||
self.popen_read('cat /sys/class/block/%s/device/model' % dev))
|
||||
|
||||
def usb_combo_changed_cb(self, usb_combo):
|
||||
combo_item = self.usb_combo.get_active_text()
|
||||
if not combo_item or combo_item == self.__dummy_usb__:
|
||||
markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
|
||||
self.usb_desc.set_markup(markup)
|
||||
else:
|
||||
markup = "<span font_desc='12'>" + self.get_usb_info(combo_item.lstrip("/dev/")) + "</span>"
|
||||
self.usb_desc.set_markup(markup)
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
if response_id == gtk.RESPONSE_YES:
|
||||
combo_item = self.usb_combo.get_active_text()
|
||||
if combo_item and combo_item != self.__dummy_usb__:
|
||||
cmdline = "/usr/bin/xterm -e "
|
||||
cmdline += "\"sudo dd if=" + self.image_path + " of=" + combo_item + "; bash\""
|
||||
subprocess.Popen(args=shlex.split(cmdline))
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=True):
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_title(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def write_file(self, ifile, ofile):
|
||||
self.progress_bar.reset()
|
||||
self.progress_bar.show()
|
||||
|
||||
f_from = os.open(ifile, os.O_RDONLY)
|
||||
f_to = os.open(ofile, os.O_WRONLY)
|
||||
|
||||
total_size = os.stat(ifile).st_size
|
||||
written_size = 0
|
||||
|
||||
while True:
|
||||
buf = os.read(f_from, 1024*1024)
|
||||
if not buf:
|
||||
break
|
||||
os.write(f_to, buf)
|
||||
written_size += 1024*1024
|
||||
self.update_progress_bar("Writing to usb:", written_size * 1.0/total_size)
|
||||
|
||||
self.update_progress_bar("Writing completed:", 1.0)
|
||||
os.close(f_from)
|
||||
os.close(f_to)
|
||||
self.progress_bar.hide()
|
||||
#
|
||||
# LayerSelectionDialog
|
||||
#
|
||||
class LayerSelectionDialog (gtk.Dialog):
|
||||
|
||||
def __init__(self, title, layers, all_layers, split_model,
|
||||
parent, flags, buttons):
|
||||
super(LayerSelectionDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
# class members from other objects
|
||||
self.layers = layers
|
||||
self.all_layers = all_layers
|
||||
self.split_model = split_model
|
||||
self.layers_changed = False
|
||||
|
||||
# class members for internal use
|
||||
self.layer_store = None
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_border_width(20)
|
||||
self.set_default_size(400, 250)
|
||||
|
||||
hbox_top = gtk.HBox()
|
||||
self.set_border_width(12)
|
||||
self.vbox.pack_start(hbox_top, expand=False, fill=False)
|
||||
|
||||
if self.split_model:
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\" font_desc='12'>Select Layers:</span>\n(Available layers under '${COREBASE}/layers/' directory)")
|
||||
else:
|
||||
label = HobWidget.gen_label_widget("<span weight=\"bold\" font_desc='12'>Select Layers:</span>")
|
||||
hbox_top.pack_start(label, expand=False, fill=False)
|
||||
|
||||
tooltip = "Layer is a collection of bb files and conf files"
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_INFO, gtk.ICON_SIZE_BUTTON)
|
||||
image.set_tooltip_text(tooltip)
|
||||
hbox_top.pack_end(image, expand=False, fill=False)
|
||||
|
||||
layer_widget, self.layer_store = HobWidget.gen_layer_widget(self.split_model, self.layers, self.all_layers, self, None)
|
||||
|
||||
self.vbox.pack_start(layer_widget, expand=True, fill=True)
|
||||
|
||||
separator = gtk.HSeparator()
|
||||
self.vbox.pack_start(separator, False, True, 5)
|
||||
separator.show()
|
||||
|
||||
hbox_button = gtk.HBox()
|
||||
self.vbox.pack_end(hbox_button, expand=False, fill=False)
|
||||
hbox_button.show()
|
||||
|
||||
label = HobWidget.gen_label_widget("<i>'meta' is Core layer for Yocto images</i>\n"
|
||||
"<span weight=\"bold\">Please do not remove it</span>")
|
||||
hbox_button.pack_start(label, expand=False, fill=False)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
model = self.layer_store
|
||||
it = model.get_iter_first()
|
||||
layers = []
|
||||
while it:
|
||||
if self.split_model:
|
||||
inc = model.get_value(it, 1)
|
||||
if inc:
|
||||
layers.append(model.get_value(it, 0))
|
||||
else:
|
||||
layers.append(model.get_value(it, 0))
|
||||
it = model.iter_next(it)
|
||||
|
||||
self.layers_changed = (self.layers != layers)
|
||||
self.layers = layers
|
||||
|
||||
class ImageSelectionDialog (gtk.Dialog):
|
||||
|
||||
def __init__(self, image_folder, image_types, title, parent, flags, buttons):
|
||||
super(ImageSelectionDialog, self).__init__(title, parent, flags, buttons)
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
self.image_folder = image_folder
|
||||
self.image_types = image_types
|
||||
self.image_names = []
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
|
||||
self.fill_image_store()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_border_width(20)
|
||||
self.set_default_size(600, 300)
|
||||
self.vbox.set_spacing(10)
|
||||
|
||||
hbox = gtk.HBox(False, 10)
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
entry = gtk.Entry()
|
||||
entry.set_text(self.image_folder)
|
||||
table = gtk.Table(1, 10, True)
|
||||
table.set_size_request(560, -1)
|
||||
hbox.pack_start(table, expand=False, fill=False)
|
||||
table.attach(entry, 0, 9, 0, 1)
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_OPEN,gtk.ICON_SIZE_BUTTON)
|
||||
open_button = gtk.Button()
|
||||
open_button.set_image(image)
|
||||
open_button.connect("clicked", self.select_path_cb, self, entry)
|
||||
table.attach(open_button, 9, 10, 0, 1)
|
||||
|
||||
imgtv_widget, self.imgsel_tv = HobWidget.gen_imgtv_widget(400, 160)
|
||||
self.vbox.pack_start(imgtv_widget, expand=True, fill=True)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def select_path_cb(self, action, parent, entry):
|
||||
dialog = gtk.FileChooserDialog("", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
|
||||
(gtk.STOCK_OK, gtk.RESPONSE_YES,
|
||||
gtk.STOCK_CANCEL, gtk.RESPONSE_NO))
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
path = dialog.get_filename()
|
||||
entry.set_text(path)
|
||||
self.image_folder = path
|
||||
self.fill_image_store()
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def fill_image_store(self):
|
||||
self.image_store.clear()
|
||||
imageset = set()
|
||||
for root, dirs, files in os.walk(self.image_folder):
|
||||
for f in files:
|
||||
for image_type in self.image_types:
|
||||
if f.endswith('.' + image_type):
|
||||
imageset.add(f.rsplit('.' + image_type)[0])
|
||||
|
||||
for image in imageset:
|
||||
self.image_store.set(self.image_store.append(), 0, image, 1, False)
|
||||
|
||||
self.imgsel_tv.set_model(self.image_store)
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
self.image_names = []
|
||||
if response_id == gtk.RESPONSE_YES:
|
||||
iter = self.image_store.get_iter_first()
|
||||
while iter:
|
||||
path = self.image_store.get_path(iter)
|
||||
if self.image_store[path][1]:
|
||||
for root, dirs, files in os.walk(self.image_folder):
|
||||
for f in files:
|
||||
if f.startswith(self.image_store[path][0] + '.'):
|
||||
self.image_names.append(f)
|
||||
break
|
||||
iter = self.image_store.iter_next(iter)
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
class HobColors:
|
||||
WHITE = "#ffffff"
|
||||
PALE_GREEN = "#aaffaa"
|
||||
ORANGE = "#ff7c24"
|
||||
PALE_RED = "#ffaaaa"
|
||||
GRAY = "#aaaaaa"
|
||||
LIGHT_GRAY = "#dddddd"
|
||||
DARK = "#3c3b37"
|
||||
BLACK = "#000000"
|
||||
LIGHT_ORANGE = "#f7a787"
|
||||
|
||||
OK = WHITE
|
||||
RUNNING = PALE_GREEN
|
||||
WARNING = ORANGE
|
||||
ERROR = PALE_RED
|
||||
@@ -4,7 +4,6 @@
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -21,7 +20,10 @@
|
||||
|
||||
import gobject
|
||||
import logging
|
||||
from bb.ui.crumbs.runningbuild import RunningBuild
|
||||
import tempfile
|
||||
import datetime
|
||||
|
||||
progress_total = 0
|
||||
|
||||
class HobHandler(gobject.GObject):
|
||||
|
||||
@@ -29,176 +31,147 @@ class HobHandler(gobject.GObject):
|
||||
This object does BitBake event handling for the hob gui.
|
||||
"""
|
||||
__gsignals__ = {
|
||||
"layers-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"package-formats-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"config-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
|
||||
"command-succeeded" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_INT,)),
|
||||
"command-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"generating-data" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"data-generated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"parsing-started" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"parsing" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"parsing-completed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"machines-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"sdk-machines-updated": (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"distros-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"package-formats-found" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"config-found" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"generating-data" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"data-generated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"fatal-error" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,)),
|
||||
"command-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"reload-triggered" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,)),
|
||||
}
|
||||
|
||||
(CFG_AVAIL_LAYERS, CFG_PATH_LAYERS, CFG_FILES_DISTRO, CFG_FILES_MACH, CFG_FILES_SDKMACH, FILES_MATCH_CLASS, PARSE_CONFIG, PARSE_BBFILES, GENERATE_TGTS, GENERATE_PACKAGEINFO, BUILD_TARGET_RECIPES, BUILD_TARGET_IMAGE, CMD_END) = range(13)
|
||||
(LAYERS_REFRESH, GENERATE_RECIPES, GENERATE_PACKAGES, GENERATE_IMAGE, POPULATE_PACKAGEINFO) = range(5)
|
||||
(CFG_PATH_LOCAL, CFG_PATH_PRE, CFG_PATH_POST, CFG_PATH_LAYERS, CFG_FILES_DISTRO, CFG_FILES_MACH, CFG_FILES_SDK, FILES_MATCH_CLASS, GENERATE_TGTS, REPARSE_FILES, BUILD_IMAGE) = range(11)
|
||||
|
||||
def __init__(self, server, server_addr, client_addr, recipe_model, package_model):
|
||||
super(HobHandler, self).__init__()
|
||||
def __init__(self, taskmodel, server):
|
||||
gobject.GObject.__init__(self)
|
||||
|
||||
self.build = RunningBuild(sequential=True)
|
||||
|
||||
self.recipe_model = recipe_model
|
||||
self.package_model = package_model
|
||||
|
||||
self.commands_async = []
|
||||
self.generating = False
|
||||
self.current_phase = None
|
||||
self.current_command = None
|
||||
self.building = False
|
||||
self.recipe_queue = []
|
||||
self.package_queue = []
|
||||
self.build_toolchain = False
|
||||
self.build_toolchain_headers = False
|
||||
self.generating = False
|
||||
self.build_queue = []
|
||||
self.current_phase = None
|
||||
self.bbpath_ok = False
|
||||
self.bbfiles_ok = False
|
||||
self.build_type = "image"
|
||||
self.image_dir = os.path.join(tempfile.gettempdir(), 'hob-images')
|
||||
|
||||
self.model = taskmodel
|
||||
self.server = server
|
||||
self.error_msg = ""
|
||||
self.initcmd = None
|
||||
|
||||
self.split_model = False
|
||||
if server_addr and client_addr:
|
||||
self.split_model = (server_addr != client_addr)
|
||||
self.reset_server() # reset server if server was found just now
|
||||
self.server_addr = server_addr
|
||||
deploy_dir = self.server.runCommand(["getVariable", "DEPLOY_DIR"])
|
||||
self.image_out_dir = os.path.join(deploy_dir, "images")
|
||||
self.image_output_types = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"]).split(" ")
|
||||
self.bbpath = self.server.runCommand(["getVariable", "BBPATH"])
|
||||
self.bbfiles = self.server.runCommand(["getVariable", "BBFILES"])
|
||||
|
||||
def kick(self):
|
||||
import xmlrpclib
|
||||
try:
|
||||
# kick the while thing off
|
||||
if self.split_model:
|
||||
self.commands_async.append(self.CFG_AVAIL_LAYERS)
|
||||
else:
|
||||
self.commands_async.append(self.CFG_PATH_LAYERS)
|
||||
self.commands_async.append(self.CFG_FILES_DISTRO)
|
||||
self.commands_async.append(self.CFG_FILES_MACH)
|
||||
self.commands_async.append(self.CFG_FILES_SDKMACH)
|
||||
self.commands_async.append(self.FILES_MATCH_CLASS)
|
||||
self.run_next_command()
|
||||
return True
|
||||
except xmlrpclib.Fault as x:
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return False
|
||||
|
||||
def set_busy(self):
|
||||
if not self.generating:
|
||||
def run_next_command(self):
|
||||
if self.current_command and not self.generating:
|
||||
self.emit("generating-data")
|
||||
self.generating = True
|
||||
|
||||
def clear_busy(self):
|
||||
if self.generating:
|
||||
self.emit("data-generated")
|
||||
self.generating = False
|
||||
|
||||
def run_next_command(self, initcmd=None):
|
||||
if initcmd != None:
|
||||
self.initcmd = initcmd
|
||||
|
||||
if self.commands_async:
|
||||
self.set_busy()
|
||||
next_command = self.commands_async.pop(0)
|
||||
else:
|
||||
self.clear_busy()
|
||||
if self.initcmd != None:
|
||||
self.emit("command-succeeded", self.initcmd)
|
||||
return
|
||||
|
||||
if next_command == self.CFG_AVAIL_LAYERS:
|
||||
self.server.runCommand(["findCoreBaseFiles", "layers", "conf/layer.conf"])
|
||||
elif next_command == self.CFG_PATH_LAYERS:
|
||||
if self.current_command == self.CFG_PATH_LOCAL:
|
||||
self.current_command = self.CFG_PATH_PRE
|
||||
self.server.runCommand(["findConfigFilePath", "hob-pre.conf"])
|
||||
elif self.current_command == self.CFG_PATH_PRE:
|
||||
self.current_command = self.CFG_PATH_POST
|
||||
self.server.runCommand(["findConfigFilePath", "hob-post.conf"])
|
||||
elif self.current_command == self.CFG_PATH_POST:
|
||||
self.current_command = self.CFG_PATH_LAYERS
|
||||
self.server.runCommand(["findConfigFilePath", "bblayers.conf"])
|
||||
elif next_command == self.CFG_FILES_DISTRO:
|
||||
elif self.current_command == self.CFG_PATH_LAYERS:
|
||||
self.current_command = self.CFG_FILES_DISTRO
|
||||
self.server.runCommand(["findConfigFiles", "DISTRO"])
|
||||
elif next_command == self.CFG_FILES_MACH:
|
||||
elif self.current_command == self.CFG_FILES_DISTRO:
|
||||
self.current_command = self.CFG_FILES_MACH
|
||||
self.server.runCommand(["findConfigFiles", "MACHINE"])
|
||||
elif next_command == self.CFG_FILES_SDKMACH:
|
||||
elif self.current_command == self.CFG_FILES_MACH:
|
||||
self.current_command = self.CFG_FILES_SDK
|
||||
self.server.runCommand(["findConfigFiles", "MACHINE-SDK"])
|
||||
elif next_command == self.FILES_MATCH_CLASS:
|
||||
elif self.current_command == self.CFG_FILES_SDK:
|
||||
self.current_command = self.FILES_MATCH_CLASS
|
||||
self.server.runCommand(["findFilesMatchingInDir", "rootfs_", "classes"])
|
||||
elif next_command == self.PARSE_CONFIG:
|
||||
self.server.runCommand(["parseConfigurationFiles", "", ""])
|
||||
elif next_command == self.PARSE_BBFILES:
|
||||
self.server.runCommand(["parseFiles"])
|
||||
elif next_command == self.GENERATE_TGTS:
|
||||
self.server.runCommand(["generateTargetsTree", "classes/image.bbclass", [], True])
|
||||
elif next_command == self.GENERATE_PACKAGEINFO:
|
||||
self.server.runCommand(["triggerEvent", "bb.event.RequestPackageInfo()"])
|
||||
elif next_command == self.BUILD_TARGET_RECIPES:
|
||||
self.clear_busy()
|
||||
elif self.current_command == self.FILES_MATCH_CLASS:
|
||||
self.current_command = self.GENERATE_TGTS
|
||||
self.server.runCommand(["generateTargetsTree", "classes/image.bbclass"])
|
||||
elif self.current_command == self.GENERATE_TGTS:
|
||||
if self.generating:
|
||||
self.emit("data-generated")
|
||||
self.generating = False
|
||||
self.current_command = None
|
||||
elif self.current_command == self.REPARSE_FILES:
|
||||
if self.build_queue:
|
||||
self.current_command = self.BUILD_IMAGE
|
||||
else:
|
||||
self.current_command = self.CFG_PATH_LAYERS
|
||||
self.server.runCommand(["resetCooker"])
|
||||
self.server.runCommand(["reparseFiles"])
|
||||
elif self.current_command == self.BUILD_IMAGE:
|
||||
if self.generating:
|
||||
self.emit("data-generated")
|
||||
self.generating = False
|
||||
self.building = True
|
||||
self.server.runCommand(["buildTargets", self.recipe_queue, "build"])
|
||||
self.recipe_queue = []
|
||||
elif next_command == self.BUILD_TARGET_IMAGE:
|
||||
self.clear_busy()
|
||||
self.building = True
|
||||
targets = ["hob-image"]
|
||||
self.server.runCommand(["setVariable", "LINGUAS_INSTALL", ""])
|
||||
self.server.runCommand(["setVariable", "PACKAGE_INSTALL", " ".join(self.package_queue)])
|
||||
if self.toolchain_build:
|
||||
pkgs = self.package_queue + [i+'-dev' for i in self.package_queue] + [i+'-dbg' for i in self.package_queue]
|
||||
self.server.runCommand(["setVariable", "TOOLCHAIN_TARGET_TASK", " ".join(pkgs)])
|
||||
targets.append("hob-toolchain")
|
||||
self.server.runCommand(["buildTargets", targets, "build"])
|
||||
self.server.runCommand(["buildTargets", self.build_queue, "build"])
|
||||
self.build_queue = []
|
||||
self.current_command = None
|
||||
|
||||
def handle_event(self, event):
|
||||
def handle_event(self, event, running_build, pbar):
|
||||
if not event:
|
||||
return
|
||||
return
|
||||
|
||||
# If we're running a build, use the RunningBuild event handler
|
||||
if self.building:
|
||||
self.current_phase = "building"
|
||||
self.build.handle_event(event)
|
||||
|
||||
if isinstance(event, bb.event.PackageInfo):
|
||||
self.package_model.populate(event._pkginfolist)
|
||||
self.run_next_command()
|
||||
|
||||
elif(isinstance(event, logging.LogRecord)):
|
||||
if event.levelno >= logging.ERROR:
|
||||
self.error_msg += event.msg + '\n'
|
||||
|
||||
running_build.handle_event(event)
|
||||
elif isinstance(event, bb.event.TargetsTreeGenerated):
|
||||
self.current_phase = "data generation"
|
||||
if event._model:
|
||||
self.recipe_model.populate(event._model)
|
||||
elif isinstance(event, bb.event.CoreBaseFilesFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
paths = event._paths
|
||||
self.emit('layers-updated', paths)
|
||||
self.model.populate(event._model)
|
||||
elif isinstance(event, bb.event.ConfigFilesFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
var = event._variable
|
||||
values = event._values
|
||||
values.sort()
|
||||
self.emit("config-updated", var, values)
|
||||
if var == "distro":
|
||||
distros = event._values
|
||||
distros.sort()
|
||||
self.emit("distros-updated", distros)
|
||||
elif var == "machine":
|
||||
machines = event._values
|
||||
machines.sort()
|
||||
self.emit("machines-updated", machines)
|
||||
elif var == "machine-sdk":
|
||||
sdk_machines = event._values
|
||||
sdk_machines.sort()
|
||||
self.emit("sdk-machines-updated", sdk_machines)
|
||||
elif isinstance(event, bb.event.ConfigFilePathFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
path = event._path
|
||||
self.emit("config-found", path)
|
||||
elif isinstance(event, bb.event.FilesMatchingFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
# FIXME: hard coding, should at least be a variable shared between
|
||||
@@ -210,84 +183,48 @@ class HobHandler(gobject.GObject):
|
||||
fs, sep, format = classname.rpartition("_")
|
||||
formats.append(format)
|
||||
formats.sort()
|
||||
self.emit("package-formats-updated", formats)
|
||||
self.emit("package-formats-found", formats)
|
||||
elif isinstance(event, bb.command.CommandCompleted):
|
||||
self.current_phase = None
|
||||
self.run_next_command()
|
||||
|
||||
elif isinstance(event, bb.event.NoProvider):
|
||||
if event._runtime:
|
||||
r = "R"
|
||||
else:
|
||||
r = ""
|
||||
if event._dependees:
|
||||
self.error_msg += " Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)" % (r, event._item, ", ".join(event._dependees), r)
|
||||
else:
|
||||
self.error_msg += " Nothing %sPROVIDES '%s'" % (r, event._item)
|
||||
if event._reasons:
|
||||
for reason in event._reasons:
|
||||
self.error_msg += " %s" % reason
|
||||
|
||||
self.commands_async = []
|
||||
self.emit("command-failed", self.error_msg)
|
||||
self.error_msg = ""
|
||||
|
||||
elif isinstance(event, bb.command.CommandFailed):
|
||||
self.commands_async = []
|
||||
if self.error_msg:
|
||||
self.emit("command-failed", self.error_msg)
|
||||
self.error_msg = ""
|
||||
elif isinstance(event, (bb.event.ParseStarted,
|
||||
bb.event.CacheLoadStarted,
|
||||
bb.event.TreeDataPreparationStarted,
|
||||
)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
message["current"] = 0
|
||||
message["total"] = None
|
||||
message["title"] = "Parsing recipes: "
|
||||
self.emit("parsing-started", message)
|
||||
elif isinstance(event, (bb.event.ParseProgress,
|
||||
bb.event.CacheLoadProgress,
|
||||
bb.event.TreeDataPreparationProgress)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
message["current"] = event.current
|
||||
message["total"] = event.total
|
||||
message["title"] = "Parsing recipes: "
|
||||
self.emit("parsing", message)
|
||||
elif isinstance(event, (bb.event.ParseCompleted,
|
||||
bb.event.CacheLoadCompleted,
|
||||
bb.event.TreeDataPreparationCompleted)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
message["current"] = event.total
|
||||
message["total"] = event.total
|
||||
message["title"] = "Parsing recipes: "
|
||||
self.emit("parsing-completed", message)
|
||||
|
||||
self.emit("command-failed", event.error)
|
||||
elif isinstance(event, bb.event.CacheLoadStarted):
|
||||
self.current_phase = "cache loading"
|
||||
bb.ui.crumbs.hobeventhandler.progress_total = event.total
|
||||
pbar.set_text("Loading cache: %s/%s" % (0, bb.ui.crumbs.hobeventhandler.progress_total))
|
||||
elif isinstance(event, bb.event.CacheLoadProgress):
|
||||
self.current_phase = "cache loading"
|
||||
pbar.set_text("Loading cache: %s/%s" % (event.current, bb.ui.crumbs.hobeventhandler.progress_total))
|
||||
elif isinstance(event, bb.event.CacheLoadCompleted):
|
||||
self.current_phase = None
|
||||
pbar.set_text("Loading...")
|
||||
elif isinstance(event, bb.event.ParseStarted):
|
||||
self.current_phase = "recipe parsing"
|
||||
if event.total == 0:
|
||||
return
|
||||
bb.ui.crumbs.hobeventhandler.progress_total = event.total
|
||||
pbar.set_text("Processing recipes: %s/%s" % (0, bb.ui.crumbs.hobeventhandler.progress_total))
|
||||
elif isinstance(event, bb.event.ParseProgress):
|
||||
self.current_phase = "recipe parsing"
|
||||
pbar.set_text("Processing recipes: %s/%s" % (event.current, bb.ui.crumbs.hobeventhandler.progress_total))
|
||||
elif isinstance(event, bb.event.ParseCompleted):
|
||||
self.current_phase = None
|
||||
pbar.set_fraction(1.0)
|
||||
pbar.set_text("Loading...")
|
||||
elif isinstance(event, logging.LogRecord):
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if event.levelno >= format.CRITICAL:
|
||||
self.emit("fatal-error", event.getMessage(), self.current_phase)
|
||||
return
|
||||
|
||||
def init_cooker(self):
|
||||
self.server.runCommand(["initCooker"])
|
||||
|
||||
def refresh_layers(self, bblayers):
|
||||
self.server.runCommand(["initCooker"])
|
||||
self.server.runCommand(["setVariable", "BBLAYERS", " ".join(bblayers)])
|
||||
self.commands_async.append(self.PARSE_CONFIG)
|
||||
self.commands_async.append(self.CFG_FILES_DISTRO)
|
||||
self.commands_async.append(self.CFG_FILES_MACH)
|
||||
self.commands_async.append(self.CFG_FILES_SDKMACH)
|
||||
self.commands_async.append(self.FILES_MATCH_CLASS)
|
||||
self.run_next_command(self.LAYERS_REFRESH)
|
||||
|
||||
def set_extra_inherit(self, bbclass):
|
||||
inherits = self.server.runCommand(["getVariable", "INHERIT"]) or ""
|
||||
inherits = inherits + " " + bbclass
|
||||
self.server.runCommand(["setVariable", "INHERIT", inherits])
|
||||
|
||||
def set_bblayers(self, bblayers):
|
||||
self.server.runCommand(["setVariable", "BBLAYERS", " ".join(bblayers)])
|
||||
def event_handle_idle_func (self, eventHandler, running_build, pbar):
|
||||
# Consume as many messages as we can in the time available to us
|
||||
event = eventHandler.getEvent()
|
||||
while event:
|
||||
self.handle_event(event, running_build, pbar)
|
||||
event = eventHandler.getEvent()
|
||||
return True
|
||||
|
||||
def set_machine(self, machine):
|
||||
self.server.runCommand(["setVariable", "MACHINE", machine])
|
||||
@@ -295,78 +232,62 @@ class HobHandler(gobject.GObject):
|
||||
def set_sdk_machine(self, sdk_machine):
|
||||
self.server.runCommand(["setVariable", "SDKMACHINE", sdk_machine])
|
||||
|
||||
def set_image_fstypes(self, image_fstypes):
|
||||
self.server.runCommand(["setVariable", "IMAGE_FSTYPES", " ".join(image_fstypes).lstrip(" ")])
|
||||
|
||||
def set_distro(self, distro):
|
||||
self.server.runCommand(["setVariable", "DISTRO", distro])
|
||||
|
||||
def set_package_format(self, format):
|
||||
package_classes = ""
|
||||
for pkgfmt in format.split():
|
||||
package_classes += ("package_%s" % pkgfmt + " ")
|
||||
self.server.runCommand(["setVariable", "PACKAGE_CLASSES", package_classes])
|
||||
self.server.runCommand(["setVariable", "PACKAGE_CLASSES", "package_%s" % format])
|
||||
|
||||
def reload_data(self, config=None):
|
||||
img = self.model.selected_image
|
||||
selected_packages, _ = self.model.get_selected_packages()
|
||||
self.emit("reload-triggered", img, " ".join(selected_packages))
|
||||
self.current_command = self.REPARSE_FILES
|
||||
self.run_next_command()
|
||||
|
||||
def set_bbthreads(self, threads):
|
||||
self.server.runCommand(["setVariable", "BB_NUMBER_THREADS", threads])
|
||||
|
||||
def set_pmake(self, threads):
|
||||
pmake = "-j %s" % threads
|
||||
self.server.runCommand(["setVariable", "PARALLEL_MAKE", pmake])
|
||||
self.server.runCommand(["setVariable", "BB_NUMBER_THREADS", pmake])
|
||||
|
||||
def set_dl_dir(self, directory):
|
||||
self.server.runCommand(["setVariable", "DL_DIR", directory])
|
||||
|
||||
def set_sstate_dir(self, directory):
|
||||
self.server.runCommand(["setVariable", "SSTATE_DIR", directory])
|
||||
|
||||
def set_sstate_mirror(self, url):
|
||||
self.server.runCommand(["setVariable", "SSTATE_MIRROR", url])
|
||||
|
||||
def set_extra_size(self, image_extra_size):
|
||||
self.server.runCommand(["setVariable", "IMAGE_ROOTFS_EXTRA_SPACE", str(image_extra_size)])
|
||||
|
||||
def set_rootfs_size(self, image_rootfs_size):
|
||||
self.server.runCommand(["setVariable", "IMAGE_ROOTFS_SIZE", str(image_rootfs_size)])
|
||||
|
||||
def set_incompatible_license(self, incompat_license):
|
||||
self.server.runCommand(["setVariable", "INCOMPATIBLE_LICENSE", incompat_license])
|
||||
|
||||
def set_extra_config(self, extra_setting):
|
||||
for key in extra_setting.keys():
|
||||
value = extra_setting[key]
|
||||
self.server.runCommand(["setVariable", key, value])
|
||||
|
||||
def request_package_info_async(self):
|
||||
self.commands_async.append(self.GENERATE_PACKAGEINFO)
|
||||
self.run_next_command(self.POPULATE_PACKAGEINFO)
|
||||
|
||||
def generate_recipes(self):
|
||||
self.commands_async.append(self.PARSE_CONFIG)
|
||||
self.commands_async.append(self.GENERATE_TGTS)
|
||||
self.run_next_command(self.GENERATE_RECIPES)
|
||||
|
||||
def generate_packages(self, tgts):
|
||||
def build_targets(self, tgts, configurator, build_type="image"):
|
||||
self.build_type = build_type
|
||||
targets = []
|
||||
nbbp = None
|
||||
nbbf = None
|
||||
targets.extend(tgts)
|
||||
self.recipe_queue = targets
|
||||
self.commands_async.append(self.PARSE_CONFIG)
|
||||
self.commands_async.append(self.PARSE_BBFILES)
|
||||
self.commands_async.append(self.BUILD_TARGET_RECIPES)
|
||||
self.run_next_command(self.GENERATE_PACKAGES)
|
||||
if self.build_toolchain and self.build_toolchain_headers:
|
||||
targets.append("meta-toolchain-sdk")
|
||||
elif self.build_toolchain:
|
||||
targets.append("meta-toolchain")
|
||||
self.build_queue = targets
|
||||
|
||||
def generate_image(self, tgts, toolchain_build=False):
|
||||
self.package_queue = tgts
|
||||
self.toolchain_build = toolchain_build
|
||||
self.commands_async.append(self.PARSE_CONFIG)
|
||||
self.commands_async.append(self.PARSE_BBFILES)
|
||||
self.commands_async.append(self.BUILD_TARGET_IMAGE)
|
||||
self.run_next_command(self.GENERATE_IMAGE)
|
||||
if not self.bbpath_ok:
|
||||
if self.image_dir in self.bbpath.split(":"):
|
||||
self.bbpath_ok = True
|
||||
else:
|
||||
nbbp = self.image_dir
|
||||
|
||||
def build_failed_async(self):
|
||||
self.initcmd = None
|
||||
self.commands_async = []
|
||||
self.building = False
|
||||
if not self.bbfiles_ok:
|
||||
import re
|
||||
pattern = "%s/\*.bb" % self.image_dir
|
||||
|
||||
for files in self.bbfiles.split(" "):
|
||||
if re.match(pattern, files):
|
||||
self.bbfiles_ok = True
|
||||
|
||||
if not self.bbfiles_ok:
|
||||
nbbf = "%s/*.bb" % self.image_dir
|
||||
|
||||
if nbbp or nbbf:
|
||||
configurator.insertTempBBPath(nbbp, nbbf)
|
||||
self.bbpath_ok = True
|
||||
self.bbfiles_ok = True
|
||||
|
||||
self.current_command = self.REPARSE_FILES
|
||||
self.run_next_command()
|
||||
|
||||
def cancel_build(self, force=False):
|
||||
if force:
|
||||
@@ -377,83 +298,46 @@ class HobHandler(gobject.GObject):
|
||||
# leave the workdir in a usable state
|
||||
self.server.runCommand(["stateShutdown"])
|
||||
|
||||
def reset_server(self):
|
||||
self.server.runCommand(["resetCooker"])
|
||||
def set_incompatible_license(self, incompatible):
|
||||
self.server.runCommand(["setVariable", "INCOMPATIBLE_LICENSE", incompatible])
|
||||
|
||||
def reset_build(self):
|
||||
self.build.reset()
|
||||
def toggle_toolchain(self, enabled):
|
||||
if self.build_toolchain != enabled:
|
||||
self.build_toolchain = enabled
|
||||
|
||||
def get_parameters(self):
|
||||
# retrieve the parameters from bitbake
|
||||
params = {}
|
||||
params["core_base"] = self.server.runCommand(["getVariable", "COREBASE"]) or ""
|
||||
hob_layer = params["core_base"] + "/meta-hob"
|
||||
params["layer"] = (self.server.runCommand(["getVariable", "BBLAYERS"]) or "") + " " + hob_layer
|
||||
params["dldir"] = self.server.runCommand(["getVariable", "DL_DIR"]) or ""
|
||||
params["machine"] = self.server.runCommand(["getVariable", "MACHINE"]) or ""
|
||||
params["distro"] = self.server.runCommand(["getVariable", "DISTRO"]) or "defaultsetup"
|
||||
params["pclass"] = self.server.runCommand(["getVariable", "PACKAGE_CLASSES"]) or ""
|
||||
params["sstatedir"] = self.server.runCommand(["getVariable", "SSTATE_DIR"]) or ""
|
||||
params["sstatemirror"] = self.server.runCommand(["getVariable", "SSTATE_MIRROR"]) or ""
|
||||
def toggle_toolchain_headers(self, enabled):
|
||||
if self.build_toolchain_headers != enabled:
|
||||
self.build_toolchain_headers = enabled
|
||||
|
||||
num_threads = self.server.runCommand(["getCpuCount"])
|
||||
if not num_threads:
|
||||
num_threads = 1
|
||||
max_threads = 65536
|
||||
else:
|
||||
num_threads = int(num_threads)
|
||||
max_threads = 16 * num_threads
|
||||
params["max_threads"] = max_threads
|
||||
def set_fstypes(self, fstypes):
|
||||
self.server.runCommand(["setVariable", "IMAGE_FSTYPES", fstypes])
|
||||
|
||||
bbthread = self.server.runCommand(["getVariable", "BB_NUMBER_THREADS"])
|
||||
if not bbthread:
|
||||
bbthread = num_threads
|
||||
else:
|
||||
bbthread = int(bbthread)
|
||||
params["bbthread"] = bbthread
|
||||
def add_image_output_type(self, output_type):
|
||||
if output_type not in self.image_output_types:
|
||||
self.image_output_types.append(output_type)
|
||||
fstypes = " ".join(self.image_output_types).lstrip(" ")
|
||||
self.set_fstypes(fstypes)
|
||||
return self.image_output_types
|
||||
|
||||
pmake = self.server.runCommand(["getVariable", "PARALLEL_MAKE"])
|
||||
if not pmake:
|
||||
pmake = num_threads
|
||||
elif isinstance(pmake, int):
|
||||
pass
|
||||
else:
|
||||
pmake = int(pmake.lstrip("-j "))
|
||||
params["pmake"] = pmake
|
||||
def remove_image_output_type(self, output_type):
|
||||
if output_type in self.image_output_types:
|
||||
ind = self.image_output_types.index(output_type)
|
||||
self.image_output_types.pop(ind)
|
||||
fstypes = " ".join(self.image_output_types).lstrip(" ")
|
||||
self.set_fstypes(fstypes)
|
||||
return self.image_output_types
|
||||
|
||||
image_addr = self.server.runCommand(["getVariable", "DEPLOY_DIR_IMAGE"]) or ""
|
||||
if self.server_addr:
|
||||
image_addr = "http://" + self.server_addr + ":" + image_addr
|
||||
params["image_addr"] = image_addr
|
||||
def get_image_deploy_dir(self):
|
||||
return self.image_out_dir
|
||||
|
||||
image_extra_size = self.server.runCommand(["getVariable", "IMAGE_ROOTFS_EXTRA_SPACE"])
|
||||
if not image_extra_size:
|
||||
image_extra_size = 0
|
||||
else:
|
||||
image_extra_size = int(image_extra_size)
|
||||
params["image_extra_size"] = image_extra_size
|
||||
def make_temp_dir(self):
|
||||
bb.utils.mkdirhier(self.image_dir)
|
||||
|
||||
image_rootfs_size = self.server.runCommand(["getVariable", "IMAGE_ROOTFS_SIZE"])
|
||||
if not image_rootfs_size:
|
||||
image_rootfs_size = 0
|
||||
else:
|
||||
image_rootfs_size = int(image_rootfs_size)
|
||||
params["image_rootfs_size"] = image_rootfs_size
|
||||
def remove_temp_dir(self):
|
||||
bb.utils.remove(self.image_dir, True)
|
||||
|
||||
image_overhead_factor = self.server.runCommand(["getVariable", "IMAGE_OVERHEAD_FACTOR"])
|
||||
if not image_overhead_factor:
|
||||
image_overhead_factor = 1
|
||||
else:
|
||||
image_overhead_factor = float(image_overhead_factor)
|
||||
params['image_overhead_factor'] = image_overhead_factor
|
||||
|
||||
params["incompat_license"] = self.server.runCommand(["getVariable", "INCOMPATIBLE_LICENSE"]) or ""
|
||||
params["sdk_machine"] = self.server.runCommand(["getVariable", "SDKMACHINE"]) or self.server.runCommand(["getVariable", "SDK_ARCH"]) or ""
|
||||
|
||||
#params["image_types"] = self.server.runCommand(["getVariable", "IMAGE_TYPES"]) or ""
|
||||
params["image_fstypes"] = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"]) or ""
|
||||
"""
|
||||
A workaround
|
||||
"""
|
||||
params["image_types"] = "jffs2 sum.jffs2 cramfs ext2 ext2.gz ext2.bz2 ext3 ext3.gz ext2.lzma btrfs live squashfs squashfs-lzma ubi tar tar.gz tar.bz2 tar.xz cpio cpio.gz cpio.xz cpio.lzma"
|
||||
return params
|
||||
def get_temp_recipe_path(self, name):
|
||||
timestamp = datetime.date.today().isoformat()
|
||||
image_file = "hob-%s-variant-%s.bb" % (name, timestamp)
|
||||
recipepath = os.path.join(self.image_dir, image_file)
|
||||
return recipepath
|
||||
|
||||
@@ -1,765 +0,0 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
|
||||
#
|
||||
# PackageListModel
|
||||
#
|
||||
class PackageListModel(gtk.TreeStore):
|
||||
"""
|
||||
This class defines an gtk.TreeStore subclass which will convert the output
|
||||
of the bb.event.TargetsTreeGenerated event into a gtk.TreeStore whilst also
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
(COL_NAME, COL_VER, COL_REV, COL_RNM, COL_SEC, COL_SUM, COL_RDEP, COL_RPROV, COL_SIZE, COL_BINB, COL_INC) = range(11)
|
||||
|
||||
__gsignals__ = {
|
||||
"packagelist-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"package-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.contents = None
|
||||
self.images = None
|
||||
self.pkgs_size = 0
|
||||
self.pn_path = {}
|
||||
self.pkg_path = {}
|
||||
|
||||
gtk.TreeStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN)
|
||||
|
||||
|
||||
"""
|
||||
Find the model path for the item_name
|
||||
Returns the path in the model or None
|
||||
"""
|
||||
def find_path_for_item(self, item_name):
|
||||
if item_name not in self.pkg_path.keys():
|
||||
return None
|
||||
else:
|
||||
return self.pkg_path[item_name]
|
||||
|
||||
def find_item_for_path(self, item_path):
|
||||
return self[item_path][self.COL_NAME]
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is an item specified by filter
|
||||
"""
|
||||
def tree_model_filter(self, model, it, filter):
|
||||
for key in filter.keys():
|
||||
if model.get_value(it, key) not in filter[key]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModelSort
|
||||
containing only the items specified by filter
|
||||
"""
|
||||
def tree_model(self, filter):
|
||||
model = self.filter_new()
|
||||
model.set_visible_func(self.tree_model_filter, filter)
|
||||
|
||||
sort = gtk.TreeModelSort(model)
|
||||
sort.set_sort_column_id(PackageListModel.COL_NAME, gtk.SORT_ASCENDING)
|
||||
sort.set_default_sort_func(None)
|
||||
return sort
|
||||
|
||||
def convert_vpath_to_path(self, view_model, view_path):
|
||||
# view_model is the model sorted
|
||||
# get the path of the model filtered
|
||||
filtered_model_path = view_model.convert_path_to_child_path(view_path)
|
||||
# get the model filtered
|
||||
filtered_model = view_model.get_model()
|
||||
# get the path of the original model
|
||||
path = filtered_model.convert_path_to_child_path(filtered_model_path)
|
||||
return path
|
||||
|
||||
def convert_path_to_vpath(self, view_model, path):
|
||||
name = self.find_item_for_path(path)
|
||||
it = view_model.get_iter_first()
|
||||
while it:
|
||||
child_it = view_model.iter_children(it)
|
||||
while child_it:
|
||||
view_name = view_model.get_value(child_it, self.COL_NAME)
|
||||
if view_name == name:
|
||||
view_path = view_model.get_path(child_it)
|
||||
return view_path
|
||||
child_it = view_model.iter_next(child_it)
|
||||
it = view_model.iter_next(it)
|
||||
return None
|
||||
|
||||
"""
|
||||
The populate() function takes as input the data from a
|
||||
bb.event.PackageInfo event and populates the package list.
|
||||
Once the population is done it emits gsignal packagelist-populated
|
||||
to notify any listeners that the model is ready
|
||||
"""
|
||||
def populate(self, pkginfolist):
|
||||
self.clear()
|
||||
self.pkgs_size = 0
|
||||
self.pn_path = {}
|
||||
self.pkg_path = {}
|
||||
|
||||
for pkginfo in pkginfolist:
|
||||
pn = pkginfo['PN']
|
||||
pv = pkginfo['PV']
|
||||
pr = pkginfo['PR']
|
||||
if pn in self.pn_path.keys():
|
||||
pniter = self.get_iter(self.pn_path[pn])
|
||||
else:
|
||||
pniter = self.append(None)
|
||||
self.set(pniter, self.COL_NAME, pn + '-' + pv + '-' + pr,
|
||||
self.COL_INC, False)
|
||||
self.pn_path[pn] = self.get_path(pniter)
|
||||
|
||||
pkg = pkginfo['PKG']
|
||||
pkgv = pkginfo['PKGV']
|
||||
pkgr = pkginfo['PKGR']
|
||||
pkgsize = pkginfo['PKGSIZE_%s' % pkg] if 'PKGSIZE_%s' % pkg in pkginfo.keys() else "0"
|
||||
pkg_rename = pkginfo['PKG_%s' % pkg] if 'PKG_%s' % pkg in pkginfo.keys() else ""
|
||||
section = pkginfo['SECTION_%s' % pkg] if 'SECTION_%s' % pkg in pkginfo.keys() else ""
|
||||
summary = pkginfo['SUMMARY_%s' % pkg] if 'SUMMARY_%s' % pkg in pkginfo.keys() else ""
|
||||
rdep = pkginfo['RDEPENDS_%s' % pkg] if 'RDEPENDS_%s' % pkg in pkginfo.keys() else ""
|
||||
rrec = pkginfo['RRECOMMENDS_%s' % pkg] if 'RRECOMMENDS_%s' % pkg in pkginfo.keys() else ""
|
||||
rprov = pkginfo['RPROVIDES_%s' % pkg] if 'RPROVIDES_%s' % pkg in pkginfo.keys() else ""
|
||||
|
||||
if 'ALLOW_EMPTY_%s' % pkg in pkginfo.keys():
|
||||
allow_empty = pkginfo['ALLOW_EMPTY_%s' % pkg]
|
||||
elif 'ALLOW_EMPTY' in pkginfo.keys():
|
||||
allow_empty = pkginfo['ALLOW_EMPTY']
|
||||
else:
|
||||
allow_empty = ""
|
||||
|
||||
if pkgsize == "0" and not allow_empty:
|
||||
continue
|
||||
|
||||
if len(pkgsize) > 3:
|
||||
size = '%.1f' % (int(pkgsize)*1.0/1024) + ' MB'
|
||||
else:
|
||||
size = pkgsize + ' KB'
|
||||
|
||||
it = self.append(pniter)
|
||||
self.pkg_path[pkg] = self.get_path(it)
|
||||
self.set(it, self.COL_NAME, pkg, self.COL_VER, pkgv,
|
||||
self.COL_REV, pkgr, self.COL_RNM, pkg_rename,
|
||||
self.COL_SEC, section, self.COL_SUM, summary,
|
||||
self.COL_RDEP, rdep + ' ' + rrec,
|
||||
self.COL_RPROV, rprov, self.COL_SIZE, size,
|
||||
self.COL_BINB, "", self.COL_INC, False)
|
||||
|
||||
self.emit("packagelist-populated")
|
||||
|
||||
"""
|
||||
Check whether the item at item_path is included or not
|
||||
"""
|
||||
def path_included(self, item_path):
|
||||
return self[item_path][self.COL_INC]
|
||||
|
||||
"""
|
||||
Update the model, send out the notification.
|
||||
"""
|
||||
def selection_change_notification(self):
|
||||
self.emit("package-selection-changed")
|
||||
|
||||
"""
|
||||
Mark a certain package as selected.
|
||||
All its dependencies are marked as selected.
|
||||
The recipe provides the package is marked as selected.
|
||||
If user explicitly selects a recipe, all its providing packages are selected
|
||||
"""
|
||||
def include_item(self, item_path, binb=""):
|
||||
if self.path_included(item_path):
|
||||
return
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_rdep = self[item_path][self.COL_RDEP]
|
||||
|
||||
self[item_path][self.COL_INC] = True
|
||||
|
||||
self.selection_change_notification()
|
||||
|
||||
it = self.get_iter(item_path)
|
||||
|
||||
# If user explicitly selects a recipe, all its providing packages are selected.
|
||||
if not self[item_path][self.COL_VER] and binb == "User Selected":
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
child_path = self.get_path(child_it)
|
||||
child_included = self.path_included(child_path)
|
||||
if not child_included:
|
||||
self.include_item(child_path, binb="User Selected")
|
||||
child_it = self.iter_next(child_it)
|
||||
return
|
||||
|
||||
# The recipe provides the package is also marked as selected
|
||||
parent_it = self.iter_parent(it)
|
||||
if parent_it:
|
||||
parent_path = self.get_path(parent_it)
|
||||
self[parent_path][self.COL_INC] = True
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if binb and not binb in item_bin:
|
||||
item_bin.append(binb)
|
||||
self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
|
||||
|
||||
if item_rdep:
|
||||
# Ensure all of the items deps are included and, where appropriate,
|
||||
# add this item to their COL_BINB
|
||||
for dep in item_rdep.split(" "):
|
||||
if dep.startswith('('):
|
||||
continue
|
||||
# If the contents model doesn't already contain dep, add it
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_included = self.path_included(dep_path)
|
||||
|
||||
if dep_included and not dep in item_bin:
|
||||
# don't set the COL_BINB to this item if the target is an
|
||||
# item in our own COL_BINB
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if not item_name in dep_bin:
|
||||
dep_bin.append(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
elif not dep_included:
|
||||
self.include_item(dep_path, binb=item_name)
|
||||
|
||||
"""
|
||||
Mark a certain package as de-selected.
|
||||
All other packages that depends on this package are marked as de-selected.
|
||||
If none of the packages provided by the recipe, the recipe should be marked as de-selected.
|
||||
If user explicitly de-select a recipe, all its providing packages are de-selected.
|
||||
"""
|
||||
def exclude_item(self, item_path):
|
||||
if not self.path_included(item_path):
|
||||
return
|
||||
|
||||
self[item_path][self.COL_INC] = False
|
||||
|
||||
self.selection_change_notification()
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_rdep = self[item_path][self.COL_RDEP]
|
||||
it = self.get_iter(item_path)
|
||||
|
||||
# If user explicitly de-select a recipe, all its providing packages are de-selected.
|
||||
if not self[item_path][self.COL_VER]:
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
child_path = self.get_path(child_it)
|
||||
child_included = self[child_path][self.COL_INC]
|
||||
if child_included:
|
||||
self.exclude_item(child_path)
|
||||
child_it = self.iter_next(child_it)
|
||||
return
|
||||
|
||||
# If none of the packages provided by the recipe, the recipe should be marked as de-selected.
|
||||
parent_it = self.iter_parent(it)
|
||||
peer_iter = self.iter_children(parent_it)
|
||||
enabled = 0
|
||||
while peer_iter:
|
||||
peer_path = self.get_path(peer_iter)
|
||||
if self[peer_path][self.COL_INC]:
|
||||
enabled = 1
|
||||
break
|
||||
peer_iter = self.iter_next(peer_iter)
|
||||
if not enabled:
|
||||
parent_path = self.get_path(parent_it)
|
||||
self[parent_path][self.COL_INC] = False
|
||||
|
||||
# All packages that depends on this package are de-selected.
|
||||
if item_rdep:
|
||||
for dep in item_rdep.split(" "):
|
||||
if dep.startswith('('):
|
||||
continue
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if item_name in dep_bin:
|
||||
dep_bin.remove(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if item_bin:
|
||||
for binb in item_bin:
|
||||
binb_path = self.find_path_for_item(binb)
|
||||
if not binb_path:
|
||||
continue
|
||||
self.exclude_item(binb_path)
|
||||
|
||||
"""
|
||||
Package model may be incomplete, therefore when calling the
|
||||
set_selected_packages(), some packages will not be set included.
|
||||
Return the un-set packages list.
|
||||
"""
|
||||
def set_selected_packages(self, packagelist):
|
||||
left = []
|
||||
for pn in packagelist:
|
||||
if pn in self.pkg_path.keys():
|
||||
path = self.pkg_path[pn]
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected")
|
||||
else:
|
||||
left.append(pn)
|
||||
|
||||
return left
|
||||
|
||||
def get_selected_packages(self):
|
||||
packagelist = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
if self.get_value(child_it, self.COL_INC):
|
||||
name = self.get_value(child_it, self.COL_NAME)
|
||||
packagelist.append(name)
|
||||
child_it = self.iter_next(child_it)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return packagelist
|
||||
|
||||
"""
|
||||
Return the selected package size, unit is KB.
|
||||
"""
|
||||
def get_packages_size(self):
|
||||
packages_size = 0
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
if self.get_value(child_it, self.COL_INC):
|
||||
str_size = self.get_value(child_it, self.COL_SIZE)
|
||||
if not str_size:
|
||||
continue
|
||||
|
||||
unit = str_size.split()
|
||||
if unit[1] == 'MB':
|
||||
size = float(unit[0])*1024
|
||||
else:
|
||||
size = float(unit[0])
|
||||
packages_size += size
|
||||
|
||||
child_it = self.iter_next(child_it)
|
||||
it = self.iter_next(it)
|
||||
return "%f" % packages_size
|
||||
|
||||
"""
|
||||
Empty self.contents by setting the include of each entry to None
|
||||
"""
|
||||
def reset(self):
|
||||
self.pkgs_size = 0
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
self.set(it, self.COL_INC, False)
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
self.set(child_it,
|
||||
self.COL_INC, False,
|
||||
self.COL_BINB, "")
|
||||
child_it = self.iter_next(child_it)
|
||||
it = self.iter_next(it)
|
||||
|
||||
self.selection_change_notification()
|
||||
|
||||
#
|
||||
# RecipeListModel
|
||||
#
|
||||
class RecipeListModel(gtk.ListStore):
|
||||
"""
|
||||
This class defines an gtk.ListStore subclass which will convert the output
|
||||
of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
(COL_NAME, COL_DESC, COL_LIC, COL_GROUP, COL_DEPS, COL_BINB, COL_TYPE, COL_INC, COL_IMG, COL_INSTALL, COL_PN) = range(11)
|
||||
|
||||
__dummy_image__ = "--select a base image--"
|
||||
|
||||
__gsignals__ = {
|
||||
"recipelist-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"recipe-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
|
||||
"""
|
||||
"""
|
||||
def __init__(self):
|
||||
gtk.ListStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING)
|
||||
|
||||
"""
|
||||
Find the model path for the item_name
|
||||
Returns the path in the model or None
|
||||
"""
|
||||
def find_path_for_item(self, item_name):
|
||||
if self.non_target_name(item_name) or item_name not in self.pn_path.keys():
|
||||
return None
|
||||
else:
|
||||
return self.pn_path[item_name]
|
||||
|
||||
def find_item_for_path(self, item_path):
|
||||
return self[item_path][self.COL_NAME]
|
||||
|
||||
"""
|
||||
Helper method to determine whether name is a target pn
|
||||
"""
|
||||
def non_target_name(self, name):
|
||||
if name and ('-native' in name):
|
||||
return True
|
||||
return False
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is an item specified by filter
|
||||
"""
|
||||
def tree_model_filter(self, model, it, filter):
|
||||
name = model.get_value(it, self.COL_NAME)
|
||||
if self.non_target_name(name):
|
||||
return False
|
||||
|
||||
for key in filter.keys():
|
||||
if model.get_value(it, key) not in filter[key]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def sort_func(self, model, iter1, iter2):
|
||||
val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
|
||||
val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
|
||||
return val1 > val2
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModelSort
|
||||
containing only the items which are items specified by filter
|
||||
"""
|
||||
def tree_model(self, filter):
|
||||
model = self.filter_new()
|
||||
model.set_visible_func(self.tree_model_filter, filter)
|
||||
|
||||
sort = gtk.TreeModelSort(model)
|
||||
sort.set_default_sort_func(self.sort_func)
|
||||
return sort
|
||||
|
||||
def convert_vpath_to_path(self, view_model, view_path):
|
||||
filtered_model_path = view_model.convert_path_to_child_path(view_path)
|
||||
filtered_model = view_model.get_model()
|
||||
|
||||
# get the path of the original model
|
||||
path = filtered_model.convert_path_to_child_path(filtered_model_path)
|
||||
return path
|
||||
|
||||
def convert_path_to_vpath(self, view_model, path):
|
||||
it = view_model.get_iter_first()
|
||||
while it:
|
||||
name = self.find_item_for_path(path)
|
||||
view_name = view_model.get_value(it, RecipeListModel.COL_NAME)
|
||||
if view_name == name:
|
||||
view_path = view_model.get_path(it)
|
||||
return view_path
|
||||
it = view_model.iter_next(it)
|
||||
return None
|
||||
|
||||
def map_runtime(self, event_model, runtime, rdep_type, name):
|
||||
if rdep_type not in ['pkg', 'pn'] or runtime not in ['rdepends', 'rrecs']:
|
||||
return
|
||||
package_depends = event_model["%s-%s" % (runtime, rdep_type)].get(name, [])
|
||||
pn_depends = []
|
||||
for package_depend in package_depends:
|
||||
if 'task-' not in package_depend and package_depend in event_model["packages"].keys():
|
||||
pn_depends.append(event_model["packages"][package_depend]["pn"])
|
||||
else:
|
||||
pn_depends.append(package_depend)
|
||||
return list(set(pn_depends))
|
||||
|
||||
def subpkg_populate(self, event_model, pkg, desc, lic, group, atype, pn):
|
||||
pn_depends = self.map_runtime(event_model, "rdepends", "pkg", pkg)
|
||||
pn_depends += self.map_runtime(event_model, "rrecs", "pkg", pkg)
|
||||
self.set(self.append(), self.COL_NAME, pkg, self.COL_DESC, desc,
|
||||
self.COL_LIC, lic, self.COL_GROUP, group,
|
||||
self.COL_DEPS, " ".join(pn_depends), self.COL_BINB, "",
|
||||
self.COL_TYPE, atype, self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, pn)
|
||||
|
||||
"""
|
||||
The populate() function takes as input the data from a
|
||||
bb.event.TargetsTreeGenerated event and populates the RecipeList.
|
||||
Once the population is done it emits gsignal recipelist-populated
|
||||
to notify any listeners that the model is ready
|
||||
"""
|
||||
def populate(self, event_model):
|
||||
# First clear the model, in case repopulating
|
||||
self.clear()
|
||||
|
||||
# dummy image for prompt
|
||||
self.set(self.append(), self.COL_NAME, self.__dummy_image__,
|
||||
self.COL_DESC, "",
|
||||
self.COL_LIC, "", self.COL_GROUP, "",
|
||||
self.COL_DEPS, "", self.COL_BINB, "",
|
||||
self.COL_TYPE, "image", self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, self.__dummy_image__)
|
||||
|
||||
for item in event_model["pn"]:
|
||||
name = item
|
||||
desc = event_model["pn"][item]["description"]
|
||||
lic = event_model["pn"][item]["license"]
|
||||
group = event_model["pn"][item]["section"]
|
||||
install = []
|
||||
|
||||
if ('task-' in name):
|
||||
if ('lib32-' in name or 'lib64-' in name):
|
||||
atype = 'mltask'
|
||||
else:
|
||||
atype = 'task'
|
||||
for pkg in event_model["pn"][name]["packages"]:
|
||||
self.subpkg_populate(event_model, pkg, desc, lic, group, atype, name)
|
||||
continue
|
||||
|
||||
elif ('-image-' in name):
|
||||
atype = 'image'
|
||||
depends = event_model["depends"].get(item, [])
|
||||
rdepends = self.map_runtime(event_model, 'rdepends', 'pn', name)
|
||||
depends = depends + rdepends
|
||||
install = event_model["rdepends-pn"].get(item, [])
|
||||
|
||||
elif ('meta-' in name):
|
||||
atype = 'toolchain'
|
||||
|
||||
elif (name == 'dummy-image' or name == 'dummy-toolchain'):
|
||||
atype = 'dummy'
|
||||
|
||||
else:
|
||||
if ('lib32-' in name or 'lib64-' in name):
|
||||
atype = 'mlrecipe'
|
||||
else:
|
||||
atype = 'recipe'
|
||||
depends = event_model["depends"].get(item, [])
|
||||
depends += self.map_runtime(event_model, 'rdepends', 'pn', item)
|
||||
for pkg in event_model["pn"][name]["packages"]:
|
||||
depends += self.map_runtime(event_model, 'rdepends', 'pkg', item)
|
||||
depends += self.map_runtime(event_model, 'rrecs', 'pkg', item)
|
||||
|
||||
self.set(self.append(), self.COL_NAME, item, self.COL_DESC, desc,
|
||||
self.COL_LIC, lic, self.COL_GROUP, group,
|
||||
self.COL_DEPS, " ".join(depends), self.COL_BINB, "",
|
||||
self.COL_TYPE, atype, self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, " ".join(install), self.COL_PN, item)
|
||||
|
||||
self.pn_path = {}
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
pn = self.get_value(it, self.COL_NAME)
|
||||
path = self.get_path(it)
|
||||
self.pn_path[pn] = path
|
||||
it = self.iter_next(it)
|
||||
|
||||
self.emit("recipelist-populated")
|
||||
|
||||
"""
|
||||
Update the model, send out the notification.
|
||||
"""
|
||||
def selection_change_notification(self):
|
||||
self.emit("recipe-selection-changed")
|
||||
|
||||
def path_included(self, item_path):
|
||||
return self[item_path][self.COL_INC]
|
||||
|
||||
"""
|
||||
Append a certain image into the combobox
|
||||
"""
|
||||
def image_list_append(self, name, deps, install):
|
||||
# check whether a certain image is there
|
||||
if not name or self.find_path_for_item(name):
|
||||
return
|
||||
it = self.append()
|
||||
self.set(it, self.COL_NAME, name, self.COL_DESC, "",
|
||||
self.COL_LIC, "", self.COL_GROUP, "",
|
||||
self.COL_DEPS, deps, self.COL_BINB, "",
|
||||
self.COL_TYPE, "image", self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, install,
|
||||
self.COL_PN, name)
|
||||
self.pn_path[name] = self.get_path(it)
|
||||
|
||||
"""
|
||||
Add this item, and any of its dependencies, to the image contents
|
||||
"""
|
||||
def include_item(self, item_path, binb="", image_contents=False):
|
||||
if self.path_included(item_path):
|
||||
return
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_DEPS]
|
||||
|
||||
self[item_path][self.COL_INC] = True
|
||||
self.selection_change_notification()
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if binb and not binb in item_bin:
|
||||
item_bin.append(binb)
|
||||
self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
|
||||
|
||||
# We want to do some magic with things which are brought in by the
|
||||
# base image so tag them as so
|
||||
if image_contents:
|
||||
self[item_path][self.COL_IMG] = True
|
||||
|
||||
if item_deps:
|
||||
# Ensure all of the items deps are included and, where appropriate,
|
||||
# add this item to their COL_BINB
|
||||
for dep in item_deps.split(" "):
|
||||
# If the contents model doesn't already contain dep, add it
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_included = self.path_included(dep_path)
|
||||
|
||||
if dep_included and not dep in item_bin:
|
||||
# don't set the COL_BINB to this item if the target is an
|
||||
# item in our own COL_BINB
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if not item_name in dep_bin:
|
||||
dep_bin.append(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
elif not dep_included:
|
||||
self.include_item(dep_path, binb=item_name, image_contents=image_contents)
|
||||
|
||||
def exclude_item(self, item_path):
|
||||
if not self.path_included(item_path):
|
||||
return
|
||||
|
||||
self[item_path][self.COL_INC] = False
|
||||
|
||||
self.selection_change_notification()
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_DEPS]
|
||||
if item_deps:
|
||||
for dep in item_deps.split(" "):
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if item_name in dep_bin:
|
||||
dep_bin.remove(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if item_bin:
|
||||
for binb in item_bin:
|
||||
binb_path = self.find_path_for_item(binb)
|
||||
if not binb_path:
|
||||
continue
|
||||
self.exclude_item(binb_path)
|
||||
|
||||
def reset(self):
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
self.set(it,
|
||||
self.COL_INC, False,
|
||||
self.COL_BINB, "",
|
||||
self.COL_IMG, False)
|
||||
it = self.iter_next(it)
|
||||
|
||||
self.selection_change_notification()
|
||||
|
||||
"""
|
||||
Returns two lists. One of user selected recipes and the other containing
|
||||
all selected recipes
|
||||
"""
|
||||
def get_selected_recipes(self):
|
||||
allrecipes = []
|
||||
userrecipes = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
name = self.get_value(it, self.COL_PN)
|
||||
type = self.get_value(it, self.COL_TYPE)
|
||||
if type != "image":
|
||||
allrecipes.append(name)
|
||||
sel = "User Selected" in self.get_value(it, self.COL_BINB)
|
||||
if sel:
|
||||
userrecipes.append(name)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return list(set(userrecipes)), list(set(allrecipes))
|
||||
|
||||
def set_selected_recipes(self, recipelist):
|
||||
for pn in recipelist:
|
||||
if pn in self.pn_path.keys():
|
||||
path = self.pn_path[pn]
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected")
|
||||
|
||||
def get_selected_image(self):
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
name = self.get_value(it, self.COL_PN)
|
||||
type = self.get_value(it, self.COL_TYPE)
|
||||
if type == "image":
|
||||
sel = "User Selected" in self.get_value(it, self.COL_BINB)
|
||||
if sel:
|
||||
return name
|
||||
it = self.iter_next(it)
|
||||
return None
|
||||
|
||||
def set_selected_image(self, img):
|
||||
if img == None:
|
||||
return
|
||||
path = self.find_path_for_item(img)
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected",
|
||||
image_contents=True)
|
||||
@@ -1,87 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hwc
|
||||
|
||||
#
|
||||
# HobPage: the super class for all Hob-related pages
|
||||
#
|
||||
class HobPage (gtk.VBox):
|
||||
|
||||
def __init__(self, builder, title = None):
|
||||
super(HobPage, self).__init__(False, 0)
|
||||
self.builder = builder
|
||||
self.builder_width, self.builder_height = self.builder.size_request()
|
||||
|
||||
if title == None:
|
||||
self.title = "HOB -- Image Creator"
|
||||
else:
|
||||
self.title = title
|
||||
|
||||
self.box_group_area = gtk.VBox(False, 15)
|
||||
self.box_group_area.set_size_request(self.builder_width - 73 - 73, self.builder_height - 88 - 15 - 15)
|
||||
self.group_align = gtk.Alignment(xalign = 0, yalign=0.5, xscale=1, yscale=1)
|
||||
self.group_align.set_padding(15, 15, 73, 73)
|
||||
self.group_align.add(self.box_group_area)
|
||||
self.box_group_area.set_homogeneous(False)
|
||||
|
||||
|
||||
def add_onto_top_bar(self, widget = None, padding = 0):
|
||||
# the top button occupies 1/7 of the page height
|
||||
# setup an event box
|
||||
eventbox = gtk.EventBox()
|
||||
style = eventbox.get_style().copy()
|
||||
style.bg[gtk.STATE_NORMAL] = eventbox.get_colormap().alloc_color(HobColors.LIGHT_GRAY, False, False)
|
||||
eventbox.set_style(style)
|
||||
eventbox.set_size_request(-1, 88)
|
||||
|
||||
hbox = gtk.HBox()
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_markup("<span font_desc=\'14\'>%s</span>" % self.title)
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=20)
|
||||
|
||||
if widget != None:
|
||||
# add the widget in the event box
|
||||
hbox.pack_end(widget, expand=False, fill=False, padding=padding)
|
||||
eventbox.add(hbox)
|
||||
|
||||
return eventbox
|
||||
|
||||
def span_tag(self, px="12px", weight="normal", forground="#1c1c1c"):
|
||||
span_tag = "weight=\'%s\' foreground=\'%s\' font_desc=\'%s\'" % (weight, forground, px)
|
||||
return span_tag
|
||||
|
||||
def append_toolbar_button(self, toolbar, buttonname, icon_disp, icon_hovor, tip, cb):
|
||||
# Create a button and append it on the toolbar according to button name
|
||||
icon = gtk.Image()
|
||||
icon_display = icon_disp
|
||||
icon_hover = icon_hovor
|
||||
pix_buffer = gtk.gdk.pixbuf_new_from_file(icon_display)
|
||||
icon.set_from_pixbuf(pix_buffer)
|
||||
tip_text = tip
|
||||
button = toolbar.append_element(gtk.TOOLBAR_CHILD_RADIOBUTTON, None,
|
||||
buttonname, tip_text, "Private text", icon,
|
||||
cb, None)
|
||||
return toolbar, button
|
||||
335
bitbake/lib/bb/ui/crumbs/hobprefs.py
Normal file
@@ -0,0 +1,335 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
from bb.ui.crumbs.configurator import Configurator
|
||||
|
||||
class HobPrefs(gtk.Dialog):
|
||||
"""
|
||||
"""
|
||||
def empty_combo_text(self, combo_text):
|
||||
model = combo_text.get_model()
|
||||
if model:
|
||||
model.clear()
|
||||
|
||||
def output_type_toggled_cb(self, check, handler):
|
||||
ot = check.get_label()
|
||||
enabled = check.get_active()
|
||||
if enabled:
|
||||
self.selected_image_types = handler.add_image_output_type(ot)
|
||||
else:
|
||||
self.selected_image_types = handler.remove_image_output_type(ot)
|
||||
|
||||
self.configurator.setConfVar('IMAGE_FSTYPES', "%s" % " ".join(self.selected_image_types).lstrip(" "))
|
||||
self.reload_required = True
|
||||
|
||||
def sdk_machine_combo_changed_cb(self, combo, handler):
|
||||
sdk_mach = combo.get_active_text()
|
||||
if sdk_mach != self.curr_sdk_mach:
|
||||
self.curr_sdk_mach = sdk_mach
|
||||
self.configurator.setConfVar('SDKMACHINE', sdk_mach)
|
||||
handler.set_sdk_machine(sdk_mach)
|
||||
|
||||
def update_sdk_machines(self, handler, sdk_machines):
|
||||
active = 0
|
||||
# disconnect the signal handler before updating the combo model
|
||||
if self.sdk_machine_handler_id:
|
||||
self.sdk_machine_combo.disconnect(self.sdk_machine_handler_id)
|
||||
self.sdk_machine_handler_id = None
|
||||
|
||||
self.empty_combo_text(self.sdk_machine_combo)
|
||||
for sdk_machine in sdk_machines:
|
||||
self.sdk_machine_combo.append_text(sdk_machine)
|
||||
if sdk_machine == self.curr_sdk_mach:
|
||||
self.sdk_machine_combo.set_active(active)
|
||||
active = active + 1
|
||||
|
||||
self.sdk_machine_handler_id = self.sdk_machine_combo.connect("changed", self.sdk_machine_combo_changed_cb, handler)
|
||||
|
||||
def distro_combo_changed_cb(self, combo, handler):
|
||||
distro = combo.get_active_text()
|
||||
if distro != self.curr_distro:
|
||||
self.curr_distro = distro
|
||||
self.configurator.setConfVar('DISTRO', distro)
|
||||
handler.set_distro(distro)
|
||||
self.reload_required = True
|
||||
|
||||
def update_distros(self, handler, distros):
|
||||
active = 0
|
||||
# disconnect the signal handler before updating combo model
|
||||
if self.distro_handler_id:
|
||||
self.distro_combo.disconnect(self.distro_handler_id)
|
||||
self.distro_handler_id = None
|
||||
|
||||
self.empty_combo_text(self.distro_combo)
|
||||
for distro in distros:
|
||||
self.distro_combo.append_text(distro)
|
||||
if distro == self.curr_distro:
|
||||
self.distro_combo.set_active(active)
|
||||
active = active + 1
|
||||
|
||||
self.distro_handler_id = self.distro_combo.connect("changed", self.distro_combo_changed_cb, handler)
|
||||
|
||||
def package_format_combo_changed_cb(self, combo, handler):
|
||||
package_format = combo.get_active_text()
|
||||
if package_format != self.curr_package_format:
|
||||
self.curr_package_format = package_format
|
||||
self.configurator.setConfVar('PACKAGE_CLASSES', 'package_%s' % package_format)
|
||||
handler.set_package_format(package_format)
|
||||
self.reload_required = True
|
||||
|
||||
def update_package_formats(self, handler, formats):
|
||||
active = 0
|
||||
# disconnect the signal handler before updating the model
|
||||
if self.package_handler_id:
|
||||
self.package_combo.disconnect(self.package_handler_id)
|
||||
self.package_handler_id = None
|
||||
|
||||
self.empty_combo_text(self.package_combo)
|
||||
for format in formats:
|
||||
self.package_combo.append_text(format)
|
||||
if format == self.curr_package_format:
|
||||
self.package_combo.set_active(active)
|
||||
active = active + 1
|
||||
|
||||
self.package_handler_id = self.package_combo.connect("changed", self.package_format_combo_changed_cb, handler)
|
||||
|
||||
def include_gplv3_cb(self, toggle):
|
||||
excluded = toggle.get_active()
|
||||
orig_incompatible = self.configurator.getConfVar('INCOMPATIBLE_LICENSE')
|
||||
new_incompatible = ""
|
||||
if excluded:
|
||||
if not orig_incompatible:
|
||||
new_incompatible = "GPLv3"
|
||||
elif not orig_incompatible.find('GPLv3'):
|
||||
new_incompatible = "%s GPLv3" % orig_incompatible
|
||||
else:
|
||||
new_incompatible = orig_incompatible.replace('GPLv3', '')
|
||||
|
||||
if new_incompatible != orig_incompatible:
|
||||
self.handler.set_incompatible_license(new_incompatible)
|
||||
self.configurator.setConfVar('INCOMPATIBLE_LICENSE', new_incompatible)
|
||||
self.reload_required = True
|
||||
|
||||
def change_bb_threads_cb(self, spinner):
|
||||
val = spinner.get_value_as_int()
|
||||
self.handler.set_bbthreads(val)
|
||||
self.configurator.setConfVar('BB_NUMBER_THREADS', val)
|
||||
|
||||
def change_make_threads_cb(self, spinner):
|
||||
val = spinner.get_value_as_int()
|
||||
self.handler.set_pmake(val)
|
||||
self.configurator.setConfVar('PARALLEL_MAKE', "-j %s" % val)
|
||||
|
||||
def toggle_toolchain_cb(self, check):
|
||||
enabled = check.get_active()
|
||||
toolchain = '0'
|
||||
if enabled:
|
||||
toolchain = '1'
|
||||
self.handler.toggle_toolchain(enabled)
|
||||
self.configurator.setConfVar('HOB_BUILD_TOOLCHAIN', toolchain)
|
||||
|
||||
def toggle_headers_cb(self, check):
|
||||
enabled = check.get_active()
|
||||
headers = '0'
|
||||
if enabled:
|
||||
headers = '1'
|
||||
self.handler.toggle_toolchain_headers(enabled)
|
||||
self.configurator.setConfVar('HOB_BUILD_TOOLCHAIN_HEADERS', headers)
|
||||
|
||||
def set_parent_window(self, parent):
|
||||
self.set_transient_for(parent)
|
||||
|
||||
def write_changes(self):
|
||||
self.configurator.writeConf()
|
||||
|
||||
def prefs_response_cb(self, dialog, response):
|
||||
if self.reload_required:
|
||||
glib.idle_add(self.handler.reload_data)
|
||||
self.reload_required = False
|
||||
|
||||
def __init__(self, configurator, handler, curr_sdk_mach, curr_distro, pclass,
|
||||
pmake, bbthread, selected_image_types, all_image_types,
|
||||
gplv3disabled, build_toolchain, build_toolchain_headers):
|
||||
"""
|
||||
"""
|
||||
gtk.Dialog.__init__(self, "Preferences", None,
|
||||
gtk.DIALOG_DESTROY_WITH_PARENT,
|
||||
(gtk.STOCK_CLOSE, gtk.RESPONSE_OK))
|
||||
|
||||
self.set_border_width(6)
|
||||
self.vbox.set_property("spacing", 12)
|
||||
self.action_area.set_property("spacing", 12)
|
||||
self.action_area.set_property("border-width", 6)
|
||||
|
||||
self.handler = handler
|
||||
self.configurator = configurator
|
||||
|
||||
self.curr_sdk_mach = curr_sdk_mach
|
||||
self.curr_distro = curr_distro
|
||||
self.curr_package_format = pclass
|
||||
self.pmake = pmake
|
||||
self.bbthread = bbthread
|
||||
self.selected_image_types = selected_image_types.split(" ")
|
||||
self.gplv3disabled = gplv3disabled
|
||||
self.build_toolchain = build_toolchain
|
||||
self.build_toolchain_headers = build_toolchain_headers
|
||||
|
||||
self.reload_required = False
|
||||
self.distro_handler_id = None
|
||||
self.sdk_machine_handler_id = None
|
||||
self.package_handler_id = None
|
||||
|
||||
left = gtk.SizeGroup(gtk.SIZE_GROUP_HORIZONTAL)
|
||||
right = gtk.SizeGroup(gtk.SIZE_GROUP_HORIZONTAL)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_markup("<b>Policy</b>")
|
||||
label.show()
|
||||
frame = gtk.Frame()
|
||||
frame.set_label_widget(label)
|
||||
frame.set_shadow_type(gtk.SHADOW_NONE)
|
||||
frame.show()
|
||||
self.vbox.pack_start(frame)
|
||||
pbox = gtk.VBox(False, 12)
|
||||
pbox.show()
|
||||
frame.add(pbox)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
# Distro selector
|
||||
label = gtk.Label("Distribution:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
self.distro_combo = gtk.combo_box_new_text()
|
||||
self.distro_combo.set_tooltip_text("Select the Yocto distribution you would like to use")
|
||||
self.distro_combo.show()
|
||||
hbox.pack_start(self.distro_combo, expand=False, fill=False, padding=6)
|
||||
# Exclude GPLv3
|
||||
check = gtk.CheckButton("Exclude GPLv3 packages")
|
||||
check.set_tooltip_text("Check this box to prevent GPLv3 packages from being included in your image")
|
||||
check.show()
|
||||
check.set_active(self.gplv3disabled)
|
||||
check.connect("toggled", self.include_gplv3_cb)
|
||||
hbox.pack_start(check, expand=False, fill=False, padding=6)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
# Package format selector
|
||||
label = gtk.Label("Package format:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
self.package_combo = gtk.combo_box_new_text()
|
||||
self.package_combo.set_tooltip_text("""The package format is that used in creation
|
||||
of the root filesystem and also dictates the package manager used in your image""")
|
||||
self.package_combo.show()
|
||||
hbox.pack_start(self.package_combo, expand=False, fill=False, padding=6)
|
||||
if all_image_types:
|
||||
# Image output type selector
|
||||
label = gtk.Label("Image output types:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
chk_cnt = 3
|
||||
for it in all_image_types.split(" "):
|
||||
chk_cnt = chk_cnt + 1
|
||||
if chk_cnt % 6 == 0:
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
chk = gtk.CheckButton(it)
|
||||
if it in self.selected_image_types:
|
||||
chk.set_active(True)
|
||||
chk.set_tooltip_text("Build an %s image" % it)
|
||||
chk.connect("toggled", self.output_type_toggled_cb, handler)
|
||||
chk.show()
|
||||
hbox.pack_start(chk, expand=False, fill=False, padding=3)
|
||||
# BitBake
|
||||
label = gtk.Label()
|
||||
label.set_markup("<b>BitBake</b>")
|
||||
label.show()
|
||||
frame = gtk.Frame()
|
||||
frame.set_label_widget(label)
|
||||
frame.set_shadow_type(gtk.SHADOW_NONE)
|
||||
frame.show()
|
||||
self.vbox.pack_start(frame)
|
||||
pbox = gtk.VBox(False, 12)
|
||||
pbox.show()
|
||||
frame.add(pbox)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
label = gtk.Label("BitBake threads:")
|
||||
label.show()
|
||||
# NOTE: may be a good idea in future to intelligently cap the maximum
|
||||
# values but we need more data to make an educated decision, for now
|
||||
# set a high maximum as a value for upper bounds is required by the
|
||||
# gtk.Adjustment
|
||||
spin_max = 30 # seems like a high enough arbitrary number
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
bbadj = gtk.Adjustment(value=self.bbthread, lower=1, upper=spin_max, step_incr=1)
|
||||
bbspinner = gtk.SpinButton(adjustment=bbadj, climb_rate=1, digits=0)
|
||||
bbspinner.show()
|
||||
bbspinner.connect("value-changed", self.change_bb_threads_cb)
|
||||
hbox.pack_start(bbspinner, expand=False, fill=False, padding=6)
|
||||
label = gtk.Label("Make threads:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
madj = gtk.Adjustment(value=self.pmake, lower=1, upper=spin_max, step_incr=1)
|
||||
makespinner = gtk.SpinButton(adjustment=madj, climb_rate=1, digits=0)
|
||||
makespinner.connect("value-changed", self.change_make_threads_cb)
|
||||
makespinner.show()
|
||||
hbox.pack_start(makespinner, expand=False, fill=False, padding=6)
|
||||
# Toolchain
|
||||
label = gtk.Label()
|
||||
label.set_markup("<b>External Toolchain</b>")
|
||||
label.show()
|
||||
frame = gtk.Frame()
|
||||
frame.set_label_widget(label)
|
||||
frame.set_shadow_type(gtk.SHADOW_NONE)
|
||||
frame.show()
|
||||
self.vbox.pack_start(frame)
|
||||
pbox = gtk.VBox(False, 12)
|
||||
pbox.show()
|
||||
frame.add(pbox)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
toolcheck = gtk.CheckButton("Build external development toolchain with image")
|
||||
toolcheck.show()
|
||||
toolcheck.set_active(self.build_toolchain)
|
||||
toolcheck.connect("toggled", self.toggle_toolchain_cb)
|
||||
hbox.pack_start(toolcheck, expand=False, fill=False, padding=6)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
label = gtk.Label("Toolchain host:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
self.sdk_machine_combo = gtk.combo_box_new_text()
|
||||
self.sdk_machine_combo.set_tooltip_text("Select the host architecture of the external machine")
|
||||
self.sdk_machine_combo.show()
|
||||
hbox.pack_start(self.sdk_machine_combo, expand=False, fill=False, padding=6)
|
||||
# headerscheck = gtk.CheckButton("Include development headers with toolchain")
|
||||
# headerscheck.show()
|
||||
# headerscheck.set_active(self.build_toolchain_headers)
|
||||
# headerscheck.connect("toggled", self.toggle_headers_cb)
|
||||
# hbox.pack_start(headerscheck, expand=False, fill=False, padding=6)
|
||||
self.connect("response", self.prefs_response_cb)
|
||||
@@ -1,805 +0,0 @@
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
import os.path
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class hwc:
|
||||
|
||||
MAIN_WIN_WIDTH = 1024
|
||||
MAIN_WIN_HEIGHT = 700
|
||||
|
||||
class hic:
|
||||
|
||||
HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("ui/icons/"))
|
||||
|
||||
ICON_RCIPE_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_display.png'))
|
||||
ICON_RCIPE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_hover.png'))
|
||||
ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_display.png'))
|
||||
ICON_PACKAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_hover.png'))
|
||||
ICON_LAYERS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_display.png'))
|
||||
ICON_LAYERS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_hover.png'))
|
||||
ICON_TEMPLATES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('templates/templates_display.png'))
|
||||
ICON_TEMPLATES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('templates/templates_hover.png'))
|
||||
ICON_IMAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_display.png'))
|
||||
ICON_IMAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_hover.png'))
|
||||
ICON_SETTINGS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_display.png'))
|
||||
ICON_SETTINGS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_hover.png'))
|
||||
ICON_INFO_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
|
||||
ICON_INFO_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_hover.png'))
|
||||
ICON_INDI_CONFIRM_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/confirmation.png'))
|
||||
ICON_INDI_ERROR_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/error.png'))
|
||||
|
||||
class HobWidget:
|
||||
@classmethod
|
||||
def resize_widget(cls, screen, widget, widget_width, widget_height):
|
||||
screen_width, screen_height = screen.get_size_request()
|
||||
ratio_height = screen_width * hwc.MAIN_WIN_HEIGHT/hwc.MAIN_WIN_WIDTH
|
||||
if ratio_height < screen_height:
|
||||
screen_height = ratio_height
|
||||
widget_width = widget_width * screen_width/hwc.MAIN_WIN_WIDTH
|
||||
widget_height = widget_height * screen_height/hwc.MAIN_WIN_HEIGHT
|
||||
widget.set_size_request(widget_width, widget_height)
|
||||
|
||||
@classmethod
|
||||
def _toggle_cb(cls, cell, path, model, column):
|
||||
it = model.get_iter(path)
|
||||
val = model.get_value(it, column)
|
||||
val = not val
|
||||
model.set(it, column, val)
|
||||
|
||||
@classmethod
|
||||
def _pkgfmt_up_clicked_cb(cls, button, tree_selection):
|
||||
(model, it) = tree_selection.get_selected()
|
||||
if not it:
|
||||
return
|
||||
path = model.get_path(it)
|
||||
if path[0] <= 0:
|
||||
return
|
||||
|
||||
pre_it = model.get_iter_first()
|
||||
if not pre_it:
|
||||
return
|
||||
else:
|
||||
while model.iter_next(pre_it) :
|
||||
if model.get_value(model.iter_next(pre_it), 1) != model.get_value(it, 1):
|
||||
pre_it = model.iter_next(pre_it)
|
||||
else:
|
||||
break
|
||||
|
||||
cur_index = model.get_value(it, 0)
|
||||
pre_index = cur_index
|
||||
if pre_it:
|
||||
model.set(pre_it, 0, pre_index)
|
||||
cur_index = cur_index - 1
|
||||
model.set(it, 0, cur_index)
|
||||
|
||||
@classmethod
|
||||
def _pkgfmt_down_clicked_cb(cls, button, tree_selection):
|
||||
(model, it) = tree_selection.get_selected()
|
||||
if not it:
|
||||
return
|
||||
next_it = model.iter_next(it)
|
||||
if not next_it:
|
||||
return
|
||||
cur_index = model.get_value(it, 0)
|
||||
next_index = cur_index
|
||||
model.set(next_it, 0, next_index)
|
||||
cur_index = cur_index + 1
|
||||
model.set(it, 0, cur_index)
|
||||
|
||||
@classmethod
|
||||
def _tree_selection_changed_cb(cls, tree_selection, button1, button2):
|
||||
(model, it) = tree_selection.get_selected()
|
||||
inc = model.get_value(it, 2)
|
||||
if inc:
|
||||
button1.set_sensitive(True)
|
||||
button2.set_sensitive(True)
|
||||
else:
|
||||
button1.set_sensitive(False)
|
||||
button2.set_sensitive(False)
|
||||
|
||||
@classmethod
|
||||
def _sort_func(cls, model, iter1, iter2, data):
|
||||
val1 = model.get_value(iter1, 0)
|
||||
val2 = model.get_value(iter2, 0)
|
||||
inc1 = model.get_value(iter1, 2)
|
||||
inc2 = model.get_value(iter2, 2)
|
||||
if inc1 != inc2:
|
||||
return inc2 - inc1
|
||||
else:
|
||||
return val1 - val2
|
||||
|
||||
@classmethod
|
||||
def gen_pkgfmt_widget(cls, curr_package_format, all_package_format, tooltip=""):
|
||||
pkgfmt_hbox = gtk.HBox(False, 15)
|
||||
|
||||
pkgfmt_store = gtk.ListStore(int, str, gobject.TYPE_BOOLEAN)
|
||||
for format in curr_package_format.split():
|
||||
pkgfmt_store.set(pkgfmt_store.append(), 1, format, 2, True)
|
||||
for format in all_package_format:
|
||||
if format not in curr_package_format:
|
||||
pkgfmt_store.set(pkgfmt_store.append(), 1, format, 2, False)
|
||||
pkgfmt_tree = gtk.TreeView(pkgfmt_store)
|
||||
pkgfmt_tree.set_headers_clickable(True)
|
||||
pkgfmt_tree.set_headers_visible(False)
|
||||
tree_selection = pkgfmt_tree.get_selection()
|
||||
tree_selection.set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
col = gtk.TreeViewColumn('NO')
|
||||
col.set_sort_column_id(0)
|
||||
col.set_sort_order(gtk.SORT_ASCENDING)
|
||||
col.set_clickable(False)
|
||||
col1 = gtk.TreeViewColumn('TYPE')
|
||||
col1.set_min_width(130)
|
||||
col1.set_max_width(140)
|
||||
col2 = gtk.TreeViewColumn('INCLUDED')
|
||||
col2.set_min_width(60)
|
||||
col2.set_max_width(70)
|
||||
pkgfmt_tree.append_column(col1)
|
||||
pkgfmt_tree.append_column(col2)
|
||||
cell = gtk.CellRendererText()
|
||||
cell1 = gtk.CellRendererText()
|
||||
cell1.set_property('width-chars', 10)
|
||||
cell2 = gtk.CellRendererToggle()
|
||||
cell2.set_property('activatable', True)
|
||||
cell2.connect("toggled", cls._toggle_cb, pkgfmt_store, 2)
|
||||
col.pack_start(cell, True)
|
||||
col1.pack_start(cell1, True)
|
||||
col2.pack_end(cell2, True)
|
||||
col.set_attributes(cell, text=0)
|
||||
col1.set_attributes(cell1, text=1)
|
||||
col2.set_attributes(cell2, active=2)
|
||||
|
||||
pkgfmt_store.set_sort_func(0, cls._sort_func, None)
|
||||
pkgfmt_store.set_sort_column_id(0, gtk.SORT_ASCENDING)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.add(pkgfmt_tree)
|
||||
scroll.set_size_request(200,60)
|
||||
pkgfmt_hbox.pack_start(scroll, False, False, 0)
|
||||
|
||||
vbox = gtk.VBox(False, 5)
|
||||
pkgfmt_hbox.pack_start(vbox, False, False, 15)
|
||||
|
||||
up = gtk.Button()
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_GO_UP, gtk.ICON_SIZE_MENU)
|
||||
up.set_image(image)
|
||||
up.set_size_request(50,30)
|
||||
up.connect("clicked", cls._pkgfmt_up_clicked_cb, tree_selection)
|
||||
vbox.pack_start(up, False, False, 5)
|
||||
|
||||
down = gtk.Button()
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_GO_DOWN, gtk.ICON_SIZE_MENU)
|
||||
down.set_image(image)
|
||||
down.set_size_request(50,30)
|
||||
down.connect("clicked", cls._pkgfmt_down_clicked_cb, tree_selection)
|
||||
vbox.pack_start(down, False, False, 5)
|
||||
tree_selection.connect("changed", cls._tree_selection_changed_cb, up, down)
|
||||
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_INFO, gtk.ICON_SIZE_BUTTON)
|
||||
image.set_tooltip_text(tooltip)
|
||||
pkgfmt_hbox.pack_start(image, expand=False, fill=False)
|
||||
|
||||
pkgfmt_hbox.show_all()
|
||||
|
||||
return pkgfmt_hbox, pkgfmt_store
|
||||
|
||||
@classmethod
|
||||
def gen_combo_widget(cls, curr_item, all_item, tooltip=""):
|
||||
hbox = gtk.HBox(False, 10)
|
||||
combo = gtk.combo_box_new_text()
|
||||
hbox.pack_start(combo, expand=False, fill=False)
|
||||
|
||||
index = 0
|
||||
for item in all_item or []:
|
||||
combo.append_text(item)
|
||||
if item == curr_item:
|
||||
combo.set_active(index)
|
||||
index += 1
|
||||
|
||||
image = gtk.Image()
|
||||
image.show()
|
||||
image.set_from_stock(gtk.STOCK_INFO, gtk.ICON_SIZE_BUTTON)
|
||||
image.set_tooltip_text(tooltip)
|
||||
|
||||
hbox.pack_start(image, expand=False, fill=False)
|
||||
|
||||
hbox.show_all()
|
||||
|
||||
return hbox, combo
|
||||
|
||||
@classmethod
|
||||
def gen_label_widget(cls, content):
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0, 0)
|
||||
label.set_markup(content)
|
||||
label.show()
|
||||
return label
|
||||
|
||||
@classmethod
|
||||
def _select_path_cb(cls, action, parent, entry):
|
||||
dialog = gtk.FileChooserDialog("", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
|
||||
(gtk.STOCK_OK, gtk.RESPONSE_YES,
|
||||
gtk.STOCK_CANCEL, gtk.RESPONSE_NO))
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
path = dialog.get_filename()
|
||||
entry.set_text(path)
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
@classmethod
|
||||
def gen_entry_widget(cls, split_model, content, parent, tooltip=""):
|
||||
hbox = gtk.HBox(False, 10)
|
||||
entry = gtk.Entry()
|
||||
entry.set_text(content)
|
||||
|
||||
if split_model:
|
||||
hbox.pack_start(entry, expand=True, fill=True)
|
||||
else:
|
||||
table = gtk.Table(1, 10, True)
|
||||
hbox.pack_start(table, expand=True, fill=True)
|
||||
table.attach(entry, 0, 9, 0, 1)
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_OPEN,gtk.ICON_SIZE_BUTTON)
|
||||
open_button = gtk.Button()
|
||||
open_button.set_image(image)
|
||||
open_button.connect("clicked", cls._select_path_cb, parent, entry)
|
||||
table.attach(open_button, 9, 10, 0, 1)
|
||||
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_INFO, gtk.ICON_SIZE_BUTTON)
|
||||
image.set_tooltip_text(tooltip)
|
||||
hbox.pack_start(image, expand=False, fill=False)
|
||||
|
||||
hbox.show_all()
|
||||
|
||||
return hbox, entry
|
||||
|
||||
@classmethod
|
||||
def gen_spinner_widget(cls, content, lower, upper, tooltip=""):
|
||||
hbox = gtk.HBox(False, 10)
|
||||
adjust = gtk.Adjustment(value=content, lower=lower, upper=upper, step_incr=1)
|
||||
spinner = gtk.SpinButton(adjustment=adjust, climb_rate=1, digits=0)
|
||||
|
||||
spinner.set_value(content)
|
||||
hbox.pack_start(spinner, expand=False, fill=False)
|
||||
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_INFO, gtk.ICON_SIZE_BUTTON)
|
||||
image.set_tooltip_text(tooltip)
|
||||
hbox.pack_start(image, expand=False, fill=False)
|
||||
|
||||
hbox.show_all()
|
||||
|
||||
return hbox, spinner
|
||||
|
||||
@classmethod
|
||||
def conf_error(cls, parent, lbl):
|
||||
dialog = CrumbsDialog(parent, lbl)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
@classmethod
|
||||
def _add_layer_cb(cls, action, layer_store, parent):
|
||||
dialog = gtk.FileChooserDialog("Add new layer", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
|
||||
(gtk.STOCK_OK, gtk.RESPONSE_YES,
|
||||
gtk.STOCK_CANCEL, gtk.RESPONSE_NO))
|
||||
label = gtk.Label("Select the layer you wish to add")
|
||||
label.show()
|
||||
dialog.set_extra_widget(label)
|
||||
response = dialog.run()
|
||||
path = dialog.get_filename()
|
||||
dialog.destroy()
|
||||
|
||||
lbl = "<b>Error</b>\nUnable to load layer <i>%s</i> because " % path
|
||||
if response == gtk.RESPONSE_YES:
|
||||
import os
|
||||
import os.path
|
||||
layers = []
|
||||
it = layer_store.get_iter_first()
|
||||
while it:
|
||||
layers.append(layer_store.get_value(it, 0))
|
||||
it = layer_store.iter_next(it)
|
||||
|
||||
if not path:
|
||||
lbl += "it is an invalid path."
|
||||
elif not os.path.exists(path+"/conf/layer.conf"):
|
||||
lbl += "there is no layer.conf inside the directory."
|
||||
elif path in layers:
|
||||
lbl += "it is already in loaded layers."
|
||||
else:
|
||||
layer_store.append([path])
|
||||
return
|
||||
cls.conf_error(parent, lbl)
|
||||
|
||||
@classmethod
|
||||
def _del_layer_cb(cls, action, tree_selection, layer_store):
|
||||
model, iter = tree_selection.get_selected()
|
||||
if iter:
|
||||
layer_store.remove(iter)
|
||||
|
||||
@classmethod
|
||||
def _toggle_layer_cb(cls, cell, path, layer_store):
|
||||
name = layer_store[path][0]
|
||||
toggle = not layer_store[path][1]
|
||||
layer_store[path][1] = toggle
|
||||
|
||||
@classmethod
|
||||
def gen_layer_widget(cls, split_model, layers, layers_avail, window, tooltip=""):
|
||||
hbox = gtk.HBox(False, 10)
|
||||
|
||||
layer_tv = gtk.TreeView()
|
||||
layer_tv.set_rules_hint(True)
|
||||
layer_tv.set_headers_visible(False)
|
||||
tree_selection = layer_tv.get_selection()
|
||||
tree_selection.set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
col0= gtk.TreeViewColumn('Path')
|
||||
cell0 = gtk.CellRendererText()
|
||||
cell0.set_padding(5,2)
|
||||
col0.pack_start(cell0, True)
|
||||
col0.set_attributes(cell0, text=0)
|
||||
layer_tv.append_column(col0)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.add(layer_tv)
|
||||
|
||||
table_layer = gtk.Table(2, 10, False)
|
||||
hbox.pack_start(table_layer, expand=True, fill=True)
|
||||
|
||||
if split_model:
|
||||
table_layer.attach(scroll, 0, 10, 0, 2)
|
||||
|
||||
layer_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
|
||||
for layer in layers:
|
||||
layer_store.set(layer_store.append(), 0, layer, 1, True)
|
||||
for layer in layers_avail:
|
||||
if layer not in layers:
|
||||
layer_store.set(layer_store.append(), 0, layer, 1, False)
|
||||
|
||||
col1 = gtk.TreeViewColumn('Included')
|
||||
layer_tv.append_column(col1)
|
||||
|
||||
cell1 = gtk.CellRendererToggle()
|
||||
cell1.connect("toggled", cls._toggle_layer_cb, layer_store)
|
||||
col1.pack_start(cell1, True)
|
||||
col1.set_attributes(cell1, active=1)
|
||||
|
||||
else:
|
||||
table_layer.attach(scroll, 0, 10, 0, 1)
|
||||
|
||||
layer_store = gtk.ListStore(gobject.TYPE_STRING)
|
||||
for layer in layers:
|
||||
layer_store.set(layer_store.append(), 0, layer)
|
||||
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_ADD,gtk.ICON_SIZE_MENU)
|
||||
add_button = gtk.Button()
|
||||
add_button.set_image(image)
|
||||
add_button.connect("clicked", cls._add_layer_cb, layer_store, window)
|
||||
table_layer.attach(add_button, 0, 5, 1, 2, gtk.EXPAND | gtk.FILL, 0, 0, 0)
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_REMOVE,gtk.ICON_SIZE_MENU)
|
||||
del_button = gtk.Button()
|
||||
del_button.set_image(image)
|
||||
del_button.connect("clicked", cls._del_layer_cb, tree_selection, layer_store)
|
||||
table_layer.attach(del_button, 5, 10, 1, 2, gtk.EXPAND | gtk.FILL, 0, 0, 0)
|
||||
layer_tv.set_model(layer_store)
|
||||
|
||||
hbox.show_all()
|
||||
|
||||
return hbox, layer_store
|
||||
|
||||
@classmethod
|
||||
def _toggle_single_cb(cls, cell, select_path, treeview, toggle_column):
|
||||
model = treeview.get_model()
|
||||
if not model:
|
||||
return
|
||||
iter = model.get_iter_first()
|
||||
while iter:
|
||||
path = model.get_path(iter)
|
||||
model[path][toggle_column] = False
|
||||
iter = model.iter_next(iter)
|
||||
|
||||
model[select_path][toggle_column] = True
|
||||
|
||||
@classmethod
|
||||
def gen_imgtv_widget(cls, col0_width, col1_width):
|
||||
vbox = gtk.VBox(False, 10)
|
||||
|
||||
imgsel_tv = gtk.TreeView()
|
||||
imgsel_tv.set_rules_hint(True)
|
||||
imgsel_tv.set_headers_visible(False)
|
||||
tree_selection = imgsel_tv.get_selection()
|
||||
tree_selection.set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
col0= gtk.TreeViewColumn('Image name')
|
||||
cell0 = gtk.CellRendererText()
|
||||
cell0.set_padding(5,2)
|
||||
col0.pack_start(cell0, True)
|
||||
col0.set_attributes(cell0, text=0)
|
||||
col0.set_max_width(col0_width)
|
||||
col0.set_min_width(col0_width)
|
||||
imgsel_tv.append_column(col0)
|
||||
|
||||
col1= gtk.TreeViewColumn('Select')
|
||||
cell1 = gtk.CellRendererToggle()
|
||||
cell1.set_padding(5,2)
|
||||
cell1.connect("toggled", cls._toggle_single_cb, imgsel_tv, 1)
|
||||
col1.pack_start(cell1, True)
|
||||
col1.set_attributes(cell1, active=1)
|
||||
col1.set_max_width(col1_width)
|
||||
col1.set_min_width(col1_width)
|
||||
imgsel_tv.append_column(col1)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.add(imgsel_tv)
|
||||
|
||||
vbox.pack_start(scroll, expand=True, fill=True)
|
||||
|
||||
return vbox, imgsel_tv
|
||||
|
||||
@classmethod
|
||||
def gen_images_widget(cls, col0_width, col1_width, col2_width):
|
||||
vbox = gtk.VBox(False, 10)
|
||||
|
||||
imgsel_tv = gtk.TreeView()
|
||||
imgsel_tv.set_rules_hint(True)
|
||||
imgsel_tv.set_headers_visible(False)
|
||||
tree_selection = imgsel_tv.get_selection()
|
||||
tree_selection.set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
col0= gtk.TreeViewColumn('Image name')
|
||||
cell0 = gtk.CellRendererText()
|
||||
cell0.set_padding(5,2)
|
||||
col0.pack_start(cell0, True)
|
||||
col0.set_attributes(cell0, text=0)
|
||||
col0.set_max_width(col0_width)
|
||||
col0.set_min_width(col0_width)
|
||||
imgsel_tv.append_column(col0)
|
||||
|
||||
col1= gtk.TreeViewColumn('Image size')
|
||||
cell1 = gtk.CellRendererText()
|
||||
cell1.set_padding(5,2)
|
||||
col1.pack_start(cell1, True)
|
||||
col1.set_attributes(cell1, text=1)
|
||||
col1.set_max_width(col1_width)
|
||||
col1.set_min_width(col1_width)
|
||||
imgsel_tv.append_column(col1)
|
||||
|
||||
col2= gtk.TreeViewColumn('Select')
|
||||
cell2 = gtk.CellRendererToggle()
|
||||
cell2.set_padding(5,2)
|
||||
cell2.connect("toggled", cls._toggle_single_cb, imgsel_tv, 2)
|
||||
col2.pack_start(cell2, True)
|
||||
col2.set_attributes(cell2, active=2)
|
||||
col2.set_max_width(col2_width)
|
||||
col2.set_min_width(col2_width)
|
||||
imgsel_tv.append_column(col2)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.add(imgsel_tv)
|
||||
|
||||
vbox.pack_start(scroll, expand=True, fill=True)
|
||||
|
||||
return vbox, imgsel_tv
|
||||
|
||||
@classmethod
|
||||
def _on_add_item_clicked(cls, button, model):
|
||||
new_item = ["##KEY##", "##VALUE##"]
|
||||
|
||||
iter = model.append()
|
||||
model.set (iter,
|
||||
0, new_item[0],
|
||||
1, new_item[1],
|
||||
)
|
||||
|
||||
|
||||
@classmethod
|
||||
def _on_remove_item_clicked(cls, button, treeview):
|
||||
|
||||
selection = treeview.get_selection()
|
||||
model, iter = selection.get_selected()
|
||||
|
||||
if iter:
|
||||
path = model.get_path(iter)[0]
|
||||
model.remove(iter)
|
||||
|
||||
@classmethod
|
||||
def _on_cell_edited(cls, cell, path_string, new_text, model):
|
||||
it = model.get_iter_from_string(path_string)
|
||||
column = cell.get_data("column")
|
||||
model.set(it, column, new_text)
|
||||
|
||||
|
||||
@classmethod
|
||||
def gen_editable_settings(cls, setting, tooltip=""):
|
||||
setting_hbox = gtk.HBox(False, 10)
|
||||
|
||||
vbox = gtk.VBox(False, 10)
|
||||
setting_hbox.pack_start(vbox, expand=True, fill=True)
|
||||
|
||||
setting_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
|
||||
for key in setting.keys():
|
||||
setting_store.set(setting_store.append(), 0, key, 1, setting[key])
|
||||
|
||||
setting_tree = gtk.TreeView(setting_store)
|
||||
setting_tree.set_headers_visible(True)
|
||||
setting_tree.set_size_request(300, 100)
|
||||
|
||||
col = gtk.TreeViewColumn('Key')
|
||||
col.set_min_width(100)
|
||||
col.set_max_width(150)
|
||||
col.set_resizable(True)
|
||||
col1 = gtk.TreeViewColumn('Value')
|
||||
col1.set_min_width(100)
|
||||
col1.set_max_width(150)
|
||||
col1.set_resizable(True)
|
||||
setting_tree.append_column(col)
|
||||
setting_tree.append_column(col1)
|
||||
cell = gtk.CellRendererText()
|
||||
cell.set_property('width-chars', 10)
|
||||
cell.set_property('editable', True)
|
||||
cell.set_data("column", 0)
|
||||
cell.connect("edited", cls._on_cell_edited, setting_store)
|
||||
cell1 = gtk.CellRendererText()
|
||||
cell1.set_property('width-chars', 10)
|
||||
cell1.set_property('editable', True)
|
||||
cell1.set_data("column", 1)
|
||||
cell1.connect("edited", cls._on_cell_edited, setting_store)
|
||||
col.pack_start(cell, True)
|
||||
col1.pack_end(cell1, True)
|
||||
col.set_attributes(cell, text=0)
|
||||
col1.set_attributes(cell1, text=1)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scroll.add(setting_tree)
|
||||
vbox.pack_start(scroll, expand=True, fill=True)
|
||||
|
||||
# some buttons
|
||||
hbox = gtk.HBox(True, 4)
|
||||
vbox.pack_start(hbox, False, False)
|
||||
|
||||
button = gtk.Button(stock=gtk.STOCK_ADD)
|
||||
button.connect("clicked", cls._on_add_item_clicked, setting_store)
|
||||
hbox.pack_start(button)
|
||||
|
||||
button = gtk.Button(stock=gtk.STOCK_REMOVE)
|
||||
button.connect("clicked", cls._on_remove_item_clicked, setting_tree)
|
||||
hbox.pack_start(button)
|
||||
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_INFO, gtk.ICON_SIZE_BUTTON)
|
||||
image.set_tooltip_text(tooltip)
|
||||
setting_hbox.pack_start(image, expand=False, fill=False)
|
||||
|
||||
return setting_hbox, setting_store
|
||||
|
||||
class HobViewTable (gtk.VBox):
|
||||
"""
|
||||
A VBox to contain the table for different recipe views and package view
|
||||
"""
|
||||
def __init__(self, columns, reset_clicked_cb=None, toggled_cb=None):
|
||||
gtk.VBox.__init__(self, False, 6)
|
||||
self.table_tree = gtk.TreeView()
|
||||
self.table_tree.set_headers_visible(True)
|
||||
self.table_tree.set_headers_clickable(True)
|
||||
self.table_tree.set_enable_search(True)
|
||||
self.table_tree.set_search_column(0)
|
||||
self.table_tree.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
for i in range(len(columns)):
|
||||
col = gtk.TreeViewColumn(columns[i]['col_name'])
|
||||
col.set_clickable(True)
|
||||
col.set_resizable(True)
|
||||
col.set_sort_column_id(columns[i]['col_id'])
|
||||
col.set_min_width(columns[i]['col_min'])
|
||||
col.set_max_width(columns[i]['col_max'])
|
||||
self.table_tree.append_column(col)
|
||||
|
||||
if columns[i]['col_style'] == 'toggle':
|
||||
cell = gtk.CellRendererToggle()
|
||||
cell.set_property('activatable', True)
|
||||
cell.connect("toggled", toggled_cb, self.table_tree)
|
||||
col.pack_end(cell, True)
|
||||
col.set_attributes(cell, active=columns[i]['col_id'])
|
||||
elif columns[i]['col_style'] == 'text':
|
||||
cell = gtk.CellRendererText()
|
||||
col.pack_start(cell, True)
|
||||
col.set_attributes(cell, text=columns[i]['col_id'])
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.add(self.table_tree)
|
||||
self.pack_start(scroll, True, True, 0)
|
||||
|
||||
hbox = gtk.HBox(False, 5)
|
||||
button = gtk.Button("Reset")
|
||||
button.connect('clicked', reset_clicked_cb)
|
||||
hbox.pack_end(button, False, False, 0)
|
||||
|
||||
self.pack_start(hbox, False, False, 0)
|
||||
|
||||
class HobViewBar (gtk.EventBox):
|
||||
"""
|
||||
A EventBox with the specified gray background color is associated with a notebook.
|
||||
And the toolbar to simulate the tabs.
|
||||
"""
|
||||
|
||||
def __init__(self, notebook):
|
||||
if not notebook:
|
||||
return
|
||||
self.notebook = notebook
|
||||
|
||||
# setup an event box
|
||||
gtk.EventBox.__init__(self)
|
||||
self.set_border_width(2)
|
||||
style = self.get_style().copy()
|
||||
style.bg[gtk.STATE_NORMAL] = self.get_colormap().alloc_color (HobColors.GRAY, False, False)
|
||||
self.set_style(style)
|
||||
|
||||
hbox = gtk.HBox()
|
||||
self.add(hbox)
|
||||
|
||||
# setup a tool bar in the event box
|
||||
self.toolbar = gtk.Toolbar()
|
||||
self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
|
||||
self.toolbar.set_style(gtk.TOOLBAR_TEXT)
|
||||
self.toolbar.set_border_width(5)
|
||||
|
||||
self.toolbuttons = []
|
||||
for index in range(self.notebook.get_n_pages()):
|
||||
child = self.notebook.get_nth_page(index)
|
||||
label = self.notebook.get_tab_label_text(child)
|
||||
tip_text = 'switch to ' + label + ' page'
|
||||
toolbutton = self.toolbar.append_element(gtk.TOOLBAR_CHILD_RADIOBUTTON, None,
|
||||
label, tip_text, "Private text", None,
|
||||
self.toolbutton_cb, index)
|
||||
toolbutton.set_size_request(200, 100)
|
||||
self.toolbuttons.append(toolbutton)
|
||||
|
||||
# set the default current page
|
||||
self.modify_toolbuttons_bg(0)
|
||||
self.notebook.set_current_page(0)
|
||||
|
||||
self.toolbar.append_space()
|
||||
|
||||
# add the tool bar into the event box
|
||||
hbox.pack_start(self.toolbar, expand=False, fill=False)
|
||||
|
||||
self.search = gtk.Entry()
|
||||
self.align = gtk.Alignment(xalign=0.5, yalign=0.5)
|
||||
self.align.add(self.search)
|
||||
hbox.pack_end(self.align, expand=False, fill=False)
|
||||
|
||||
self.label = gtk.Label(" Search: ")
|
||||
self.label.set_alignment(0.5, 0.5)
|
||||
hbox.pack_end(self.label, expand=False, fill=False)
|
||||
|
||||
def toolbutton_cb(self, widget, index):
|
||||
if index >= self.notebook.get_n_pages():
|
||||
return
|
||||
self.notebook.set_current_page(index)
|
||||
self.modify_toolbuttons_bg(index)
|
||||
|
||||
def modify_toolbuttons_bg(self, index):
|
||||
if index >= len(self.toolbuttons):
|
||||
return
|
||||
for i in range(0, len(self.toolbuttons)):
|
||||
toolbutton = self.toolbuttons[i]
|
||||
if i == index:
|
||||
self.modify_toolbutton_bg(toolbutton, True)
|
||||
else:
|
||||
self.modify_toolbutton_bg(toolbutton)
|
||||
|
||||
def modify_toolbutton_bg(self, toolbutton, active=False):
|
||||
if active:
|
||||
toolbutton.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(HobColors.WHITE))
|
||||
toolbutton.modify_bg(gtk.STATE_ACTIVE, gtk.gdk.Color(HobColors.WHITE))
|
||||
toolbutton.modify_bg(gtk.STATE_SELECTED, gtk.gdk.Color(HobColors.WHITE))
|
||||
toolbutton.modify_bg(gtk.STATE_PRELIGHT, gtk.gdk.Color(HobColors.WHITE))
|
||||
else:
|
||||
toolbutton.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(HobColors.GRAY))
|
||||
toolbutton.modify_bg(gtk.STATE_ACTIVE, gtk.gdk.Color(HobColors.GRAY))
|
||||
toolbutton.modify_bg(gtk.STATE_SELECTED, gtk.gdk.Color(HobColors.GRAY))
|
||||
toolbutton.modify_bg(gtk.STATE_PRELIGHT, gtk.gdk.Color(HobColors.GRAY))
|
||||
|
||||
class HobXpmLabelButtonBox(gtk.EventBox):
|
||||
""" label: name of buttonbox
|
||||
description: the simple description
|
||||
"""
|
||||
|
||||
def __init__(self, display_file="", hover_file="", label="", description=""):
|
||||
gtk.EventBox.__init__(self)
|
||||
self._base_state_flags = gtk.STATE_NORMAL
|
||||
self.set_events(gtk.gdk.MOTION_NOTIFY | gtk.gdk.BUTTON_PRESS | gtk.gdk.EXPOSE)
|
||||
|
||||
self.connect("expose-event", self.cb)
|
||||
self.connect("enter-notify-event", self.pointer_enter_cb)
|
||||
self.connect("leave-notify-event", self.pointer_leave_cb)
|
||||
|
||||
self.icon_hover = gtk.Image()
|
||||
self.icon_hover.set_name("icon_image")
|
||||
if type(hover_file) == str:
|
||||
pixbuf = gtk.gdk.pixbuf_new_from_file(hover_file)
|
||||
self.icon_hover.set_from_pixbuf(pixbuf)
|
||||
|
||||
self.icon_display = gtk.Image()
|
||||
self.icon_display.set_name("icon_image")
|
||||
if type(display_file) == str:
|
||||
pixbuf = gtk.gdk.pixbuf_new_from_file(display_file)
|
||||
self.icon_display.set_from_pixbuf(pixbuf)
|
||||
|
||||
self.tb = gtk.Table(2, 10, True)
|
||||
self.tb.set_row_spacing(1, False)
|
||||
self.tb.set_col_spacing(1, False)
|
||||
self.add(self.tb)
|
||||
self.tb.attach(self.icon_display, 0, 2, 0, 2, 0, 0)
|
||||
self.tb.attach(self.icon_hover, 0, 2, 0, 2, 0, 0)
|
||||
|
||||
lbl = gtk.Label()
|
||||
lbl.set_alignment(0.0, 0.5)
|
||||
lbl.set_markup("<span foreground=\'#1C1C1C\' font_desc=\'18px\'>%s</span>" % label)
|
||||
self.tb.attach(lbl, 2, 10, 0, 1)
|
||||
|
||||
lbl = gtk.Label()
|
||||
lbl.set_alignment(0.0, 0.5)
|
||||
lbl.set_markup("<span foreground=\'#1C1C1C\' font_desc=\'14px\'>%s</span>" % description)
|
||||
self.tb.attach(lbl, 2, 10, 1, 2)
|
||||
|
||||
def pointer_enter_cb(self, *args):
|
||||
#if not self.is_focus():
|
||||
self.set_state(gtk.STATE_PRELIGHT)
|
||||
self._base_state_flags = gtk.STATE_PRELIGHT
|
||||
self.icon_hover.show()
|
||||
self.icon_display.hide()
|
||||
|
||||
def pointer_leave_cb(self, *args):
|
||||
self.set_state(gtk.STATE_NORMAL)
|
||||
self._base_state_flags = gtk.STATE_NORMAL
|
||||
self.icon_display.show()
|
||||
self.icon_hover.hide()
|
||||
|
||||
def cb(self, w,e):
|
||||
""" Hide items - first time """
|
||||
pass
|
||||
|
||||
@@ -1,358 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hic, HobXpmLabelButtonBox
|
||||
from bb.ui.crumbs.hoblistmodel import RecipeListModel
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
from bb.ui.crumbs.hig import CrumbsDialog, BinbDialog, \
|
||||
AdvancedSettingDialog, LayerSelectionDialog
|
||||
|
||||
#
|
||||
# ImageConfigurationPage
|
||||
#
|
||||
class ImageConfigurationPage (HobPage):
|
||||
|
||||
__dummy_machine__ = "--select a machine--"
|
||||
|
||||
def __init__(self, builder):
|
||||
super(ImageConfigurationPage, self).__init__(builder, "Image configuration")
|
||||
|
||||
self.image_combo_id = None
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
# create visual elements
|
||||
self.toolbar = gtk.Toolbar()
|
||||
self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
|
||||
self.toolbar.set_style(gtk.TOOLBAR_BOTH)
|
||||
|
||||
_, template_button = self.append_toolbar_button(self.toolbar,
|
||||
"Template",
|
||||
hic.ICON_TEMPLATES_DISPLAY_FILE,
|
||||
hic.ICON_TEMPLATES_HOVER_FILE,
|
||||
"Load a hob building template saved before",
|
||||
self.template_button_clicked_cb)
|
||||
_, my_images_button = self.append_toolbar_button(self.toolbar,
|
||||
"My images",
|
||||
hic.ICON_IMAGES_DISPLAY_FILE,
|
||||
hic.ICON_IMAGES_HOVER_FILE,
|
||||
"Open images built out previously for running or deployment",
|
||||
self.my_images_button_clicked_cb)
|
||||
_, settings_button = self.append_toolbar_button(self.toolbar,
|
||||
"Settings",
|
||||
hic.ICON_SETTINGS_DISPLAY_FILE,
|
||||
hic.ICON_SETTINGS_HOVER_FILE,
|
||||
"Other advanced settings for build",
|
||||
self.settings_button_clicked_cb)
|
||||
|
||||
self.config_top_button = self.add_onto_top_bar(self.toolbar)
|
||||
|
||||
self.gtable = gtk.Table(40, 40, True)
|
||||
self.create_config_machine()
|
||||
self.create_config_baseimg()
|
||||
self.config_build_button = self.create_config_build_button()
|
||||
|
||||
def _remove_all_widget(self):
|
||||
children = self.gtable.get_children() or []
|
||||
for child in children:
|
||||
self.gtable.remove(child)
|
||||
children = self.box_group_area.get_children() or []
|
||||
for child in children:
|
||||
self.box_group_area.remove(child)
|
||||
children = self.get_children() or []
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
|
||||
def _pack_components(self):
|
||||
self._remove_all_widget()
|
||||
self.pack_start(self.config_top_button, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_start(self.gtable, expand=True, fill=True)
|
||||
self.box_group_area.pack_end(self.config_build_button, expand=False, fill=False)
|
||||
|
||||
def show_machine(self):
|
||||
self._pack_components()
|
||||
self.set_config_machine_layout()
|
||||
self.show_all()
|
||||
self.progress_bar.reset()
|
||||
self.progress_bar.hide()
|
||||
self.config_build_button.hide_all()
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=True):
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_title(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def show_info_populating(self):
|
||||
self._pack_components()
|
||||
self.set_config_machine_layout()
|
||||
self.show_all()
|
||||
self.config_build_button.hide_all()
|
||||
|
||||
def show_info_populated(self):
|
||||
self._pack_components()
|
||||
self.set_config_machine_layout()
|
||||
self.set_config_baseimg_layout()
|
||||
self.show_all()
|
||||
self.progress_bar.reset()
|
||||
self.progress_bar.hide()
|
||||
|
||||
def create_config_machine(self):
|
||||
self.machine_title = gtk.Label()
|
||||
self.machine_title.set_alignment(0.0, 0.5)
|
||||
mark = "<span %s>Select a machine</span>" % self.span_tag('24px', 'bold')
|
||||
self.machine_title.set_markup(mark)
|
||||
|
||||
self.machine_title_desc = gtk.Label()
|
||||
self.machine_title_desc.set_alignment(0, 0.5)
|
||||
mark = ("<span %s>This is the profile of the target machine for which you"
|
||||
" are building the image.\n</span>") % (self.span_tag(px='14px'))
|
||||
self.machine_title_desc.set_markup(mark)
|
||||
|
||||
self.machine_combo = gtk.combo_box_new_text()
|
||||
self.machine_combo.connect("changed", self.machine_combo_changed_cb)
|
||||
|
||||
icon_file = hic.ICON_LAYERS_DISPLAY_FILE
|
||||
hover_file = hic.ICON_LAYERS_HOVER_FILE
|
||||
self.layer_button = HobXpmLabelButtonBox(icon_file, hover_file,
|
||||
"Layers", "Add support for machines, software, etc")
|
||||
self.layer_button.connect("button-release-event", self.layer_button_clicked_cb)
|
||||
|
||||
icon_file = hic.ICON_INFO_DISPLAY_FILE
|
||||
self.layer_info_icon = gtk.Image()
|
||||
pix_buffer = gtk.gdk.pixbuf_new_from_file(icon_file)
|
||||
self.layer_info_icon.set_from_pixbuf(pix_buffer)
|
||||
markup = "Layers are a powerful mechanism to extend the Yocto Project "
|
||||
markup += "with your own functionality.\n"
|
||||
markup += "For more on layers, check:\n"
|
||||
markup += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
|
||||
markup += "poky-ref-manual.html#usingpoky-changes-layers."
|
||||
self.layer_info_icon.set_tooltip_markup(markup)
|
||||
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.machine_separator = gtk.HSeparator()
|
||||
|
||||
def set_config_machine_layout(self):
|
||||
self.gtable.attach(self.machine_title, 0, 40, 0, 4)
|
||||
self.gtable.attach(self.machine_title_desc, 0, 40, 4, 6)
|
||||
self.gtable.attach(self.machine_combo, 0, 12, 6, 9)
|
||||
self.gtable.attach(self.layer_button, 12, 36, 6, 10)
|
||||
self.gtable.attach(self.layer_info_icon, 36, 40, 6, 9)
|
||||
self.gtable.attach(self.progress_bar, 0, 40, 13, 17)
|
||||
self.gtable.attach(self.machine_separator, 0, 40, 12, 13)
|
||||
|
||||
def create_config_baseimg(self):
|
||||
self.image_title = gtk.Label()
|
||||
self.image_title.set_alignment(0, 1.0)
|
||||
mark = "<span %s>Select a base image</span>" % self.span_tag('24px', 'bold')
|
||||
self.image_title.set_markup(mark)
|
||||
|
||||
self.image_title_desc = gtk.Label()
|
||||
self.image_title_desc.set_alignment(0, 0.5)
|
||||
mark = ("<span %s>Base images are a starting point for the type of image you want. "
|
||||
"You can build them as \n"
|
||||
"they are or customize them to your specific needs.\n</span>") % self.span_tag('14px')
|
||||
self.image_title_desc.set_markup(mark)
|
||||
|
||||
self.image_combo = gtk.combo_box_new_text()
|
||||
self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
|
||||
|
||||
self.image_desc = gtk.Label()
|
||||
self.image_desc.set_alignment(0, 0)
|
||||
self.image_desc.set_line_wrap(True)
|
||||
|
||||
# button to view recipes
|
||||
icon_file = hic.ICON_RCIPE_DISPLAY_FILE
|
||||
hover_file = hic.ICON_RCIPE_HOVER_FILE
|
||||
self.view_recipes_button = HobXpmLabelButtonBox(icon_file, hover_file,
|
||||
"View Recipes", "Add/remove recipes and collections")
|
||||
self.view_recipes_button.connect("button-release-event", self.view_recipes_button_clicked_cb)
|
||||
|
||||
# button to view packages
|
||||
icon_file = hic.ICON_PACKAGES_DISPLAY_FILE
|
||||
hover_file = hic.ICON_PACKAGES_HOVER_FILE
|
||||
self.view_packages_button = HobXpmLabelButtonBox(icon_file, hover_file,
|
||||
"View Packages", "Add/remove packages")
|
||||
self.view_packages_button.connect("button-release-event", self.view_packages_button_clicked_cb)
|
||||
|
||||
self.image_separator = gtk.HSeparator()
|
||||
|
||||
def set_config_baseimg_layout(self):
|
||||
self.gtable.attach(self.image_title, 0, 40, 13, 17)
|
||||
self.gtable.attach(self.image_title_desc, 0, 40, 17, 22)
|
||||
self.gtable.attach(self.image_combo, 0, 12, 22, 25)
|
||||
self.gtable.attach(self.image_desc, 14, 38, 22, 27)
|
||||
self.gtable.attach(self.view_recipes_button, 0, 20, 28, 32)
|
||||
self.gtable.attach(self.view_packages_button, 20, 40, 28, 32)
|
||||
self.gtable.attach(self.image_separator, 0, 40, 35, 36)
|
||||
|
||||
def create_config_build_button(self):
|
||||
# Create the "Build packages" and "Just bake" buttons at the bottom
|
||||
button_box = gtk.HBox(False, 5)
|
||||
|
||||
# create button "Just bake"
|
||||
just_bake_button = gtk.Button()
|
||||
label = gtk.Label()
|
||||
mark = "<span %s>Just bake</span>" % self.span_tag('24px', 'bold')
|
||||
label.set_markup(mark)
|
||||
|
||||
just_bake_button.set_image(label)
|
||||
just_bake_button.set_size_request(205, 49)
|
||||
just_bake_button.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(HobColors.ORANGE))
|
||||
just_bake_button.modify_bg(gtk.STATE_PRELIGHT, gtk.gdk.Color(HobColors.ORANGE))
|
||||
just_bake_button.modify_bg(gtk.STATE_SELECTED, gtk.gdk.Color(HobColors.ORANGE))
|
||||
just_bake_button.set_tooltip_text("Build image to get your target image")
|
||||
just_bake_button.set_flags(gtk.CAN_DEFAULT)
|
||||
just_bake_button.grab_default()
|
||||
just_bake_button.connect("clicked", self.just_bake_button_clicked_cb)
|
||||
button_box.pack_end(just_bake_button, expand=False, fill=False)
|
||||
|
||||
label = gtk.Label(" or ")
|
||||
button_box.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Build Packages"
|
||||
build_packages_button = gtk.LinkButton("Build packages first based on recipe selection "
|
||||
"for late customization on packages for the target image", "Build Packages")
|
||||
build_packages_button.connect("clicked", self.build_packages_button_clicked_cb)
|
||||
button_box.pack_end(build_packages_button, expand=False, fill=False)
|
||||
|
||||
return button_box
|
||||
|
||||
def machine_combo_changed_cb(self, machine_combo):
|
||||
combo_item = machine_combo.get_active_text()
|
||||
if not combo_item or combo_item == self.__dummy_machine__:
|
||||
self.builder.switch_page(self.builder.MACHINE_SELECTION)
|
||||
else:
|
||||
self.builder.configuration.curr_mach = combo_item
|
||||
# Do reparse recipes
|
||||
self.builder.switch_page(self.builder.RCPPKGINFO_POPULATING)
|
||||
|
||||
def update_machine_combo(self):
|
||||
all_machines = [self.__dummy_machine__] + self.builder.parameters.all_machines
|
||||
|
||||
model = self.machine_combo.get_model()
|
||||
model.clear()
|
||||
for machine in all_machines:
|
||||
self.machine_combo.append_text(machine)
|
||||
self.machine_combo.set_active(0)
|
||||
|
||||
def switch_machine_combo(self):
|
||||
model = self.machine_combo.get_model()
|
||||
active = 0
|
||||
while active < len(model):
|
||||
if model[active][0] == self.builder.configuration.curr_mach:
|
||||
self.machine_combo.set_active(active)
|
||||
return
|
||||
active += 1
|
||||
self.machine_combo.set_active(0)
|
||||
|
||||
def image_combo_changed_idle_cb(self, selected_image, selected_recipes, selected_packages):
|
||||
self.builder.update_recipe_model(selected_image, selected_recipes)
|
||||
self.builder.update_package_model(selected_packages)
|
||||
self.builder.window_sensitive(True)
|
||||
|
||||
def image_combo_changed_cb(self, combo):
|
||||
self.builder.window_sensitive(False)
|
||||
selected_image = self.image_combo.get_active_text()
|
||||
if not selected_image:
|
||||
return
|
||||
|
||||
selected_recipes = []
|
||||
|
||||
image_path = self.builder.recipe_model.pn_path[selected_image]
|
||||
image_iter = self.builder.recipe_model.get_iter(image_path)
|
||||
selected_packages = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_INSTALL).split()
|
||||
|
||||
mark = ("<span %s>%s</span>\n") % (self.span_tag('14px'), self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_DESC))
|
||||
self.image_desc.set_markup(mark)
|
||||
|
||||
self.builder.recipe_model.reset()
|
||||
self.builder.package_model.reset()
|
||||
|
||||
glib.idle_add(self.image_combo_changed_idle_cb, selected_image, selected_recipes, selected_packages)
|
||||
|
||||
def _image_combo_connect_signal(self):
|
||||
if not self.image_combo_id:
|
||||
self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
|
||||
|
||||
def _image_combo_disconnect_signal(self):
|
||||
if self.image_combo_id:
|
||||
self.image_combo.disconnect(self.image_combo_id)
|
||||
self.image_combo_id = None
|
||||
|
||||
def update_image_combo(self, recipe_model, selected_image):
|
||||
# Update the image combo according to the images in the recipe_model
|
||||
# populate image combo
|
||||
filter = {RecipeListModel.COL_TYPE : ['image']}
|
||||
image_model = recipe_model.tree_model(filter)
|
||||
active = 0
|
||||
cnt = 0
|
||||
|
||||
it = image_model.get_iter_first()
|
||||
self._image_combo_disconnect_signal()
|
||||
model = self.image_combo.get_model()
|
||||
model.clear()
|
||||
# append and set active
|
||||
while it:
|
||||
path = image_model.get_path(it)
|
||||
image_name = image_model[path][recipe_model.COL_NAME]
|
||||
self.image_combo.append_text(image_name)
|
||||
if image_name == selected_image:
|
||||
active = cnt
|
||||
it = image_model.iter_next(it)
|
||||
cnt = cnt + 1
|
||||
self._image_combo_connect_signal()
|
||||
|
||||
self.image_combo.set_active(-1)
|
||||
self.image_combo.set_active(active)
|
||||
|
||||
def layer_button_clicked_cb(self, event, data):
|
||||
# Create a layer selection dialog
|
||||
self.builder.show_layer_selection_dialog()
|
||||
|
||||
def view_recipes_button_clicked_cb(self, event, data):
|
||||
self.builder.show_recipes()
|
||||
|
||||
def view_packages_button_clicked_cb(self, event, data):
|
||||
self.builder.show_packages()
|
||||
|
||||
def just_bake_button_clicked_cb(self, button):
|
||||
self.builder.just_bake()
|
||||
|
||||
def build_packages_button_clicked_cb(self, button):
|
||||
self.builder.build_packages()
|
||||
|
||||
def template_button_clicked_cb(self, button):
|
||||
self.builder.show_load_template_dialog()
|
||||
|
||||
def my_images_button_clicked_cb(self, button):
|
||||
self.builder.show_load_my_images_dialog()
|
||||
|
||||
def settings_button_clicked_cb(self, button):
|
||||
# Create an advanced settings dialog
|
||||
self.builder.show_adv_settings_dialog()
|
||||
@@ -1,294 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hic, HobWidget
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# ImageDetailsPage
|
||||
#
|
||||
class ImageDetailsPage (HobPage):
|
||||
|
||||
class DetailBox (gtk.EventBox):
|
||||
def __init__(self, varlist, vallist, icon = None, button = None, color = HobColors.LIGHT_GRAY):
|
||||
gtk.EventBox.__init__(self)
|
||||
|
||||
# set color
|
||||
style = self.get_style().copy()
|
||||
style.bg[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(color, False, False)
|
||||
self.set_style(style)
|
||||
|
||||
self.hbox = gtk.HBox()
|
||||
self.hbox.set_border_width(15)
|
||||
self.add(self.hbox)
|
||||
|
||||
# pack the icon and the text on the left
|
||||
row = len(varlist)
|
||||
self.table = gtk.Table(row, 20, True)
|
||||
self.table.set_size_request(100, -1)
|
||||
self.hbox.pack_start(self.table, expand=True, fill=True, padding=15)
|
||||
|
||||
colid = 0
|
||||
if icon != None:
|
||||
self.table.attach(icon, colid, colid + 2, 0, 1)
|
||||
colid = colid + 2
|
||||
for line in range(0, row):
|
||||
self.table.attach(self.text2label(varlist[line], vallist[line]), colid, 20, line, line + 1)
|
||||
|
||||
# pack the button on the right
|
||||
if button != None:
|
||||
self.hbox.pack_end(button, expand=False, fill=False)
|
||||
|
||||
def text2label(self, variable, value):
|
||||
# append the name:value to the left box
|
||||
# such as "Name: hob-core-minimal-variant-2011-12-15-beagleboard"
|
||||
markup = "<span weight=\'bold\'>%s</span>" % variable
|
||||
markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % value
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup(markup)
|
||||
return label
|
||||
|
||||
def __init__(self, builder):
|
||||
super(ImageDetailsPage, self).__init__(builder, "Image details")
|
||||
|
||||
self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
# create visual elements
|
||||
# create the toolbar
|
||||
self.toolbar = gtk.Toolbar()
|
||||
self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
|
||||
self.toolbar.set_style(gtk.TOOLBAR_BOTH)
|
||||
|
||||
_, my_images_button = self.append_toolbar_button(self.toolbar,
|
||||
"My images",
|
||||
hic.ICON_IMAGES_DISPLAY_FILE,
|
||||
hic.ICON_IMAGES_HOVER_FILE,
|
||||
"Open images built out previously for running or deployment",
|
||||
self.my_images_button_clicked_cb)
|
||||
|
||||
self.details_top_buttons = self.add_onto_top_bar(self.toolbar)
|
||||
|
||||
def _remove_all_widget(self):
|
||||
children = self.get_children() or []
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
children = self.box_group_area.get_children() or []
|
||||
for child in children:
|
||||
self.box_group_area.remove(child)
|
||||
|
||||
def _size_to_string(self, size):
|
||||
if len(str(int(size))) > 6:
|
||||
size_str = '%.1f' % (size*1.0/(1024*1024)) + ' MB'
|
||||
elif len(str(int(size))) > 3:
|
||||
size_str = '%.1f' % (size*1.0/1024) + ' KB'
|
||||
else:
|
||||
size_str = str(size) + ' B'
|
||||
return size_str
|
||||
|
||||
def show_page(self, step):
|
||||
build_succeeded = (step == self.builder.IMAGE_GENERATED)
|
||||
image_addr = self.builder.parameters.image_addr
|
||||
image_names = self.builder.parameters.image_names
|
||||
if build_succeeded:
|
||||
image_addr = self.builder.parameters.image_addr
|
||||
image_names = self.builder.parameters.image_names
|
||||
machine = self.builder.configuration.curr_mach
|
||||
base_image = self.builder.recipe_model.get_selected_image()
|
||||
layers = self.builder.configuration.layers
|
||||
pkg_num = "%s" % len(self.builder.package_model.get_selected_packages())
|
||||
else:
|
||||
pkg_num = "N/A"
|
||||
|
||||
self._remove_all_widget()
|
||||
self.pack_start(self.details_top_buttons, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
if build_succeeded:
|
||||
# building is the previous step
|
||||
icon = gtk.Image()
|
||||
pixmap_path = hic.ICON_INDI_CONFIRM_FILE
|
||||
color = HobColors.RUNNING
|
||||
pix_buffer = gtk.gdk.pixbuf_new_from_file(pixmap_path)
|
||||
icon.set_from_pixbuf(pix_buffer)
|
||||
varlist = [""]
|
||||
vallist = ["Your image is ready"]
|
||||
build_result = self.DetailBox(varlist=varlist, vallist=vallist, icon=icon, button=None, color=color)
|
||||
self.box_group_area.pack_start(build_result, expand=False, fill=False)
|
||||
|
||||
# Name
|
||||
self.image_store.clear()
|
||||
for image_name in image_names:
|
||||
image_size = self._size_to_string(os.stat(os.path.join(image_addr, image_name)).st_size)
|
||||
self.image_store.set(self.image_store.append(), 0, image_name, 1, image_size, 2, False)
|
||||
images_widget, treeview = HobWidget.gen_images_widget(600, 200, 100)
|
||||
treeview.set_model(self.image_store)
|
||||
self.box_group_area.pack_start(images_widget, expand=False, fill=False)
|
||||
|
||||
# Machine, Base image and Layers
|
||||
layer_num_limit = 15
|
||||
varlist = ["Machine: ", "Base image: ", "Layers: "]
|
||||
vallist = []
|
||||
if build_succeeded:
|
||||
vallist.append(machine)
|
||||
vallist.append(base_image)
|
||||
i = 0
|
||||
for layer in layers:
|
||||
varlist.append(" - ")
|
||||
if i > layer_num_limit:
|
||||
break
|
||||
i += 1
|
||||
vallist.append("")
|
||||
i = 0
|
||||
for layer in layers:
|
||||
if i > layer_num_limit:
|
||||
break
|
||||
elif i == layer_num_limit:
|
||||
vallist.append("and more...")
|
||||
else:
|
||||
vallist.append(layer)
|
||||
i += 1
|
||||
|
||||
edit_config_button = gtk.LinkButton("Changes settings for build", "Edit configuration")
|
||||
edit_config_button.connect("clicked", self.edit_config_button_clicked_cb)
|
||||
setting_detail = self.DetailBox(varlist=varlist, vallist=vallist, icon=None, button=edit_config_button)
|
||||
self.box_group_area.pack_start(setting_detail, expand=False, fill=False)
|
||||
|
||||
# Packages included, and Total image size
|
||||
varlist = ["Packages included: ", "Total image size: "]
|
||||
vallist = []
|
||||
vallist.append(pkg_num)
|
||||
vallist.append(image_size)
|
||||
if build_succeeded:
|
||||
edit_packages_button = gtk.LinkButton("Change package selection for customization", "Edit packages")
|
||||
edit_packages_button.connect("clicked", self.edit_packages_button_clicked_cb)
|
||||
else: # get to this page from "My images"
|
||||
edit_packages_button = None
|
||||
package_detail = self.DetailBox(varlist=varlist, vallist=vallist, icon=None, button=edit_packages_button)
|
||||
self.box_group_area.pack_start(package_detail, expand=False, fill=False)
|
||||
if build_succeeded:
|
||||
buttonlist = ["Build new image", "Save as template", "Run image", "Deploy image"]
|
||||
else: # get to this page from "My images"
|
||||
buttonlist = ["Build new image", "Run image", "Deploy image"]
|
||||
details_bottom_buttons = self.create_bottom_buttons(buttonlist)
|
||||
self.box_group_area.pack_end(details_bottom_buttons, expand=False, fill=False)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def create_bottom_buttons(self, buttonlist):
|
||||
# Create the buttons at the bottom
|
||||
bottom_buttons = gtk.HBox(False, 5)
|
||||
created = False
|
||||
|
||||
# create button "Deploy image"
|
||||
name = "Deploy image"
|
||||
if name in buttonlist:
|
||||
deploy_button = gtk.Button()
|
||||
label = gtk.Label()
|
||||
mark = "<span %s>Deploy image</span>" % self.span_tag('24px', 'bold')
|
||||
label.set_markup(mark)
|
||||
deploy_button.set_image(label)
|
||||
deploy_button.set_size_request(205, 49)
|
||||
deploy_button.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(HobColors.ORANGE))
|
||||
deploy_button.modify_bg(gtk.STATE_PRELIGHT, gtk.gdk.Color(HobColors.ORANGE))
|
||||
deploy_button.modify_bg(gtk.STATE_SELECTED, gtk.gdk.Color(HobColors.ORANGE))
|
||||
deploy_button.set_tooltip_text("Deploy image to get your target board")
|
||||
deploy_button.set_flags(gtk.CAN_DEFAULT)
|
||||
deploy_button.grab_default()
|
||||
deploy_button.connect("clicked", self.deploy_button_clicked_cb)
|
||||
bottom_buttons.pack_end(deploy_button, expand=False, fill=False)
|
||||
created = True
|
||||
|
||||
name = "Run image"
|
||||
if name in buttonlist:
|
||||
if created == True:
|
||||
# separator
|
||||
label = gtk.Label(" or ")
|
||||
bottom_buttons.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Run image"
|
||||
run_button = gtk.LinkButton("Launch and boot the image in the QEMU emulator", "Run image")
|
||||
run_button.connect("clicked", self.run_button_clicked_cb)
|
||||
bottom_buttons.pack_end(run_button, expand=False, fill=False)
|
||||
created = True
|
||||
|
||||
name = "Save as template"
|
||||
if name in buttonlist:
|
||||
if created == True:
|
||||
# separator
|
||||
label = gtk.Label(" or ")
|
||||
bottom_buttons.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Save as template"
|
||||
save_button = gtk.LinkButton("Save the hob build template for future use", "Save as template")
|
||||
save_button.connect("clicked", self.save_button_clicked_cb)
|
||||
bottom_buttons.pack_end(save_button, expand=False, fill=False)
|
||||
create = True
|
||||
|
||||
name = "Build new image"
|
||||
if name in buttonlist:
|
||||
# create button "Build new image"
|
||||
build_new_button = gtk.LinkButton("Initiate another new build from the beginning", "Build new image")
|
||||
build_new_button.connect("clicked", self.build_new_button_clicked_cb)
|
||||
bottom_buttons.pack_start(build_new_button, expand=False, fill=False)
|
||||
|
||||
return bottom_buttons
|
||||
|
||||
def _get_selected_image(self):
|
||||
image_name = ""
|
||||
iter = self.image_store.get_iter_first()
|
||||
while iter:
|
||||
path = self.image_store.get_path(iter)
|
||||
if self.image_store[path][2]:
|
||||
image_name = self.image_store[path][0]
|
||||
break
|
||||
iter = self.image_store.iter_next(iter)
|
||||
|
||||
return image_name
|
||||
|
||||
def save_button_clicked_cb(self, button):
|
||||
self.builder.show_save_template_dialog()
|
||||
|
||||
def deploy_button_clicked_cb(self, button):
|
||||
image_name = self._get_selected_image()
|
||||
self.builder.deploy_image(image_name)
|
||||
|
||||
def run_button_clicked_cb(self, button):
|
||||
image_name = self._get_selected_image()
|
||||
self.builder.runqemu_image(image_name)
|
||||
|
||||
def build_new_button_clicked_cb(self, button):
|
||||
self.builder.initiate_new_build()
|
||||
|
||||
def edit_config_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def edit_packages_button_clicked_cb(self, button):
|
||||
self.builder.show_packages(ask=False)
|
||||
|
||||
def my_images_button_clicked_cb(self, button):
|
||||
self.builder.show_load_my_images_dialog()
|
||||
153
bitbake/lib/bb/ui/crumbs/layereditor.py
Normal file
@@ -0,0 +1,153 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import gtk
|
||||
from bb.ui.crumbs.configurator import Configurator
|
||||
from bb.ui.crumbs.hig import CrumbsDialog
|
||||
|
||||
class LayerEditor(gtk.Dialog):
|
||||
"""
|
||||
Gtk+ Widget for enabling and disabling layers.
|
||||
Layers are added through using an open dialog to find the layer.conf
|
||||
Disabled layers are deleted from conf/bblayers.conf
|
||||
"""
|
||||
def __init__(self, configurator, parent=None):
|
||||
gtk.Dialog.__init__(self, "Layers", None,
|
||||
gtk.DIALOG_DESTROY_WITH_PARENT,
|
||||
(gtk.STOCK_CLOSE, gtk.RESPONSE_OK))
|
||||
|
||||
# We want to show a little more of the treeview in the default,
|
||||
# emptier, case
|
||||
self.set_size_request(-1, 300)
|
||||
self.set_border_width(6)
|
||||
self.vbox.set_property("spacing", 0)
|
||||
self.action_area.set_property("border-width", 6)
|
||||
|
||||
self.configurator = configurator
|
||||
self.newly_added = {}
|
||||
|
||||
# Label to inform users that meta is enabled but that you can't
|
||||
# disable it as it'd be a *bad* idea
|
||||
msg = "As the core of the build system the <i>meta</i> layer must always be included and therefore can't be viewed or edited here."
|
||||
lbl = gtk.Label()
|
||||
lbl.show()
|
||||
lbl.set_use_markup(True)
|
||||
lbl.set_markup(msg)
|
||||
lbl.set_line_wrap(True)
|
||||
lbl.set_justify(gtk.JUSTIFY_FILL)
|
||||
self.vbox.pack_start(lbl, expand=False, fill=False, padding=6)
|
||||
|
||||
# Create a treeview in which to list layers
|
||||
# ListStore of Name, Path, Enabled
|
||||
self.layer_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
|
||||
self.tv = gtk.TreeView(self.layer_store)
|
||||
self.tv.set_headers_visible(True)
|
||||
|
||||
col0 = gtk.TreeViewColumn('Name')
|
||||
self.tv.append_column(col0)
|
||||
col1 = gtk.TreeViewColumn('Path')
|
||||
self.tv.append_column(col1)
|
||||
col2 = gtk.TreeViewColumn('Enabled')
|
||||
self.tv.append_column(col2)
|
||||
|
||||
cell0 = gtk.CellRendererText()
|
||||
col0.pack_start(cell0, True)
|
||||
col0.set_attributes(cell0, text=0)
|
||||
cell1 = gtk.CellRendererText()
|
||||
col1.pack_start(cell1, True)
|
||||
col1.set_attributes(cell1, text=1)
|
||||
cell2 = gtk.CellRendererToggle()
|
||||
cell2.connect("toggled", self._toggle_layer_cb)
|
||||
col2.pack_start(cell2, True)
|
||||
col2.set_attributes(cell2, active=2)
|
||||
|
||||
self.tv.show()
|
||||
self.vbox.pack_start(self.tv, expand=True, fill=True, padding=0)
|
||||
|
||||
tb = gtk.Toolbar()
|
||||
tb.set_icon_size(gtk.ICON_SIZE_SMALL_TOOLBAR)
|
||||
tb.set_style(gtk.TOOLBAR_BOTH)
|
||||
tb.set_tooltips(True)
|
||||
tb.show()
|
||||
icon = gtk.Image()
|
||||
icon.set_from_stock(gtk.STOCK_ADD, gtk.ICON_SIZE_SMALL_TOOLBAR)
|
||||
icon.show()
|
||||
tb.insert_item("Add Layer", "Add new layer", None, icon,
|
||||
self._find_layer_cb, None, -1)
|
||||
self.vbox.pack_start(tb, expand=False, fill=False, padding=0)
|
||||
|
||||
def set_parent_window(self, parent):
|
||||
self.set_transient_for(parent)
|
||||
|
||||
def load_current_layers(self, data):
|
||||
for layer, path in self.configurator.enabled_layers.items():
|
||||
if layer != 'meta':
|
||||
self.layer_store.append([layer, path, True])
|
||||
|
||||
def save_current_layers(self):
|
||||
self.configurator.writeLayerConf()
|
||||
|
||||
def _toggle_layer_cb(self, cell, path):
|
||||
name = self.layer_store[path][0]
|
||||
toggle = not self.layer_store[path][2]
|
||||
if toggle:
|
||||
self.configurator.addLayer(name, path)
|
||||
else:
|
||||
self.configurator.disableLayer(name)
|
||||
self.layer_store[path][2] = toggle
|
||||
|
||||
def _find_layer_cb(self, button):
|
||||
self.find_layer(self)
|
||||
|
||||
def find_layer(self, parent):
|
||||
def conf_error(parent, lbl):
|
||||
dialog = CrumbsDialog(parent, lbl)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
dialog = gtk.FileChooserDialog("Add new layer", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_OPEN,
|
||||
(gtk.STOCK_CANCEL, gtk.RESPONSE_NO,
|
||||
gtk.STOCK_OPEN, gtk.RESPONSE_YES))
|
||||
label = gtk.Label("Select the layer.conf of the layer you wish to add")
|
||||
label.show()
|
||||
dialog.set_extra_widget(label)
|
||||
response = dialog.run()
|
||||
path = dialog.get_filename()
|
||||
dialog.destroy()
|
||||
|
||||
lbl = "<b>Error</b>\nUnable to load layer <i>%s</i> because " % path
|
||||
if response == gtk.RESPONSE_YES:
|
||||
# FIXME: verify we've actually got a layer conf?
|
||||
if path.endswith("layer.conf"):
|
||||
name, layerpath = self.configurator.addLayerConf(path)
|
||||
if name and layerpath:
|
||||
self.newly_added[name] = layerpath
|
||||
self.layer_store.append([name, layerpath, True])
|
||||
return
|
||||
elif name:
|
||||
return
|
||||
else:
|
||||
lbl += "there was a problem parsing the layer.conf."
|
||||
else:
|
||||
lbl += "it is not a layer.conf file."
|
||||
conf_error(parent, lbl)
|
||||
@@ -1,226 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import HobViewBar, HobViewTable
|
||||
from bb.ui.crumbs.hoblistmodel import PackageListModel
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# PackageSelectionPage
|
||||
#
|
||||
class PackageSelectionPage (HobPage):
|
||||
|
||||
pages = [
|
||||
{
|
||||
'name' : 'All packages',
|
||||
'filter' : {},
|
||||
'columns' : [{
|
||||
'col_name' : 'Name',
|
||||
'col_id' : PackageListModel.COL_NAME,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400
|
||||
}, {
|
||||
'col_name' : 'size',
|
||||
'col_id' : PackageListModel.COL_SIZE,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 500
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : PackageListModel.COL_INC,
|
||||
'col_style': 'toggle',
|
||||
'col_min' : 50,
|
||||
'col_max' : 50
|
||||
}]
|
||||
}, {
|
||||
'name' : 'Included',
|
||||
'filter' : { PackageListModel.COL_INC : [True] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Name',
|
||||
'col_id' : PackageListModel.COL_NAME,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300
|
||||
}, {
|
||||
'col_name' : 'Brought by',
|
||||
'col_id' : PackageListModel.COL_BINB,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 350
|
||||
}, {
|
||||
'col_name' : 'size',
|
||||
'col_id' : PackageListModel.COL_SIZE,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : PackageListModel.COL_INC,
|
||||
'col_style': 'toggle',
|
||||
'col_min' : 50,
|
||||
'col_max' : 50
|
||||
}]
|
||||
}
|
||||
]
|
||||
|
||||
def __init__(self, builder):
|
||||
super(PackageSelectionPage, self).__init__(builder, "Package Selection")
|
||||
|
||||
# set invisiable members
|
||||
self.package_model = self.builder.package_model
|
||||
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.label = gtk.Label("Packages included: 0\nSelected packages size: 0 MB")
|
||||
self.eventbox = self.add_onto_top_bar(self.label, 73)
|
||||
self.pack_start(self.eventbox, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
# set visiable members
|
||||
self.grid = gtk.Table(10, 1, True)
|
||||
self.grid.set_col_spacings(3)
|
||||
|
||||
self.ins = gtk.Notebook()
|
||||
self.ins.set_show_tabs(False)
|
||||
self.tables = [] # we need to modify table when the dialog is shown
|
||||
# append the tab
|
||||
for i in range(len(self.pages)):
|
||||
columns = self.pages[i]['columns']
|
||||
tab = HobViewTable(columns, self.reset_clicked_cb, self.table_toggled_cb)
|
||||
filter = self.pages[i]['filter']
|
||||
tab.table_tree.set_model(self.package_model.tree_model(filter))
|
||||
label = gtk.Label(self.pages[i]['name'])
|
||||
self.ins.append_page(tab, label)
|
||||
self.tables.append(tab)
|
||||
|
||||
self.grid.attach(self.ins, 0, 1, 1, 10, gtk.FILL | gtk.EXPAND, gtk.FILL | gtk.EXPAND, 1, 1)
|
||||
# a black bar associated with the notebook
|
||||
self.topbar = HobViewBar(self.ins)
|
||||
self.grid.attach(self.topbar, 0, 1, 0, 1, gtk.FILL | gtk.EXPAND, gtk.FILL | gtk.EXPAND, 1, 1)
|
||||
# set the search entry for each table
|
||||
for tab in self.tables:
|
||||
tab.table_tree.set_search_entry(self.topbar.search)
|
||||
|
||||
inctab_tree_view = self.tables[len(self.pages)-1].table_tree
|
||||
inctab_tree_selection = inctab_tree_view.get_selection()
|
||||
inctab_tree_selection.connect("changed", self.tree_selection_cb, inctab_tree_view)
|
||||
|
||||
# add all into the dialog
|
||||
self.box_group_area.add(self.grid)
|
||||
|
||||
button_box = gtk.HBox(False, 5)
|
||||
self.box_group_area.pack_start(button_box, expand=False, fill=False)
|
||||
|
||||
self.build_image_button = gtk.Button()
|
||||
label = gtk.Label()
|
||||
mark = "<span %s>Build image</span>" % self.span_tag('24px', 'bold')
|
||||
label.set_markup(mark)
|
||||
self.build_image_button.set_image(label)
|
||||
self.build_image_button.set_size_request(205, 49)
|
||||
self.build_image_button.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(HobColors.ORANGE))
|
||||
self.build_image_button.modify_bg(gtk.STATE_PRELIGHT, gtk.gdk.Color(HobColors.ORANGE))
|
||||
self.build_image_button.modify_bg(gtk.STATE_SELECTED, gtk.gdk.Color(HobColors.ORANGE))
|
||||
self.build_image_button.set_tooltip_text("Build image to get your target image")
|
||||
self.build_image_button.set_flags(gtk.CAN_DEFAULT)
|
||||
self.build_image_button.grab_default()
|
||||
self.build_image_button.connect("clicked", self.build_image_clicked_cb)
|
||||
button_box.pack_end(self.build_image_button, expand=False, fill=False)
|
||||
|
||||
self.back_button = gtk.LinkButton("Go back to Image Configuration screen", "<< Back to image configuration")
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def tree_selection_cb(self, tree_selection, tree_view):
|
||||
tree_model = tree_view.get_model()
|
||||
path, column = tree_view.get_cursor()
|
||||
if not path or column == tree_view.get_column(2):
|
||||
return
|
||||
|
||||
it = tree_model.get_iter(path)
|
||||
binb = tree_model.get_value(it, PackageListModel.COL_BINB)
|
||||
if binb:
|
||||
self.builder.show_binb_dialog(binb)
|
||||
|
||||
def build_image_clicked_cb(self, button):
|
||||
self.builder.build_image()
|
||||
|
||||
def back_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def _expand_all(self):
|
||||
for tab in self.tables:
|
||||
tab.table_tree.expand_all()
|
||||
|
||||
def refresh_selection(self):
|
||||
self._expand_all()
|
||||
|
||||
self.builder.configuration.selected_packages = self.package_model.get_selected_packages()
|
||||
selected_packages_num = len(self.builder.configuration.selected_packages)
|
||||
selected_packages_size = float(self.package_model.get_packages_size())
|
||||
selected_packages_size_str = self._size_to_string(selected_packages_size)
|
||||
|
||||
image_overhead_factor = self.builder.configuration.image_overhead_factor
|
||||
image_rootfs_size = self.builder.configuration.image_rootfs_size
|
||||
image_extra_size = self.builder.configuration.image_extra_size
|
||||
base_size = image_overhead_factor * selected_packages_size
|
||||
image_total_size = max(base_size, image_rootfs_size) + image_extra_size
|
||||
image_total_size_str = self._size_to_string(image_total_size)
|
||||
|
||||
self.label.set_text("Packages included: %s\nSelected packages size: %s\nTotal image size: %s" %
|
||||
(selected_packages_num, selected_packages_size_str, image_total_size_str))
|
||||
|
||||
"""
|
||||
Helper function to convert the package size to string format.
|
||||
The unit of size is KB
|
||||
"""
|
||||
def _size_to_string(self, size):
|
||||
if len(str(int(size))) > 3:
|
||||
size_str = '%.1f' % (size*1.0/1024) + ' MB'
|
||||
else:
|
||||
size_str = str(size) + ' KB'
|
||||
return size_str
|
||||
|
||||
# Callback functions
|
||||
def reset_clicked_cb(self, button):
|
||||
self.package_model.reset()
|
||||
self.builder.reset_package_model()
|
||||
|
||||
def toggle_item_idle_cb(self, path):
|
||||
if not self.package_model.path_included(path):
|
||||
self.package_model.include_item(item_path=path, binb="User Selected")
|
||||
else:
|
||||
self.package_model.exclude_item(item_path=path)
|
||||
|
||||
self.builder.window_sensitive(True)
|
||||
|
||||
def table_toggled_cb(self, cell, view_path, view_tree):
|
||||
# Click to include a package
|
||||
self.builder.window_sensitive(False)
|
||||
view_model = view_tree.get_model()
|
||||
path = self.package_model.convert_vpath_to_path(view_model, view_path)
|
||||
glib.idle_add(self.toggle_item_idle_cb, path)
|
||||
@@ -1,52 +0,0 @@
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class HobProgressBar (gtk.ProgressBar):
|
||||
def __init__(self):
|
||||
gtk.ProgressBar.__init__(self)
|
||||
self.set_rcstyle(True)
|
||||
self.percentage = 0
|
||||
|
||||
def set_rcstyle(self, status):
|
||||
rcstyle = gtk.RcStyle()
|
||||
rcstyle.fg[2] = gtk.gdk.Color(HobColors.BLACK)
|
||||
if status:
|
||||
rcstyle.bg[3] = gtk.gdk.Color(HobColors.RUNNING)
|
||||
else:
|
||||
rcstyle.bg[3] = gtk.gdk.Color(HobColors.ERROR)
|
||||
self.modify_style(rcstyle)
|
||||
|
||||
def set_title(self, text=None):
|
||||
if not text:
|
||||
text = ""
|
||||
text += " %.0f%%" % self.percentage
|
||||
self.set_text(text)
|
||||
|
||||
def reset(self):
|
||||
self.set_fraction(0)
|
||||
self.set_text("")
|
||||
self.set_rcstyle(True)
|
||||
self.percentage = 0
|
||||
|
||||
def update(self, fraction):
|
||||
self.percentage = int(fraction * 100)
|
||||
self.set_fraction(fraction)
|
||||
@@ -1,221 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import HobWidget, HobViewBar, HobViewTable
|
||||
from bb.ui.crumbs.hoblistmodel import RecipeListModel
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# RecipeSelectionPage
|
||||
#
|
||||
class RecipeSelectionPage (HobPage):
|
||||
pages = [
|
||||
{
|
||||
'name' : 'Recipe',
|
||||
'filter' : { RecipeListModel.COL_TYPE : ['recipe'] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Recipe',
|
||||
'col_id' : RecipeListModel.COL_NAME,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400
|
||||
}, {
|
||||
'col_name' : 'License',
|
||||
'col_id' : RecipeListModel.COL_LIC,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400
|
||||
}, {
|
||||
'col_name' : 'Group',
|
||||
'col_id' : RecipeListModel.COL_GROUP,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : RecipeListModel.COL_INC,
|
||||
'col_style': 'toggle',
|
||||
'col_min' : 50,
|
||||
'col_max' : 50
|
||||
}]
|
||||
}, {
|
||||
'name' : 'Recipe Collection',
|
||||
'filter' : { RecipeListModel.COL_TYPE : ['task'] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Recipe Collection',
|
||||
'col_id' : RecipeListModel.COL_NAME,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400
|
||||
}, {
|
||||
'col_name' : 'Description',
|
||||
'col_id' : RecipeListModel.COL_DESC,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : RecipeListModel.COL_INC,
|
||||
'col_style': 'toggle',
|
||||
'col_min' : 50,
|
||||
'col_max' : 50
|
||||
}]
|
||||
}, {
|
||||
'name' : 'Included',
|
||||
'filter' : { RecipeListModel.COL_INC : [True],
|
||||
RecipeListModel.COL_TYPE : ['recipe', 'task'] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Recipe',
|
||||
'col_id' : RecipeListModel.COL_NAME,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400
|
||||
}, {
|
||||
'col_name' : 'Brought by',
|
||||
'col_id' : RecipeListModel.COL_BINB,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 500
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : RecipeListModel.COL_INC,
|
||||
'col_style': 'toggle',
|
||||
'col_min' : 50,
|
||||
'col_max' : 50
|
||||
}]
|
||||
}
|
||||
]
|
||||
|
||||
def __init__(self, builder = None):
|
||||
super(RecipeSelectionPage, self).__init__(builder, "Recipe Selection")
|
||||
|
||||
# set invisiable members
|
||||
self.recipe_model = self.builder.recipe_model
|
||||
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.label = gtk.Label("Recipes included: %s" % len(self.builder.configuration.selected_recipes))
|
||||
self.eventbox = self.add_onto_top_bar(self.label, 73)
|
||||
self.pack_start(self.eventbox, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
# set visiable members
|
||||
self.grid = gtk.Table(10, 1, True)
|
||||
self.grid.set_col_spacings(3)
|
||||
|
||||
# draw the left part of the window
|
||||
# a notebook
|
||||
self.ins = gtk.Notebook()
|
||||
self.ins.set_show_tabs(False)
|
||||
self.tables = [] # we need modify table when the dialog is shown
|
||||
# append the tabs in order
|
||||
for i in range(len(self.pages)):
|
||||
columns = self.pages[i]['columns']
|
||||
tab = HobViewTable(columns, self.reset_clicked_cb, self.table_toggled_cb)
|
||||
filter = self.pages[i]['filter']
|
||||
tab.table_tree.set_model(self.recipe_model.tree_model(filter))
|
||||
label = gtk.Label(self.pages[i]['name'])
|
||||
self.ins.append_page(tab, label)
|
||||
self.tables.append(tab)
|
||||
|
||||
self.grid.attach(self.ins, 0, 1, 1, 10, gtk.FILL | gtk.EXPAND, gtk.FILL | gtk.EXPAND)
|
||||
# a black bar associated with the notebook
|
||||
self.topbar = HobViewBar(self.ins)
|
||||
self.grid.attach(self.topbar, 0, 1, 0, 1, gtk.FILL | gtk.EXPAND, gtk.FILL | gtk.EXPAND)
|
||||
# set the search entry for each table
|
||||
for tab in self.tables:
|
||||
tab.table_tree.set_search_entry(self.topbar.search)
|
||||
|
||||
inctab_tree_view = self.tables[len(self.pages)-1].table_tree
|
||||
inctab_tree_selection = inctab_tree_view.get_selection()
|
||||
inctab_tree_selection.connect("changed", self.tree_selection_cb, inctab_tree_view)
|
||||
|
||||
# add all into the window
|
||||
self.box_group_area.add(self.grid)
|
||||
|
||||
button_box = gtk.HBox(False, 5)
|
||||
self.box_group_area.pack_end(button_box, expand=False, fill=False)
|
||||
|
||||
self.build_packages_button = gtk.Button()
|
||||
label = gtk.Label()
|
||||
mark = "<span %s>Build packages</span>" % self.span_tag('24px', 'bold')
|
||||
label.set_markup(mark)
|
||||
self.build_packages_button.set_image(label)
|
||||
self.build_packages_button.set_size_request(205, 49)
|
||||
self.build_packages_button.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(HobColors.ORANGE))
|
||||
self.build_packages_button.modify_bg(gtk.STATE_PRELIGHT, gtk.gdk.Color(HobColors.ORANGE))
|
||||
self.build_packages_button.modify_bg(gtk.STATE_SELECTED, gtk.gdk.Color(HobColors.ORANGE))
|
||||
self.build_packages_button.set_tooltip_text("Build packages for customization")
|
||||
self.build_packages_button.set_flags(gtk.CAN_DEFAULT)
|
||||
self.build_packages_button.grab_default()
|
||||
self.build_packages_button.connect("clicked", self.build_packages_clicked_cb)
|
||||
button_box.pack_end(self.build_packages_button, expand=False, fill=False)
|
||||
|
||||
self.back_button = gtk.LinkButton("Go back to Image Configuration screen", "<< Back to image configuration")
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def tree_selection_cb(self, tree_selection, tree_view):
|
||||
tree_model = tree_view.get_model()
|
||||
path, column = tree_view.get_cursor()
|
||||
if not path or column == tree_view.get_column(2):
|
||||
return
|
||||
|
||||
it = tree_model.get_iter(path)
|
||||
binb = tree_model.get_value(it, RecipeListModel.COL_BINB)
|
||||
if binb:
|
||||
self.builder.show_binb_dialog(binb)
|
||||
|
||||
def build_packages_clicked_cb(self, button):
|
||||
self.builder.build_packages()
|
||||
|
||||
def back_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def refresh_selection(self):
|
||||
self.builder.configuration.selected_image = self.recipe_model.get_selected_image()
|
||||
_, self.builder.configuration.selected_recipes = self.recipe_model.get_selected_recipes()
|
||||
self.label.set_text("Recipes included: %s" % len(self.builder.configuration.selected_recipes))
|
||||
|
||||
# Callback functions
|
||||
def reset_clicked_cb(self, button):
|
||||
self.builder.reset_recipe_model()
|
||||
|
||||
def toggle_item_idle_cb(self, path):
|
||||
if not self.recipe_model.path_included(path):
|
||||
self.recipe_model.include_item(item_path=path, binb="User Selected", image_contents=False)
|
||||
else:
|
||||
self.recipe_model.exclude_item(item_path=path)
|
||||
|
||||
self.builder.window_sensitive(True)
|
||||
|
||||
def table_toggled_cb(self, cell, view_path, view_tree):
|
||||
# Click to include a recipe
|
||||
self.builder.window_sensitive(False)
|
||||
view_model = view_tree.get_model()
|
||||
path = self.recipe_model.convert_vpath_to_path(view_model, view_path)
|
||||
glib.idle_add(self.toggle_item_idle_cb, path)
|
||||
@@ -25,7 +25,12 @@ import logging
|
||||
import time
|
||||
import urllib
|
||||
import urllib2
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class Colors(object):
|
||||
OK = "#ffffff"
|
||||
RUNNING = "#aaffaa"
|
||||
WARNING ="#f88017"
|
||||
ERROR = "#ffaaaa"
|
||||
|
||||
class RunningBuildModel (gtk.TreeStore):
|
||||
(COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
|
||||
@@ -53,10 +58,7 @@ class RunningBuild (gobject.GObject):
|
||||
()),
|
||||
'build-complete' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'task-started' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
())
|
||||
}
|
||||
pids_to_task = {}
|
||||
tasks_to_iter = {}
|
||||
@@ -106,13 +108,13 @@ class RunningBuild (gobject.GObject):
|
||||
|
||||
if event.levelno >= logging.ERROR:
|
||||
icon = "dialog-error"
|
||||
color = HobColors.ERROR
|
||||
color = Colors.ERROR
|
||||
elif event.levelno >= logging.WARNING:
|
||||
icon = "dialog-warning"
|
||||
color = HobColors.WARNING
|
||||
color = Colors.WARNING
|
||||
else:
|
||||
icon = None
|
||||
color = HobColors.OK
|
||||
color = Colors.OK
|
||||
|
||||
# if we know which package we belong to, we'll append onto its list.
|
||||
# otherwise, we'll jump to the top of the master list
|
||||
@@ -150,7 +152,7 @@ class RunningBuild (gobject.GObject):
|
||||
None,
|
||||
"Package: %s" % (package),
|
||||
None,
|
||||
HobColors.OK,
|
||||
Colors.OK,
|
||||
0))
|
||||
self.tasks_to_iter[(package, None)] = parent
|
||||
|
||||
@@ -158,7 +160,7 @@ class RunningBuild (gobject.GObject):
|
||||
# such.
|
||||
# @todo if parent is already in error, don't mark it green
|
||||
self.model.set(parent, self.model.COL_ICON, "gtk-execute",
|
||||
self.model.COL_COLOR, HobColors.RUNNING)
|
||||
self.model.COL_COLOR, Colors.RUNNING)
|
||||
|
||||
# Add an entry in the model for this task
|
||||
i = self.model.append (parent, (None,
|
||||
@@ -166,7 +168,7 @@ class RunningBuild (gobject.GObject):
|
||||
task,
|
||||
"Task: %s" % (task),
|
||||
"gtk-execute",
|
||||
HobColors.RUNNING,
|
||||
Colors.RUNNING,
|
||||
0))
|
||||
|
||||
# update the parent's active task count
|
||||
@@ -177,6 +179,10 @@ class RunningBuild (gobject.GObject):
|
||||
# that we need to attach to a task.
|
||||
self.tasks_to_iter[(package, task)] = i
|
||||
|
||||
# If we don't handle these the GUI does not proceed
|
||||
elif isinstance(event, bb.build.TaskInvalid):
|
||||
return
|
||||
|
||||
elif isinstance(event, bb.build.TaskBase):
|
||||
current = self.tasks_to_iter[(package, task)]
|
||||
parent = self.tasks_to_iter[(package, None)]
|
||||
@@ -188,20 +194,20 @@ class RunningBuild (gobject.GObject):
|
||||
if isinstance(event, bb.build.TaskFailed):
|
||||
# Mark the task and parent as failed
|
||||
icon = "dialog-error"
|
||||
color = HobColors.ERROR
|
||||
color = Colors.ERROR
|
||||
|
||||
logfile = event.logfile
|
||||
if logfile and os.path.exists(logfile):
|
||||
with open(logfile) as f:
|
||||
logdata = f.read()
|
||||
self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', HobColors.OK, 0))
|
||||
self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', Colors.OK, 0))
|
||||
|
||||
for i in (current, parent):
|
||||
self.model.set(i, self.model.COL_ICON, icon,
|
||||
self.model.COL_COLOR, color)
|
||||
else:
|
||||
icon = None
|
||||
color = HobColors.OK
|
||||
color = Colors.OK
|
||||
|
||||
# Mark the task as inactive
|
||||
self.model.set(current, self.model.COL_ICON, icon,
|
||||
@@ -213,7 +219,7 @@ class RunningBuild (gobject.GObject):
|
||||
if self.model.get(parent, self.model.COL_ICON) != 'dialog-error':
|
||||
self.model.set(parent, self.model.COL_ICON, icon)
|
||||
if num_active == 0:
|
||||
self.model.set(parent, self.model.COL_COLOR, HobColors.OK)
|
||||
self.model.set(parent, self.model.COL_COLOR, Colors.OK)
|
||||
|
||||
# Clear the iters and the pids since when the task goes away the
|
||||
# pid will no longer be used for messages
|
||||
@@ -228,12 +234,8 @@ class RunningBuild (gobject.GObject):
|
||||
None,
|
||||
"Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
|
||||
None,
|
||||
HobColors.OK,
|
||||
Colors.OK,
|
||||
0))
|
||||
if pbar:
|
||||
pbar.update(0, None, bb.event.getName(event))
|
||||
pbar.set_title()
|
||||
|
||||
elif isinstance(event, bb.event.BuildCompleted):
|
||||
failures = int (event._failures)
|
||||
self.model.prepend(None, (None,
|
||||
@@ -241,7 +243,7 @@ class RunningBuild (gobject.GObject):
|
||||
None,
|
||||
"Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
|
||||
None,
|
||||
HobColors.OK,
|
||||
Colors.OK,
|
||||
0))
|
||||
|
||||
# Emit the appropriate signal depending on the number of failures
|
||||
@@ -252,8 +254,6 @@ class RunningBuild (gobject.GObject):
|
||||
# Emit a generic "build-complete" signal for things wishing to
|
||||
# handle when the build is finished
|
||||
self.emit("build-complete")
|
||||
if pbar:
|
||||
pbar.set_text(event.msg)
|
||||
|
||||
elif isinstance(event, bb.command.CommandFailed):
|
||||
if event.error.startswith("Exited with"):
|
||||
@@ -280,15 +280,6 @@ class RunningBuild (gobject.GObject):
|
||||
pbar.update(event.current, self.progress_total)
|
||||
elif isinstance(event, bb.event.ParseCompleted) and pbar:
|
||||
pbar.hide()
|
||||
#using runqueue events as many as possible to update the progress bar
|
||||
elif isinstance(event, (bb.runqueue.runQueueTaskStarted, bb.runqueue.sceneQueueTaskStarted)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
num_of_completed = event.stats.completed + event.stats.failed
|
||||
message["current"] = num_of_completed
|
||||
message["total"] = event.stats.total
|
||||
message["title"] = ""
|
||||
self.emit("task-started", message)
|
||||
|
||||
return
|
||||
|
||||
|
||||
620
bitbake/lib/bb/ui/crumbs/tasklistmodel.py
Normal file
@@ -0,0 +1,620 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import re
|
||||
|
||||
class BuildRep(gobject.GObject):
|
||||
|
||||
def __init__(self, userpkgs, allpkgs, base_image=None):
|
||||
gobject.GObject.__init__(self)
|
||||
self.base_image = base_image
|
||||
self.allpkgs = allpkgs
|
||||
self.userpkgs = userpkgs
|
||||
|
||||
def loadRecipe(self, pathname):
|
||||
contents = []
|
||||
packages = ""
|
||||
base_image = ""
|
||||
|
||||
with open(pathname, 'r') as f:
|
||||
contents = f.readlines()
|
||||
|
||||
pkg_pattern = "^\s*(IMAGE_INSTALL)\s*([+=.?]+)\s*(\".*?\")"
|
||||
img_pattern = "^\s*(require)\s+(\S+.bb)"
|
||||
|
||||
for line in contents:
|
||||
matchpkg = re.search(pkg_pattern, line)
|
||||
matchimg = re.search(img_pattern, line)
|
||||
if matchpkg:
|
||||
packages = packages + matchpkg.group(3).strip('"')
|
||||
if matchimg:
|
||||
base_image = os.path.basename(matchimg.group(2)).split(".")[0]
|
||||
|
||||
self.base_image = base_image
|
||||
self.userpkgs = packages
|
||||
|
||||
def writeRecipe(self, writepath, model):
|
||||
template = """
|
||||
# Recipe generated by the HOB
|
||||
|
||||
require %s
|
||||
|
||||
IMAGE_INSTALL += "%s"
|
||||
"""
|
||||
|
||||
empty_template = """
|
||||
# Recipe generated by the HOB
|
||||
|
||||
inherit core-image
|
||||
|
||||
IMAGE_INSTALL = "%s"
|
||||
"""
|
||||
if self.base_image and not self.base_image == "empty":
|
||||
meta_path = model.find_image_path(self.base_image)
|
||||
recipe = template % (meta_path, self.userpkgs)
|
||||
else:
|
||||
recipe = empty_template % self.allpkgs
|
||||
|
||||
if os.path.exists(writepath):
|
||||
os.rename(writepath, "%s~" % writepath)
|
||||
|
||||
with open(writepath, 'w') as r:
|
||||
r.write(recipe)
|
||||
|
||||
return writepath
|
||||
|
||||
class TaskListModel(gtk.ListStore):
|
||||
"""
|
||||
This class defines an gtk.ListStore subclass which will convert the output
|
||||
of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
(COL_NAME, COL_DESC, COL_LIC, COL_GROUP, COL_DEPS, COL_BINB, COL_TYPE, COL_INC, COL_IMG, COL_PATH, COL_PN) = range(11)
|
||||
|
||||
__gsignals__ = {
|
||||
"tasklist-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"contents-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_INT,)),
|
||||
"image-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
}
|
||||
|
||||
"""
|
||||
"""
|
||||
def __init__(self):
|
||||
self.contents = None
|
||||
self.tasks = None
|
||||
self.packages = None
|
||||
self.images = None
|
||||
self.selected_image = None
|
||||
|
||||
gtk.ListStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING)
|
||||
|
||||
"""
|
||||
Helper method to determine whether name is a target pn
|
||||
"""
|
||||
def non_target_name(self, name):
|
||||
if ('-native' in name) or ('-cross' in name) or name.startswith('virtual/'):
|
||||
return True
|
||||
return False
|
||||
|
||||
def contents_changed_cb(self, tree_model, path, it=None):
|
||||
pkg_cnt = self.contents.iter_n_children(None)
|
||||
self.emit("contents-changed", pkg_cnt)
|
||||
|
||||
def contents_model_filter(self, model, it):
|
||||
if not model.get_value(it, self.COL_INC) or model.get_value(it, self.COL_TYPE) == 'image':
|
||||
return False
|
||||
name = model.get_value(it, self.COL_NAME)
|
||||
if self.non_target_name(name):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModel
|
||||
containing only the items which are to be included in the
|
||||
image
|
||||
"""
|
||||
def contents_model(self):
|
||||
if not self.contents:
|
||||
self.contents = self.filter_new()
|
||||
self.contents.set_visible_func(self.contents_model_filter)
|
||||
self.contents.connect("row-inserted", self.contents_changed_cb)
|
||||
self.contents.connect("row-deleted", self.contents_changed_cb)
|
||||
return self.contents
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is a task
|
||||
"""
|
||||
def task_model_filter(self, model, it):
|
||||
if model.get_value(it, self.COL_TYPE) == 'task':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModel
|
||||
containing only the items which are tasks
|
||||
"""
|
||||
def tasks_model(self):
|
||||
if not self.tasks:
|
||||
self.tasks = self.filter_new()
|
||||
self.tasks.set_visible_func(self.task_model_filter)
|
||||
return self.tasks
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is an image
|
||||
"""
|
||||
def image_model_filter(self, model, it):
|
||||
if model.get_value(it, self.COL_TYPE) == 'image':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModel
|
||||
containing only the items which are images
|
||||
"""
|
||||
def images_model(self):
|
||||
if not self.images:
|
||||
self.images = self.filter_new()
|
||||
self.images.set_visible_func(self.image_model_filter)
|
||||
return self.images
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is a package
|
||||
"""
|
||||
def package_model_filter(self, model, it):
|
||||
if model.get_value(it, self.COL_TYPE) != 'package':
|
||||
return False
|
||||
else:
|
||||
name = model.get_value(it, self.COL_NAME)
|
||||
if self.non_target_name(name):
|
||||
return False
|
||||
return True
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModel
|
||||
containing only the items which are packages
|
||||
"""
|
||||
def packages_model(self):
|
||||
if not self.packages:
|
||||
self.packages = self.filter_new()
|
||||
self.packages.set_visible_func(self.package_model_filter)
|
||||
return self.packages
|
||||
|
||||
"""
|
||||
The populate() function takes as input the data from a
|
||||
bb.event.TargetsTreeGenerated event and populates the TaskList.
|
||||
Once the population is done it emits gsignal tasklist-populated
|
||||
to notify any listeners that the model is ready
|
||||
"""
|
||||
def populate(self, event_model):
|
||||
# First clear the model, in case repopulating
|
||||
self.clear()
|
||||
for item in event_model["pn"]:
|
||||
atype = 'package'
|
||||
name = item
|
||||
summary = event_model["pn"][item]["summary"]
|
||||
lic = event_model["pn"][item]["license"]
|
||||
group = event_model["pn"][item]["section"]
|
||||
filename = event_model["pn"][item]["filename"]
|
||||
if ('task-' in name):
|
||||
atype = 'task'
|
||||
elif ('-image-' in name):
|
||||
atype = 'image'
|
||||
|
||||
# Create a combined list of build and runtime dependencies and
|
||||
# then remove any duplicate entries and any entries for -dev
|
||||
# packages
|
||||
depends = event_model["depends"].get(item, [])
|
||||
rdepends = event_model["rdepends-pn"].get(item, [])
|
||||
packages = {}
|
||||
for pkg in event_model["packages"]:
|
||||
if event_model["packages"][pkg]["pn"] == name:
|
||||
deps = []
|
||||
deps.extend(depends)
|
||||
deps.extend(event_model["rdepends-pkg"].get(pkg, []))
|
||||
deps.extend(rdepends)
|
||||
deps = self.squish(deps)
|
||||
# rdepends-pn includes pn-dev
|
||||
if ("%s-dev" % item) in deps:
|
||||
deps.remove("%s-dev" % item)
|
||||
# rdepends-on includes pn
|
||||
if pkg in deps:
|
||||
deps.remove(pkg)
|
||||
packages[pkg] = deps
|
||||
|
||||
for p in packages:
|
||||
self.set(self.append(), self.COL_NAME, p, self.COL_DESC, summary,
|
||||
self.COL_LIC, lic, self.COL_GROUP, group,
|
||||
self.COL_DEPS, " ".join(packages[p]), self.COL_BINB, "",
|
||||
self.COL_TYPE, atype, self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_PATH, filename,
|
||||
self.COL_PN, item)
|
||||
|
||||
self.emit("tasklist-populated")
|
||||
|
||||
"""
|
||||
Load a BuildRep into the model
|
||||
"""
|
||||
def load_image_rep(self, rep):
|
||||
# Unset everything
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
path = self.get_path(it)
|
||||
self[path][self.COL_INC] = False
|
||||
self[path][self.COL_IMG] = False
|
||||
it = self.iter_next(it)
|
||||
|
||||
# Iterate the images and disable them all
|
||||
it = self.images.get_iter_first()
|
||||
while it:
|
||||
path = self.images.convert_path_to_child_path(self.images.get_path(it))
|
||||
name = self[path][self.COL_NAME]
|
||||
if name == rep.base_image:
|
||||
self.include_item(path, image_contents=True)
|
||||
else:
|
||||
self[path][self.COL_INC] = False
|
||||
it = self.images.iter_next(it)
|
||||
|
||||
# Mark all of the additional packages for inclusion
|
||||
packages = rep.userpkgs.split(" ")
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
path = self.get_path(it)
|
||||
name = self[path][self.COL_NAME]
|
||||
if name in packages:
|
||||
self.include_item(path, binb="User Selected")
|
||||
packages.remove(name)
|
||||
it = self.iter_next(it)
|
||||
|
||||
self.emit("image-changed", rep.base_image)
|
||||
|
||||
"""
|
||||
squish lst so that it doesn't contain any duplicate entries
|
||||
"""
|
||||
def squish(self, lst):
|
||||
seen = {}
|
||||
for l in lst:
|
||||
seen[l] = 1
|
||||
return seen.keys()
|
||||
|
||||
"""
|
||||
Mark the item at path as not included
|
||||
NOTE:
|
||||
path should be a gtk.TreeModelPath into self (not a filtered model)
|
||||
"""
|
||||
def remove_item_path(self, path):
|
||||
self[path][self.COL_BINB] = ""
|
||||
self[path][self.COL_INC] = False
|
||||
|
||||
"""
|
||||
Recursively called to mark the item at opath and any package which
|
||||
depends on it for removal.
|
||||
NOTE: This method dumbly removes user selected packages and since we don't
|
||||
do significant reverse dependency tracking it's easier and simpler to save
|
||||
the items marked as user selected and re-add them once the removal sweep is
|
||||
complete.
|
||||
"""
|
||||
def mark(self, opath):
|
||||
usersel = {}
|
||||
removed = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
# The name of the item we're removing, so that we can use it to find
|
||||
# other items which either depend on it, or were brought in by it
|
||||
marked_name = self[opath][self.COL_NAME]
|
||||
|
||||
# Remove the passed item
|
||||
self.remove_item_path(opath)
|
||||
|
||||
# Remove all dependent packages, update binb
|
||||
while it:
|
||||
path = self.get_path(it)
|
||||
it = self.iter_next(it)
|
||||
|
||||
inc = self[path][self.COL_INC]
|
||||
deps = self[path][self.COL_DEPS]
|
||||
binb = self[path][self.COL_BINB].split(', ')
|
||||
itype = self[path][self.COL_TYPE]
|
||||
itname = self[path][self.COL_NAME]
|
||||
|
||||
# We ignore anything that isn't a package
|
||||
if not itype == "package":
|
||||
continue
|
||||
|
||||
# If the user added this item and it's not the item we're removing
|
||||
# we should keep it and its dependencies, the easiest way to do so
|
||||
# is to save its name and re-mark it for inclusion once dependency
|
||||
# processing is complete
|
||||
if "User Selected" in binb:
|
||||
usersel[itname] = self[path][self.COL_IMG]
|
||||
|
||||
# If the iterated item is included and depends on the removed
|
||||
# item it should also be removed.
|
||||
# FIXME: need to ensure partial name matching doesn't happen
|
||||
if inc and marked_name in deps and itname not in removed:
|
||||
# found a dependency, remove it
|
||||
removed.append(itname)
|
||||
self.mark(path)
|
||||
|
||||
# If the iterated item was brought in by the removed (passed) item
|
||||
# try and find an alternative dependee and update the binb column
|
||||
if inc and marked_name in binb:
|
||||
binb.remove(marked_name)
|
||||
self[path][self.COL_BINB] = ', '.join(binb).lstrip(', ')
|
||||
|
||||
# Re-add any removed user selected items
|
||||
for u in usersel:
|
||||
npath = self.find_path_for_item(u)
|
||||
self.include_item(item_path=npath,
|
||||
binb="User Selected",
|
||||
image_contents=usersel[u])
|
||||
"""
|
||||
Remove items from contents if the have an empty COL_BINB (brought in by)
|
||||
caused by all packages they are a dependency of being removed.
|
||||
If the item isn't a package we leave it included.
|
||||
"""
|
||||
def sweep_up(self):
|
||||
it = self.contents.get_iter_first()
|
||||
while it:
|
||||
binb = self.contents.get_value(it, self.COL_BINB)
|
||||
itype = self.contents.get_value(it, self.COL_TYPE)
|
||||
remove = False
|
||||
|
||||
if itype == 'package' and not binb:
|
||||
oit = self.contents.convert_iter_to_child_iter(it)
|
||||
opath = self.get_path(oit)
|
||||
self.mark(opath)
|
||||
remove = True
|
||||
|
||||
# When we remove a package from the contents model we alter the
|
||||
# model, so continuing to iterate is bad. *Furthermore* it's
|
||||
# likely that the removal has affected an already iterated item
|
||||
# so we should start from the beginning anyway.
|
||||
# Only when we've managed to iterate the entire contents model
|
||||
# without removing any items do we allow the loop to exit.
|
||||
if remove:
|
||||
it = self.contents.get_iter_first()
|
||||
else:
|
||||
it = self.contents.iter_next(it)
|
||||
|
||||
"""
|
||||
Check whether the item at item_path is included or not
|
||||
"""
|
||||
def contents_includes_path(self, item_path):
|
||||
return self[item_path][self.COL_INC]
|
||||
|
||||
"""
|
||||
Add this item, and any of its dependencies, to the image contents
|
||||
"""
|
||||
def include_item(self, item_path, binb="", image_contents=False):
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_DEPS]
|
||||
|
||||
self[item_path][self.COL_INC] = True
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if binb and not binb in item_bin:
|
||||
item_bin.append(binb)
|
||||
self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
|
||||
|
||||
# We want to do some magic with things which are brought in by the
|
||||
# base image so tag them as so
|
||||
if image_contents:
|
||||
self[item_path][self.COL_IMG] = True
|
||||
if self[item_path][self.COL_TYPE] == 'image':
|
||||
self.selected_image = item_name
|
||||
|
||||
if item_deps:
|
||||
# Ensure all of the items deps are included and, where appropriate,
|
||||
# add this item to their COL_BINB
|
||||
for dep in item_deps.split(" "):
|
||||
# If the contents model doesn't already contain dep, add it
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_included = self.contents_includes_path(dep_path)
|
||||
|
||||
if dep_included and not dep in item_bin:
|
||||
# don't set the COL_BINB to this item if the target is an
|
||||
# item in our own COL_BINB
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if not item_name in dep_bin:
|
||||
dep_bin.append(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
elif not dep_included:
|
||||
self.include_item(dep_path, binb=item_name, image_contents=image_contents)
|
||||
|
||||
"""
|
||||
Find the model path for the item_name
|
||||
Returns the path in the model or None
|
||||
"""
|
||||
def find_path_for_item(self, item_name):
|
||||
# We don't include virtual/* or *-native items in the model so save a
|
||||
# heavy iteration loop by exiting early for these items
|
||||
if self.non_target_name(item_name):
|
||||
return None
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if (self.get_value(it, self.COL_NAME) == item_name):
|
||||
return self.get_path(it)
|
||||
else:
|
||||
it = self.iter_next(it)
|
||||
return None
|
||||
|
||||
"""
|
||||
Empty self.contents by setting the include of each entry to None
|
||||
"""
|
||||
def reset(self):
|
||||
# Deselect images - slightly more complex logic so that we don't
|
||||
# have to iterate all of the contents of the main model, instead
|
||||
# just iterate the images model.
|
||||
if self.selected_image:
|
||||
iit = self.images.get_iter_first()
|
||||
while iit:
|
||||
pit = self.images.convert_iter_to_child_iter(iit)
|
||||
self.set(pit, self.COL_INC, False)
|
||||
iit = self.images.iter_next(iit)
|
||||
self.selected_image = None
|
||||
|
||||
it = self.contents.get_iter_first()
|
||||
while it:
|
||||
oit = self.contents.convert_iter_to_child_iter(it)
|
||||
self.set(oit,
|
||||
self.COL_INC, False,
|
||||
self.COL_BINB, "",
|
||||
self.COL_IMG, False)
|
||||
# As we've just removed the first item...
|
||||
it = self.contents.get_iter_first()
|
||||
|
||||
"""
|
||||
Returns two lists. One of user selected packages and the other containing
|
||||
all selected packages
|
||||
"""
|
||||
def get_selected_packages(self):
|
||||
allpkgs = []
|
||||
userpkgs = []
|
||||
|
||||
it = self.contents.get_iter_first()
|
||||
while it:
|
||||
sel = "User Selected" in self.contents.get_value(it, self.COL_BINB)
|
||||
name = self.contents.get_value(it, self.COL_NAME)
|
||||
allpkgs.append(name)
|
||||
if sel:
|
||||
userpkgs.append(name)
|
||||
it = self.contents.iter_next(it)
|
||||
return userpkgs, allpkgs
|
||||
|
||||
"""
|
||||
Return a squished (uniquified) list of the PN's of all selected items
|
||||
"""
|
||||
def get_selected_pn(self):
|
||||
pns = []
|
||||
|
||||
it = self.contents.get_iter_first()
|
||||
while it:
|
||||
if self.contents.get_value(it, self.COL_BINB):
|
||||
pns.append(self.contents.get_value(it, self.COL_PN))
|
||||
it = self.contents.iter_next(it)
|
||||
|
||||
return self.squish(pns)
|
||||
|
||||
def image_contents_removed(self):
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
sel = self.get_value(it, self.COL_INC)
|
||||
img = self.get_value(it, self.COL_IMG)
|
||||
if img and not sel:
|
||||
return True
|
||||
it = self.iter_next(it)
|
||||
return False
|
||||
|
||||
def get_build_rep(self):
|
||||
userpkgs, allpkgs = self.get_selected_packages()
|
||||
# If base image contents have been removed start from an empty rootfs
|
||||
if not self.selected_image or self.image_contents_removed():
|
||||
image = "empty"
|
||||
else:
|
||||
image = self.selected_image
|
||||
|
||||
return BuildRep(" ".join(userpkgs), " ".join(allpkgs), image)
|
||||
|
||||
def find_reverse_depends(self, pn):
|
||||
revdeps = []
|
||||
it = self.contents.get_iter_first()
|
||||
|
||||
while it:
|
||||
name = self.contents.get_value(it, self.COL_NAME)
|
||||
itype = self.contents.get_value(it, self.COL_TYPE)
|
||||
deps = self.contents.get_value(it, self.COL_DEPS)
|
||||
|
||||
it = self.contents.iter_next(it)
|
||||
|
||||
if not itype == 'package':
|
||||
continue
|
||||
|
||||
if pn in deps:
|
||||
revdeps.append(name)
|
||||
|
||||
if pn in revdeps:
|
||||
revdeps.remove(pn)
|
||||
return revdeps
|
||||
|
||||
def set_selected_image(self, img):
|
||||
self.selected_image = img
|
||||
path = self.find_path_for_item(img)
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected",
|
||||
image_contents=True)
|
||||
|
||||
self.emit("image-changed", self.selected_image)
|
||||
|
||||
def set_selected_packages(self, pkglist):
|
||||
selected = pkglist
|
||||
it = self.get_iter_first()
|
||||
|
||||
while it:
|
||||
name = self.get_value(it, self.COL_NAME)
|
||||
if name in pkglist:
|
||||
pkglist.remove(name)
|
||||
path = self.get_path(it)
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected")
|
||||
if len(pkglist) == 0:
|
||||
return
|
||||
it = self.iter_next(it)
|
||||
|
||||
def find_image_path(self, image):
|
||||
it = self.images.get_iter_first()
|
||||
|
||||
while it:
|
||||
image_name = self.images.get_value(it, self.COL_NAME)
|
||||
if image_name == image:
|
||||
path = self.images.get_value(it, self.COL_PATH)
|
||||
meta_pattern = "(\S*)/(meta*/)(\S*)"
|
||||
meta_match = re.search(meta_pattern, path)
|
||||
if meta_match:
|
||||
_, lyr, bbrel = path.partition(meta_match.group(2))
|
||||
if bbrel:
|
||||
path = bbrel
|
||||
return path
|
||||
it = self.images.iter_next(it)
|
||||
@@ -1,180 +0,0 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import os
|
||||
import re
|
||||
|
||||
class File(gobject.GObject):
|
||||
|
||||
def __init__(self, pathfilename, suffix):
|
||||
if not pathfilename.endswith(suffix):
|
||||
pathfilename = "%s%s" % (pathfilename, suffix)
|
||||
gobject.GObject.__init__(self)
|
||||
self.pathfilename = pathfilename
|
||||
|
||||
def readFile(self):
|
||||
if not os.path.isfile(self.pathfilename):
|
||||
return None
|
||||
if not os.path.exists(self.pathfilename):
|
||||
return None
|
||||
|
||||
with open(self.pathfilename, 'r') as f:
|
||||
contents = f.readlines()
|
||||
f.close()
|
||||
|
||||
return contents
|
||||
|
||||
def writeFile(self, contents):
|
||||
if os.path.exists(self.pathfilename):
|
||||
orig = "%s.orig" % self.pathfilename
|
||||
if os.path.exists(orig):
|
||||
os.remove(orig)
|
||||
os.rename(self.pathfilename, orig)
|
||||
|
||||
with open(self.pathfilename, 'w') as f:
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
class ConfigFile(File):
|
||||
"""
|
||||
This object does save general config file. (say bblayers.conf, or local.conf). Again, it is the base class for other template files and image bb files.
|
||||
"""
|
||||
def __init__(self, pathfilename, suffix = None, header = None):
|
||||
if suffix:
|
||||
File.__init__(self, pathfilename, suffix)
|
||||
else:
|
||||
File.__init__(self, pathfilename, ".conf")
|
||||
if header:
|
||||
self.header = header
|
||||
else:
|
||||
self.header = "# Config generated by the HOB\n\n"
|
||||
self.dictionary = {}
|
||||
|
||||
def setVar(self, var, val):
|
||||
if isinstance(val, list):
|
||||
liststr = ""
|
||||
if val:
|
||||
i = 0
|
||||
for value in val:
|
||||
if i < len(val) - 1:
|
||||
liststr += "%s " % value
|
||||
else:
|
||||
liststr += "%s" % value
|
||||
i += 1
|
||||
self.dictionary[var] = liststr
|
||||
else:
|
||||
self.dictionary[var] = val
|
||||
|
||||
def save(self):
|
||||
contents = self.header
|
||||
for var, val in self.dictionary.items():
|
||||
contents += "%s = \"%s\"\n" % (var, val)
|
||||
File.writeFile(self, contents)
|
||||
|
||||
class HobTemplateFile(ConfigFile):
|
||||
"""
|
||||
This object does save or load hob specific file.
|
||||
"""
|
||||
def __init__(self, pathfilename):
|
||||
ConfigFile.__init__(self, pathfilename, ".hob", "# Hob Template generated by the HOB\n\n")
|
||||
|
||||
def getVar(self, var):
|
||||
if var in self.dictionary:
|
||||
return self.dictionary[var]
|
||||
else:
|
||||
return ""
|
||||
|
||||
def load(self):
|
||||
contents = ConfigFile.readFile(self)
|
||||
self.dictionary.clear()
|
||||
|
||||
pattern = "^\s*(\S+)\s*=\s*(\".*?\")"
|
||||
|
||||
for line in contents:
|
||||
match = re.search(pattern, line)
|
||||
if match:
|
||||
var = match.group(1)
|
||||
val = match.group(2).strip('"')
|
||||
self.dictionary[var] = val
|
||||
return self.dictionary
|
||||
|
||||
class RecipeFile(ConfigFile):
|
||||
"""
|
||||
This object is for image bb file.
|
||||
"""
|
||||
def __init__(self, pathfilename):
|
||||
ConfigFile.__init__(self, pathfilename, ".bb", "# Recipe generated by the HOB\n\ninherit core-image\n")
|
||||
|
||||
class TemplateMgr(gobject.GObject):
|
||||
|
||||
__gLocalVars__ = ["MACHINE", "PACKAGE_CLASSES", "DISTRO", "DL_DIR", "SSTATE_DIR", "SSTATE_MIRROR", "PARALLEL_MAKE", "BB_NUMBER_THREAD"]
|
||||
__gBBLayersVars__ = ["BBLAYERS"]
|
||||
__gRecipeVars__ = ["DEPENDS", "IMAGE_INSTALL"]
|
||||
|
||||
def __init__(self):
|
||||
gobject.GObject.__init__(self)
|
||||
self.template_hob = None
|
||||
self.bblayers_conf = None
|
||||
self.local_conf = None
|
||||
self.image_bb = None
|
||||
|
||||
def open(self, filename, path):
|
||||
self.template_hob = HobTemplateFile("%s/%s%s%s" % (path, "template-", filename, ".hob"))
|
||||
self.bblayers_conf = ConfigFile("%s/%s%s%s" % (path, "bblayers-", filename, ".conf"))
|
||||
self.local_conf = ConfigFile("%s/%s%s%s" % (path, "local-", filename, ".conf"))
|
||||
self.image_bb = RecipeFile("%s/%s%s%s" % (path, "hob-image-", filename, ".bb"))
|
||||
|
||||
def setVar(self, var, val):
|
||||
if var in TemplateMgr.__gLocalVars__:
|
||||
self.local_conf.setVar(var, val)
|
||||
if var in TemplateMgr.__gBBLayersVars__:
|
||||
self.bblayers_conf.setVar(var, val)
|
||||
if var in TemplateMgr.__gRecipeVars__:
|
||||
self.image_bb.setVar(var, val)
|
||||
|
||||
self.template_hob.setVar(var, val)
|
||||
|
||||
def save(self):
|
||||
self.local_conf.save()
|
||||
self.bblayers_conf.save()
|
||||
self.image_bb.save()
|
||||
self.template_hob.save()
|
||||
|
||||
def load(self, path):
|
||||
self.template_hob = HobTemplateFile(path)
|
||||
self.dictionary = self.template_hob.load()
|
||||
|
||||
def getVar(self, var):
|
||||
return self.template_hob.getVar(var)
|
||||
|
||||
def destroy(self):
|
||||
if self.template_hob:
|
||||
del self.template_hob
|
||||
template_hob = None
|
||||
if self.bblayers_conf:
|
||||
del self.bblayers_conf
|
||||
self.bblayers_conf = None
|
||||
if self.local_conf:
|
||||
del self.local_conf
|
||||
self.local_conf = None
|
||||
if self.image_bb:
|
||||
del self.image_bb
|
||||
self.image_bb = None
|
||||
1111
bitbake/lib/bb/ui/hob.py
Executable file → Normal file
|
Before Width: | Height: | Size: 6.7 KiB |
|
Before Width: | Height: | Size: 6.9 KiB |
|
Before Width: | Height: | Size: 3.9 KiB |
|
Before Width: | Height: | Size: 5.7 KiB |
|
Before Width: | Height: | Size: 3.9 KiB |
|
Before Width: | Height: | Size: 6.3 KiB |
|
Before Width: | Height: | Size: 4.4 KiB |
|
Before Width: | Height: | Size: 5.1 KiB |
|
Before Width: | Height: | Size: 4.5 KiB |
|
Before Width: | Height: | Size: 4.6 KiB |
|
Before Width: | Height: | Size: 4.7 KiB |
|
Before Width: | Height: | Size: 5.2 KiB |
|
Before Width: | Height: | Size: 5.3 KiB |
|
Before Width: | Height: | Size: 7.0 KiB |
|
Before Width: | Height: | Size: 7.1 KiB |
|
Before Width: | Height: | Size: 4.8 KiB |
|
Before Width: | Height: | Size: 4.9 KiB |
|
Before Width: | Height: | Size: 5.9 KiB |
|
Before Width: | Height: | Size: 6.1 KiB |
|
Before Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 5.7 KiB |
@@ -64,18 +64,11 @@ def new_progress(msg, maxval):
|
||||
else:
|
||||
return NonInteractiveProgress(msg, maxval)
|
||||
|
||||
def pluralise(singular, plural, qty):
|
||||
if(qty == 1):
|
||||
return singular % qty
|
||||
else:
|
||||
return plural % qty
|
||||
|
||||
def main(server, eventHandler):
|
||||
|
||||
# Get values of variables which control our output
|
||||
includelogs = server.runCommand(["getVariable", "BBINCLUDELOGS"])
|
||||
loglines = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
|
||||
consolelogfile = server.runCommand(["getVariable", "BB_CONSOLELOG"])
|
||||
|
||||
helper = uihelper.BBUIHelper()
|
||||
|
||||
@@ -84,11 +77,6 @@ def main(server, eventHandler):
|
||||
bb.msg.addDefaultlogFilter(console)
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
if consolelogfile:
|
||||
consolelog = logging.FileHandler(consolelogfile)
|
||||
bb.msg.addDefaultlogFilter(consolelog)
|
||||
consolelog.setFormatter(format)
|
||||
logger.addHandler(consolelog)
|
||||
|
||||
try:
|
||||
cmdline = server.runCommand(["getCmdLineAction"])
|
||||
@@ -110,17 +98,11 @@ def main(server, eventHandler):
|
||||
parseprogress = None
|
||||
cacheprogress = None
|
||||
shutdown = 0
|
||||
interrupted = False
|
||||
return_value = 0
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
while True:
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if event is None:
|
||||
if shutdown > 1:
|
||||
break
|
||||
continue
|
||||
helper.eventHandler(event)
|
||||
if isinstance(event, bb.runqueue.runQueueExitWait):
|
||||
@@ -135,15 +117,13 @@ def main(server, eventHandler):
|
||||
|
||||
if isinstance(event, logging.LogRecord):
|
||||
if event.levelno >= format.ERROR:
|
||||
errors = errors + 1
|
||||
return_value = 1
|
||||
elif event.levelno == format.WARNING:
|
||||
warnings = warnings + 1
|
||||
# For "normal" logging conditions, don't show note logs from tasks
|
||||
# but do show them if the user has changed the default log level to
|
||||
# include verbose/debug messages
|
||||
#if logger.getEffectiveLevel() > format.VERBOSE:
|
||||
if event.taskpid != 0 and event.levelno <= format.NOTE:
|
||||
continue
|
||||
continue
|
||||
logger.handle(event)
|
||||
continue
|
||||
|
||||
@@ -152,7 +132,7 @@ def main(server, eventHandler):
|
||||
logfile = event.logfile
|
||||
if logfile and os.path.exists(logfile):
|
||||
print("ERROR: Logfile of failure stored in: %s" % logfile)
|
||||
if includelogs and not event.errprinted:
|
||||
if 1 or includelogs:
|
||||
print("Log data follows:")
|
||||
f = open(logfile, "r")
|
||||
lines = []
|
||||
@@ -202,19 +182,18 @@ def main(server, eventHandler):
|
||||
print("Loaded %d entries from dependency cache." % event.num_entries)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.command.CommandCompleted):
|
||||
break
|
||||
if isinstance(event, bb.command.CommandFailed):
|
||||
return_value = event.exitcode
|
||||
errors = errors + 1
|
||||
logger.error("Command execution failed: %s", event.error)
|
||||
shutdown = 2
|
||||
continue
|
||||
break
|
||||
if isinstance(event, bb.command.CommandExit):
|
||||
if not return_value:
|
||||
return_value = event.exitcode
|
||||
continue
|
||||
if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
|
||||
shutdown = 2
|
||||
continue
|
||||
if isinstance(event, bb.cooker.CookerExit):
|
||||
break
|
||||
if isinstance(event, bb.event.MultipleProviders):
|
||||
logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
|
||||
event._item,
|
||||
@@ -223,7 +202,6 @@ def main(server, eventHandler):
|
||||
continue
|
||||
if isinstance(event, bb.event.NoProvider):
|
||||
return_value = 1
|
||||
errors = errors + 1
|
||||
if event._runtime:
|
||||
r = "R"
|
||||
else:
|
||||
@@ -238,10 +216,6 @@ def main(server, eventHandler):
|
||||
logger.error("%s", reason)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
|
||||
logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring))
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.runqueue.runQueueTaskStarted):
|
||||
if event.noexec:
|
||||
tasktype = 'noexec task'
|
||||
@@ -255,16 +229,10 @@ def main(server, eventHandler):
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.runqueue.runQueueTaskFailed):
|
||||
taskfailures.append(event.taskstring)
|
||||
logger.error("Task %s (%s) failed with exit code '%s'",
|
||||
event.taskid, event.taskstring, event.exitcode)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.runqueue.sceneQueueTaskFailed):
|
||||
logger.warn("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead",
|
||||
event.taskid, event.taskstring, event.exitcode)
|
||||
continue
|
||||
|
||||
# ignore
|
||||
if isinstance(event, (bb.event.BuildBase,
|
||||
bb.event.StampUpdate,
|
||||
@@ -272,10 +240,7 @@ def main(server, eventHandler):
|
||||
bb.event.RecipeParsed,
|
||||
bb.event.RecipePreFinalise,
|
||||
bb.runqueue.runQueueEvent,
|
||||
bb.runqueue.runQueueExitWait,
|
||||
bb.event.OperationStarted,
|
||||
bb.event.OperationCompleted,
|
||||
bb.event.OperationProgress)):
|
||||
bb.runqueue.runQueueExitWait)):
|
||||
continue
|
||||
|
||||
logger.error("Unknown event: %s", event)
|
||||
@@ -285,34 +250,15 @@ def main(server, eventHandler):
|
||||
if ioerror.args[0] == 4:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
if shutdown == 2:
|
||||
print("\nThird Keyboard Interrupt, exit.\n")
|
||||
break
|
||||
if shutdown == 1:
|
||||
print("\nSecond Keyboard Interrupt, stopping...\n")
|
||||
server.runCommand(["stateStop"])
|
||||
if shutdown == 0:
|
||||
interrupted = True
|
||||
print("\nKeyboard Interrupt, closing down...\n")
|
||||
server.runCommand(["stateShutdown"])
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
|
||||
summary = ""
|
||||
if taskfailures:
|
||||
summary += pluralise("\nSummary: %s task failed:",
|
||||
"\nSummary: %s tasks failed:", len(taskfailures))
|
||||
for failure in taskfailures:
|
||||
summary += "\n %s" % failure
|
||||
if warnings:
|
||||
summary += pluralise("\nSummary: There was %s WARNING message shown.",
|
||||
"\nSummary: There were %s WARNING messages shown.", warnings)
|
||||
if return_value:
|
||||
summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
|
||||
"\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
|
||||
if summary:
|
||||
print(summary)
|
||||
|
||||
if interrupted:
|
||||
print("Execution was interrupted, returning a non-zero exit code.")
|
||||
if return_value == 0:
|
||||
return_value = 1
|
||||
|
||||
return return_value
|
||||
|
||||
@@ -28,14 +28,13 @@ import socket, threading, pickle
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
|
||||
class BBUIEventQueue:
|
||||
def __init__(self, BBServer, clientinfo=("localhost, 0")):
|
||||
def __init__(self, BBServer):
|
||||
|
||||
self.eventQueue = []
|
||||
self.eventQueueLock = threading.Lock()
|
||||
self.eventQueueNotify = threading.Event()
|
||||
|
||||
self.BBServer = BBServer
|
||||
self.clientinfo = clientinfo
|
||||
|
||||
self.t = threading.Thread()
|
||||
self.t.setDaemon(True)
|
||||
@@ -73,7 +72,7 @@ class BBUIEventQueue:
|
||||
|
||||
def startCallbackHandler(self):
|
||||
|
||||
server = UIXMLRPCServer(self.clientinfo)
|
||||
server = UIXMLRPCServer()
|
||||
self.host, self.port = server.socket.getsockname()
|
||||
|
||||
server.register_function( self.system_quit, "event.quit" )
|
||||
@@ -99,7 +98,7 @@ class BBUIEventQueue:
|
||||
|
||||
class UIXMLRPCServer (SimpleXMLRPCServer):
|
||||
|
||||
def __init__( self, interface ):
|
||||
def __init__( self, interface = ("localhost", 0) ):
|
||||
self.quit = False
|
||||
SimpleXMLRPCServer.__init__( self,
|
||||
interface,
|
||||
|
||||
@@ -32,52 +32,11 @@ class BBUIHelper:
|
||||
if isinstance(event, bb.build.TaskSucceeded):
|
||||
del self.running_tasks[event.pid]
|
||||
self.needUpdate = True
|
||||
if isinstance(event, bb.build.TaskFailed) or isinstance(event, bb.build.TaskFailedSilent):
|
||||
if isinstance(event, bb.build.TaskFailed):
|
||||
del self.running_tasks[event.pid]
|
||||
self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
|
||||
self.needUpdate = True
|
||||
|
||||
def getTasks(self):
|
||||
self.needUpdate = False
|
||||
return (self.running_tasks, self.failed_tasks)
|
||||
|
||||
def findServerDetails(self):
|
||||
import sys
|
||||
import optparse
|
||||
from bb.server.xmlrpc import BitbakeServerInfo, BitBakeServerConnection
|
||||
host = ""
|
||||
port = 0
|
||||
bind = ""
|
||||
parser = optparse.OptionParser(
|
||||
usage = """%prog -H host -P port -B bindaddr""")
|
||||
|
||||
parser.add_option("-H", "--host", help = "Bitbake server's IP address",
|
||||
action = "store", dest = "host", default = None)
|
||||
|
||||
parser.add_option("-P", "--port", help = "Bitbake server's Port number",
|
||||
action = "store", dest = "port", default = None)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "Hob2 local bind address",
|
||||
action = "store", dest = "bind", default = None)
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
for key, val in options.__dict__.items():
|
||||
if key == 'host' and val:
|
||||
host = val
|
||||
elif key == 'port' and val:
|
||||
port = int(val)
|
||||
elif key == 'bind' and val:
|
||||
bind = val
|
||||
|
||||
if not host or not port or not bind:
|
||||
parser.print_usage()
|
||||
sys.exit(1)
|
||||
|
||||
serverinfo = BitbakeServerInfo(host, port)
|
||||
clientinfo = (bind, 0)
|
||||
connection = BitBakeServerConnection(serverinfo, clientinfo)
|
||||
|
||||
server = connection.connection
|
||||
eventHandler = connection.events
|
||||
|
||||
return server, eventHandler, host, bind
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ import errno
|
||||
import logging
|
||||
import bb
|
||||
import bb.msg
|
||||
import multiprocessing
|
||||
from commands import getstatusoutput
|
||||
from contextlib import contextmanager
|
||||
|
||||
@@ -563,7 +562,7 @@ def filter_environment(good_vars):
|
||||
|
||||
def create_interactive_env(d):
|
||||
for k in preserved_envvars_exported_interactive():
|
||||
os.setenv(k, d.getVar(k, True))
|
||||
os.setenv(k, bb.data.getVar(k, d, True))
|
||||
|
||||
def approved_variables():
|
||||
"""
|
||||
@@ -602,9 +601,9 @@ def build_environment(d):
|
||||
"""
|
||||
import bb.data
|
||||
for var in bb.data.keys(d):
|
||||
export = d.getVarFlag(var, "export")
|
||||
export = bb.data.getVarFlag(var, "export", d)
|
||||
if export:
|
||||
os.environ[var] = d.getVar(var, True) or ""
|
||||
os.environ[var] = bb.data.getVar(var, d, True) or ""
|
||||
|
||||
def remove(path, recurse=False):
|
||||
"""Equivalent to rm -f or rm -rf"""
|
||||
@@ -863,6 +862,3 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
|
||||
if checkvalues.issubset(val):
|
||||
return truevalue
|
||||
return falsevalue
|
||||
|
||||
def cpu_count():
|
||||
return multiprocessing.cpu_count()
|
||||
|
||||
@@ -7,8 +7,5 @@ def init_logger(logfile, loglevel):
|
||||
numeric_level = getattr(logging, loglevel.upper(), None)
|
||||
if not isinstance(numeric_level, int):
|
||||
raise ValueError('Invalid log level: %s' % loglevel)
|
||||
FORMAT = '%(asctime)-15s %(message)s'
|
||||
logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
|
||||
logging.basicConfig(level=numeric_level, filename=logfile)
|
||||
|
||||
class NotFoundError(StandardError):
|
||||
pass
|
||||
@@ -1,233 +1,86 @@
|
||||
import logging
|
||||
import os.path
|
||||
import errno
|
||||
import prserv
|
||||
import sys
|
||||
import warnings
|
||||
import sqlite3
|
||||
|
||||
try:
|
||||
import sqlite3
|
||||
except ImportError:
|
||||
from pysqlite2 import dbapi2 as sqlite3
|
||||
|
||||
logger = logging.getLogger("BitBake.PRserv")
|
||||
|
||||
sqlversion = sqlite3.sqlite_version_info
|
||||
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
|
||||
raise Exception("sqlite3 version 3.3.0 or later is required.")
|
||||
|
||||
class PRTable():
|
||||
def __init__(self, conn, table, nohist):
|
||||
self.conn = conn
|
||||
self.nohist = nohist
|
||||
if nohist:
|
||||
self.table = "%s_nohist" % table
|
||||
else:
|
||||
self.table = "%s_hist" % table
|
||||
class NotFoundError(StandardError):
|
||||
pass
|
||||
|
||||
class PRTable():
|
||||
def __init__(self,cursor,table):
|
||||
self.cursor = cursor
|
||||
self.table = table
|
||||
|
||||
#create the table
|
||||
self._execute("CREATE TABLE IF NOT EXISTS %s \
|
||||
(version TEXT NOT NULL, \
|
||||
pkgarch TEXT NOT NULL, \
|
||||
checksum TEXT NOT NULL, \
|
||||
value INTEGER, \
|
||||
PRIMARY KEY (version, pkgarch, checksum));" % self.table)
|
||||
PRIMARY KEY (version,checksum));"
|
||||
% table)
|
||||
|
||||
def _execute(self, *query):
|
||||
"""Execute a query, waiting to acquire a lock if necessary"""
|
||||
count = 0
|
||||
while True:
|
||||
try:
|
||||
return self.conn.execute(*query)
|
||||
return self.cursor.execute(*query)
|
||||
except sqlite3.OperationalError as exc:
|
||||
if 'database is locked' in str(exc) and count < 500:
|
||||
count = count + 1
|
||||
continue
|
||||
raise exc
|
||||
raise
|
||||
except sqlite3.IntegrityError as exc:
|
||||
print "Integrity error %s" % str(exc)
|
||||
break
|
||||
|
||||
def _getValueHist(self, version, pkgarch, checksum):
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
def getValue(self, version, checksum):
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND checksum=?;" % self.table,
|
||||
(version,checksum))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
return row[0]
|
||||
else:
|
||||
#no value found, try to insert
|
||||
try:
|
||||
self._execute("BEGIN")
|
||||
self._execute("INSERT OR ROLLBACK INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"
|
||||
self._execute("INSERT INTO %s VALUES (?, ?, (select ifnull(max(value)+1,0) from %s where version=?));"
|
||||
% (self.table,self.table),
|
||||
(version,pkgarch, checksum,version, pkgarch))
|
||||
self.conn.commit()
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
(version,checksum,version))
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND checksum=?;" % self.table,
|
||||
(version,checksum))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
return row[0]
|
||||
else:
|
||||
raise prserv.NotFoundError
|
||||
|
||||
def _getValueNohist(self, version, pkgarch, checksum):
|
||||
data=self._execute("SELECT value FROM %s \
|
||||
WHERE version=? AND pkgarch=? AND checksum=? AND \
|
||||
value >= (select max(value) from %s where version=? AND pkgarch=?);"
|
||||
% (self.table, self.table),
|
||||
(version, pkgarch, checksum, version, pkgarch))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
return row[0]
|
||||
else:
|
||||
#no value found, try to insert
|
||||
try:
|
||||
self._execute("BEGIN")
|
||||
self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"
|
||||
% (self.table,self.table),
|
||||
(version, pkgarch, checksum, version, pkgarch))
|
||||
self.conn.commit()
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
self.conn.rollback()
|
||||
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
return row[0]
|
||||
else:
|
||||
raise prserv.NotFoundError
|
||||
|
||||
def getValue(self, version, pkgarch, checksum):
|
||||
if self.nohist:
|
||||
return self._getValueNohist(version, pkgarch, checksum)
|
||||
else:
|
||||
return self._getValueHist(version, pkgarch, checksum)
|
||||
|
||||
def _importHist(self, version, pkgarch, checksum, value):
|
||||
val = None
|
||||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row = data.fetchone()
|
||||
if row != None:
|
||||
val=row[0]
|
||||
else:
|
||||
#no value found, try to insert
|
||||
try:
|
||||
self._execute("BEGIN")
|
||||
self._execute("INSERT OR ROLLBACK INTO %s VALUES (?, ?, ?, ?);" % (self.table),
|
||||
(version, pkgarch, checksum, value))
|
||||
self.conn.commit()
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row = data.fetchone()
|
||||
if row != None:
|
||||
val = row[0]
|
||||
return val
|
||||
|
||||
def _importNohist(self, version, pkgarch, checksum, value):
|
||||
try:
|
||||
#try to insert
|
||||
self._execute("BEGIN")
|
||||
self._execute("INSERT OR ROLLBACK INTO %s VALUES (?, ?, ?, ?);" % (self.table),
|
||||
(version, pkgarch, checksum,value))
|
||||
self.conn.commit()
|
||||
except sqlite3.IntegrityError as exc:
|
||||
#already have the record, try to update
|
||||
try:
|
||||
self._execute("BEGIN")
|
||||
self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?"
|
||||
% (self.table),
|
||||
(value,version,pkgarch,checksum,value))
|
||||
self.conn.commit()
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table,
|
||||
(version,pkgarch,checksum,value))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
return row[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def importone(self, version, pkgarch, checksum, value):
|
||||
if self.nohist:
|
||||
return self._importNohist(version, pkgarch, checksum, value)
|
||||
else:
|
||||
return self._importHist(version, pkgarch, checksum, value)
|
||||
|
||||
def export(self, version, pkgarch, checksum, colinfo):
|
||||
metainfo = {}
|
||||
#column info
|
||||
if colinfo:
|
||||
metainfo['tbl_name'] = self.table
|
||||
metainfo['core_ver'] = prserv.__version__
|
||||
metainfo['col_info'] = []
|
||||
data = self._execute("PRAGMA table_info(%s);" % self.table)
|
||||
for row in data:
|
||||
col = {}
|
||||
col['name'] = row['name']
|
||||
col['type'] = row['type']
|
||||
col['notnull'] = row['notnull']
|
||||
col['dflt_value'] = row['dflt_value']
|
||||
col['pk'] = row['pk']
|
||||
metainfo['col_info'].append(col)
|
||||
|
||||
#data info
|
||||
datainfo = []
|
||||
|
||||
if self.nohist:
|
||||
sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \
|
||||
(SELECT version,pkgarch,max(value) as maxvalue FROM %s GROUP BY version,pkgarch) as T2 \
|
||||
WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table)
|
||||
else:
|
||||
sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table
|
||||
sqlarg = []
|
||||
where = ""
|
||||
if version:
|
||||
where += "AND T1.version=? "
|
||||
sqlarg.append(str(version))
|
||||
if pkgarch:
|
||||
where += "AND T1.pkgarch=? "
|
||||
sqlarg.append(str(pkgarch))
|
||||
if checksum:
|
||||
where += "AND T1.checksum=? "
|
||||
sqlarg.append(str(checksum))
|
||||
|
||||
sqlstmt += where + ";"
|
||||
|
||||
if len(sqlarg):
|
||||
data = self._execute(sqlstmt, tuple(sqlarg))
|
||||
else:
|
||||
data = self._execute(sqlstmt)
|
||||
for row in data:
|
||||
if row['version']:
|
||||
col = {}
|
||||
col['version'] = row['version']
|
||||
col['pkgarch'] = row['pkgarch']
|
||||
col['checksum'] = row['checksum']
|
||||
col['value'] = row['value']
|
||||
datainfo.append(col)
|
||||
return (metainfo, datainfo)
|
||||
raise NotFoundError
|
||||
|
||||
class PRData(object):
|
||||
"""Object representing the PR database"""
|
||||
def __init__(self, filename, nohist=True):
|
||||
def __init__(self, filename):
|
||||
self.filename=os.path.abspath(filename)
|
||||
self.nohist=nohist
|
||||
#build directory hierarchy
|
||||
try:
|
||||
os.makedirs(os.path.dirname(self.filename))
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise e
|
||||
self.connection=sqlite3.connect(self.filename, isolation_level="DEFERRED")
|
||||
self.connection.row_factory=sqlite3.Row
|
||||
self.connection=sqlite3.connect(self.filename, timeout=5,
|
||||
isolation_level=None)
|
||||
self.cursor=self.connection.cursor()
|
||||
self._tables={}
|
||||
|
||||
def __del__(self):
|
||||
print "PRData: closing DB %s" % self.filename
|
||||
self.connection.close()
|
||||
|
||||
def __getitem__(self,tblname):
|
||||
@@ -237,11 +90,11 @@ class PRData(object):
|
||||
if tblname in self._tables:
|
||||
return self._tables[tblname]
|
||||
else:
|
||||
tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist)
|
||||
tableobj = self._tables[tblname] = PRTable(self.cursor, tblname)
|
||||
return tableobj
|
||||
|
||||
def __delitem__(self, tblname):
|
||||
if tblname in self._tables:
|
||||
del self._tables[tblname]
|
||||
logger.info("drop table %s" % (tblname))
|
||||
self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname)
|
||||
logging.info("drop table %s" % (tblname))
|
||||
self.cursor.execute("DROP TABLE IF EXISTS %s;" % tblname)
|
||||
|
||||
@@ -1,19 +1,12 @@
|
||||
import os,sys,logging
|
||||
import signal, time, atexit, threading
|
||||
import signal,time, atexit
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
import xmlrpclib
|
||||
|
||||
try:
|
||||
import sqlite3
|
||||
except ImportError:
|
||||
from pysqlite2 import dbapi2 as sqlite3
|
||||
import xmlrpclib,sqlite3
|
||||
|
||||
import bb.server.xmlrpc
|
||||
import prserv
|
||||
import prserv.db
|
||||
|
||||
logger = logging.getLogger("BitBake.PRserv")
|
||||
|
||||
if sys.hexversion < 0x020600F0:
|
||||
print("Sorry, python 2.6 or later is required.")
|
||||
sys.exit(1)
|
||||
@@ -28,10 +21,8 @@ class Handler(SimpleXMLRPCRequestHandler):
|
||||
raise
|
||||
return value
|
||||
|
||||
PIDPREFIX = "/tmp/PRServer_%s_%s.pid"
|
||||
singleton = None
|
||||
|
||||
class PRServer(SimpleXMLRPCServer):
|
||||
pidfile="/tmp/PRServer.pid"
|
||||
def __init__(self, dbfile, logfile, interface, daemon=True):
|
||||
''' constructor '''
|
||||
SimpleXMLRPCServer.__init__(self, interface,
|
||||
@@ -40,88 +31,66 @@ class PRServer(SimpleXMLRPCServer):
|
||||
self.dbfile=dbfile
|
||||
self.daemon=daemon
|
||||
self.logfile=logfile
|
||||
self.working_thread=None
|
||||
self.host, self.port = self.socket.getsockname()
|
||||
self.db=prserv.db.PRData(dbfile)
|
||||
self.table=self.db["PRMAIN"]
|
||||
self.pidfile=PIDPREFIX % (self.host, self.port)
|
||||
|
||||
self.register_function(self.getPR, "getPR")
|
||||
self.register_function(self.quit, "quit")
|
||||
self.register_function(self.ping, "ping")
|
||||
self.register_function(self.export, "export")
|
||||
self.register_function(self.importone, "importone")
|
||||
self.register_introspection_functions()
|
||||
|
||||
def export(self, version=None, pkgarch=None, checksum=None, colinfo=True):
|
||||
try:
|
||||
return self.table.export(version, pkgarch, checksum, colinfo)
|
||||
except sqlite3.Error as exc:
|
||||
logger.error(str(exc))
|
||||
return None
|
||||
|
||||
def importone(self, version, pkgarch, checksum, value):
|
||||
return self.table.importone(version, pkgarch, checksum, value)
|
||||
|
||||
def ping(self):
|
||||
return not self.quit
|
||||
|
||||
def getinfo(self):
|
||||
return (self.host, self.port)
|
||||
|
||||
def getPR(self, version, pkgarch, checksum):
|
||||
|
||||
def getPR(self, version, checksum):
|
||||
try:
|
||||
return self.table.getValue(version, pkgarch, checksum)
|
||||
return self.table.getValue(version,checksum)
|
||||
except prserv.NotFoundError:
|
||||
logger.error("can not find value for (%s, %s)",version, checksum)
|
||||
logging.error("can not find value for (%s, %s)",version,checksum)
|
||||
return None
|
||||
except sqlite3.Error as exc:
|
||||
logger.error(str(exc))
|
||||
logging.error(str(exc))
|
||||
return None
|
||||
|
||||
def quit(self):
|
||||
self.quit=True
|
||||
return
|
||||
|
||||
def work_forever(self,):
|
||||
def _serve_forever(self):
|
||||
self.quit = False
|
||||
self.timeout = 0.5
|
||||
logger.info("PRServer: started! DBfile: %s, IP: %s, PORT: %s, PID: %s" %
|
||||
(self.dbfile, self.host, self.port, str(os.getpid())))
|
||||
|
||||
while not self.quit:
|
||||
self.handle_request()
|
||||
|
||||
logger.info("PRServer: stopping...")
|
||||
logging.info("PRServer: stopping...")
|
||||
self.server_close()
|
||||
return
|
||||
|
||||
def start(self):
|
||||
if self.daemon is True:
|
||||
logger.info("PRServer: try to start daemon...")
|
||||
logging.info("PRServer: starting daemon...")
|
||||
self.daemonize()
|
||||
else:
|
||||
atexit.register(self.delpid)
|
||||
pid = str(os.getpid())
|
||||
pf = file(self.pidfile, 'w+')
|
||||
pf.write("%s\n" % pid)
|
||||
pf.close()
|
||||
self.work_forever()
|
||||
logging.info("PRServer: starting...")
|
||||
self._serve_forever()
|
||||
|
||||
def delpid(self):
|
||||
os.remove(self.pidfile)
|
||||
os.remove(PRServer.pidfile)
|
||||
|
||||
def daemonize(self):
|
||||
"""
|
||||
See Advanced Programming in the UNIX, Sec 13.3
|
||||
"""
|
||||
os.umask(0)
|
||||
|
||||
try:
|
||||
pid = os.fork()
|
||||
if pid > 0:
|
||||
#parent return instead of exit to give control
|
||||
return
|
||||
if pid > 0:
|
||||
sys.exit(0)
|
||||
except OSError as e:
|
||||
raise Exception("%s [%d]" % (e.strerror, e.errno))
|
||||
sys.stderr.write("1st fork failed: %d %s\n" % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
|
||||
os.setsid()
|
||||
"""
|
||||
@@ -133,9 +102,9 @@ class PRServer(SimpleXMLRPCServer):
|
||||
if pid > 0: #parent
|
||||
sys.exit(0)
|
||||
except OSError as e:
|
||||
raise Exception("%s [%d]" % (e.strerror, e.errno))
|
||||
sys.stderr.write("2nd fork failed: %d %s\n" % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
|
||||
os.umask(0)
|
||||
os.chdir("/")
|
||||
|
||||
sys.stdout.flush()
|
||||
@@ -150,44 +119,19 @@ class PRServer(SimpleXMLRPCServer):
|
||||
# write pidfile
|
||||
atexit.register(self.delpid)
|
||||
pid = str(os.getpid())
|
||||
pf = file(self.pidfile, 'w')
|
||||
pf = file(PRServer.pidfile, 'w+')
|
||||
pf.write("%s\n" % pid)
|
||||
pf.write("%s\n" % self.host)
|
||||
pf.write("%s\n" % self.port)
|
||||
pf.close()
|
||||
|
||||
self.work_forever()
|
||||
sys.exit(0)
|
||||
|
||||
class PRServSingleton():
|
||||
def __init__(self, dbfile, logfile, interface):
|
||||
self.dbfile = dbfile
|
||||
self.logfile = logfile
|
||||
self.interface = interface
|
||||
self.host = None
|
||||
self.port = None
|
||||
self.event = threading.Event()
|
||||
|
||||
def _work(self):
|
||||
self.prserv = PRServer(self.dbfile, self.logfile, self.interface, False)
|
||||
self.host, self.port = self.prserv.getinfo()
|
||||
self.event.set()
|
||||
self.prserv.work_forever()
|
||||
del self.prserv.db
|
||||
|
||||
def start(self):
|
||||
self.working_thread = threading.Thread(target=self._work)
|
||||
self.working_thread.start()
|
||||
|
||||
def getinfo(self):
|
||||
self.event.wait()
|
||||
return (self.host, self.port)
|
||||
self._serve_forever()
|
||||
|
||||
class PRServerConnection():
|
||||
def __init__(self, host, port):
|
||||
if is_local_special(host, port):
|
||||
host, port = singleton.getinfo()
|
||||
self.connection = bb.server.xmlrpc._create_server(host, port)
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.connection = bb.server.xmlrpc._create_server(self.host, self.port)
|
||||
|
||||
def terminate(self):
|
||||
# Don't wait for server indefinitely
|
||||
@@ -195,25 +139,18 @@ class PRServerConnection():
|
||||
socket.setdefaulttimeout(2)
|
||||
try:
|
||||
self.connection.quit()
|
||||
except Exception as exc:
|
||||
sys.stderr.write("%s\n" % str(exc))
|
||||
except:
|
||||
pass
|
||||
|
||||
def getPR(self, version, pkgarch, checksum):
|
||||
return self.connection.getPR(version, pkgarch, checksum)
|
||||
def getPR(self, version, checksum):
|
||||
return self.connection.getPR(version, checksum)
|
||||
|
||||
def ping(self):
|
||||
return self.connection.ping()
|
||||
|
||||
def export(self,version=None, pkgarch=None, checksum=None, colinfo=True):
|
||||
return self.connection.export(version, pkgarch, checksum, colinfo)
|
||||
|
||||
def importone(self, version, pkgarch, checksum, value):
|
||||
return self.connection.importone(version, pkgarch, checksum, value)
|
||||
|
||||
def start_daemon(dbfile, host, port, logfile):
|
||||
pidfile = PIDPREFIX % (host, port)
|
||||
def start_daemon(options):
|
||||
try:
|
||||
pf = file(pidfile,'r')
|
||||
pf = file(PRServer.pidfile,'r')
|
||||
pid = int(pf.readline().strip())
|
||||
pf.close()
|
||||
except IOError:
|
||||
@@ -221,89 +158,41 @@ def start_daemon(dbfile, host, port, logfile):
|
||||
|
||||
if pid:
|
||||
sys.stderr.write("pidfile %s already exist. Daemon already running?\n"
|
||||
% pidfile)
|
||||
return 1
|
||||
% PRServer.pidfile)
|
||||
sys.exit(1)
|
||||
|
||||
server = PRServer(os.path.abspath(dbfile), os.path.abspath(logfile), (host,port))
|
||||
server = PRServer(options.dbfile, interface=(options.host, options.port),
|
||||
logfile=os.path.abspath(options.logfile))
|
||||
server.start()
|
||||
return 0
|
||||
|
||||
def stop_daemon(host, port):
|
||||
pidfile = PIDPREFIX % (host, port)
|
||||
def stop_daemon():
|
||||
try:
|
||||
pf = file(pidfile,'r')
|
||||
pf = file(PRServer.pidfile,'r')
|
||||
pid = int(pf.readline().strip())
|
||||
host = pf.readline().strip()
|
||||
port = int(pf.readline().strip())
|
||||
pf.close()
|
||||
except IOError:
|
||||
pid = None
|
||||
|
||||
if not pid:
|
||||
sys.stderr.write("pidfile %s does not exist. Daemon not running?\n"
|
||||
% pidfile)
|
||||
% PRServer.pidfile)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
PRServerConnection(host, port).terminate()
|
||||
except:
|
||||
logger.critical("Stop PRService %s:%d failed" % (host,port))
|
||||
PRServerConnection(host,port).terminate()
|
||||
time.sleep(0.5)
|
||||
|
||||
try:
|
||||
if pid:
|
||||
if os.path.exists(pidfile):
|
||||
os.remove(pidfile)
|
||||
while 1:
|
||||
os.kill(pid,signal.SIGTERM)
|
||||
time.sleep(0.1)
|
||||
except OSError as e:
|
||||
err = str(e)
|
||||
if err.find("No such process") <= 0:
|
||||
raise e
|
||||
|
||||
return 0
|
||||
|
||||
def is_local_special(host, port):
|
||||
if host.strip().upper() == 'localhost'.upper() and (not port):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def auto_start(d):
|
||||
global singleton
|
||||
if (not d.getVar('PRSERV_HOST', True)) or (not d.getVar('PRSERV_PORT', True)):
|
||||
return True
|
||||
|
||||
if is_local_special(d.getVar('PRSERV_HOST', True), int(d.getVar('PRSERV_PORT', True))) and not singleton:
|
||||
import bb.utils
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or d.getVar("CACHE", True))
|
||||
if not cachedir:
|
||||
logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
|
||||
except OSError as err:
|
||||
err = str(err)
|
||||
if err.find("No such process") > 0:
|
||||
if os.path.exists(PRServer.pidfile):
|
||||
os.remove(PRServer.pidfile)
|
||||
else:
|
||||
print err
|
||||
sys.exit(1)
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
dbfile = os.path.join(cachedir, "prserv.sqlite3")
|
||||
logfile = os.path.join(cachedir, "prserv.log")
|
||||
singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), ("localhost",0))
|
||||
singleton.start()
|
||||
if singleton:
|
||||
host, port = singleton.getinfo()
|
||||
else:
|
||||
host = d.getVar('PRSERV_HOST', True)
|
||||
port = int(d.getVar('PRSERV_PORT', True))
|
||||
|
||||
try:
|
||||
return PRServerConnection(host,port).ping()
|
||||
except Exception:
|
||||
logger.critical("PRservice %s:%d not available" % (host, port))
|
||||
return False
|
||||
|
||||
def auto_shutdown(d=None):
|
||||
global singleton
|
||||
if singleton:
|
||||
host, port = singleton.getinfo()
|
||||
try:
|
||||
PRServerConnection(host, port).terminate()
|
||||
except:
|
||||
logger.critical("Stop PRService %s:%d failed" % (host,port))
|
||||
singleton = None
|
||||
|
||||
def ping(host, port):
|
||||
conn=PRServerConnection(host, port)
|
||||
return conn.ping()
|
||||
|
||||
@@ -1,48 +1,61 @@
|
||||
# This is a single Makefile to handle all generated Yocto Project documents.
|
||||
# The Makefile needs to live in the documents directory and all figures used
|
||||
# in any manuals must be PNG files and live in the individual book's figures
|
||||
# directory.
|
||||
# in any manuals must be .PNG files and live in the individual book's figures
|
||||
# directory. Note that the figures for the Yocto Project Development Manual
|
||||
# differ between the 'master' and 'edison' branches.
|
||||
#
|
||||
# The Makefile has these targets:
|
||||
#
|
||||
# pdf: generates a PDF version of a manual. Not valid for the Quick Start
|
||||
# html: generates an HTML version of a manual.
|
||||
# tarball: creates a tarball for the doc files.
|
||||
# pdf: generates a PDF version of a manual. Not valid for the Quick Start
|
||||
# html: generates an HTML version of a manual.
|
||||
# tarball: creates a tarball for the doc files.
|
||||
# validate: validates
|
||||
# publish: pushes generated files to the Yocto Project website
|
||||
# clean: removes files
|
||||
# publish: pushes generated files to the Yocto Project website
|
||||
# clean: removes files
|
||||
#
|
||||
# The Makefile generates an HTML and PDF version of every document except the
|
||||
# Yocto Project Quick Start. The Quick Start is in HTML form only. The variable
|
||||
# The command-line argument DOC represents the folder name in which a particular
|
||||
# document is stored. The command-line argument VER represents the distro
|
||||
# version of the Yocto Release for which the manuals are being generated.
|
||||
# DOC is used to indicate the folder name for a given manual. The variable
|
||||
# VER represents the distro version of the Yocto Release for which the manuals
|
||||
# are being generated. The variable BRANCH is used to indicate the 'edison'
|
||||
# branch and is used only when DOC=dev-manual (making the YP Development
|
||||
# Manual).
|
||||
#
|
||||
# To build the HTML and PDF versions of the manual you must invoke the Makefile
|
||||
# with the DOC argument. If you are going to publish the manual then you
|
||||
# you must invoke the Makefile with both the DOC and the VER argument.
|
||||
# If you are building the 'edison' version of the YP DEvelopment Manual then
|
||||
# you must use the DOC and BRANCH arguments.
|
||||
#
|
||||
# Examples:
|
||||
#
|
||||
# make DOC=bsp-guide
|
||||
# make DOC=yocto-project-qs
|
||||
# make pdf DOC=poky-ref-manual
|
||||
# make DOC=dev-manual BRANCH=edison
|
||||
#
|
||||
# The first example generates the HTML and PDF versions of the BSP Guide.
|
||||
# The second example generates the HTML version only of the Quick Start. Note that
|
||||
# the Quick Start only has an HTML version available. The third example generates
|
||||
# both the PDF and HTML versions of the Yocto Project Reference Manual.
|
||||
# both the PDF and HTML versions of the Yocto Project Reference Manual. The
|
||||
# last example generates both the PDF and HTML 'edison' versions of the YP
|
||||
# Development Manual.
|
||||
#
|
||||
# Use the publish target to push the generated manuals to the Yocto Project
|
||||
# website. All files needed for the manual's HTML form are pushed as well as the
|
||||
# PDF version (if applicable).
|
||||
# Examples:
|
||||
#
|
||||
# make publish DOC=bsp-guide VER=1.1
|
||||
# make publish DOC=adt-manual VER=1.1
|
||||
# make publish DOC=bsp-guide VER=1.2
|
||||
# make publish DOC=adt-manual VER=1.2
|
||||
# make publish DOC=dev-manual VER=1.1.1 BRANCH=edison
|
||||
# make publish DOC=dev-manual VER=1.2
|
||||
#
|
||||
# The first example publishes the 1.1 version of both the PDF and HTML versions of
|
||||
# the BSP Guide. The second example publishes the 1.1 version of both the PDF and
|
||||
# HTML versions of the ADT Manual.
|
||||
# The first example publishes the 1.2 version of both the PDF and HTML versions of
|
||||
# the BSP Guide. The second example publishes the 1.2 version of both the PDF and
|
||||
# HTML versions of the ADT Manual. The third example publishes the PDF and HTML
|
||||
# 'edison' versions of the YP Development Manual. Finally, the last example publishes
|
||||
# the PDF and HTML 'master' versions of the YP Development Manual.
|
||||
#
|
||||
|
||||
ifeq ($(DOC),bsp-guide)
|
||||
@@ -66,11 +79,32 @@ XSLTOPTS = --stringparam html.stylesheet style.css \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = style.css dev-manual.html dev-manual.pdf figures/bsp-dev-flow.png figures/dev-title.png \
|
||||
#
|
||||
# Note that the tarfile might produce the "Cannot stat: No such file or directory" error
|
||||
# message for .PNG files that are not present when building a particular branch. The
|
||||
# list of files is all-inclusive for all branches.
|
||||
#
|
||||
|
||||
ifeq ($(BRANCH),edison)
|
||||
TARFILES = style.css dev-manual.html dev-manual.pdf \
|
||||
figures/app-dev-flow.png figures/bsp-dev-flow.png figures/dev-title.png \
|
||||
figures/git-workflow.png figures/index-downloads.png figures/kernel-dev-flow.png \
|
||||
figures/kernel-example-repos.png figures/kernel-overview-1.png figures/kernel-overview-2.png \
|
||||
figures/kernel-overview-3.png figures/source-repos.png figures/yp-download.png \
|
||||
figures/kernel-example-repos-edison.png \
|
||||
figures/kernel-overview-1.png figures/kernel-overview-2.png \
|
||||
figures/kernel-overview-3-edison.png \
|
||||
figures/source-repos.png figures/yp-download.png \
|
||||
figures/wip.png
|
||||
else
|
||||
TARFILES = style.css dev-manual.html dev-manual.pdf \
|
||||
figures/app-dev-flow.png figures/bsp-dev-flow.png figures/dev-title.png \
|
||||
figures/git-workflow.png figures/index-downloads.png figures/kernel-dev-flow.png \
|
||||
figures/kernel-example-repos.png \
|
||||
figures/kernel-overview-1.png figures/kernel-overview-2.png \
|
||||
figures/kernel-overview-3.png \
|
||||
figures/source-repos.png figures/yp-download.png \
|
||||
figures/wip.png
|
||||
endif
|
||||
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='using-the-command-line'>
|
||||
<title>Using the Command Line</title>
|
||||
@@ -12,9 +13,9 @@
|
||||
Toolchain Tarball)</link>".
|
||||
And, that sourcing your architecture-specific environment setup script
|
||||
initializes a suitable cross-toolchain development environment.
|
||||
This setup occurs by adding the compiler, QEMU scripts, QEMU binary,
|
||||
During the setup, locations for the compiler, QEMU scripts, QEMU binary,
|
||||
a special version of <filename>pkgconfig</filename> and other useful
|
||||
utilities to the <filename>PATH</filename> variable.
|
||||
utilities are added to the <filename>PATH</filename> variable.
|
||||
Variables to assist <filename>pkgconfig</filename> and <filename>autotools</filename>
|
||||
are also defined so that,
|
||||
for example, <filename>configure.sh</filename> can find pre-generated
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='adt-eclipse'>
|
||||
<title>Working Within Eclipse</title>
|
||||
@@ -34,6 +35,11 @@
|
||||
<listitem><para>Install the Eclipse Yocto Plug-in.</para></listitem>
|
||||
<listitem><para>Configure the Eclipse Yocto Plug-in.</para></listitem>
|
||||
</orderedlist>
|
||||
<note>
|
||||
Do not install Eclipse from your distribution's package repository.
|
||||
Be sure to install Eclipse from the official Eclipse download site as directed
|
||||
in the next section.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<section id='installing-eclipse-ide'>
|
||||
@@ -43,7 +49,7 @@
|
||||
It is recommended that you have the Indigo 3.7 version of the
|
||||
Eclipse IDE installed on your development system.
|
||||
If you don’t have this version, you can find it at
|
||||
<ulink url='http://www.eclipse.org/downloads'></ulink>.
|
||||
<ulink url='&ECLIPSE_MAIN_URL;'></ulink>.
|
||||
From that site, choose the Eclipse Classic version particular to your development
|
||||
host.
|
||||
This version contains the Eclipse Platform, the Java Development
|
||||
@@ -53,10 +59,12 @@
|
||||
<para>
|
||||
Once you have downloaded the tarball, extract it into a clean
|
||||
directory.
|
||||
For example, the following command unpacks and installs the Eclipse IDE
|
||||
For example, the following commands unpack and install the Eclipse IDE
|
||||
tarball found in the <filename>Downloads</filename> area
|
||||
into a clean directory using the default name <filename>eclipse</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ tar -xzvf ~/Downloads/Eclipse-SDK-3.7-linux-gtk-x86_64.tar.gz
|
||||
$ cd ~
|
||||
$ tar -xzvf ~/Downloads/eclipse-SDK-3.7.1-linux-gtk-x86_64.tar.gz
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
@@ -98,20 +106,22 @@
|
||||
<listitem><para>Make sure you are in your Workbench and select
|
||||
"Install New Software" from the "Help" pull-down menu.
|
||||
</para></listitem>
|
||||
<listitem><para>Select <filename>indigo - http://download.eclipse.org/releases/indigo</filename>
|
||||
<listitem><para>Select <filename>indigo - &ECLIPSE_INDIGO_URL;</filename>
|
||||
from the "Work with:" pull-down menu.</para></listitem>
|
||||
<listitem><para>Expand the box next to <filename>Programming Languages</filename>
|
||||
and select the <filename>Autotools Support for CDT (incubation)</filename>
|
||||
and <filename>C/C++ Development Tools</filename> boxes.</para></listitem>
|
||||
<listitem><para>Expand the box next to "Linux Tools" and select the
|
||||
"LTTng - Linux Tracing Toolkit(incubation)" boxes.</para></listitem>
|
||||
<listitem><para>Complete the installation and restart the Eclipse IDE.</para></listitem>
|
||||
<listitem><para>After the Eclipse IDE restarts and from the Workbench, select
|
||||
"Install New Software" from the "Help" pull-down menu.</para></listitem>
|
||||
<listitem><para>Click the
|
||||
"Available Software Sites" link.</para></listitem>
|
||||
<listitem><para>Check the box next to
|
||||
<filename>http://download.eclipse.org/tm/updates/3.3</filename>
|
||||
<filename>&ECLIPSE_UPDATES_URL;</filename>
|
||||
and click "OK".</para></listitem>
|
||||
<listitem><para>Select <filename>http://download.eclipse.org/tm/updates/3.3</filename>
|
||||
<listitem><para>Select <filename>&ECLIPSE_UPDATES_URL;</filename>
|
||||
from the "Work with:" pull-down menu.</para></listitem>
|
||||
<listitem><para>Check the box next to <filename>TM and RSE Main Features</filename>.
|
||||
</para></listitem>
|
||||
@@ -125,7 +135,7 @@
|
||||
<listitem><para>After clicking "Available Software Sites", check the box next to
|
||||
<filename>http://download.eclipse.org/tools/cdt/releases/indigo</filename>
|
||||
and click "OK".</para></listitem>
|
||||
<listitem><para>Select <filename>http://download.eclipse.org/tools/cdt/releases/indigo</filename>
|
||||
<listitem><para>Select <filename>&ECLIPSE_INDIGO_CDT_URL;</filename>
|
||||
from the "Work with:" pull-down menu.</para></listitem>
|
||||
<listitem><para>Check the box next to <filename>CDT Main Features</filename>.
|
||||
</para></listitem>
|
||||
@@ -138,30 +148,36 @@
|
||||
</section>
|
||||
|
||||
<section id='installing-the-eclipse-yocto-plug-in'>
|
||||
<title>Installing the Eclipse Yocto Plug-in</title>
|
||||
<title>Installing or Accessing the Eclipse Yocto Plug-in</title>
|
||||
|
||||
<para>
|
||||
You can install the Eclipse Yocto Plug-in one of three methods: as new software
|
||||
from within the Eclipse IDE, from the Yocto Project source repositories, or as a built zip file.
|
||||
You can install the Eclipse Yocto Plug-in into the Eclipse application
|
||||
one of two ways: using the Eclipse IDE and installing the plug-in as new software, or
|
||||
using a built zip file.
|
||||
If you don't want to permanently install the plug-in but just want to try it out
|
||||
within the Eclipse environment, you can import the plug-in project from the
|
||||
Yocto Project source repositories.
|
||||
</para>
|
||||
|
||||
<section id='new-software'>
|
||||
<title>New Software</title>
|
||||
<title>Installing the Plug-in as New Software</title>
|
||||
|
||||
<para>
|
||||
To install the Eclipse Yocto Plug-in directly into the Eclipse IDE,
|
||||
To install the Eclipse Yocto Plug-in as new software directly into the Eclipse IDE,
|
||||
follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Start up the Eclipse IDE.</para></listitem>
|
||||
<listitem><para>In Eclipse, select "Install New Software" from the "Help" menu.</para></listitem>
|
||||
<listitem><para>Click "Add..." in the "Work with:" area.</para></listitem>
|
||||
<listitem><para>Enter
|
||||
<filename>http://www.yoctoproject.org/downloads/eclipse-plugin/1.1</filename>
|
||||
<filename>&ECLIPSE_DL_PLUGIN_URL;</filename>
|
||||
in the URL field and provide a meaningful name in the "Name" field.</para></listitem>
|
||||
<listitem><para>Click "OK" to have the entry added to the "Work with:"
|
||||
drop-down list.</para></listitem>
|
||||
<listitem><para>Select the entry for the plug-in from the "Work with:" drop-down
|
||||
list.</para></listitem>
|
||||
<listitem><para>Check the box next to <filename>Development tools and SDKs for Yocto Linux</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para>Complete the remaining software installation steps and
|
||||
then restart the Eclipse IDE to finish the installation of the plug-in.
|
||||
</para></listitem>
|
||||
@@ -169,46 +185,8 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
|
||||
<section id='yocto-project-source'>
|
||||
<title>Yocto Project Source</title>
|
||||
|
||||
<para>
|
||||
To install the Eclipse Yocto Plug-in from the Yocto Project source repositories,
|
||||
follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Open a shell and create a Git repository with:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.yoctoproject.org/eclipse-poky yocto-eclipse
|
||||
</literallayout>
|
||||
For this example, the repository is named
|
||||
<filename>~/yocto-eclipse</filename>.</para></listitem>
|
||||
<listitem><para>In Eclipse, select "Import" from the "File" menu.</para></listitem>
|
||||
<listitem><para>Expand the "General" box and pick "existing projects into workspace".
|
||||
</para></listitem>
|
||||
<listitem><para>Select the root directory and browse to "~/yocto-eclipse/plugins".
|
||||
</para></listitem>
|
||||
<listitem><para>There will be three things there.
|
||||
Select each one and install one at a time.
|
||||
Do all three.</para></listitem>
|
||||
<listitem><para>Restart everything.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
At this point you should be able to invoke Eclipse from the shell using the following:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/eclipse
|
||||
$ ./eclipse -vmargs -XX:PermSize=256M
|
||||
</literallayout>
|
||||
The left navigation pane shows the default projects.
|
||||
Right-click on one of these projects and run it as an Eclipse application.
|
||||
This brings up a second instance of Eclipse IDE that has the Yocto Plug-in.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='zip-file-method'>
|
||||
<title>Zip File Method</title>
|
||||
<title>Installing the Plug-in from a Zip File</title>
|
||||
<para>
|
||||
To install the Eclipse Yocto Plug-in by building and installing a plug-in
|
||||
zip file, follow these steps:
|
||||
@@ -234,9 +212,9 @@
|
||||
name of the Git branch along with the Yocto Project release you are
|
||||
using.
|
||||
Here is an example that uses the <filename>master</filename> Git repository
|
||||
and the <filename>1.1M4</filename> release:
|
||||
and the <filename>&DISTRO;</filename> release:
|
||||
<literallayout class='monospaced'>
|
||||
$ scripts/build.sh master 1.1M4
|
||||
$ scripts/build.sh master &DISTRO;
|
||||
</literallayout>
|
||||
After running the script, the file
|
||||
<filename>org.yocto.sdk-<release>-<date>-archive.zip</filename>
|
||||
@@ -247,22 +225,57 @@
|
||||
</para></listitem>
|
||||
<listitem><para>Click "Add".</para></listitem>
|
||||
<listitem><para>Provide anything you want in the "Name" field.</para></listitem>
|
||||
<listitem><para>For the "Archive" field, select the ZIP file you built in step
|
||||
4.
|
||||
<listitem><para>Click "Archive" and browse to the ZIP file you built
|
||||
in step four.
|
||||
This ZIP file should not be "unzipped", and must be the
|
||||
<filename>*archive.zip</filename> file created by running the
|
||||
<filename>build.sh</filename> script.</para></listitem>
|
||||
<listitem><para>Select the new entry in the installation window and complete
|
||||
<listitem><para>Check the box next to the new entry in the installation window and complete
|
||||
the installation.</para></listitem>
|
||||
<listitem><para>Restart the Eclipse IDE if necessary.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
At this point you should be able to configure the Eclipse Yocto Plug-in as described in
|
||||
the next section.
|
||||
At this point you should be able to configure the Eclipse Yocto Plug-in as described in the
|
||||
"<link linkend='configuring-the-eclipse-yocto-plug-in'>Configuring the Eclipse Yocto Plug-in</link>"
|
||||
section.</para>
|
||||
</section>
|
||||
|
||||
<section id='yocto-project-source'>
|
||||
<title>Importing the Plug-in Project into the Eclipse Environment</title>
|
||||
<para>
|
||||
Importing the Eclipse Yocto Plug-in project from the Yocto Project source repositories
|
||||
is useful when you want to try out the latest plug-in from the tip of plug-in's
|
||||
development tree.
|
||||
It is important to understand when you import the plug-in you are not installing
|
||||
it into the Eclipse application.
|
||||
Rather, you are importing the project and just using it.
|
||||
To import the plug-in project, follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Open a shell and create a Git repository with:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.yoctoproject.org/eclipse-poky yocto-eclipse
|
||||
</literallayout>
|
||||
For this example, the repository is named
|
||||
<filename>~/yocto-eclipse</filename>.</para></listitem>
|
||||
<listitem><para>In Eclipse, select "Import" from the "File" menu.</para></listitem>
|
||||
<listitem><para>Expand the "General" box and select "existing projects into workspace"
|
||||
and then click "Next".</para></listitem>
|
||||
<listitem><para>Select the root directory and browse to "~/yocto-eclipse/plugins".
|
||||
</para></listitem>
|
||||
<listitem><para>There will be three things there.
|
||||
Select each one and install one at a time.
|
||||
Do all three.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The left navigation pane in the Eclipse application shows the default projects.
|
||||
Right-click on one of these projects and run it as an Eclipse application.
|
||||
This brings up a second instance of Eclipse IDE that has the Yocto Plug-in.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='configuring-the-eclipse-yocto-plug-in'>
|
||||
@@ -317,7 +330,7 @@
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Point to the Toolchain:</emphasis>
|
||||
If you are using a stand-alone pre-built toolchain, you should be pointing to the
|
||||
<filename>/opt/poky/1.1</filename> directory.
|
||||
<filename>&YOCTO_ADTPATH_DIR;</filename> directory.
|
||||
This is the location for toolchains installed by the ADT Installer or by hand.
|
||||
Sections "<link linkend='configuring-and-running-the-adt-installer-script'>Configuring
|
||||
and Running the ADT Installer Script</link>" and
|
||||
@@ -349,9 +362,8 @@
|
||||
The pull-down menu should have the supported architectures.
|
||||
If the architecture you need is not listed in the menu, you
|
||||
will need to build the image.
|
||||
See the "<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#building-image'>Building an Image</ulink>" section of the
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html'>
|
||||
The Yocto Project Quick Start</ulink> for more information.</para></listitem>
|
||||
See the "<ulink url='&YOCTO_DOCS_QS_URL;#building-image'>Building an Image</ulink>" section
|
||||
of The Yocto Project Quick Start for more information.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
@@ -467,7 +479,9 @@
|
||||
The script also runs <filename>libtoolize</filename>, <filename>aclocal</filename>,
|
||||
<filename>autoconf</filename>, <filename>autoheader</filename>,
|
||||
<filename>automake --a</filename>, and
|
||||
<filename>./configure</filename>.</para></listitem>
|
||||
<filename>./configure</filename>.
|
||||
Click on the <filename>Console</filename> tab beneath your source code to
|
||||
see the results of reconfiguring your project.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
@@ -490,7 +504,7 @@
|
||||
<listitem><para>Expose the <filename>Run -> External Tools</filename> menu.
|
||||
Your image should appear as a selectable menu item.
|
||||
</para></listitem>
|
||||
<listitem><para>Select your image in the navigation pane to launch the
|
||||
<listitem><para>Select your image from the menu to launch the
|
||||
emulator in a new window.</para></listitem>
|
||||
<listitem><para>If needed, enter your host root password in the shell window at the prompt.
|
||||
This sets up a <filename>Tap 0</filename> connection needed for running in user-space
|
||||
@@ -509,8 +523,8 @@
|
||||
<title>Deploying and Debugging the Application</title>
|
||||
|
||||
<para>
|
||||
Once the QEMU emulator is running the image, you can deploy your application and use the emulator
|
||||
to perform debugging.
|
||||
Once the QEMU emulator is running the image, using the Eclipse IDE
|
||||
you can deploy your application and use the emulator to perform debugging.
|
||||
Follow these steps to deploy the application.
|
||||
<orderedlist>
|
||||
<listitem><para>Select <filename>Run -> Debug Configurations...</filename></para></listitem>
|
||||
@@ -550,7 +564,7 @@
|
||||
your development experience.
|
||||
These tools are aids in developing and debugging applications and images.
|
||||
You can run these user-space tools from within the Eclipse IDE through the
|
||||
<filename>Window -> YoctoTools</filename> menu.
|
||||
<filename>YoctoTools</filename> menu.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -575,14 +589,14 @@
|
||||
host can use, you must have <filename>oprofile</filename> version 0.9.4 or
|
||||
greater installed on the host.</para>
|
||||
<para>You can locate both the viewer and server from
|
||||
<ulink url='http://git.yoctoproject.org/cgit/cgit.cgi/oprofileui/'></ulink>.
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit/cgit.cgi/oprofileui/'></ulink>.
|
||||
<note>The <filename>oprofile-server</filename> is installed by default on
|
||||
the <filename>core-image-sato-sdk</filename> image.</note></para></listitem>
|
||||
<listitem><para><emphasis><filename>Lttng-ust</filename>:</emphasis> Selecting this tool runs
|
||||
<filename>usttrace</filename> on the remote target, transfers the output data back to the
|
||||
local host machine, and uses <filename>lttv-gui</filename> to graphically display the output.
|
||||
The <filename>lttv-gui</filename> must be installed on the local host machine to use this tool.
|
||||
For information on how to use <filename>lttng</filename> to trace an application, see
|
||||
<listitem><para><emphasis><filename>Lttng-ust</filename>:</emphasis> Selecting this tool runs
|
||||
<filename>usttrace</filename> on the remote target, transfers the output data back
|
||||
to the local host machine, and uses the <filename>lttng</filename> Eclipse plug-in to
|
||||
graphically display the output.
|
||||
For information on how to use <filename>lttng</filename> to trace an application, see
|
||||
<ulink url='http://lttng.org/files/ust/manual/ust.html'></ulink>.</para>
|
||||
<para>For <filename>Application</filename>, you must supply the absolute path name of the
|
||||
application to be traced by user mode <filename>lttng</filename>.
|
||||
@@ -590,7 +604,32 @@
|
||||
<filename>usttrace /path/to/foo</filename> on the remote target to trace the
|
||||
program <filename>/path/to/foo</filename>.</para>
|
||||
<para><filename>Argument</filename> is passed to <filename>usttrace</filename>
|
||||
running on the remote target.</para></listitem>
|
||||
running on the remote target.</para>
|
||||
<para>Before you use the <filename>lttng-ust</filename> tool, you need to setup
|
||||
the <filename>lttng</filename> Eclipse plug-in and create a <filename>lttng</filename>
|
||||
project.
|
||||
Do the following:
|
||||
<orderedlist>
|
||||
<listitem><para>Follow these
|
||||
<ulink url='http://wiki.eclipse.org/Linux_Tools_Project/LTTng#Downloading_and_installing_the_LTTng_parser_library'>instructions</ulink>
|
||||
to download and install the <filename>lttng</filename> parser library.
|
||||
</para></listitem>
|
||||
<listitem><para>Select <filename>Window -> Open Perspective -> Other</filename>
|
||||
and then select <filename>LTTng</filename>.</para></listitem>
|
||||
<listitem><para>Click <filename>OK</filename> to change the Eclipse perspective
|
||||
into the <filename>LTTng</filename> perspective.</para></listitem>
|
||||
<listitem><para>Create a new <filename>LTTng</filename> project by selecting
|
||||
<filename>File -> New -> Project</filename>.</para></listitem>
|
||||
<listitem><para>Choose <filename>LTTng -> LTTng Project</filename>.</para></listitem>
|
||||
<listitem><para>Click <filename>YoctoTools -> lttng-ust</filename> to start user mode
|
||||
<filename>lttng</filename> on the remote target.</para></listitem>
|
||||
</orderedlist></para>
|
||||
<para>After the output data has been transferred from the remote target back to the local
|
||||
host machine, new traces will be imported into the selected <filename>LTTng</filename> project.
|
||||
Then you can go to the <filename>LTTng</filename> project, right click the imported
|
||||
trace, and set the trace type as the <filename>LTTng</filename> kernel trace.
|
||||
Finally, right click the imported trace and select <filename>Open</filename>
|
||||
to display the data graphically.</para></listitem>
|
||||
<listitem><para><emphasis><filename>PowerTOP</filename>:</emphasis> Selecting this tool runs
|
||||
<filename>powertop</filename> on the remote target machine and displays the results in a
|
||||
new view called <filename>powertop</filename>.</para>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='adt-intro'>
|
||||
|
||||
@@ -106,7 +107,7 @@
|
||||
<listitem><para><emphasis>PowerTOP:</emphasis> Helps you determine what
|
||||
software is using the most power.
|
||||
You can find out more about PowerTOP at
|
||||
<ulink url='http://www.linuxpowertop.org/'></ulink>.</para></listitem>
|
||||
<ulink url='https://01.org/powertop/'></ulink>.</para></listitem>
|
||||
<listitem><para><emphasis>OProfile:</emphasis> A system-wide profiler for Linux
|
||||
systems that is capable of profiling all running code at low overhead.
|
||||
You can find out more about OProfile at
|
||||
@@ -114,7 +115,7 @@
|
||||
<listitem><para><emphasis>Perf:</emphasis> Performance counters for Linux used
|
||||
to keep track of certain types of hardware and software events.
|
||||
For more information on these types of counters see
|
||||
<ulink url='https://perf.wiki.kernel.org/index.php'></ulink> and click
|
||||
<ulink url='https://perf.wiki.kernel.org/'></ulink> and click
|
||||
on “Perf tools.”</para></listitem>
|
||||
<listitem><para><emphasis>SystemTap:</emphasis> A free software infrastructure
|
||||
that simplifies information gathering about a running Linux system.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<book id='adt-manual' lang='en'
|
||||
xmlns:xi="http://www.w3.org/2003/XInclude"
|
||||
@@ -43,10 +44,20 @@
|
||||
<date>6 October 2011</date>
|
||||
<revremark>Released with the Yocto Project 1.1 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.1.1</revnumber>
|
||||
<date>15 March 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.1.1 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.1.2</revnumber>
|
||||
<date>July 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.1.2 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
<copyright>
|
||||
<year>2010-2011</year>
|
||||
<year>©RIGHT_YEAR;</year>
|
||||
<holder>Linux Foundation</holder>
|
||||
</copyright>
|
||||
|
||||
@@ -58,9 +69,9 @@
|
||||
<note>
|
||||
Due to production processes, there could be differences between the Yocto Project
|
||||
documentation bundled in the release tarball and the
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/adt-manual/adt-manual.html'>
|
||||
<ulink url='&YOCTO_DOCS_ADT_URL;'>
|
||||
Application Developer's Toolkit (ADT) User's Guide</ulink> on
|
||||
the <ulink url='http://www.yoctoproject.org'>Yocto Project</ulink> website.
|
||||
the <ulink url='&YOCTO_HOME_URL;'>Yocto Project</ulink> website.
|
||||
For the latest version of this manual, see the manual on the website.
|
||||
</note>
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='adt-package'>
|
||||
<title>Optionally Customizing the Development Packages Installation</title>
|
||||
@@ -54,9 +55,7 @@
|
||||
|
||||
<note>
|
||||
For build performance information related to the PMS, see
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html#ref-classes-package'>Packaging - <filename>package*.bbclass</filename></ulink>
|
||||
in <ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html'>
|
||||
The Yocto Project Reference Manual</ulink>.
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-package'>Packaging - <filename>package*.bbclass</filename></ulink> in The Yocto Project Reference Manual.
|
||||
</note>
|
||||
|
||||
<para>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='adt-prepare'>
|
||||
|
||||
@@ -55,8 +56,10 @@
|
||||
|
||||
<para>
|
||||
The ADT Installer is contained in the ADT Installer tarball.
|
||||
You can download the tarball into any directory from
|
||||
<ulink url='http://downloads.yoctoproject.org/releases/yocto/yocto-1.1/adt_installer'></ulink>.
|
||||
You can download the tarball into any directory from the
|
||||
<ulink url='&YOCTO_DL_URL;/releases'>Index of Releases</ulink>, specifically
|
||||
at
|
||||
<ulink url='&YOCTO_ADTINSTALLER_DL_URL;'></ulink>.
|
||||
Or, you can use BitBake to generate the tarball inside the existing Yocto Project
|
||||
build tree.
|
||||
</para>
|
||||
@@ -79,9 +82,9 @@
|
||||
$ cd ~
|
||||
$ mkdir yocto-project
|
||||
$ cd yocto-project
|
||||
$ wget http://downloads.yoctoproject.org/releases/yocto/yocto-1.1/poky-edison-6.0.tar.bz2
|
||||
$ tar xjf poky-edison-6.0.tar.bz2
|
||||
$ source poky-edison-6.0/oe-init-build-env
|
||||
$ wget &YOCTO_RELEASE_DL_URL;/&YOCTO_POKY_TARBALL;
|
||||
$ tar xjf &YOCTO_POKY_TARBALL;
|
||||
$ source &OE_INIT_PATH;
|
||||
$ bitbake adt-installer
|
||||
</literallayout>
|
||||
</para>
|
||||
@@ -93,6 +96,14 @@
|
||||
<para>
|
||||
Before running the ADT Installer script, you need to unpack the tarball.
|
||||
You can unpack the tarball in any directory you wish.
|
||||
For example, this command copies the ADT Installer tarball from where
|
||||
it was built into the home directory and then unpacks the tarball into
|
||||
a top-level directory named <filename>adt-installer</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~
|
||||
$ cp ~/poky/build/tmp/deploy/sdk/adt_installer.tar.bz2 $HOME
|
||||
$ tar -xjf adt_installer.tar.bz2
|
||||
</literallayout>
|
||||
Unpacking it creates the directory <filename>adt-installer</filename>,
|
||||
which contains the ADT Installer script (<filename>adt_installer</filename>)
|
||||
and its configuration file (<filename>adt_installer.conf</filename>).
|
||||
@@ -155,19 +166,20 @@
|
||||
|
||||
<para>
|
||||
After you have configured the <filename>adt_installer.conf</filename> file,
|
||||
run the installer using the following command:
|
||||
run the installer using the following command.
|
||||
Be sure that you are not trying to use cross-compilation tools.
|
||||
When you run the installer, the environment must use a
|
||||
host <filename>gcc</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ adt_installer
|
||||
$ ./adt_installer
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
The ADT Installer requires the <filename>libtool</filename> package to complete.
|
||||
If you install the recommended packages as described in the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#packages'>Packages</ulink>"
|
||||
section of
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html'>
|
||||
The Yocto Project Quick Start</ulink>, then you will have libtool installed.
|
||||
If you install the recommended packages as described in
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#packages'>The Packages</ulink>"
|
||||
section of The Yocto Project Quick Start, then you will have libtool installed.
|
||||
</note>
|
||||
|
||||
<para>
|
||||
@@ -181,7 +193,7 @@
|
||||
<para>
|
||||
Once the installation completes, the ADT, which includes the cross-toolchain, is installed.
|
||||
You will notice environment setup files for the cross-toolchain in
|
||||
<filename>/opt/poky/1.1</filename>,
|
||||
<filename>&YOCTO_ADTPATH_DIR;</filename>,
|
||||
and image tarballs in the <filename>adt-installer</filename>
|
||||
directory according to your installer configurations, and the target sysroot located
|
||||
according to the <filename>YOCTOADT_TARGET_SYSROOT_LOC_<arch></filename> variable
|
||||
@@ -204,17 +216,17 @@
|
||||
Follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Go to
|
||||
<ulink url='http://downloads.yoctoproject.org/releases/yocto/yocto-1.1/toolchain'></ulink>
|
||||
<ulink url='&YOCTO_TOOLCHAIN_DL_URL;'></ulink>
|
||||
and find the folder that matches your host development system
|
||||
(i.e. <filename>i686</filename> for 32-bit machines or
|
||||
<filename>x86_64</filename> for 64-bit machines).</para></listitem>
|
||||
<filename>x86-64</filename> for 64-bit machines).</para></listitem>
|
||||
<listitem><para>Go into that folder and download the toolchain tarball whose name
|
||||
includes the appropriate target architecture.
|
||||
For example, if your host development system is an Intel-based 64-bit system and
|
||||
you are going to use your cross-toolchain for an Intel-based 32-bit target, go into the
|
||||
<filename>x86_64</filename> folder and download the following tarball:
|
||||
<literallayout class='monospaced'>
|
||||
poky-eglibc-x86_64-i586-toolchain-gmae-1.1.tar.bz2
|
||||
poky-eglibc-x86_64-i586-toolchain-gmae-&DISTRO;.tar.bz2
|
||||
</literallayout>
|
||||
<note><para>As an alternative to steps one and two, you can build the toolchain tarball
|
||||
if you have a Yocto Project build tree.
|
||||
@@ -231,7 +243,7 @@
|
||||
</para></note></para></listitem>
|
||||
<listitem><para>Make sure you are in the root directory with root privileges and then expand
|
||||
the tarball.
|
||||
The tarball expands into <filename>/opt/poky/1.1</filename>.
|
||||
The tarball expands into <filename>&YOCTO_ADTPATH_DIR;</filename>.
|
||||
Once the tarball is expanded, the cross-toolchain is installed.
|
||||
You will notice environment setup files for the cross-toolchain in the directory.
|
||||
</para></listitem>
|
||||
@@ -294,7 +306,7 @@
|
||||
Before you can develop using the cross-toolchain, you need to set up the
|
||||
cross-development environment by sourcing the toolchain's environment setup script.
|
||||
If you used the ADT Installer or used an existing ADT tarball to install the ADT,
|
||||
then you can find this script in the <filename>/opt/poky/1.1</filename>
|
||||
then you can find this script in the <filename>&YOCTO_ADTPATH_DIR;</filename>
|
||||
directory.
|
||||
If you installed the toolchain in the build tree, you can find the environment setup
|
||||
script for the toolchain in the Yocto Project build tree's <filename>tmp</filename> directory.
|
||||
@@ -308,7 +320,7 @@
|
||||
For example, the toolchain environment setup script for a 64-bit IA-based architecture would
|
||||
be the following:
|
||||
<literallayout class='monospaced'>
|
||||
/opt/poky/1.1/environment-setup-x86_64-poky-linux
|
||||
&YOCTO_ADTPATH_DIR;/environment-setup-x86_64-poky-linux
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
@@ -330,10 +342,8 @@
|
||||
To get the kernel and filesystem images, you either have to build them or download
|
||||
pre-built versions.
|
||||
You can find examples for both these situations in the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#test-run'>A
|
||||
Quick Test Run</ulink>" section of
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html'>
|
||||
The Yocto Project Quick Start</ulink>.
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#test-run'>A Quick Test Run</ulink>" section of
|
||||
The Yocto Project Quick Start.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -342,12 +352,11 @@
|
||||
<filename>mips</filename>, <filename>powerpc</filename>, and <filename>arm</filename>)
|
||||
that you can use unaltered in the QEMU emulator.
|
||||
These kernel images reside in the Yocto Project release
|
||||
area - <ulink url='http://downloads.yoctoproject.org/releases/yocto/yocto-1.1/machines/'></ulink>
|
||||
area - <ulink url='&YOCTO_MACHINES_DL_URL;'></ulink>
|
||||
and are ideal for experimentation within Yocto Project.
|
||||
For information on the image types you can build using the Yocto Project, see the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html#ref-images'>Reference: Images</ulink>" appendix in
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html'>
|
||||
The Yocto Project Reference Manual</ulink>.
|
||||
"<ulink url='&YOCTO_DOCS_REF_URL;#ref-images'>Reference: Images</ulink>" appendix in
|
||||
The Yocto Project Reference Manual.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -363,7 +372,7 @@
|
||||
If you want to use a different image type that contains the <filename>tcf-agent</filename>,
|
||||
you can do so one of two ways:
|
||||
<itemizedlist>
|
||||
<listitem><para>Modify the <filename>conf/local.conf</filename> configuration in
|
||||
<listitem><para>Modify the <filename>conf/local.conf</filename> configuration file in
|
||||
the Yocto Project build directory and then rebuild the image.
|
||||
With this method, you need to modify the <filename>EXTRA_IMAGE_FEATURES</filename>
|
||||
variable to have the value of "tools-debug" before rebuilding the image.
|
||||
@@ -378,11 +387,11 @@
|
||||
<listitem><para>Set up the cross-development environment as described in the
|
||||
"<link linkend='setting-up-the-cross-development-environment'>Setting
|
||||
Up the Cross-Development Environment</link>" section.</para></listitem>
|
||||
<listitem><para>Get the <filename>tcf-agent</filename> source code, which is
|
||||
stored using the Subversion SCM, using the following command:
|
||||
<listitem><para>Get the <filename>tcf-agent</filename> source code using
|
||||
the following commands:
|
||||
<literallayout class='monospaced'>
|
||||
$ svn checkout svn://dev.eclipse.org/svnroot/dsdp/org.eclipse.tm.tcf/trunk/agent \
|
||||
<-r #rev_number>
|
||||
$ git clone http://git.eclipse.org/gitroot/tcf/org.eclipse.tcf.agent.git
|
||||
$ cd agent
|
||||
</literallayout></para></listitem>
|
||||
<listitem><para>Modify the <filename>Makefile.inc</filename> file
|
||||
for the cross-compilation environment by setting the
|
||||
@@ -422,13 +431,13 @@
|
||||
filesystem image.
|
||||
For example, the following commands set up the environment and then extract
|
||||
the root filesystem from a previously built filesystem image tarball named
|
||||
<filename>core-image-sato-sdk-qemux86-2011091411831.rootfs.tar.bz2</filename>.
|
||||
<filename>core-image-sato-sdk-qemux86.tar.bz2</filename>.
|
||||
The example extracts the root filesystem into the <filename>$HOME/qemux86-sato</filename>
|
||||
directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ source $HOME/poky/build/tmp/environment-setup-i586-poky-linux
|
||||
$ runqemu-extract-sdk \
|
||||
tmp/deploy/images/core-image-sato-sdk-qemux86-2011091411831.rootfs.tar.bz2 \
|
||||
tmp/deploy/images/core-image-sato-sdk-qemux86.tar.bz2 \
|
||||
$HOME/qemux86-sato
|
||||
</literallayout>
|
||||
In this case, you could now point to the target sysroot at
|
||||
|
||||
@@ -654,7 +654,7 @@ hr {
|
||||
|
||||
|
||||
.tip, .warning, .caution, .note {
|
||||
border-color: #aaa;
|
||||
border-color: #fff;
|
||||
}
|
||||
|
||||
|
||||
@@ -662,24 +662,24 @@ hr {
|
||||
.warning table th,
|
||||
.caution table th,
|
||||
.note table th {
|
||||
border-bottom-color: #aaa;
|
||||
border-bottom-color: #fff;
|
||||
}
|
||||
|
||||
|
||||
.warning {
|
||||
background-color: #fea;
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.caution {
|
||||
background-color: #fea;
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.tip {
|
||||
background-color: #eff;
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.note {
|
||||
background-color: #dfc;
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.glossary dl dt,
|
||||
@@ -946,8 +946,8 @@ table {
|
||||
|
||||
.tip,
|
||||
.note {
|
||||
background: #666666;
|
||||
color: #fff;
|
||||
background: #f0f0f2;
|
||||
color: #333;
|
||||
padding: 20px;
|
||||
margin: 20px;
|
||||
}
|
||||
@@ -958,12 +958,12 @@ table {
|
||||
margin: 0em;
|
||||
font-size: 2em;
|
||||
font-weight: bold;
|
||||
color: #fff;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
.tip a,
|
||||
.note a {
|
||||
color: #fff;
|
||||
color: #333;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
@@ -972,3 +972,12 @@ table {
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Changes the announcement text */
|
||||
.tip h3,
|
||||
.warning h3,
|
||||
.caution h3,
|
||||
.note h3 {
|
||||
font-size:large;
|
||||
color: #00557D;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<book id='bsp-guide' lang='en'
|
||||
xmlns:xi="http://www.w3.org/2003/XInclude"
|
||||
@@ -55,10 +56,20 @@
|
||||
<date>6 October 2011</date>
|
||||
<revremark>Released with the Yocto Project 1.1 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.1.1</revnumber>
|
||||
<date>15 March 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.1.1 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.1.2</revnumber>
|
||||
<date>July 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.1.2 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
<copyright>
|
||||
<year>2010-2011</year>
|
||||
<year>©RIGHT_YEAR;</year>
|
||||
<holder>Linux Foundation</holder>
|
||||
</copyright>
|
||||
|
||||
@@ -70,9 +81,9 @@
|
||||
<note>
|
||||
Due to production processes, there could be differences between the Yocto Project
|
||||
documentation bundled in the release tarball and the
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/bsp-guide/bsp-guide.html'>
|
||||
<ulink url='&YOCTO_DOCS_BSP_URL;'>
|
||||
Board Support Package (BSP) Developer's Guide</ulink> on
|
||||
the <ulink url='http://www.yoctoproject.org'>Yocto Project</ulink> website.
|
||||
the <ulink url='&YOCTO_HOME_URL;'>Yocto Project</ulink> website.
|
||||
For the latest version of this manual, see the manual on the website.
|
||||
</note>
|
||||
</legalnotice>
|
||||
|
||||
@@ -654,7 +654,7 @@ hr {
|
||||
|
||||
|
||||
.tip, .warning, .caution, .note {
|
||||
border-color: #aaa;
|
||||
border-color: #fff;
|
||||
}
|
||||
|
||||
|
||||
@@ -662,24 +662,24 @@ hr {
|
||||
.warning table th,
|
||||
.caution table th,
|
||||
.note table th {
|
||||
border-bottom-color: #aaa;
|
||||
border-bottom-color: #fff;
|
||||
}
|
||||
|
||||
|
||||
.warning {
|
||||
background-color: #fea;
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.caution {
|
||||
background-color: #fea;
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.tip {
|
||||
background-color: #eff;
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.note {
|
||||
background-color: #dfc;
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.glossary dl dt,
|
||||
@@ -771,6 +771,17 @@ h6,
|
||||
h7{
|
||||
}
|
||||
|
||||
/*
|
||||
Example of how to stick an image as part of the title.
|
||||
|
||||
div.article .titlepage .title
|
||||
{
|
||||
background-image: url("figures/white-on-black.png");
|
||||
background-position: center;
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
*/
|
||||
|
||||
div.preface .titlepage .title,
|
||||
div.colophon .title,
|
||||
div.chapter .titlepage .title {
|
||||
@@ -936,8 +947,8 @@ table {
|
||||
|
||||
.tip,
|
||||
.note {
|
||||
background: #666666;
|
||||
color: #fff;
|
||||
background: #f0f0f2;
|
||||
color: #333;
|
||||
padding: 20px;
|
||||
margin: 20px;
|
||||
}
|
||||
@@ -948,12 +959,12 @@ table {
|
||||
margin: 0em;
|
||||
font-size: 2em;
|
||||
font-weight: bold;
|
||||
color: #fff;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
.tip a,
|
||||
.note a {
|
||||
color: #fff;
|
||||
color: #333;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
@@ -962,3 +973,12 @@ table {
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Changes the announcement text */
|
||||
.tip h3,
|
||||
.warning h3,
|
||||
.caution h3,
|
||||
.note h3 {
|
||||
font-size:large;
|
||||
color: #00557D;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE appendix PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<appendix id='dev-manual-bsp-appendix'>
|
||||
|
||||
@@ -31,47 +32,77 @@
|
||||
The following paragraphs describe both methods.
|
||||
For additional information, see the bulleted item
|
||||
"<link linkend='local-yp-release'>Yocto Project Release</link>".
|
||||
</para>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
As mentioned, one way to get the Yocto Project files is to use Git to clone the
|
||||
<filename>poky</filename> repository:
|
||||
<filename>poky</filename> repository.
|
||||
These commands create a local copy of the Git repository.
|
||||
By default, the top-level directory of the repository is named <filename>poky</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.yoctoproject.org/poky
|
||||
$ cd poky
|
||||
</literallayout>
|
||||
Alternatively, you can start with the downloaded Poky "edison" tarball:
|
||||
Alternatively, you can start with the downloaded Poky "&DISTRO_NAME;" tarball.
|
||||
These commands unpack the tarball into a Yocto Project File directory structure.
|
||||
By default, the top-level directory of the file structure is named
|
||||
<filename>&YOCTO_POKY;</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ tar xfj poky-edison-6.0.tar.bz2
|
||||
$ cd poky
|
||||
$ tar xfj &YOCTO_POKY_TARBALL;
|
||||
$ cd &YOCTO_POKY;
|
||||
</literallayout>
|
||||
<note>If you're using the tarball method, you can ignore all the following steps that
|
||||
<note><para>If you're using the tarball method, you can ignore all the following steps that
|
||||
ask you to carry out Git operations.
|
||||
You already have the results of those operations
|
||||
in the form of the edison release tarballs.
|
||||
in the form of the &DISTRO_NAME; release tarballs.
|
||||
Consequently, there is nothing left to do other than extract those tarballs into the
|
||||
proper locations.</note>
|
||||
proper locations.</para>
|
||||
|
||||
<para>Once you expand the released tarball, you have a snapshot of the Git repository
|
||||
that represents a specific release.
|
||||
Fundamentally, this is different than having a local copy of the Yocto Project
|
||||
Git repository.
|
||||
Given the tarball method, changes you make are building on top of a release.
|
||||
With the Git repository method you have the ability to track development
|
||||
and keep changes in revision control.</para></note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Once you have the local <filename>poky</filename> Git repository set up,
|
||||
you have many development branches from which you can work.
|
||||
From inside the repository you can see the branch names and the tag names used
|
||||
in the Git repository using either of the following two commands:
|
||||
With the local <filename>poky</filename> Git repository set up,
|
||||
you have all the development branches available to you from which you can work.
|
||||
Next, you need to be sure that your local repository reflects the exact
|
||||
release in which you are interested.
|
||||
From inside the repository you can see the development branches that represent
|
||||
areas of development that have diverged from the main (master) branch
|
||||
at some point, such as a branch to track a maintenance release's development.
|
||||
You can also see the tag names used to mark snapshots of stable releases or
|
||||
points in the repository.
|
||||
Use the following commands to list out the branches and the tags in the repository,
|
||||
respectively.
|
||||
<literallayout class='monospaced'>
|
||||
$ git branch -a
|
||||
$ git tag -l
|
||||
</literallayout>
|
||||
For this example we are going to use the Yocto Project 1.1 Release, which is code
|
||||
named "edison".
|
||||
These commands create a local branch named <filename>edison</filename>
|
||||
that tracks the remote branch of the same name.
|
||||
For this example, we are going to use the Yocto Project &DISTRO; Release, which is code
|
||||
named "&DISTRO_NAME;".
|
||||
To make sure we have a local area (branch in Git terms) on our machine that
|
||||
reflects the &DISTRO; release, we can use the following commands:
|
||||
<literallayout class='monospaced'>
|
||||
$ git checkout -b edison origin/edison
|
||||
Switched to a new branch 'edison'
|
||||
$ cd ~/poky
|
||||
$ git fetch --tags
|
||||
$ git checkout &DISTRO_NAME;-&POKYVERSION; -b &DISTRO_NAME;
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
The <filename>git fetch --tags</filename> is somewhat redundant since you just set
|
||||
up the repository and should have all the tags.
|
||||
The <filename>fetch</filename> command makes sure all the tags are available in your
|
||||
local repository.
|
||||
The Git <filename>checkout</filename> command with the <filename>-b</filename> option
|
||||
creates a local branch for you named <filename>&DISTRO_NAME;</filename>.
|
||||
Your local branch begins in the same state as the Yocto Project &DISTRO; released tarball
|
||||
marked with the <filename>&DISTRO_NAME;-&POKYVERSION;</filename> tag in the source repositories.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='choosing-a-base-bsp-app'>
|
||||
<title>Choosing a Base BSP</title>
|
||||
@@ -100,7 +131,7 @@
|
||||
<para>
|
||||
You need to have the base BSP layer on your development system.
|
||||
Similar to the local Yocto Project files, you can get the BSP
|
||||
layer in a couple of different ways:
|
||||
layer in couple of different ways:
|
||||
download the BSP tarball and extract it, or set up a local Git repository that
|
||||
has the Yocto Project BSP layers.
|
||||
You should use the same method that you used to get the local Yocto Project files earlier.
|
||||
@@ -126,15 +157,15 @@
|
||||
$ cd meta-intel
|
||||
</literallayout>
|
||||
Alternatively, you can start with the downloaded Crown Bay tarball.
|
||||
You can download the edison version of the BSP tarball from the
|
||||
<ulink url='http://www.yoctoproject.org/download'>Download</ulink> page of the
|
||||
You can download the &DISTRO_NAME; version of the BSP tarball from the
|
||||
<ulink url='&YOCTO_HOME_URL;/download'>Download</ulink> page of the
|
||||
Yocto Project website.
|
||||
Here is the specific link for the tarball needed for this example:
|
||||
<ulink url='http://downloads.yoctoproject.org/releases/yocto/yocto-1.1/machines/crownbay-noemgd/crownbay-noemgd-edison-6.0.0.tar.bz2'></ulink>.
|
||||
<ulink url='&YOCTO_DL_URL;/releases/yocto/yocto-1.1/machines/crownbay-noemgd/crownbay-noemgd-edison-6.0.0.tar.bz2'></ulink>.
|
||||
Again, be sure that you are already in the <filename>poky</filename> directory
|
||||
as described previously before installing the tarball:
|
||||
<literallayout class='monospaced'>
|
||||
$ tar xfj crownbay-noemgd-edison-6.0.0.tar.bz2
|
||||
$ tar xfj crownbay-noemgd-&DISTRO_NAME;-6.0.0.tar.bz2
|
||||
$ cd meta-intel
|
||||
</literallayout>
|
||||
</para>
|
||||
@@ -143,15 +174,16 @@
|
||||
The <filename>meta-intel</filename> directory contains all the metadata
|
||||
that supports BSP creation.
|
||||
If you're using the Git method, the following
|
||||
step will switch to the edison metadata.
|
||||
step will switch to the &DISTRO_NAME; metadata.
|
||||
If you're using the tarball method, you already have the correct metadata and can
|
||||
skip to the next step.
|
||||
Because <filename>meta-intel</filename> is its own Git repository, you will want
|
||||
to be sure you are in the appropriate branch for your work.
|
||||
For this example we are going to use the <filename>edison</filename> branch.
|
||||
For this example we are going to use the <filename>&DISTRO_NAME;</filename> branch.
|
||||
<literallayout class='monospaced'>
|
||||
$ git checkout -b edison origin/edison
|
||||
Switched to a new branch 'edison'
|
||||
$ git checkout -b &DISTRO_NAME; origin/&DISTRO_NAME;
|
||||
Branch &DISTRO_NAME; set up to track remote branch &DISTRO_NAME; from origin.
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
@@ -238,10 +270,8 @@
|
||||
<filename>meta-mymachine/conf/layer.conf</filename>.
|
||||
This file identifies build information needed for the new layer.
|
||||
You can see the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/bsp-guide/bsp-guide.html#bsp-filelayout-layer'>Layer Configuration File</ulink>" section in
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/bsp-guide/bsp-guide.html'>The Board
|
||||
Support Packages (BSP) Development Guide</ulink>
|
||||
for more information on this configuration file.
|
||||
"<ulink url='&YOCTO_DOCS_BSP_URL;#bsp-filelayout-layer'>Layer Configuration File</ulink>" section
|
||||
in The Board Support Packages (BSP) Development Guide for more information on this configuration file.
|
||||
Basically, we are changing the existing statements to work with our BSP.
|
||||
</para>
|
||||
|
||||
@@ -272,7 +302,8 @@
|
||||
Now we will take a look at the recipes in your new layer.
|
||||
The standard BSP structure has areas for BSP, graphics, core, and kernel recipes.
|
||||
When you create a BSP, you use these areas for appropriate recipes and append files.
|
||||
Recipes take the form of <filename>.bb</filename> files.
|
||||
Recipes take the form of <filename>.bb</filename> files, while append files take
|
||||
the form of <filename>.bbappend</filename> files.
|
||||
If you want to leverage the existing recipes the Yocto Project build system uses
|
||||
but change those recipes, you can use <filename>.bbappend</filename> files.
|
||||
All new recipes and append files for your layer must go in the layer’s
|
||||
@@ -328,7 +359,7 @@
|
||||
<filename>recipes-core/tasks</filename> appends the similarly named recipe
|
||||
located in the local Yocto Project files at
|
||||
<filename>meta/recipes-core/tasks</filename>.
|
||||
The "append" file in our layer right now is Crown Bay-specific and supports
|
||||
The append file in our layer right now is Crown Bay-specific and supports
|
||||
EMGD and non-EMGD.
|
||||
Here are the contents of the file:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -372,15 +403,18 @@
|
||||
However, in the <filename>meta-mymachine</filename> layer in
|
||||
<filename>recipes-kernel/linux</filename> resides a <filename>.bbappend</filename>
|
||||
file named <filename>linux-yocto_3.0.bbappend</filename> that
|
||||
is appended to the recipe of the same name in <filename>meta/recipes-kernel/linux</filename>.
|
||||
Thus, the <filename>SRCREV</filename> statements in the "append" file override
|
||||
appends information to the recipe of the same name in <filename>meta/recipes-kernel/linux</filename>.
|
||||
Thus, the <filename>SRCREV</filename> statements in the append file override
|
||||
the more general statements found in <filename>meta</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>SRCREV</filename> statements in the "append" file currently identify
|
||||
The <filename>SRCREV</filename> statements in the append file currently identify
|
||||
the kernel that supports the Crown Bay BSP with and without EMGD support.
|
||||
Here are the statements:
|
||||
Here are the statements:
|
||||
<note>The commit ID strings used in this manual might not match the actual commit
|
||||
ID strings found in the <filename>linux-yocto_3.0.bbappend</filename> file.
|
||||
For the example, this difference does not matter.</note>
|
||||
<literallayout class='monospaced'>
|
||||
SRCREV_machine_pn-linux-yocto_crownbay ?= \
|
||||
"2247da9131ea7e46ed4766a69bb1353dba22f873"
|
||||
@@ -412,11 +446,11 @@
|
||||
and insert the commit identifiers to identify the kernel in which we
|
||||
are interested, which will be based on the <filename>atom-pc-standard</filename>
|
||||
kernel.
|
||||
In this case, because we're working with the edison branch of everything, we
|
||||
In this case, because we're working with the &DISTRO_NAME; branch of everything, we
|
||||
need to use the <filename>SRCREV</filename> values for the atom-pc branch
|
||||
that are associated with the edison release.
|
||||
that are associated with the &DISTRO_NAME; release.
|
||||
To find those values, we need to find the <filename>SRCREV</filename>
|
||||
values that edison uses for the atom-pc branch, which we find in the
|
||||
values that &DISTRO_NAME; uses for the atom-pc branch, which we find in the
|
||||
<filename>poky/meta-yocto/recipes-kernel/linux/linux-yocto_3.0.bbappend</filename>
|
||||
file.
|
||||
</para>
|
||||
@@ -427,9 +461,7 @@
|
||||
The meta <filename>SRCREV</filename> isn't specified in this file, so it must be
|
||||
specified in the base kernel recipe in the
|
||||
<filename>poky/meta/recipes-kernel/linux/linux-yocto_3.0.bb</filename>
|
||||
file, in the <filename>SRCREV_meta variable</filename> found there.
|
||||
It happens to be the same as the value we already inherited from the
|
||||
<filename>meta-crownbay</filename> BSP.
|
||||
file, in the <filename>SRCREV_meta</filename> variable found there.
|
||||
Here are the final <filename>SRCREV</filename> statements:
|
||||
<literallayout class='monospaced'>
|
||||
SRCREV_machine_pn-linux-yocto_mymachine ?= \
|
||||
@@ -441,8 +473,8 @@
|
||||
|
||||
<para>
|
||||
In this example, we're using the <filename>SRCREV</filename> values we
|
||||
found already captured in the edison release because we're creating a BSP based on
|
||||
edison.
|
||||
found already captured in the &DISTRO_NAME; release because we're creating a BSP based on
|
||||
&DISTRO_NAME;.
|
||||
If, instead, we had based our BSP on the master branches, we would want to use
|
||||
the most recent <filename>SRCREV</filename> values taken directly from the kernel repo.
|
||||
We will not be doing that for this example.
|
||||
@@ -451,8 +483,8 @@
|
||||
exact commit strings in the Yocto Project source repositories you need to change
|
||||
the <filename>SRCREV</filename> statements.
|
||||
You can find all the <filename>machine</filename> and <filename>meta</filename>
|
||||
branch points (commits) for the <filename>linux-yocto-3.0</filename> kernel at
|
||||
<ulink url='http://git.yoctoproject.org/cgit/cgit.cgi/linux-yocto-3.0'></ulink>.
|
||||
branch points (commits) for the <filename>linux-yocto-3.0-1.1.x</filename> kernel at
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit.cgi/linux-yocto-3.0-1.1.x/'></ulink>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -481,12 +513,12 @@
|
||||
Because we are not interested in supporting EMGD those three can be deleted.
|
||||
The remaining three must be changed so that <filename>mymachine</filename> replaces
|
||||
<filename>crownbay-noemgd</filename> and <filename>crownbay</filename>.
|
||||
Because we are using the atom-pc branch for this new BSP, we can also find
|
||||
the exact branch we need for the KMACHINE variable in our new BSP from the value
|
||||
Because we are using the <filename>atom-pc</filename> branch for this new BSP, we can also find
|
||||
the exact branch we need for the <filename>KMACHINE</filename> variable in our new BSP from the value
|
||||
we find in the
|
||||
<filename>poky/meta-yocto/recipes-kernel/linux/linux-yocto_3.0.bbappend</filename>
|
||||
file we looked at in a previous step.
|
||||
In this case, the value we want is in the KMACHINE_atom-pc variable in that file.
|
||||
In this case, the value we want is in the <filename>KMACHINE_atom-pc</filename> variable in that file.
|
||||
Here is the final <filename>linux-yocto_3.0.bbappend</filename> file after all
|
||||
the edits:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -568,9 +600,8 @@
|
||||
variables to twice the number of cores your system supports.</para></listitem>
|
||||
<listitem><para>Update the <filename>bblayers.conf</filename> file so that it includes
|
||||
the path to your new BSP layer.
|
||||
In this example you need to include the pathname to <filename>meta-mymachine</filename>.
|
||||
For this example the
|
||||
<filename>BBLAYERS</filename> variable in the file would need to include the following path:
|
||||
In this example, you need to include this path as part of the
|
||||
<filename>BBLAYERS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
$HOME/poky/meta-intel/meta-mymachine
|
||||
</literallayout></para></listitem>
|
||||
@@ -579,7 +610,7 @@
|
||||
|
||||
<para>
|
||||
The appendix
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html#ref-variables-glos'>
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;#ref-variables-glos'>
|
||||
Reference: Variables Glossary</ulink> in the Yocto Project Reference Manual has more information
|
||||
on configuration variables.
|
||||
</para>
|
||||
@@ -615,13 +646,13 @@
|
||||
copy the <filename>.hddimg</filename> file, located in the
|
||||
<filename>poky/build/tmp/deploy/images</filename>
|
||||
directory after a successful build to the flash drive.
|
||||
Assuming the USB flash drive takes device <filename>/dev/sdf</filename>,
|
||||
Assuming the USB flash drive takes device <filename>/dev/sdc</filename>,
|
||||
use <filename>dd</filename> to copy the live image to it.
|
||||
For example:
|
||||
<literallayout class='monospaced'>
|
||||
# dd if=core-image-sato-mymachine-20111101223904.hddimg of=/dev/sdf
|
||||
# dd if=core-image-sato-mymachine-20120111232235.hddimg of=/dev/sdc
|
||||
# sync
|
||||
# eject /dev/sdf
|
||||
# eject /dev/sdc
|
||||
</literallayout>
|
||||
You should now have a bootable USB flash device.
|
||||
</para>
|
||||
@@ -649,12 +680,12 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For reference, the sato image produced by the previous steps for edison
|
||||
For reference, the sato image produced by the previous steps for &DISTRO_NAME;
|
||||
should look like the following in terms of size.
|
||||
If your sato image is much different from this,
|
||||
you probably made a mistake in one of the above steps:
|
||||
<literallayout class='monospaced'>
|
||||
358715392 2011-11-01 19:11 core-image-sato-mymachine-20111101223904.hddimg
|
||||
358709248 2012-01-11 20:43 core-image-sato-mymachine-20120111232235.hddimg
|
||||
</literallayout>
|
||||
<note>The previous instructions are also present in the README that was copied
|
||||
from meta-crownbay, which should also be updated to reflect the specifics of your
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='dev-manual-intro'>
|
||||
|
||||
@@ -17,7 +18,8 @@
|
||||
sources where you can find more detail.
|
||||
For example, detailed information on Git, repositories and open source in general
|
||||
can be found in many places.
|
||||
Another example is how to get set up to use the Yocto Project, which our Yocto Project Quick Start covers.
|
||||
Another example is how to get set up to use the Yocto Project, which our
|
||||
<ulink url='&YOCTO_DOCS_QS_URL;'>Yocto Project Quick Start</ulink> covers.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -38,7 +40,7 @@
|
||||
<listitem><para>Information to help developers who are new to the open source environment
|
||||
and to the distributed revision control system Git, which the Yocto Project
|
||||
uses.</para></listitem>
|
||||
<listitem><para>An understanding of common end-to-end development models.</para></listitem>
|
||||
<listitem><para>An understanding of common end-to-end development models and tasks.</para></listitem>
|
||||
<listitem><para>Development case overviews for both system development and user-space
|
||||
applications.</para></listitem>
|
||||
<listitem><para>An overview and understanding of the emulation environment used with
|
||||
@@ -63,13 +65,15 @@
|
||||
<itemizedlist>
|
||||
<listitem><para>Step-by-step instructions if those instructions exist in other Yocto
|
||||
Project documentation.
|
||||
For example, the Application Development Toolkit (ADT) User’s Guide contains detailed
|
||||
For example, the
|
||||
<ulink url='&YOCTO_DOCS_ADT_URL;'>Yocto Project Application Development Toolkit (ADT)
|
||||
User's Guide</ulink> contains detailed
|
||||
instruction on how to obtain and configure the
|
||||
<trademark class='trade'>Eclipse</trademark> Yocto Plug-in.</para></listitem>
|
||||
<listitem><para>Reference material.
|
||||
This type of material resides in an appropriate reference manual.
|
||||
For example, system variables are documented in the
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html'>
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;'>
|
||||
Yocto Project Reference Manual</ulink>.</para></listitem>
|
||||
<listitem><para>Detailed public information that is not specific to the Yocto Project.
|
||||
For example, exhaustive information on how to use Git is covered better through the
|
||||
@@ -86,31 +90,31 @@
|
||||
need to supplement it with other information.
|
||||
The following list presents other sources of information you might find helpful:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>The <ulink url='http://www.yoctoproject.org'>Yocto Project Website</ulink>:
|
||||
<listitem><para><emphasis>The <ulink url='&YOCTO_HOME_URL;'>Yocto Project Website</ulink>:
|
||||
</emphasis> The home page for the Yocto Project provides lots of information on the project
|
||||
as well as links to software and documentation.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html'>
|
||||
<ulink url='&YOCTO_DOCS_QS_URL;'>
|
||||
The Yocto Project Quick Start</ulink>:</emphasis> This short document lets you get started
|
||||
with the Yocto Project quickly and start building an image.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html'>
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;'>
|
||||
The Yocto Project Reference Manual</ulink>:</emphasis> This manual is a reference
|
||||
guide to the Yocto Project build component known as "Poky."
|
||||
The manual also contains a reference chapter on Board Support Package (BSP)
|
||||
layout.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/adt-manual/adt-manual.html'>
|
||||
<ulink url='&YOCTO_DOCS_ADT_URL;'>
|
||||
The Yocto Project Application Development Toolkit (ADT) User's Guide</ulink>:</emphasis>
|
||||
This guide provides information that lets you get going with the ADT to
|
||||
develop projects using the Yocto Project.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/bsp-guide/bsp-guide.html'>
|
||||
<ulink url='&YOCTO_DOCS_BSP_URL;'>
|
||||
The Yocto Project Board Support Package (BSP) Developer's Guide</ulink>:</emphasis>
|
||||
This guide defines the structure for BSP components.
|
||||
Having a commonly understood structure encourages standardization.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/kernel-manual/kernel-manual.html'>
|
||||
<ulink url='&YOCTO_DOCS_KERNEL_URL;'>
|
||||
The Yocto Project Kernel Architecture and Use Manual</ulink>:</emphasis>
|
||||
This manual describes the architecture of the Yocto Project kernel and provides
|
||||
some work flow examples.</para></listitem>
|
||||
@@ -120,14 +124,14 @@
|
||||
demonstrates how an application developer uses Yocto Plug-in features within
|
||||
the Eclipse IDE.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://wiki.yoctoproject.org/wiki/FAQ'>FAQ</ulink>:</emphasis>
|
||||
<ulink url='&YOCTO_WIKI_URL;/wiki/FAQ'>FAQ</ulink>:</emphasis>
|
||||
A list of commonly asked questions and their answers.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.yoctoproject.org/download/yocto/yocto-project-1.1-release-notes-poky-6.0'>
|
||||
<ulink url='&YOCTO_HOME_URL;/download/yocto/yocto-project-1.1.2-release-notes-poky-&POKYVERSION;'>
|
||||
Release Notes</ulink>:</emphasis> Features, updates and known issues for the current
|
||||
release of the Yocto Project.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://bugzilla.yoctoproject.org/'>Bugzilla</ulink>:</emphasis>
|
||||
<ulink url='&YOCTO_BUGZILLA_URL;'>Bugzilla</ulink>:</emphasis>
|
||||
The bug tracking application the Yocto Project uses.
|
||||
If you find problems with the Yocto Project, you should report them using this
|
||||
application.</para></listitem>
|
||||
@@ -135,11 +139,11 @@
|
||||
Yocto Project Mailing Lists:</emphasis> To subscribe to the Yocto Project mailing
|
||||
lists, click on the following URLs and follow the instructions:
|
||||
<itemizedlist>
|
||||
<listitem><para><ulink url='http://lists.yoctoproject.org/listinfo/yocto'></ulink> for a
|
||||
<listitem><para><ulink url='&YOCTO_LISTS_URL;/listinfo/yocto'></ulink> for a
|
||||
Yocto Project Discussions mailing list.</para></listitem>
|
||||
<listitem><para><ulink url='http://lists.yoctoproject.org/listinfo/poky'></ulink> for a
|
||||
<listitem><para><ulink url='&YOCTO_LISTS_URL;/listinfo/poky'></ulink> for a
|
||||
Yocto Project Discussions mailing list about the Poky build system.</para></listitem>
|
||||
<listitem><para><ulink url='http://lists.yoctoproject.org/listinfo/yocto-announce'></ulink>
|
||||
<listitem><para><ulink url='&YOCTO_LISTS_URL;/listinfo/yocto-announce'></ulink>
|
||||
for a mailing list to receive offical Yocto Project announcements for developments and
|
||||
as well as Yocto Project milestones.</para></listitem>
|
||||
</itemizedlist></para></listitem>
|
||||
@@ -148,7 +152,7 @@
|
||||
for Yocto Project and Poky discussions: <filename>#yocto</filename> and
|
||||
<filename>#poky</filename>.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.openedhand.com/'>OpenedHand</ulink>:</emphasis>
|
||||
<ulink url='&OH_HOME_URL;'>OpenedHand</ulink>:</emphasis>
|
||||
The company where the Yocto Project build system Poky was first developed.
|
||||
OpenedHand has since been acquired by Intel Corporation.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
@@ -156,7 +160,7 @@
|
||||
The company that acquired OpenedHand in 2008 and continues development on the
|
||||
Yocto Project.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.openembedded.org/'>OpenEmbedded</ulink>:</emphasis>
|
||||
<ulink url='&OE_HOME_URL;'>OpenEmbedded</ulink>:</emphasis>
|
||||
The upstream, generic, embedded distribution the Yocto Project build system (Poky) derives
|
||||
from and to which it contributes.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE appendix PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<appendix id='dev-manual-kernel-appendix'>
|
||||
|
||||
@@ -65,7 +66,7 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
<imagedata fileref="figures/kernel-example-repos.png" width="7in" depth="5in"
|
||||
<imagedata fileref="figures/kernel-example-repos-edison.png" width="7in" depth="5in"
|
||||
align="center" scale="100" />
|
||||
</para>
|
||||
|
||||
@@ -75,9 +76,10 @@
|
||||
<listitem><para><emphasis>Local Yocto Project Files Git Repository:</emphasis>
|
||||
This area contains all the metadata that supports building images in the
|
||||
Yocto Project build environment - the local Yocto Project files.
|
||||
The local Yocto Project files Git repository also contains the build directory
|
||||
and a configuration directory that let you control the build.
|
||||
Note also that in this example, the repository also contains the
|
||||
In this example, the local Yocto Project files Git repository also
|
||||
contains the build directory, which contains the configuration directory
|
||||
that lets you control the build.
|
||||
In this example, the repository also contains the
|
||||
<filename>poky-extras</filename> Git repository.</para>
|
||||
<para>See the bulleted item
|
||||
"<link linkend='local-yp-release'>Yocto Project Release</link>"
|
||||
@@ -148,14 +150,14 @@
|
||||
$ git branch -a
|
||||
$ git tag -l
|
||||
</literallayout>
|
||||
This example uses the Yocto Project 1.1 Release code named "edison",
|
||||
which maps to the <filename>edison</filename> branch in the repository.
|
||||
The following commands create and checkout the local <filename>edison</filename>
|
||||
This example uses the Yocto Project &DISTRO; Release code named "&DISTRO_NAME;",
|
||||
which maps to the <filename>&DISTRO_NAME;</filename> branch in the repository.
|
||||
The following commands create and checkout the local <filename>&DISTRO_NAME;</filename>
|
||||
branch:
|
||||
<literallayout class='monospaced'>
|
||||
$ git checkout -b edison origin/edison
|
||||
Branch edison set up to track remote branch edison from origin.
|
||||
Switched to a new branch 'edison'
|
||||
$ git checkout -b &DISTRO_NAME; origin/&DISTRO_NAME;
|
||||
Branch &DISTRO_NAME; set up to track remote branch &DISTRO_NAME; from origin.
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
@@ -171,13 +173,34 @@
|
||||
<filename>poky-extras</filename> Git Repository</link>"
|
||||
for information on how to get the <filename>poky-extras</filename> repository.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Once you have the repository set up,
|
||||
you have many development branches from which you can work.
|
||||
From inside the repository you can see the branch names and the tag names used
|
||||
in the Git repository using either of the following two commands:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd poky/poky-extras
|
||||
$ git branch -a
|
||||
$ git tag -l
|
||||
</literallayout>
|
||||
This example uses the Yocto Project &DISTRO; Release code named "&DISTRO_NAME;",
|
||||
which maps to the <filename>&DISTRO_NAME;</filename> branch in the repository.
|
||||
The following commands create and checkout the local <filename>&DISTRO_NAME;</filename>
|
||||
branch:
|
||||
<literallayout class='monospaced'>
|
||||
$ git checkout -b &DISTRO_NAME; origin/&DISTRO_NAME;
|
||||
Branch &DISTRO_NAME; set up to track remote branch &DISTRO_NAME; from origin.
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='setting-up-the-bare-clone-and-its-copy'>
|
||||
<title>Setting Up the Bare Clone and its Copy</title>
|
||||
|
||||
<para>
|
||||
This example modifies the <filename>linux-yocto-3.0</filename> kernel.
|
||||
This example modifies the <filename>linux-yocto-3.0-1.1.x</filename> kernel.
|
||||
Thus, you need to create a bare clone of that kernel and then make a copy of the
|
||||
bare clone.
|
||||
See the bulleted item
|
||||
@@ -189,13 +212,14 @@
|
||||
The bare clone exists for the kernel build tools and simply as the receiving end
|
||||
of <filename>git push</filename>
|
||||
commands after you make edits and commits inside the copy of the clone.
|
||||
The copy (<filename>linux-yocto-3.0</filename> in this example) has to have
|
||||
The copy (<filename>my-linux-yocto-3.0-1.1.x-work</filename> in this example) has to have
|
||||
a local branch created and checked out for your work.
|
||||
This example uses <filename>common-pc-base</filename> as the local branch.
|
||||
The following commands create and checkout the branch:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/linux-yocto-3.0
|
||||
$ cd ~/my-linux-yocto-3.0-1.1.x-work
|
||||
$ git checkout -b common-pc-base origin/yocto/standard/common-pc/base
|
||||
Checking out files: 100% (7289/7289), done.
|
||||
Branch common-pc-base set up to track remote branch
|
||||
yocto/standard/common-pc/base from origin.
|
||||
Switched to a new branch 'common-pc-base'
|
||||
@@ -225,10 +249,8 @@
|
||||
<filename>PARALLEL_MAKE</filename> to twice the number
|
||||
of cores your machine supports.
|
||||
</note>
|
||||
</para>
|
||||
<para>
|
||||
The following two commands build the default <filename>qemux86</filename> image and
|
||||
<filename>source</filename> build environment setup script.
|
||||
The following two commands <filename>source</filename> the build environment setup script
|
||||
and build the default <filename>qemux86</filename> image.
|
||||
If necessary, the script creates the build directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/poky
|
||||
@@ -291,7 +313,7 @@
|
||||
|
||||
<para>
|
||||
The file you change in this example is named <filename>calibrate.c</filename>
|
||||
and is located in the <filename>linux-yocto-3.0</filename> Git repository
|
||||
and is located in the <filename>my-linux-yocto-3.0-1.1.x-work</filename> Git repository
|
||||
(the copy of the bare clone) in <filename>init</filename>.
|
||||
This example simply inserts several <filename>printk</filename> statements
|
||||
at the beginning of the <filename>calibrate_delay</filename> function.
|
||||
@@ -415,13 +437,13 @@
|
||||
<filename>poky-extras/meta-kernel-dev/recipes-kernel/linux</filename>
|
||||
directory, you need to identify the location of the
|
||||
local source code, which in this example is the bare clone named
|
||||
<filename>linux-yocto-3.0.git</filename>.
|
||||
<filename>linux-yocto-3.0-1.1.x.git</filename>.
|
||||
To do this, set the <filename>KSRC_linux_yocto</filename> variable to point to your
|
||||
local <filename>linux-yocto-3.0.git</filename> Git repository by adding the
|
||||
local <filename>linux-yocto-3.0-1.1.x.git</filename> Git repository by adding the
|
||||
following statement.
|
||||
Be sure to substitute your user information in the statement:
|
||||
<literallayout class='monospaced'>
|
||||
KSRC_linux_yocto ?= /home/scottrif/linux-yocto-3.0.git
|
||||
KSRC_linux_yocto ?= /home/scottrif/linux-yocto-3.0-1.1.x.git
|
||||
</literallayout></para></listitem>
|
||||
<listitem><para><emphasis>Specify the Kernel Machine:</emphasis> Also in the
|
||||
<filename>linux-yocto_3.0.bbappend</filename> file, you need to specify
|
||||
@@ -433,14 +455,19 @@
|
||||
</para>
|
||||
|
||||
<note>
|
||||
Before attempting to build the modified kernel, there is one more set of changes you
|
||||
<para>Before attempting to build the modified kernel, there is one more set of changes you
|
||||
need to make in the <filename>meta-kernel-dev</filename> layer.
|
||||
Because all the kernel <filename>.bbappend</filename> files are parsed during the
|
||||
build process regardless of whether you are using them or not, you should either
|
||||
comment out the <filename>COMPATIBLE_MACHINE</filename> statements in all
|
||||
<filename>.bbappend</filename> files, or you should simply remove all the files
|
||||
unused <filename>.bbappend</filename> files, or simply remove (or rename) all the files
|
||||
except the one your are using for the build
|
||||
(i.e. <filename>linux-yocto_3.0.bbappend</filename> in this example).
|
||||
(i.e. <filename>linux-yocto_3.0.bbappend</filename> in this example).</para>
|
||||
<para>If you do not make one of these two adjustments, your machine will be compatible
|
||||
with all the kernel recipes in the <filename>meta-kernel-dev</filename> layer.
|
||||
When your machine is comapatible with all the kernel recipes, the build attempts
|
||||
to build all kernels in the layer.
|
||||
You could end up with build errors blocking your work.</para>
|
||||
</note>
|
||||
</section>
|
||||
|
||||
@@ -454,18 +481,21 @@
|
||||
<listitem><para>Your environment should be set up since you previously sourced
|
||||
the <filename>oe-init-build-env</filename> script.
|
||||
If it isn't, source the script again from <filename>poky</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para>Be sure old images are cleaned out by running the
|
||||
<filename>cleanall</filename> BitBake task as follows:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/poky
|
||||
$ source oe-init-build-env
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para>Be sure old images are cleaned out by running the
|
||||
<filename>cleanall</filename> BitBake task as follows from your build directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -c cleanall linux-yocto
|
||||
</literallayout></para>
|
||||
<para><note>Never remove any files by hand from the <filename>tmp/deploy</filename>
|
||||
directory insided the local Yocto Project files build directory.
|
||||
Always use the BitBake <filename>cleanall</filename> task to clear
|
||||
out previous builds.</note></para></listitem>
|
||||
<listitem><para>Build the kernel image using this command:
|
||||
<listitem><para>Next, build the kernel image using this command:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -k core-image-minimal
|
||||
</literallayout></para></listitem>
|
||||
@@ -509,46 +539,94 @@
|
||||
in "<link linkend='modifying-the-kernel-source-code'>Modifying the Kernel Source
|
||||
Code</link>" you should already have the Yocto Project files set up on your
|
||||
host machine.
|
||||
If this is the case, go to the next section, which is titled
|
||||
"<link linkend='examining-the-default-config-smp-behavior'>Examining the Default
|
||||
<filename>CONFIG_SMP</filename> Behavior</link>", and continue with the
|
||||
example.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you don't have the Yocto Project files established on your system,
|
||||
See "<link linkend='setting-up-the-local-yocto-project-files-git-repository'>Setting
|
||||
Up the Local Yocto Project Files Git Repository</link>" for
|
||||
information.
|
||||
To reconfigure the kernel, this is the only Git repository you need to have set up.
|
||||
you can get them through tarball extraction or by
|
||||
cloning the <filename>poky</filename> Git repository.
|
||||
This example uses <filename>poky</filename> as the root directory of the
|
||||
local Yocto Project files Git repository.
|
||||
See the bulleted item
|
||||
"<link linkend='local-yp-release'>Yocto Project Release</link>"
|
||||
for information on how to get these files.
|
||||
</para>
|
||||
|
||||
<!--
|
||||
<para>
|
||||
Once you have the repository set up,
|
||||
you have many development branches from which you can work.
|
||||
From inside the repository you can see the branch names and the tag names used
|
||||
in the Git repository using either of the following two commands:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd poky
|
||||
$ git branch -a
|
||||
$ git tag -l
|
||||
</literallayout>
|
||||
This example uses the Yocto Project &DISTRO; Release code named "&DISTRO_NAME;",
|
||||
which maps to the <filename>&DISTRO_NAME;</filename> branch in the repository.
|
||||
The following commands create and checkout the local <filename>&DISTRO_NAME;</filename>
|
||||
branch:
|
||||
<literallayout class='monospaced'>
|
||||
$ git checkout -b &DISTRO_NAME; origin/&DISTRO_NAME;
|
||||
Branch &DISTRO_NAME; set up to track remote branch &DISTRO_NAME; from origin.
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you took the time to work through the example that modifies the kernel source code
|
||||
in "<link linkend='modifying-the-kernel-source-code'>Modifying the Kernel Source
|
||||
Code</link>" you are already set up to quickly work through this example.
|
||||
If not, then work through the following list to prepare:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Understand the development environment:</emphasis>
|
||||
See "<link linkend='understanding-the-files-you-need'>Understanding
|
||||
the Files You Need</link>" for information.</para></listitem>
|
||||
<listitem><para><emphasis>Set up the local Yocto Project files Git
|
||||
repository:</emphasis>
|
||||
See "<link linkend='setting-up-the-local-yocto-project-files-git-repository'>Setting
|
||||
Up the Local Yocto Project Files Git Repository</link>" for
|
||||
information.</para></listitem>
|
||||
<listitem><para><emphasis>Set up the <filename>poky-extras</filename> Git
|
||||
repository:</emphasis>
|
||||
See "<link linkend='setting-up-the-poky-extras-git-repository'>Setting
|
||||
Up <filename>poky-extras</filename> Git repository</link>" for
|
||||
information.</para></listitem>
|
||||
<listitem><para><emphasis>Set up the the bare clone and its copy:</emphasis>
|
||||
See "<link linkend='setting-up-the-bare-clone-and-its-copy'>Setting Up the
|
||||
Bare Clone and its Copy</link>" for information.</para></listitem>
|
||||
<listitem><para><emphasis>Build the default QEMU kernel image:</emphasis>
|
||||
See "<link linkend='building-and-booting-the-default-qemu-kernel-image'>Building
|
||||
and Booting the Default QEMU Kernel image</link>" for information.
|
||||
Do not boot the image in the QEMU emulator at this point.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para> -->
|
||||
Next, you need to build the default <filename>qemux86</filename> image that you
|
||||
can boot using QEMU.
|
||||
<note>
|
||||
Because a full build can take hours, you should check two variables in the
|
||||
<filename>build</filename> directory that is created after you source the
|
||||
<filename>oe-init-build-env</filename> script.
|
||||
You can find these variables
|
||||
<filename>BB_NUMBER_THREADS</filename> and <filename>PARALLEL_MAKE</filename>
|
||||
in the <filename>build/conf</filename> directory in the
|
||||
<filename>local.conf</filename> configuration file.
|
||||
By default, these variables are commented out.
|
||||
If your host development system supports multi-core and multi-thread capabilities,
|
||||
you can uncomment these statements and set the variables to significantly shorten
|
||||
the full build time.
|
||||
As a guideline, set <filename>BB_NUMBER_THREADS</filename> to twice the number
|
||||
of cores your machine supports and set <filename>PARALLEL_MAKE</filename> to one and
|
||||
a half times the number of cores your machine supports.
|
||||
</note>
|
||||
The following two commands <filename>source</filename> the build environment setup script
|
||||
and build the default <filename>qemux86</filename> image.
|
||||
If necessary, the script creates the build directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/poky
|
||||
$ source oe-init-build-env
|
||||
|
||||
### Shell environment set up for builds. ###
|
||||
|
||||
You can now run 'bitbake <target>'
|
||||
|
||||
Common targets are:
|
||||
core-image-minimal
|
||||
core-image-sato
|
||||
meta-toolchain
|
||||
meta-toolchain-sdk
|
||||
adt-installer
|
||||
meta-ide-support
|
||||
|
||||
You can also run generated qemu images with a command like 'runqemu qemux86'
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The following <filename>bitbake</filename> command starts the build:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -k core-image-minimal
|
||||
</literallayout>
|
||||
<note>Be sure to check the settings in the <filename>local.conf</filename>
|
||||
before starting the build.</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='examining-the-default-config-smp-behavior'>
|
||||
@@ -578,6 +656,8 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
|
||||
|
||||
<section id='changing-the-config-smp-configuration-using-menuconfig'>
|
||||
<title>Changing the <filename>CONFIG_SMP</filename> Configuration Using <filename>menuconfig</filename></title>
|
||||
|
||||
@@ -597,15 +677,24 @@
|
||||
<para>
|
||||
After setting up the environment to run <filename>menuconfig</filename>, you are ready
|
||||
to use the tool to interactively change the kernel configuration.
|
||||
In this example, we are basing our changes on the <filename>linux-yocto-3.0</filename>
|
||||
In this example, we are basing our changes on the <filename>linux-yocto-3.0-1.1.x</filename>
|
||||
kernel.
|
||||
The Yocto Project build environment recognizes this kernel as
|
||||
<filename>linux-yocto</filename>.
|
||||
Thus, the following command from the shell in which you previously sourced the
|
||||
environment initialization script launches <filename>menuconfig</filename>:
|
||||
Thus, the following commands from the shell in which you previously sourced the
|
||||
environment initialization script cleans the shared state memory and
|
||||
the <filename>WORKDIR</filename> direcotry and then builds and
|
||||
launches <filename>menuconfig</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake linux-yocto -c cleansstate
|
||||
$ bitbake linux-yocto -c menuconfig
|
||||
</literallayout>
|
||||
<note>Due to a bug in the release, it is necessary to clean the shared state
|
||||
memory in order for configurations made using <filename>menuconfig</filename>
|
||||
to take effect.
|
||||
For information on the bug, see
|
||||
<ulink url='&YOCTO_BUGZILLA_URL;/show_bug.cgi?id=2256'></ulink>.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -623,16 +712,19 @@
|
||||
is updated.
|
||||
This is the file that the build system uses to configure the Linux Yocto kernel
|
||||
when it is built.
|
||||
You can find and examine this file in the Yocto Project files Git repository in
|
||||
You can find and examine this file in the Yocto Project Files Git repository in
|
||||
the build directory.
|
||||
This example uses the following.
|
||||
Note that this example directory is artificially split and many of the characters
|
||||
in the actually filename are omitted in order to make it more
|
||||
readable:
|
||||
This example uses the following:
|
||||
<literallayout class='monospaced'>
|
||||
~/poky/build/tmp/work/qemux86-poky-linux/linux-yocto-2.6.37+git1+84f...
|
||||
...r20/linux-qemux86-standard-build
|
||||
~/poky/build/tmp/work/qemux86-poky-linux/linux-yocto-3.0.10+git1+d38...
|
||||
...3a9ac596f7a-r3/linux-qemux86-standard-build
|
||||
</literallayout>
|
||||
<note>
|
||||
The previous example directory is artificially split and many of the characters
|
||||
in the actual filename are omitted in order to make it more readable.
|
||||
Also, depending on the kernel you are using, the exact pathname might differ
|
||||
slightly.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='dev-manual-model'>
|
||||
|
||||
@@ -23,9 +24,8 @@
|
||||
"<link linkend='dev-manual-kernel-appendix'>Kernel Modification Example</link>" appendix.
|
||||
For a user-space application development example that uses the
|
||||
<trademark class='trade'>Eclipse</trademark> IDE,
|
||||
see the
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/adt-manual/adt-manual.html'>
|
||||
The Yocto Project Application Development Toolkit (ADT) User's Guide</ulink>.
|
||||
see <ulink url='&YOCTO_DOCS_ADT_URL;'>The Yocto Project Application Development
|
||||
Toolkit (ADT) User's Guide</ulink>.
|
||||
</para>
|
||||
|
||||
<section id='system-development-model'>
|
||||
@@ -50,7 +50,7 @@
|
||||
<title>Developing a Board Support Package (BSP)</title>
|
||||
|
||||
<para>
|
||||
A BSP is a package of recipes that, when applied, during a build results in
|
||||
A BSP is a packageof recipes that, when applied, during a build results in
|
||||
an image that you can run on a particular board.
|
||||
Thus, the package, when compiled into the new image, supports the operation of the board.
|
||||
</para>
|
||||
@@ -79,8 +79,9 @@
|
||||
<orderedlist>
|
||||
<listitem><para><emphasis>Set up your host development system to support
|
||||
development using the Yocto Project</emphasis>: See the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#the-linux-distro'>The Linux Distributions</ulink>" and the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#packages'>The Packages</ulink>" sections both
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#the-linux-distro'>The Linux Distributions</ulink>"
|
||||
and the
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#packages'>The Packages</ulink>" sections both
|
||||
in the Yocto Project Quick Start for requirements.</para></listitem>
|
||||
<listitem><para><emphasis>Establish a local copy of the Yocto Project files on your
|
||||
system</emphasis>: You need to have the Yocto Project files available on your host system.
|
||||
@@ -111,13 +112,15 @@
|
||||
Crown Bay that does not support the <trademark class='registered'>Intel</trademark>
|
||||
Embedded Media Graphics Driver (EMGD).
|
||||
The remainder of this example uses that base BSP.</para>
|
||||
<para>To see the supported BSPs, go to the Yocto Project
|
||||
<ulink url='http://www.yoctoproject.org/download'>download page</ulink> and click
|
||||
on “BSP Downloads.”</para></listitem>
|
||||
<para>To see the supported BSPs, go to the
|
||||
<ulink url='&YOCTO_HOME_URL;/download'>Download</ulink> page on the Yocto Project
|
||||
website and click on “BSP Downloads.”</para></listitem>
|
||||
<listitem><para><emphasis>Create your own BSP layer</emphasis>: Layers are ideal for
|
||||
isolating and storing work for a given piece of hardware.
|
||||
A layer is really just a location or area in which you place the recipes for your BSP.
|
||||
A layer is really just a location or area in which you place the recipes for your BSP.
|
||||
In fact, a BSP is, in itself, a special type of layer.
|
||||
</para>
|
||||
<para>
|
||||
Another example that illustrates a layer is an application.
|
||||
Suppose you are creating an application that has library or other dependencies in
|
||||
order for it to compile and run.
|
||||
@@ -137,16 +140,17 @@
|
||||
N450, and Sugar Bay are isolated.</note>
|
||||
<para>When you set up a layer for a new BSP, you should follow a standard layout.
|
||||
This layout is described in the section
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/bsp-guide/bsp-guide.html#bsp-filelayout'>Example Filesystem Layout</ulink>" section of the Board Support Package (BSP) Development Guide.
|
||||
"<ulink url='&YOCTO_DOCS_BSP_URL;#bsp-filelayout'>Example Filesystem Layout</ulink>"
|
||||
section of the Board Support Package (BSP) Development Guide.
|
||||
In the standard layout, you will notice a suggested structure for recipes and
|
||||
configuration information.
|
||||
You can see the standard layout for the Crown Bay BSP in this example by examining the
|
||||
directory structure of the <filename>meta-crownbay</filename> layer inside the
|
||||
local Yocto Project files.</para></listitem>
|
||||
<listitem><para><emphasis>Make configuration changes to your new BSP
|
||||
layer</emphasis>: The standard BSP layer structure organizes the files you need to edit in
|
||||
<filename>conf</filename> and several <filename>recipes-*</filename> directories within the
|
||||
BSP layer.
|
||||
layer</emphasis>: The standard BSP layer structure organizes the files you need
|
||||
to edit in <filename>conf</filename> and several <filename>recipes-*</filename>
|
||||
directories within the BSP layer.
|
||||
Configuration changes identify where your new layer is on the local system
|
||||
and identify which kernel you are going to use.
|
||||
</para></listitem>
|
||||
@@ -160,7 +164,8 @@
|
||||
You need to get the build environment ready by sourcing an environment setup script
|
||||
and you need to be sure two key configuration files are configured appropriately.</para>
|
||||
<para>The entire process for building an image is overviewed in the section
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#building-image'>Building an Image</ulink>" section of the Yocto Project Quick Start.
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#building-image'>Building an Image</ulink>" section
|
||||
of the Yocto Project Quick Start.
|
||||
You might want to reference this information.</para></listitem>
|
||||
<listitem><para><emphasis>Build the image</emphasis>: The Yocto Project uses the BitBake
|
||||
tool to build images based on the type of image you want to create.
|
||||
@@ -168,9 +173,9 @@
|
||||
<ulink url='http://bitbake.berlios.de/manual/'>here</ulink>.</para>
|
||||
<para>The build process supports several types of images to satisfy different needs.
|
||||
See the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html#ref-images'>Reference: Images</ulink>" appendix in the
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html'>
|
||||
Yocto Project Reference Manual</ulink>for information on supported images.</para></listitem>
|
||||
"<ulink url='&YOCTO_DOCS_REF_URL;#ref-images'>Reference: Images</ulink>" appendix
|
||||
in The Yocto Project Reference Manual for information on
|
||||
supported images.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
@@ -178,10 +183,10 @@
|
||||
You can view a video presentation on "Building Custom Embedded Images with Yocto"
|
||||
at <ulink url='http://free-electrons.com/blog/elc-2011-videos'>Free Electrons</ulink>.
|
||||
You can also find supplemental information in
|
||||
<ulink url='http://yoctoproject.org/docs/latest/bsp-guide/bsp-guide.html'>
|
||||
<ulink url='&YOCTO_DOCS_BSP_URL;'>
|
||||
The Board Support Package (BSP) Development Guide</ulink>.
|
||||
Finally, there is wiki page write up of the example also located
|
||||
<ulink url='https://wiki.yoctoproject.org/wiki/Transcript:_creating_one_generic_Atom_BSP_from_another'>
|
||||
<ulink url='&YOCTO_WIKI_URL;/wiki/Transcript:_creating_one_generic_Atom_BSP_from_another'>
|
||||
here</ulink> that you might find helpful.
|
||||
</para>
|
||||
</section>
|
||||
@@ -201,7 +206,7 @@
|
||||
The remainder of this section presents a high-level overview of the Linux Yocto
|
||||
kernel architecture and the steps to modify the Linux Yocto kernel.
|
||||
For a complete discussion of the kernel, see
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/kernel-manual/kernel-manual.html'>
|
||||
<ulink url='&YOCTO_DOCS_KERNEL_URL;'>
|
||||
The Yocto Project Kernel Architecture and Use Manual</ulink>.
|
||||
You can reference the appendix
|
||||
"<link linkend='dev-manual-kernel-appendix'>Kernel Modification Example</link>"
|
||||
@@ -221,18 +226,24 @@
|
||||
|
||||
<para>
|
||||
You can find a web interface to the Linux Yocto kernel source repositories at
|
||||
<ulink url='http://git.yoctoproject.org/'></ulink>.
|
||||
<ulink url='&YOCTO_GIT_URL;'></ulink>.
|
||||
If you look at the interface, you will see to the left a grouping of
|
||||
Git repositories titled "Yocto Linux Kernel."
|
||||
Within this group, you will find the four different kernels supported by
|
||||
Within this group, you will find several kernels supported by
|
||||
the Yocto Project:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>linux-yocto-2.6.34</filename></emphasis> - The
|
||||
stable Linux Yocto kernel that is based on the Linux 2.6.34 release.</para></listitem>
|
||||
<listitem><para><emphasis><filename>linux-yocto-2.6.37</filename></emphasis> - The
|
||||
stable Linux Yocto kernel that is based on the Linux 2.6.37 release.</para></listitem>
|
||||
<listitem><para><emphasis><filename>linux-yocto-3.0</filename></emphasis> - The current
|
||||
<listitem><para><emphasis><filename>linux-yocto-3.0</filename></emphasis> - The stable
|
||||
Linux Yocto kernel that is based on the Linux 3.0 release.</para></listitem>
|
||||
<listitem><para><emphasis><filename>linux-yocto-3.0-1.1.x</filename></emphasis> - The
|
||||
stable Linux Yocto kernel to use with the Yocto Project Release 1.1.x. This kernel
|
||||
is based on the Linux 3.0 release</para></listitem>
|
||||
<listitem><para><emphasis><filename>linux-yocto-3.2</filename></emphasis> - The
|
||||
stable Linux Yocto kernel to use with the Yocto Project Release 1.2. This kernel
|
||||
is based on the Linux 3.2 release</para></listitem>
|
||||
<listitem><para><emphasis><filename>linux-yocto-dev</filename></emphasis> - A development
|
||||
kernel based on the latest upstream release candidate available.</para></listitem>
|
||||
</itemizedlist>
|
||||
@@ -274,7 +285,7 @@
|
||||
</para>
|
||||
|
||||
<note>
|
||||
Keep in mind the figure does not take into account all four supported Linux Yocto
|
||||
Keep in mind the figure does not take into account all the supported Linux Yocto
|
||||
kernel types, but rather shows a single generic kernel just for conceptual purposes.
|
||||
Also keep in mind that this structure represents the Yocto Project source repositories
|
||||
that are either pulled from during the build or established on the host development system
|
||||
@@ -311,7 +322,7 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
<imagedata fileref="figures/kernel-overview-3.png"
|
||||
<imagedata fileref="figures/kernel-overview-3-edison.png"
|
||||
width="6in" depth="4in" align="center" scale="100" />
|
||||
</para>
|
||||
|
||||
@@ -349,11 +360,11 @@
|
||||
<para>
|
||||
Again, for a complete discussion of the Yocto Project kernel's architcture and its
|
||||
branching strategy,
|
||||
see the <ulink url='http://www.yoctoproject.org/docs/latest/kernel-manual/kernel-manual.html'>
|
||||
see <ulink url='&YOCTO_DOCS_KERNEL_URL;'>
|
||||
The Yocto Project Kernel Architecture and Use Manual</ulink>.
|
||||
Also, you can reference
|
||||
<xref linkend='modifying-the-kernel-source-code'>Modifying the Kernel Source Code</xref>
|
||||
for a detailed example that modifies the kernel.
|
||||
You can also reference the
|
||||
"<link linkend='modifying-the-kernel-source-code'>Modifying the Kernel Source Code</link>"
|
||||
section for a detailed example that modifies the kernel.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -373,8 +384,8 @@
|
||||
<orderedlist>
|
||||
<listitem><para><emphasis>Set up your host development system to support
|
||||
development using the Yocto Project</emphasis>: See
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#the-linux-distro'>The Linux Distributions</ulink>" and
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#packages'>The Packages</ulink>" sections both
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#the-linux-distro'>The Linux Distributions</ulink>" and
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#packages'>The Packages</ulink>" sections both
|
||||
in the Yocto Project Quick Start for requirements.</para></listitem>
|
||||
<listitem><para><emphasis>Establish a local copy of the Yocto Project files on your
|
||||
system</emphasis>: Having the Yocto Project files on your system gives you access to
|
||||
@@ -431,8 +442,8 @@
|
||||
<para>Once you are satisfied with the configuration changes made using
|
||||
<filename>menuconfig</filename>, you can directly examine the
|
||||
<filename>.config</filename> file against a saved original and gather those
|
||||
changes into a config fragment to be placed inside a
|
||||
<filename>.bbappend</filename></para></listitem>
|
||||
changes into a config fragment to be referenced from within the kernel's
|
||||
<filename>.bbappend</filename> file.</para></listitem>
|
||||
<listitem><para><emphasis>Add or extend kernel recipes if applicable</emphasis>:
|
||||
The standard
|
||||
layer structure organizes recipe files inside the
|
||||
@@ -446,14 +457,15 @@
|
||||
<listitem><para><emphasis>Prepare for the build</emphasis>: Once you have made all the
|
||||
changes to your kernel (configurations, source code changes, recipe additions,
|
||||
or recipe changes), there remains a few things
|
||||
you need to do in order for the Yocto Project build system to create your image.
|
||||
you need to do in order for the Yocto Project build system (BitBake) to create your image.
|
||||
If you have not done so, you need to get the build environment ready by sourcing
|
||||
the environment setup script described earlier.
|
||||
You also need to be sure two key configuration files
|
||||
(<filename>local.conf</filename> and <filename>bblayers.conf</filename>)
|
||||
are configured appropriately.</para>
|
||||
<para>The entire process for building an image is overviewed in the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#building-image'>Building an Image</ulink>" section of the Yocto Project Quick Start.
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#building-image'>Building an Image</ulink>"
|
||||
section of the Yocto Project Quick Start.
|
||||
You might want to reference this information.
|
||||
Also, you should look at the detailed examples found in the appendices at
|
||||
at the end of this manual.</para></listitem>
|
||||
@@ -464,10 +476,8 @@
|
||||
<ulink url='http://bitbake.berlios.de/manual/'>here</ulink>.</para>
|
||||
<para>The build process supports several types of images to satisfy different needs.
|
||||
See the appendix
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html#ref-images'>Reference: Images</ulink>" in the
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html'>
|
||||
Yocto Project Reference Manual</ulink> for information on supported
|
||||
images.</para></listitem>
|
||||
"<ulink url='&YOCTO_DOCS_REF_URL;#ref-images'>Reference: Images</ulink>" in
|
||||
The Yocto Project Reference Manual for information on supported images.</para></listitem>
|
||||
<listitem><para><emphasis>Make your configuration changes available
|
||||
in the kernel layer</emphasis>: Up to this point, all the configuration changes to the
|
||||
kernel have been done and tested iteratively.
|
||||
@@ -517,7 +527,7 @@
|
||||
provides an overview of the general development process.
|
||||
If you want to see a detailed example of the process as it is used from within the Eclipse
|
||||
IDE, see
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/adt-manual/adt-manual.html'>
|
||||
<ulink url='&YOCTO_DOCS_ADT_URL;'>
|
||||
The Application Development Toolkit (ADT) User's Manual</ulink>.
|
||||
</para>
|
||||
|
||||
@@ -534,8 +544,8 @@
|
||||
<orderedlist>
|
||||
<listitem><para><emphasis>Prepare the Host System for the Yocto Project</emphasis>:
|
||||
See
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#the-linux-distro'>The Linux Distributions</ulink>" and
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#packages'>The Packages</ulink>" sections both
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#the-linux-distro'>The Linux Distributions</ulink>" and
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#packages'>The Packages</ulink>" sections both
|
||||
in the Yocto Project Quick Start for requirements.</para></listitem>
|
||||
|
||||
<!--
|
||||
@@ -560,15 +570,15 @@ WRITER NOTE: The areas to get the kernel and root filesystem are located in the
|
||||
You must have a target kernel image that has been built using the Yocto Project.</para>
|
||||
<para>Depending on whether the Yocto Project has a pre-built image that matches your target
|
||||
architecture and where you are going to run the image while you develop your application
|
||||
(QEMU or real hardware), the area you get the image from differs.
|
||||
(QEMU or real hardware), the area from which you get the image differs.
|
||||
<itemizedlist>
|
||||
<listitem><para>Download the image from
|
||||
<ulink url='http://www.yoctoproject.org/downloads/yocto-1.1/machines/'>
|
||||
<ulink url='&YOCTO_MACHINES_DL_URL;'>
|
||||
<filename>machines</filename></ulink> if your target architecture is supported
|
||||
and you are going to develop and test your application on actual hardware.
|
||||
</para></listitem>
|
||||
<listitem><para>Download the image from the
|
||||
<ulink url='http://www.yoctoproject.org/downloads/yocto-1.1/machines/qemu/'>
|
||||
<ulink url='&YOCTO_QEMU_DL_URL;'>
|
||||
<filename>machines/qemu</filename></ulink> if your target architecture is supported
|
||||
and you are going to develop and test your application using the QEMU
|
||||
emulator.</para></listitem>
|
||||
@@ -583,10 +593,8 @@ WRITER NOTE: The areas to get the kernel and root filesystem are located in the
|
||||
</itemizedlist></para>
|
||||
<para>For information on pre-built kernel image naming schemes for images
|
||||
that can run on the QEMU emulator, see the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#using-pre-built'>Using Pre-Built Binaries and QEMU</ulink>"
|
||||
section in
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html'>
|
||||
The Yocto Project Quick Start</ulink>.</para></listitem>
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#using-pre-built'>Using Pre-Built Binaries and QEMU</ulink>"
|
||||
section in the Yocto Project Quick Start.</para></listitem>
|
||||
<listitem><para><emphasis>Install the ADT</emphasis>:
|
||||
The ADT provides a target-specific cross-development toolchain, the root filesystem,
|
||||
the QEMU emulator, and other tools that can help you develop your application.
|
||||
@@ -594,9 +602,9 @@ WRITER NOTE: The areas to get the kernel and root filesystem are located in the
|
||||
easy method.
|
||||
You can get these pieces by running an ADT installer script, which is configurable.
|
||||
For information on how to install the ADT, see the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/adt-manual/adt-manual.html#using-the-adt-installer'>Using the ADT Installer</ulink>" section in
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/adt-manual/adt-manual.html'>The Yocto Project
|
||||
Application Development (ADT) User's Manual</ulink>.</para></listitem>
|
||||
"<ulink url='&YOCTO_DOCS_ADT_URL;#using-the-adt-installer'>Using the ADT Installer</ulink>"
|
||||
section
|
||||
in the Yocto Project Application Development (ADT) User's Manual.</para></listitem>
|
||||
<listitem><para><emphasis>If Applicable, Secure the Target Root Filesystem</emphasis>:
|
||||
If you choose not to install the ADT using the ADT Installer,
|
||||
you need to find and download the
|
||||
@@ -640,14 +648,14 @@ WRITER NOTE: The areas to get the kernel and root filesystem are located in the
|
||||
<orderedlist>
|
||||
<listitem><para><emphasis>Install the cross-development toolchain for your target hardware:</emphasis>
|
||||
For information on how to install the toolchain, see the
|
||||
"<ulink url='http://www.yoctoproject/docs/1.1/adt-manual/adt-manual.html#using-an-existing-toolchain-tarball'>Using a Cross-Toolchain Tarball</ulink>" section in
|
||||
<ulink url='http://www.yoctoproject/docs/1.1/adt-manual/adt-manual.html'>The Yocto Project
|
||||
Application Development (ADT) User's Manual</ulink>.</para></listitem>
|
||||
"<ulink url='&YOCTO_DOCS_ADT_URL;#using-an-existing-toolchain-tarball'>Using a Cross-Toolchain Tarball</ulink>"
|
||||
section
|
||||
in the Yocto Project Application Development (ADT) User's Manual.</para></listitem>
|
||||
<listitem><para><emphasis>Download the Target Image:</emphasis> The Yocto Project supports
|
||||
several target architectures and has many pre-built kernel images and root filesystem
|
||||
images.</para>
|
||||
<para>If you are going to develop your application on hardware, go to the
|
||||
<ulink url='http://www.yoctoproject.org/downloads/yocto-1.1/machines/'>
|
||||
<ulink url='&YOCTO_MACHINES_DL_URL;'>
|
||||
<filename>machines</filename></ulink> download area and choose a target machine area
|
||||
from which to download the kernel image and root filesystem.
|
||||
This download area could have several files in it that support development using
|
||||
@@ -657,7 +665,7 @@ WRITER NOTE: The areas to get the kernel and root filesystem are located in the
|
||||
Be sure to get the files you need for your particular development process.</para>
|
||||
<para>If you are going to develop your application and then run and test it using the QEMU
|
||||
emulator, go to the
|
||||
<ulink url='http://www.yoctoproject.org/downloads/yocto-1.1/machines/qemu/'>
|
||||
<ulink url='&YOCTO_QEMU_DL_URL;'>
|
||||
<filename>machines/qemu</filename></ulink> download area.
|
||||
From this area, go down into the directory for your target architecture
|
||||
(e.g. <filename>qemux86_64</filename> for an
|
||||
@@ -665,7 +673,7 @@ WRITER NOTE: The areas to get the kernel and root filesystem are located in the
|
||||
Download kernel, root filesystem, and any other files you need for your process.
|
||||
<note>In order to use the root filesystem in QEMU, you need to extract it.
|
||||
See the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/adt-manual/adt-manual.html#extracting-the-root-filesystem'>Extracting the Root Filesystem</ulink>" section for information on how to extract the
|
||||
"<ulink url='&YOCTO_DOCS_ADT_URL;#extracting-the-root-filesystem'>Extracting the Root Filesystem</ulink>" section for information on how to extract the
|
||||
root filesystem.</note></para></listitem>
|
||||
<listitem><para><emphasis>Develop and Test your Application:</emphasis> At this point,
|
||||
you have the tools to develop your application.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='dev-manual-newbie'>
|
||||
|
||||
@@ -58,7 +59,7 @@
|
||||
|
||||
<para>
|
||||
The Yocto Project team maintains complete source repositories for all Yocto Project files
|
||||
<ulink url='http://git.yoctoproject.org/cgit/cgit.cgi'>here</ulink>.
|
||||
at <ulink url='&YOCTO_GIT_URL;/cgit/cgit.cgi'></ulink>.
|
||||
This web-based source code browser is organized into categories by function such as
|
||||
IDE Plugins, Matchbox, Poky, Yocto Linux Kernel, and so forth.
|
||||
From the interface, you can click on any particular item in the "Name" column and
|
||||
@@ -79,7 +80,7 @@
|
||||
|
||||
<para>
|
||||
For any supported release of Yocto Project, you can go to the Yocto Project website’s
|
||||
<ulink url='http://www.yoctoproject.org/download'>download page</ulink> and get a
|
||||
<ulink url='&YOCTO_HOME_URL;/download'>download page</ulink> and get a
|
||||
tarball of the release.
|
||||
You can also go to this site to download any supported BSP tarballs.
|
||||
Unpacking the tarball gives you a hierarchical directory structure of Yocto Project
|
||||
@@ -94,15 +95,15 @@
|
||||
<para>
|
||||
In summary, here is where you can get the Yocto Project files needed for development:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><ulink url='http://git.yoctoproject.org/cgit/cgit.cgi'>Source Repositories:</ulink></emphasis>
|
||||
<listitem><para><emphasis><ulink url='&YOCTO_GIT_URL;/cgit/cgit.cgi'>Source Repositories:</ulink></emphasis>
|
||||
This area contains IDE Plugins, Matchbox, Poky, Poky Support, Tools, Yocto Linux Kernel, and Yocto
|
||||
Metadata Layers.
|
||||
You can create Git repositories for each of these areas.</para>
|
||||
<para>
|
||||
<imagedata fileref="figures/source-repos.png" align="center" width="6in" depth="4in" />
|
||||
</para></listitem>
|
||||
<listitem><para><anchor id='index-downloads' /><emphasis><ulink url='http://downloads.yoctoproject.org/releases/'>Index of /releases:</ulink></emphasis>
|
||||
This area contains an index of downloads such as
|
||||
<listitem><para><anchor id='index-downloads' /><emphasis><ulink url='&YOCTO_DL_URL;/releases/'>Index of /releases:</ulink></emphasis>
|
||||
This area contains index releases such as
|
||||
the <trademark class='trade'>Eclipse</trademark>
|
||||
Yocto Plug-in, miscellaneous support, Poky, pseudo, cross-development toolchains,
|
||||
and all released versions of Yocto Project in the form of images or tarballs.
|
||||
@@ -111,11 +112,11 @@
|
||||
<para>
|
||||
<imagedata fileref="figures/index-downloads.png" align="center" width="6in" depth="4in" />
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><ulink url='http://www.yoctoproject.org/download'>Yocto Project Download Page</ulink></emphasis>
|
||||
<listitem><para><emphasis><ulink url='&YOCTO_HOME_URL;/download'>Yocto Project Download Page</ulink></emphasis>
|
||||
This page on the Yocto Project website allows you to download any Yocto Project
|
||||
release or Board Support Package (BSP) in tarball form.
|
||||
The tarballs are similar to those found in the
|
||||
<ulink url='http://downloads.yoctoproject.org/releases/'>Index of /releases:</ulink> area.</para>
|
||||
<ulink url='&YOCTO_DL_URL;/releases/'>Index of /releases:</ulink> area.</para>
|
||||
<para>
|
||||
<imagedata fileref="figures/yp-download.png" align="center" width="6in" depth="4in" />
|
||||
</para></listitem>
|
||||
@@ -133,7 +134,7 @@
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Append Files:</emphasis> Files that append build information to
|
||||
a recipe file.
|
||||
Information in append files overrides the information in the similarly-named recipe file.
|
||||
Information in append files override the information in the similarly-named recipe file.
|
||||
Append files use the <filename>.bbappend</filename> filename suffix.</para></listitem>
|
||||
<listitem><para><emphasis>BitBake:</emphasis> The task executor and scheduler used by
|
||||
the Yocto Project to build images.
|
||||
@@ -143,12 +144,12 @@
|
||||
and inheritance allowing commonly used patterns to be defined once and easily used
|
||||
in multiple recipes.
|
||||
Class files end with the <filename>.bbclass</filename> filename extension.</para></listitem>
|
||||
<listitem><para><emphasis>Configuration File:</emphasis> Configuration information in various
|
||||
<filename>.conf</filename> files provides global definitions of variables.
|
||||
The <filename>conf/local.conf</filename> configuration file in the Yocto Project
|
||||
build directory contains user-defined variables that affect each build.
|
||||
The <filename>meta-yocto/conf/distro/poky.conf</filename> configuration file
|
||||
defines Yocto ‘distro’ configuration
|
||||
<listitem><para><emphasis>Configuration File:</emphasis> Configuration information in the
|
||||
<filename>.conf</filename> files provides global definitions of variables.
|
||||
The <filename>conf/local.conf</filename> configuration file in the Yocto Project
|
||||
build directory defines user-defined variables that affect each build.
|
||||
The <filename>distro/poky.conf</filename> configuration file also in the
|
||||
build directory defines Yocto ‘distro’ configuration
|
||||
variables used only when building with this policy.
|
||||
Machine configuration files, which
|
||||
are located throughout the Yocto Project file structure, define
|
||||
@@ -159,7 +160,7 @@
|
||||
<listitem><para><emphasis>Cross-Development Toolchain:</emphasis> A collection of software development
|
||||
tools and utilities that allow you to develop software for targeted architectures.
|
||||
This toolchain contains cross-compilers, linkers, and debuggers that are specific to
|
||||
an architecture.
|
||||
an architecure.
|
||||
You can use the Yocto Project to build cross-development toolchains in tarball form that when
|
||||
unpacked contain the development tools you need to cross-compile and test your software.
|
||||
The Yocto Project ships with images that contain toolchains for supported architectures
|
||||
@@ -170,10 +171,8 @@
|
||||
Images are the binary output that runs on specific hardware and for specific
|
||||
use cases.
|
||||
For a list of the supported image types that the Yocto Project provides, see the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html#ref-images'>Reference: Images</ulink>"
|
||||
appendix in
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html'>
|
||||
The Yocto Project Reference Manual</ulink>.</para></listitem>
|
||||
"<ulink url='&YOCTO_DOCS_REF_URL;#ref-images'>Reference: Images</ulink>"
|
||||
appendix in the Yocto Project Reference Manual.</para></listitem>
|
||||
<listitem><para><emphasis>Layer:</emphasis> A collection of recipes representing the core,
|
||||
a BSP, or an application stack.</para></listitem>
|
||||
<listitem><para><emphasis>Metadata:</emphasis> The files that BitBake parses when building an image.
|
||||
@@ -216,14 +215,14 @@
|
||||
system in order to do any development using the Yocto Project.</para>
|
||||
<para>The name of the top-level directory of the Yocto Project file structure
|
||||
is derived from the Yocto Project release tarball.
|
||||
For example, downloading and unpacking <filename>poky-edison-6.0.tar.bz2</filename>
|
||||
For example, downloading and unpacking <filename>&YOCTO_POKY_TARBALL;</filename>
|
||||
results in a Yocto Project file structure whose Yocto Project source directory is named
|
||||
<filename>poky-edison-6.0</filename>.
|
||||
<filename>&YOCTO_POKY;</filename>.
|
||||
If you create a Git repository, then you can name the repository anything you like.</para>
|
||||
<para>You can find instruction on how to set up the Yocto Project files on your
|
||||
host development system by reading
|
||||
the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/dev-manual/dev-manual.html#getting-setup'>Getting
|
||||
"<ulink url='&YOCTO_DOCS_DEV_URL;#getting-setup'>Getting
|
||||
Setup</ulink>" section.</para></listitem>
|
||||
<listitem><para><emphasis>Yocto Project Build Directory:</emphasis>
|
||||
This term refers to the area used by the Yocto Project for builds.
|
||||
@@ -233,9 +232,9 @@
|
||||
You can create the Yocto Project build directory anywhere you want on your
|
||||
development system.
|
||||
Here is an example that creates the directory in <filename>mybuilds</filename>
|
||||
and names the Yocto Project build directory <filename>YP-6.0</filename>:
|
||||
and names the Yocto Project build directory <filename>YP-&POKYVERSION;</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ source poky-edison-6.0/oe-init-build-env $HOME/mybuilds/YP-6.0
|
||||
$ source &OE_INIT_PATH; $HOME/mybuilds/YP-&POKYVERSION;
|
||||
</literallayout>
|
||||
If you don't specifically name the directory, BitBake creates it
|
||||
in the current directory and uses the name <filename>build</filename>.
|
||||
@@ -305,7 +304,7 @@
|
||||
|
||||
<para>
|
||||
You can find a list of the combined SPDX and OSI licenses that the Yocto Project uses
|
||||
<ulink url='http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/meta/files/common-licenses'>here</ulink>.
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit/cgit.cgi/poky/tree/meta/files/common-licenses'>here</ulink>.
|
||||
This wiki page discusses the license infrastructure used by the Yocto Project.
|
||||
</para>
|
||||
</section>
|
||||
@@ -316,7 +315,10 @@
|
||||
<para>
|
||||
The Yocto Project uses Git, which is a free, open source distributed version control system.
|
||||
Git supports distributed development, non-linear development, and can handle large projects.
|
||||
It is best that you know how to work with Git if you are going to use Yocto Project for development.
|
||||
It is best that you have some fundamental understanding of how Git tracks projects and
|
||||
how to work with Git if you are going to use Yocto Project for development.
|
||||
This section provides a quick overview of how Git works and provides you with a summary
|
||||
of some essential Git commands.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -369,7 +371,7 @@
|
||||
will allow the change, and for ultimately pushing the change from your local Git repository
|
||||
into the project’s upstream (or master) repository.</para></listitem>
|
||||
<listitem><para><emphasis><filename>git status</filename>:</emphasis> Reports any modified files that
|
||||
possibly need to be added and committed.</para></listitem>
|
||||
possibly need added and committed.</para></listitem>
|
||||
<listitem><para><emphasis><filename>git checkout <branch-name></filename>:</emphasis> Changes
|
||||
your working branch.
|
||||
This command is analogous to “cd”.</para></listitem>
|
||||
@@ -486,8 +488,8 @@
|
||||
While each development environment is unique, there are some best practices or methods
|
||||
that help development run smoothly.
|
||||
The following list describes some of these practices.
|
||||
For more detailed information about these strategies see
|
||||
<ulink url='http://www.kernel.org/pub/software/scm/git/docs/gitworkflows.html'>Git Workflows</ulink>.
|
||||
For more information about Git workflows, see the workflow topics in the
|
||||
<ulink url='http://book.git-scm.com'>Git Community Book</ulink>.
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Make Small Changes:</emphasis> It is best to keep the changes you commit
|
||||
small as compared to bundling many disparate changes into a single commit.
|
||||
@@ -551,7 +553,7 @@
|
||||
changes, can be used to communicate changes and problems with developers, can be used to
|
||||
submit and review patches, and can be used to manage quality assurance.
|
||||
The home page for the Yocto Project implementation of Bugzilla is
|
||||
<ulink url='http://bugzilla.yoctoproject.org'>http://bugzilla.yoctoproject.org</ulink>.
|
||||
<ulink url='&YOCTO_BUGZILLA_URL;'>&YOCTO_BUGZILLA_URL;</ulink>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -562,7 +564,7 @@
|
||||
Bugzilla.
|
||||
You can find more information on defect management, bug tracking, and feature request
|
||||
processes all accomplished through the Yocto Project Bugzilla on the wiki page
|
||||
<ulink url='https://wiki.yoctoproject.org/wiki/Bugzilla_Configuration_and_Bug_Tracking'>here</ulink>.
|
||||
<ulink url='&YOCTO_WIKI_URL;/wiki/Bugzilla_Configuration_and_Bug_Tracking'>here</ulink>.
|
||||
<orderedlist>
|
||||
<listitem><para>Always use the Yocto Project implementation of Bugzilla to submit
|
||||
a bug.</para></listitem>
|
||||
@@ -605,12 +607,13 @@
|
||||
|
||||
<para>
|
||||
Contributions to the Yocto Project are very welcome.
|
||||
Because the Yocto Project is extremely configurable and flexible, we recognize that developers
|
||||
will want to extend, configure or optimize it for their specific uses.
|
||||
You should send patches to the appropriate Yocto Project mailing list to get them
|
||||
in front of the Yocto Project Maintainer.
|
||||
For a list of the Yocto Project mailing lists, see the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html#resources-mailinglist'>Mailing lists</ulink>" section in
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/poky-ref-manual/poky-ref-manual.html'> The
|
||||
Yocto Project Reference Manual</ulink>.
|
||||
"<ulink url='&YOCTO_DOCS_REF_URL;#resources-mailinglist'>Mailing lists</ulink>" section in
|
||||
The Yocto Project Reference Manual.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -618,14 +621,14 @@
|
||||
<itemizedlist>
|
||||
<listitem><para>For defects against the Yocto Project build system Poky, send
|
||||
your patch to the
|
||||
<ulink url='http://lists.yoctoproject.org/listinfo/poky'></ulink> mailing list.
|
||||
<ulink url='&YOCTO_LISTS_URL;/listinfo/poky'></ulink> mailing list.
|
||||
This mailing list corresponds to issues that are not specific to the Yocto Project but
|
||||
are part of the OE-core.
|
||||
For example, a defect against anything in the <filename>meta</filename> layer
|
||||
or the BitBake Manual could be sent to this mailing list.</para></listitem>
|
||||
<listitem><para>For defects against Yocto-specific layers, tools, and Yocto Project
|
||||
documentation use the
|
||||
<ulink url='http://lists.yoctoproject.org/listinfo/yocto'></ulink> mailing list.
|
||||
<ulink url='&YOCTO_LISTS_URL;/listinfo/yocto'></ulink> mailing list.
|
||||
This mailing list corresponds to Yocto-specific areas such as
|
||||
<filename>meta-yocto</filename>, <filename>meta-intel</filename>,
|
||||
<filename>linux-yocto</filename>, and <filename>documentation</filename>.</para></listitem>
|
||||
@@ -675,7 +678,10 @@
|
||||
<para>
|
||||
In a collaborative environment, it is necessary to have some sort of standard
|
||||
or method through which you submit changes.
|
||||
Otherwise, things could get quite chaotic.
|
||||
Otherwise, things could get quite chaotic.
|
||||
One general practice to follow is to make small, controlled changes to the
|
||||
Yocto Project.
|
||||
Keeping changes small and isolated lets you best keep pace with future Yocto Project changes.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -713,7 +719,7 @@
|
||||
<para>
|
||||
You can find more guidance on creating well-formed commit messages at this OpenEmbedded
|
||||
wiki page:
|
||||
<ulink url='http://www.openembedded.org/wiki/Commit_Patch_Message_Guidelines'></ulink>.
|
||||
<ulink url='&OE_HOME_URL;/wiki/Commit_Patch_Message_Guidelines'></ulink>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -751,9 +757,8 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
You can find general Git information on how to push a change upstream
|
||||
<ulink url='http://www.kernel.org/pub/software/scm/git/docs/user-manual.html#Developing-With-git'>
|
||||
here</ulink>.
|
||||
You can find general Git information on how to push a change upstream in the
|
||||
<ulink url='http://book.git-scm.com/3_distributed_workflows.html'>Git Community Book</ulink>.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -778,7 +783,7 @@
|
||||
See the earlier section
|
||||
"<link linkend='how-to-submit-a-change'>How to Submit a Change</link>"
|
||||
for Yocto Project commit message standards.</para></listitem>
|
||||
<listitem><para>Format the commit into an email messsage.
|
||||
<listitem><para>Format the commit into an email message.
|
||||
To format commits, use the <filename>git format-patch</filename> command.
|
||||
When you provide the command, you must include a revision list or a number of patches
|
||||
as part of the command.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='dev-manual-start'>
|
||||
|
||||
@@ -9,7 +10,7 @@
|
||||
This chapter introduces the Yocto Project and gives you an idea of what you need to get started.
|
||||
You can find enough information to set up your development host and build or use images for
|
||||
hardware supported by the Yocto Project by reading
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html'>
|
||||
<ulink url='&YOCTO_DOCS_QS_URL;'>
|
||||
The Yocto Project Quick Start</ulink>.
|
||||
</para>
|
||||
|
||||
@@ -30,7 +31,8 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
You can use the Yocto Project, which uses the BitBake build tool, to develop complete Linux
|
||||
You can use the Yocto Project build system, which uses
|
||||
<ulink url='http://bitbake.berlios.de/manual/'>BitBake</ulink>, to develop complete Linux
|
||||
images and associated user-space applications for architectures based on ARM, MIPS, PowerPC,
|
||||
x86 and x86-64.
|
||||
While the Yocto Project does not provide a strict testing framework,
|
||||
@@ -57,7 +59,7 @@
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Packages:</emphasis> The Yocto Project requires certain packages
|
||||
exist on your development system (e.g. Python 2.6 or 2.7).
|
||||
See "<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#packages'>The Packages</ulink>"
|
||||
See "<ulink url='&YOCTO_DOCS_QS_URL;#packages'>The Packages</ulink>"
|
||||
section in the Yocto Project Quick start for the exact package
|
||||
requirements and the installation commands to install them
|
||||
for the supported distributions.</para></listitem>
|
||||
@@ -73,29 +75,37 @@
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Tarball Extraction:</emphasis> If you are not going to contribute
|
||||
back into the Yocto Project, you can simply download the Yocto Project release you want
|
||||
from the website’s <ulink url='http://yoctoproject.org/download'>download page</ulink>.
|
||||
from the website’s <ulink url='&YOCTO_HOME_URL;/download'>download page</ulink>.
|
||||
Once you have the tarball, just extract it into a directory of your choice.</para>
|
||||
<para>For example, the following command extracts the Yocto Project 1.1 release tarball
|
||||
<para>For example, the following command extracts the Yocto Project &DISTRO;
|
||||
release tarball
|
||||
into the current working directory and sets up the Yocto Project file structure
|
||||
with a top-level directory named <filename>poky-edison-6.0</filename>:
|
||||
with a top-level directory named <filename>&YOCTO_POKY;</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ tar xfj poky-edison-6.0.tar.bz2
|
||||
$ tar xfj &YOCTO_POKY_TARBALL;
|
||||
</literallayout></para>
|
||||
<para>This method does not produce a Git repository.
|
||||
Instead, you simply end up with a local snapshot of the
|
||||
Yocto Project files that are based on the particular release in the
|
||||
tarball.</para></listitem>
|
||||
<listitem><para><emphasis>Git Repository Method:</emphasis> If you are going to be contributing
|
||||
back into the Yocto Project, you should use Git commands to set up a local
|
||||
Git repository of the Yocto Project files.
|
||||
back into the Yocto Project or you simply want to keep up
|
||||
with the latest developments, you should use Git commands to set up a local
|
||||
Git repository of the Yocto Project files.
|
||||
Doing so creates a Git repository with a complete history of changes and allows
|
||||
you to easily submit your changes upstream to the project.</para>
|
||||
<para>The following transcript shows how to clone the Yocto Project files'
|
||||
Git repository into the current working directory.
|
||||
The command creates the repository in a directory named <filename>poky</filename>.
|
||||
For information on the Yocto Project and Git, see the
|
||||
"<link linkend='git'>Git</link>" section.
|
||||
<literallayout class='monospaced'>
|
||||
you to easily submit your changes upstream to the project.
|
||||
Because you cloned the repository, you have access to all the Yocto Project development
|
||||
branches and tag names used in the upstream repository.</para>
|
||||
<para>The following transcript shows how to clone the Yocto Project Files'
|
||||
Git repository into the current working directory.
|
||||
<note>The name of the Yocto Project Files Git repository in the Yocto Project Files
|
||||
Source Repositories is <filename>poky</filename>.
|
||||
You can view the Yocto Project Source Repositories at
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit.cgi'></ulink></note>
|
||||
The command creates the local repository in a directory named <filename>poky</filename>.
|
||||
For information on Git used within the Yocto Project, see the
|
||||
"<link linkend='git'>Git</link>" section.
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.yoctoproject.org/poky
|
||||
Initialized empty Git repository in /home/scottrif/poky/.git/
|
||||
remote: Counting objects: 116882, done.
|
||||
@@ -104,68 +114,78 @@
|
||||
Receiving objects: 100% (116882/116882), 72.13 MiB | 2.68 MiB/s, done.
|
||||
Resolving deltas: 100% (80651/80651), done. </literallayout></para>
|
||||
<para>For another example of how to set up your own local Git repositories, see this
|
||||
<ulink url='https://wiki.yoctoproject.org/wiki/Transcript:_from_git_checkout_to_meta-intel_BSP'>
|
||||
<ulink url='&YOCTO_WIKI_URL;/wiki/Transcript:_from_git_checkout_to_meta-intel_BSP'>
|
||||
wiki page</ulink>, which describes how to create both <filename>poky</filename>
|
||||
and <filename>meta-intel</filename> Git repositories.</para></listitem>
|
||||
</itemizedlist></para></listitem>
|
||||
<listitem id='local-kernel-files'><para><emphasis>Linux Yocto Kernel:</emphasis>
|
||||
If you are going to be making modifications to a supported Linux Yocto kernel, you
|
||||
need to establish local copies of the source.
|
||||
This setup involves creating a bare clone of the Linux Yocto kernel and then cloning
|
||||
that repository.
|
||||
You can find Git repositories of supported Linux Yocto Kernels organized under
|
||||
"Yocto Linux Kernel" in the Yocto Project Source Repositories at
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit.cgi'></ulink>.</para>
|
||||
<para>This setup involves creating a bare clone of the Linux Yocto kernel and then
|
||||
copying that cloned repository.
|
||||
You can create the bare clone and the copy of the bare clone anywhere you like.
|
||||
For simplicity, it is recommended that you create these structures outside of the
|
||||
Yocto Project files' Git repository.</para>
|
||||
<para>As an example, the following transcript shows how to create the bare clone
|
||||
of the <filename>linux-yocto-3.0</filename> kernel and then create a copy of
|
||||
of the <filename>linux-yocto-3.0-1.1.x</filename> kernel and then create a copy of
|
||||
that clone.
|
||||
<note>When you have a local Linux Yocto kernel Git repository, you can
|
||||
reference that repository rather than the upstream Git repository as
|
||||
part of the <filename>clone</filename> command.
|
||||
Doing so can speed up the process.</note></para>
|
||||
<para>In the following example, the bare clone is named
|
||||
<filename>linux-yocto-3.0.git</filename>, while the
|
||||
copy is named <filename>linux-yocto-3.0</filename>:
|
||||
<filename>linux-yocto-3.0-1.1.x.git</filename>, while the
|
||||
copy is named <filename>my-linux-yocto-3.0-1.1.x-work</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone --bare git://git.yoctoproject.org/linux-yocto-3.0 linux-yocto-3.0.git
|
||||
Initialized empty Git repository in /home/scottrif/linux-yocto-3.0.git/
|
||||
remote: Counting objects: 2123870, done.
|
||||
remote: Compressing objects: 100% (341338/341338), done.
|
||||
remote: Total 2123870 (delta 1778780), reused 2107534 (delta 1762583)
|
||||
Receiving objects: 100% (2123870/2123870), 445.72 MiB | 2.06 MiB/s, done.
|
||||
Resolving deltas: 100% (1778780/1778780), done. </literallayout></para>
|
||||
$ git clone --bare git://git.yoctoproject.org/linux-yocto-3.0-1.1.x linux-yocto-3.0-1.1.x.git
|
||||
Initialized empty Git repository in /home/scottrif/linux-yocto-3.0-1.1.x.git/
|
||||
remote: Counting objects: 2259181, done.
|
||||
remote: Compressing objects: 100% (373259/373259), done.
|
||||
remote: Total 2259181 (delta 1892638), reused 2231556 (delta 1865300)
|
||||
Receiving objects: 100% (2259181/2259181), 482.44 MiB | 580 KiB/s, done.
|
||||
Resolving deltas: 100% (1892638/1892638), done.
|
||||
</literallayout></para>
|
||||
<para>Now create a clone of the bare clone just created:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone linux-yocto-3.0.git linux-yocto-3.0
|
||||
Initialized empty Git repository in /home/scottrif/linux-yocto-3.0/.git/
|
||||
Checking out files: 100% (36898/36898), done. </literallayout></para></listitem>
|
||||
$ git clone linux-yocto-3.0-1.1.x.git my-linux-yocto-3.0-1.1.x-work
|
||||
Initialized empty Git repository in /home/scottrif/my-linux-yocto-3.0-1.1.x/.git/
|
||||
Checking out files: 100% (36898/36898), done.
|
||||
</literallayout></para></listitem>
|
||||
<listitem id='poky-extras-repo'><para><emphasis>
|
||||
The <filename>poky-extras</filename> Git Repository</emphasis>:
|
||||
The <filename>poky-extras</filename> Git repository contains metadata needed
|
||||
only if you are modifying and building the kernel image.
|
||||
In particular, it contains the kernel <filename>.bbappend</filename> files that you
|
||||
In particular, it contains the kernel BitBake append (<filename>.bbappend</filename>)
|
||||
files that you
|
||||
edit to point to your locally modified kernel source files and to build the kernel
|
||||
image.
|
||||
Pointing to these local files is much more efficient than requiring a download of the
|
||||
source files from upstream each time you make changes to the kernel.</para>
|
||||
<para>It is good practice to create this Git repository inside the Yocto Project
|
||||
files Git repository.
|
||||
Following is an example that creates the <filename>poky-extras</filename> Git
|
||||
<para>You can find the <filename>poky-extras</filename> Git Repository in the
|
||||
"Yocto Metadata Layers" area of the Yocto Project Source Repositories at
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit.cgi'></ulink>.
|
||||
It is good practice to create this Git repository inside the Yocto Project
|
||||
files Git repository.</para>
|
||||
<para>Following is an example that creates the <filename>poky-extras</filename> Git
|
||||
repository inside the Yocto Project files Git repository, which is named
|
||||
<filename>poky</filename> in this case:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/poky
|
||||
$ git clone git://git.yoctoproject.org/poky-extras poky-extras
|
||||
Initialized empty Git repository in /home/scottrif/poky/poky-extras/.git/
|
||||
remote: Counting objects: 543, done.
|
||||
remote: Compressing objects: 100% (483/483), done.
|
||||
remote: Total 543 (delta 144), reused 307 (delta 39)
|
||||
Receiving objects: 100% (543/543), 520.55 KiB, done.
|
||||
Resolving deltas: 100% (144/144), done. </literallayout></para></listitem>
|
||||
remote: Counting objects: 561, done.
|
||||
remote: Compressing objects: 100% (501/501), done.
|
||||
remote: Total 561 (delta 159), reused 306 (delta 39)
|
||||
Receiving objects: 100% (561/561), 519.96 KiB | 479 KiB/s, done.
|
||||
Resolving deltas: 100% (159/159), done.
|
||||
</literallayout></para></listitem>
|
||||
<listitem><para><emphasis>Supported Board Support Packages (BSPs):</emphasis>
|
||||
Similar considerations exist for BSPs.
|
||||
You can get set up for BSP development one of two ways: tarball extraction or
|
||||
with a local Git repository.
|
||||
It is a good idea to use the same method used to set up the Yocto Project Files.
|
||||
Regardless of the method you use, the Yocto Project uses the following BSP layer
|
||||
naming scheme:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -181,31 +201,33 @@
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Tarball Extraction:</emphasis> You can download any released
|
||||
BSP tarball from the same
|
||||
<ulink url='http://yoctoproject.org/download'>download site</ulink> used
|
||||
<ulink url='&YOCTO_HOME_URL;/download'>download site</ulink> used
|
||||
to get the Yocto Project release.
|
||||
Once you have the tarball, just extract it into a directory of your choice.
|
||||
Again, this method just produces a snapshot of the BSP layer in the form
|
||||
of a hierarchical directory structure.</para></listitem>
|
||||
<listitem><para><emphasis>Git Repository Method:</emphasis> If you are working
|
||||
with a Yocto Project files Git repository, you should also set up a
|
||||
<filename>meta-intel</filename> Git repository.
|
||||
Typically, you set up the <filename>meta-intel</filename> Git repository inside
|
||||
the Yocto Project files Git repository.</para>
|
||||
<para>For example, the following transcript shows the steps to clone the
|
||||
with a Yocto Project Files Git repository, you should also use this method
|
||||
to set up the <filename>meta-intel</filename> Git repository.
|
||||
You can locate the <filename>meta-intel</filename> Git repository in the
|
||||
"Yocto Metadata Layers" area of the Yocto Project Source Repositories at
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit.cgi'></ulink>.</para>
|
||||
<para>Typically, you set up the <filename>meta-intel</filename> Git repository inside
|
||||
the Yocto Project Files Git repository.
|
||||
For example, the following transcript shows the steps to clone the
|
||||
<filename>meta-intel</filename>
|
||||
Git repository inside the <filename>poky</filename> Git repository.
|
||||
<literallayout class='monospaced'>
|
||||
$ cd poky
|
||||
$ git clone git://git.yoctoproject.org/meta-intel.git
|
||||
Initialized empty Git repository in /home/scottrif/poky/meta-intel/.git/
|
||||
remote: Counting objects: 1325, done.
|
||||
remote: Compressing objects: 100% (1078/1078), done.
|
||||
remote: Total 1325 (delta 546), reused 85 (delta 27)
|
||||
Receiving objects: 100% (1325/1325), 1.56 MiB | 330 KiB/s, done.
|
||||
Resolving deltas: 100% (546/546), done.
|
||||
remote: Counting objects: 3279, done.
|
||||
remote: Compressing objects: 100% (2708/2708), done.
|
||||
remote: Total 3279 (delta 1761), reused 194 (delta 105)
|
||||
Receiving objects: 100% (3279/3279), 1.75 MiB | 377 KiB/s, done.
|
||||
Resolving deltas: 100% (1761/1761), done.
|
||||
</literallayout></para>
|
||||
<para>The same
|
||||
<ulink url='https://wiki.yoctoproject.org/wiki/Transcript:_from_git_checkout_to_meta-intel_BSP'>
|
||||
<ulink url='&YOCTO_WIKI_URL;/wiki/Transcript:_from_git_checkout_to_meta-intel_BSP'>
|
||||
wiki page</ulink> referenced earlier covers how to
|
||||
set up the <filename>meta-intel</filename> Git repository.</para></listitem>
|
||||
</itemizedlist></para></listitem>
|
||||
@@ -213,7 +235,7 @@
|
||||
applications using the Eclipse Integrated Development Environment (IDE),
|
||||
you will need this plug-in.
|
||||
See the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/adt-manual/adt-manual.html#setting-up-the-eclipse-ide'>Setting up the Eclipse IDE</ulink>"
|
||||
"<ulink url='&YOCTO_DOCS_ADT_URL;#setting-up-the-eclipse-ide'>Setting up the Eclipse IDE</ulink>"
|
||||
section in the Yocto Application Development Toolkit (ADT)
|
||||
User’s Guide for more information.</para></listitem>
|
||||
</itemizedlist>
|
||||
@@ -226,7 +248,7 @@
|
||||
<para>
|
||||
The build process creates an entire Linux distribution, including the toolchain, from source.
|
||||
For more information on this topic, see the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#building-image'>Building an Image</ulink>"
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#building-image'>Building an Image</ulink>"
|
||||
section in the Yocto Project Quick Start.
|
||||
</para>
|
||||
|
||||
@@ -237,12 +259,20 @@
|
||||
previous section.</para></listitem>
|
||||
<listitem><para>Initialize the build environment by sourcing a build environment
|
||||
script.</para></listitem>
|
||||
<listitem><para>Optionally ensure the <filename>conf/local.conf</filename> configuration file is set
|
||||
up how you want it.
|
||||
This file defines the target machine architecture and other build options.</para></listitem>
|
||||
<listitem><para>Build the image using the <command>bitbake</command> command.
|
||||
<listitem><para>Optionally ensure the <filename>/conf/local.conf</filename> configuration file,
|
||||
which is found in the Yocto Project build directory,
|
||||
is set up how you want it.
|
||||
This file defines many aspects of the build environment including
|
||||
the target machine architecture through the
|
||||
<filename><ulink url='&YOCTO_DOCS_REF_URL;#var-MACHINE'>MACHINE</ulink></filename> variable,
|
||||
the development machine's processor use through the
|
||||
<filename><ulink url='&YOCTO_DOCS_REF_URL;#var-BB_NUMBER_THREADS'>BB_NUMBER_THREADS</ulink></filename> and
|
||||
<filename><ulink url='&YOCTO_DOCS_REF_URL;#var-PARALLEL_MAKE'>PARALLEL_MAKE</ulink></filename> variables, and
|
||||
a centralized tarball download directory through the
|
||||
<filename><ulink url='&YOCTO_DOCS_REF_URL;#var-DL_DIR'>DL_DIR</ulink></filename> variable.</para></listitem>
|
||||
<listitem><para>Build the image using the <filename>bitbake</filename> command.
|
||||
If you want information on BitBake, see the user manual at
|
||||
<ulink url='http://docs.openembedded.org/bitbake/html'></ulink>.</para></listitem>
|
||||
<ulink url='&OE_DOCS_URL;/bitbake/html'></ulink>.</para></listitem>
|
||||
<listitem><para>Run the image either on the actual hardware or using the QEMU
|
||||
emulator.</para></listitem>
|
||||
</orderedlist>
|
||||
@@ -253,18 +283,37 @@
|
||||
<title>Using Pre-Built Binaries and QEMU</title>
|
||||
|
||||
<para>
|
||||
Another option you have to get started is to use pre-built binaries.
|
||||
This scenario is ideal for developing software applications to run on your target hardware.
|
||||
To do this, you need to install the stand-alone Yocto Project cross-toolchain tarball and
|
||||
then download the pre-built kernel that you will boot in the QEMU emulator.
|
||||
Next, you must download and extract the target root filesystem for your target
|
||||
machine’s architecture.
|
||||
Finally, you set up the environment to emulate the hardware and then start the QEMU emulator.
|
||||
Another option you have to get started is to use pre-built binaries.
|
||||
The Yocto Project provides many types of binaries with each release.
|
||||
See the <ulink url='&YOCTO_DOCS_REF_URL;#ref-images'>Reference: Images</ulink>
|
||||
section for descriptions of the types of binaries that ship with a Yocto Project
|
||||
release.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Using a pre-built binary is ideal for developing software applications to run on your
|
||||
target hardware.
|
||||
To do this, you need to be able to access the appropriate cross-toolchain tarball for
|
||||
the architecture on which you are developing.
|
||||
If you are using an SDK type image, the image ships with the complete toolchain native to
|
||||
the architecture.
|
||||
If you are not using an SDK type image, you need to separately download and
|
||||
install the stand-alone Yocto Project cross-toolchain tarball.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Regardless of the type of image you are using, you need to download the pre-built kernel
|
||||
that you will boot in the QEMU emulator and then download and extract the target root
|
||||
filesystem for your target machine’s architecture.
|
||||
You can get architecture-specific binaries and filesystem from
|
||||
<ulink url='&YOCTO_MACHINES_DL_URL;'>machines</ulink>.
|
||||
You can get stand-alone toolchains from
|
||||
<ulink url='&YOCTO_TOOLCHAIN_DL_URL;'>toolchains</ulink>.
|
||||
Once you have all your files, you set up the environment to emulate the hardware
|
||||
by sourcing an environment setup script.
|
||||
Finally, you start the QEMU emulator.
|
||||
You can find details on all these steps in the
|
||||
"<ulink url='http://www.yoctoproject.org/docs/latest/yocto-project-qs/yocto-project-qs.html#using-pre-built'>Using Pre-Built Binaries and QEMU</ulink>"
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#using-pre-built'>Using Pre-Built Binaries and QEMU</ulink>"
|
||||
section of the Yocto Project Quick Start.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<book id='dev-manual' lang='en'
|
||||
xmlns:xi="http://www.w3.org/2003/XInclude"
|
||||
@@ -33,10 +34,20 @@
|
||||
<date>6 October 2011</date>
|
||||
<revremark>The initial document released with the Yocto Project 1.1 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.1.1</revnumber>
|
||||
<date>15 March 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.1.1 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.1.2</revnumber>
|
||||
<date>July 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.1.2 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
<copyright>
|
||||
<year>2010-2011</year>
|
||||
<year>©RIGHT_YEAR;</year>
|
||||
<holder>Linux Foundation</holder>
|
||||
</copyright>
|
||||
|
||||
@@ -51,9 +62,9 @@
|
||||
<note>
|
||||
Due to production processes, there could be differences between the Yocto Project
|
||||
documentation bundled in the release tarball and
|
||||
<ulink url='http://www.yoctoproject.org/docs/latest/dev-manual/dev-manual.html'>
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;'>
|
||||
The Yocto Project Development Manual</ulink> on
|
||||
the <ulink url='http://www.yoctoproject.org'>Yocto Project</ulink> website.
|
||||
the <ulink url='&YOCTO_HOME_URL;'>Yocto Project</ulink> website.
|
||||
For the latest version of this manual, see the manual on the website.
|
||||
</note>
|
||||
</legalnotice>
|
||||
|
||||
|
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 26 KiB |