Compare commits
569 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8886aee5b9 | ||
|
|
2e99dcc03a | ||
|
|
1f53e5a96a | ||
|
|
bf46fa7d50 | ||
|
|
35d25f853c | ||
|
|
ccc2918ef7 | ||
|
|
c47f892526 | ||
|
|
3635f0ce53 | ||
|
|
678f6ee437 | ||
|
|
7ce46066a5 | ||
|
|
d3e51cea0f | ||
|
|
fb2d503992 | ||
|
|
ad050e2912 | ||
|
|
ec385088c4 | ||
|
|
aee274f5fa | ||
|
|
74b0cafd65 | ||
|
|
9622701c00 | ||
|
|
7e3332c3cc | ||
|
|
99e39f6c12 | ||
|
|
c0433b3b94 | ||
|
|
28989f2d04 | ||
|
|
d088fac872 | ||
|
|
9f915d46be | ||
|
|
a6f25334ec | ||
|
|
ed26c02ac6 | ||
|
|
860acfbdaa | ||
|
|
470fd8de07 | ||
|
|
7082a56c95 | ||
|
|
4919821572 | ||
|
|
a8289a9943 | ||
|
|
8b72a3c863 | ||
|
|
7315ac6e8f | ||
|
|
522b5864e9 | ||
|
|
c11f65a290 | ||
|
|
b2ec918b67 | ||
|
|
4cd83a91fb | ||
|
|
953ad50a27 | ||
|
|
fcf7db01c0 | ||
|
|
c8fc8aa4ae | ||
|
|
e50f5673e2 | ||
|
|
0cb1be9fae | ||
|
|
565d5abd87 | ||
|
|
fee19b3dda | ||
|
|
eb1ed1f02a | ||
|
|
664c12885b | ||
|
|
22fa92fa14 | ||
|
|
a717c33e5d | ||
|
|
d39f8db69e | ||
|
|
efd4cb0ddf | ||
|
|
61e550b462 | ||
|
|
4b72728755 | ||
|
|
1cc3056b3a | ||
|
|
27e5be1709 | ||
|
|
6c9c0f1f45 | ||
|
|
8650364e50 | ||
|
|
450913f388 | ||
|
|
de01c5c217 | ||
|
|
d5a5f63867 | ||
|
|
06072024be | ||
|
|
107ec95659 | ||
|
|
af3e5039e8 | ||
|
|
08d70734d5 | ||
|
|
164a4d1bac | ||
|
|
7e0dd59e30 | ||
|
|
aca161f8a0 | ||
|
|
94c8d01eba | ||
|
|
e1e0dd932b | ||
|
|
6c335846d9 | ||
|
|
774f93e8d3 | ||
|
|
535cfa538b | ||
|
|
ac63b3f8ef | ||
|
|
4039b5b97c | ||
|
|
67334bfb26 | ||
|
|
36e13dd42f | ||
|
|
de485f4973 | ||
|
|
56310cbc4c | ||
|
|
68cd8deadc | ||
|
|
b2a0243f05 | ||
|
|
a99b7d39dc | ||
|
|
755508c423 | ||
|
|
adbf38414e | ||
|
|
c3be61e204 | ||
|
|
490753f440 | ||
|
|
f3fc5e1e3f | ||
|
|
e2c5e5a513 | ||
|
|
c5a9efca96 | ||
|
|
15905aec48 | ||
|
|
2b92d9f6d3 | ||
|
|
bfa48c3c09 | ||
|
|
ef6062981b | ||
|
|
f57eca6f28 | ||
|
|
885ebdae10 | ||
|
|
33561a5417 | ||
|
|
bb31c819be | ||
|
|
d1c5de9ccb | ||
|
|
4274ebdd00 | ||
|
|
0fbd6a1615 | ||
|
|
bda8a084f5 | ||
|
|
939ec1ca1e | ||
|
|
69b307523c | ||
|
|
6482c0e20d | ||
|
|
ac9c62c907 | ||
|
|
806c23ef2e | ||
|
|
4c496a970f | ||
|
|
fa6eb32a5a | ||
|
|
eaec7e9624 | ||
|
|
e6ea83fece | ||
|
|
613e985811 | ||
|
|
b900d54f57 | ||
|
|
83e5279d62 | ||
|
|
b36cde2308 | ||
|
|
c1a2249c96 | ||
|
|
e3afb1ebc8 | ||
|
|
686345f1d0 | ||
|
|
7b15a9372c | ||
|
|
b52792d84d | ||
|
|
a684aa1df4 | ||
|
|
69a3fba2aa | ||
|
|
c6ec5a0d9e | ||
|
|
de68393270 | ||
|
|
12e5797e51 | ||
|
|
6c65263f8d | ||
|
|
32f0a45c33 | ||
|
|
726f3bce5a | ||
|
|
75f253d7d2 | ||
|
|
a5c04850e6 | ||
|
|
cef4500611 | ||
|
|
df2da07184 | ||
|
|
77912d65c7 | ||
|
|
878425f147 | ||
|
|
fa9ad15e41 | ||
|
|
961f75d11b | ||
|
|
60c5ef3508 | ||
|
|
e5e7a913c2 | ||
|
|
165e39a0bb | ||
|
|
09d5966e46 | ||
|
|
0bd433ebf5 | ||
|
|
c2e003ecd5 | ||
|
|
9f51e226dc | ||
|
|
681499ebfe | ||
|
|
5d96094939 | ||
|
|
10d9e0805e | ||
|
|
c1c6613ddd | ||
|
|
5db4eaac2d | ||
|
|
f99f36f637 | ||
|
|
7705d9a8cc | ||
|
|
bcec98bf1c | ||
|
|
0d9809c4ec | ||
|
|
dcf64630f8 | ||
|
|
fa610f7f20 | ||
|
|
d97ad36d90 | ||
|
|
de7377a170 | ||
|
|
c471ec56b4 | ||
|
|
9d55534cc7 | ||
|
|
54f4e9b66c | ||
|
|
3e783002b3 | ||
|
|
935678cbe1 | ||
|
|
ee75b5020b | ||
|
|
5f21a24580 | ||
|
|
100002e4c6 | ||
|
|
71824019fb | ||
|
|
3400b3d2df | ||
|
|
745d83f968 | ||
|
|
340a680de2 | ||
|
|
3048bd79b3 | ||
|
|
ef37926f31 | ||
|
|
4c30bcfbfe | ||
|
|
709ad80662 | ||
|
|
08a834a08a | ||
|
|
6c3dd24e59 | ||
|
|
66d6c031b0 | ||
|
|
957882caef | ||
|
|
8781450256 | ||
|
|
9d23f215a0 | ||
|
|
2c1a0b7d32 | ||
|
|
9e52c53a5d | ||
|
|
f812a2c912 | ||
|
|
d3c848094f | ||
|
|
37d694ae80 | ||
|
|
e391e1a200 | ||
|
|
199f985754 | ||
|
|
395ffa8930 | ||
|
|
90920546e4 | ||
|
|
1d9ec42166 | ||
|
|
57481984c9 | ||
|
|
05051d864d | ||
|
|
48ee7e9b3a | ||
|
|
1278cee687 | ||
|
|
b5195d2739 | ||
|
|
7561770d43 | ||
|
|
03fbfe7cf1 | ||
|
|
9faa58ecdc | ||
|
|
cc19812fb4 | ||
|
|
110d499544 | ||
|
|
7fe64f43f4 | ||
|
|
47007075d4 | ||
|
|
9dc2193d31 | ||
|
|
403d5e0b7d | ||
|
|
5d9dfed5c4 | ||
|
|
9924a6c72d | ||
|
|
ccf6077d4e | ||
|
|
38978dc0b8 | ||
|
|
9152ef8b1d | ||
|
|
4e4521b5bf | ||
|
|
501211d4d5 | ||
|
|
155aad308c | ||
|
|
11e383d24c | ||
|
|
aff0c68b0f | ||
|
|
3b75e27536 | ||
|
|
5f3b7a7616 | ||
|
|
fb8d219960 | ||
|
|
ae88920dec | ||
|
|
57c6f14828 | ||
|
|
bd9a5e1b88 | ||
|
|
8614fcf709 | ||
|
|
2202d845ab | ||
|
|
a55d8c6aa4 | ||
|
|
b6312e2d51 | ||
|
|
5a41a612c9 | ||
|
|
aaea770f1f | ||
|
|
6e4607f23a | ||
|
|
5a192f85d9 | ||
|
|
9ce56ec4ca | ||
|
|
e47cfd447c | ||
|
|
fc2433de1d | ||
|
|
8cf7c76ce1 | ||
|
|
5d8269d28a | ||
|
|
9f542cf856 | ||
|
|
398a0159a6 | ||
|
|
8ce627f9b1 | ||
|
|
a7e5ad1268 | ||
|
|
8223a46ca0 | ||
|
|
1890a0f3b2 | ||
|
|
2dbcd4154c | ||
|
|
0524f419cf | ||
|
|
a90c197e94 | ||
|
|
21458bd419 | ||
|
|
7e96247751 | ||
|
|
07e2aa9b80 | ||
|
|
5c37b7ea47 | ||
|
|
79081f46ec | ||
|
|
4f9e333b05 | ||
|
|
dc09c258f0 | ||
|
|
5de0f305f9 | ||
|
|
52dc5edde3 | ||
|
|
9d086cd151 | ||
|
|
2edde1021f | ||
|
|
afc60481c7 | ||
|
|
eb94ba9052 | ||
|
|
6fa445d50e | ||
|
|
795843df09 | ||
|
|
8a20492e8a | ||
|
|
70ff3b6d98 | ||
|
|
ba79e6f631 | ||
|
|
071d5de3f3 | ||
|
|
1fa324c533 | ||
|
|
958c7f773f | ||
|
|
141240c409 | ||
|
|
89e945be6a | ||
|
|
7d30c2df87 | ||
|
|
90a4f95d3d | ||
|
|
53db004d24 | ||
|
|
f7d5b31d6c | ||
|
|
35d3782099 | ||
|
|
1080ef1105 | ||
|
|
7bd151a4f3 | ||
|
|
e1f53370ed | ||
|
|
e708d0ab68 | ||
|
|
e6e867558b | ||
|
|
ab81049f37 | ||
|
|
0b2f036a81 | ||
|
|
6ed9f0763b | ||
|
|
1e225af16e | ||
|
|
385365f689 | ||
|
|
dec4fb1bee | ||
|
|
ef1a8f21e0 | ||
|
|
0c1b16db4c | ||
|
|
02c530f442 | ||
|
|
df2fddf9cb | ||
|
|
6f0c0167c6 | ||
|
|
47c5f1c3bc | ||
|
|
29a5cc693c | ||
|
|
c3a1b97511 | ||
|
|
f1369ae9fe | ||
|
|
8620d997d4 | ||
|
|
7d06a71c02 | ||
|
|
7c5028614b | ||
|
|
6844fac9d5 | ||
|
|
41b5ca8582 | ||
|
|
7a1504dfe8 | ||
|
|
062623f6ef | ||
|
|
62ad5b81cb | ||
|
|
ada8ebb116 | ||
|
|
f1f2cbbc0d | ||
|
|
c434795edf | ||
|
|
25330d9f38 | ||
|
|
c1c5eb6866 | ||
|
|
51e089403a | ||
|
|
058ef489a0 | ||
|
|
3eb7e626d0 | ||
|
|
386e75b7f0 | ||
|
|
f72a801d51 | ||
|
|
4ffc32566a | ||
|
|
3f692305dc | ||
|
|
4ff17dc89d | ||
|
|
1a506c5dfd | ||
|
|
84865e45ea | ||
|
|
ae97dbe1db | ||
|
|
edb2641243 | ||
|
|
c8635bab0b | ||
|
|
cd50451812 | ||
|
|
877979c8b5 | ||
|
|
f69eca96d1 | ||
|
|
c0a8c9b985 | ||
|
|
09ab224a2f | ||
|
|
0e9001afd5 | ||
|
|
d63678cdfa | ||
|
|
1af2581f0b | ||
|
|
9dcb176dc8 | ||
|
|
64ba74deff | ||
|
|
ff047d3a77 | ||
|
|
b137421cfc | ||
|
|
3a8590f105 | ||
|
|
3571525ab8 | ||
|
|
204762c531 | ||
|
|
394d340ab1 | ||
|
|
4bf5435d95 | ||
|
|
05dba88379 | ||
|
|
1ab5a6851d | ||
|
|
781866f64e | ||
|
|
702c428804 | ||
|
|
5882121a94 | ||
|
|
6c2f754a0a | ||
|
|
acd0bedbce | ||
|
|
90f8d53800 | ||
|
|
23c6b49566 | ||
|
|
f204d16012 | ||
|
|
3796541746 | ||
|
|
0e676f74c5 | ||
|
|
26666187e3 | ||
|
|
c270f92b08 | ||
|
|
1670051a79 | ||
|
|
c61f04c34e | ||
|
|
2b26745c70 | ||
|
|
28ca6cc34b | ||
|
|
ada59bde67 | ||
|
|
9a68fb1364 | ||
|
|
f87c92143e | ||
|
|
f38e44bbb2 | ||
|
|
4d7f50382e | ||
|
|
6803d97bdb | ||
|
|
81ed10442b | ||
|
|
1a46002fad | ||
|
|
2747b2003e | ||
|
|
375297ea28 | ||
|
|
c2662a5095 | ||
|
|
da56e3df88 | ||
|
|
388dbe4928 | ||
|
|
dbcce81f66 | ||
|
|
46ac868403 | ||
|
|
6e1105e1e8 | ||
|
|
4494f59a26 | ||
|
|
13590b23c6 | ||
|
|
2c3861ee68 | ||
|
|
9cf7aabecf | ||
|
|
81d1a4aadf | ||
|
|
6578845f69 | ||
|
|
b5a4e78df5 | ||
|
|
68b55c1e85 | ||
|
|
4234beb034 | ||
|
|
ddb5143d9d | ||
|
|
25dcd673f5 | ||
|
|
1ad7977742 | ||
|
|
fe40f117c1 | ||
|
|
0550d8c73e | ||
|
|
bc821a2ab5 | ||
|
|
aa72ed0b23 | ||
|
|
e0a2bbd2a4 | ||
|
|
1a2454fcba | ||
|
|
92675a93ba | ||
|
|
1bafc89431 | ||
|
|
dc785b64c1 | ||
|
|
257dbe8d39 | ||
|
|
c81c4cb0c7 | ||
|
|
da3edbd85b | ||
|
|
44aa4f320a | ||
|
|
38dbccd997 | ||
|
|
6c27a7b50e | ||
|
|
7ef3bc97b7 | ||
|
|
807b96f882 | ||
|
|
2add98ffc8 | ||
|
|
ed7fe93178 | ||
|
|
397081ef41 | ||
|
|
570eeea297 | ||
|
|
b9232eb2b4 | ||
|
|
405578286d | ||
|
|
1c937b6359 | ||
|
|
567200dcf2 | ||
|
|
b4f5708c05 | ||
|
|
b8cb28fc2f | ||
|
|
495d37ab0b | ||
|
|
9d60cb9450 | ||
|
|
5592e80877 | ||
|
|
979ecf3eea | ||
|
|
770f5bb229 | ||
|
|
44211ed500 | ||
|
|
ac715efc14 | ||
|
|
b256ae8f80 | ||
|
|
fa969ffb59 | ||
|
|
e67311606e | ||
|
|
b17aecd70a | ||
|
|
1cb265f575 | ||
|
|
31b7cac818 | ||
|
|
05738313c3 | ||
|
|
25cf1a65ec | ||
|
|
442730168e | ||
|
|
2ca5c8c03e | ||
|
|
fa056279ea | ||
|
|
7ec098bedc | ||
|
|
68d048abfd | ||
|
|
d5848aa719 | ||
|
|
9edf601d2d | ||
|
|
155d0deae8 | ||
|
|
85408dfd36 | ||
|
|
43fb63af31 | ||
|
|
8add7fccde | ||
|
|
01f5e6778c | ||
|
|
1ff81200ac | ||
|
|
d2f1ca8cba | ||
|
|
caab52f6cc | ||
|
|
b4eb195b34 | ||
|
|
3dbabb693d | ||
|
|
5dd34a717e | ||
|
|
e7cfb3b469 | ||
|
|
c2494d3014 | ||
|
|
9d87cd9952 | ||
|
|
1851a96b47 | ||
|
|
efd2d7ee05 | ||
|
|
b16bc3d277 | ||
|
|
adcf8bf7b5 | ||
|
|
fda17235fd | ||
|
|
c5bdef5617 | ||
|
|
77640e96dd | ||
|
|
98d9b82759 | ||
|
|
1aac5c310f | ||
|
|
1924f52cc8 | ||
|
|
6535ba6077 | ||
|
|
eae4945a9d | ||
|
|
5ec43fdbb8 | ||
|
|
5ed59ae0f2 | ||
|
|
e02d553b45 | ||
|
|
720446629b | ||
|
|
db9d36f196 | ||
|
|
4cca048ab8 | ||
|
|
51b3d9dd53 | ||
|
|
bc885cd8d3 | ||
|
|
c657668a07 | ||
|
|
02e3d4dc70 | ||
|
|
57746012d0 | ||
|
|
8a48ec4297 | ||
|
|
61637a5241 | ||
|
|
8a475908b5 | ||
|
|
b7d2cf0525 | ||
|
|
4d7fbeda35 | ||
|
|
baf536c62c | ||
|
|
0c48a6805e | ||
|
|
1ea2c63bf5 | ||
|
|
f33f49a348 | ||
|
|
cc6819ede7 | ||
|
|
2a68be025b | ||
|
|
f05471dcf8 | ||
|
|
5fe2c53493 | ||
|
|
6b2ae5fd17 | ||
|
|
4eeeded4a7 | ||
|
|
931db10bd0 | ||
|
|
522268be49 | ||
|
|
8e17bffa42 | ||
|
|
cc004358f1 | ||
|
|
0e623482d5 | ||
|
|
ec31ee62d5 | ||
|
|
a59ca8316b | ||
|
|
4423b5b024 | ||
|
|
b47f39dbc3 | ||
|
|
9d72b706fa | ||
|
|
5d4888723b | ||
|
|
49e3171850 | ||
|
|
66ddb69916 | ||
|
|
de1dcde413 | ||
|
|
9f36b1fe16 | ||
|
|
38c7a8a069 | ||
|
|
23bac7cb0e | ||
|
|
0021456aad | ||
|
|
a568995f40 | ||
|
|
f82ac840aa | ||
|
|
cd2c80dedc | ||
|
|
ed93525e65 | ||
|
|
4025831e90 | ||
|
|
94c381f71b | ||
|
|
588e21b339 | ||
|
|
3429095e86 | ||
|
|
feb11f1079 | ||
|
|
fbec475275 | ||
|
|
317fc4fbd0 | ||
|
|
909dd5b306 | ||
|
|
24623d149d | ||
|
|
5687f68f3e | ||
|
|
f282b7a027 | ||
|
|
32b1c9150f | ||
|
|
7a541d69dd | ||
|
|
aa1cb68ce2 | ||
|
|
dc1f3a3bd0 | ||
|
|
5fbb040355 | ||
|
|
9886c510f9 | ||
|
|
49de6096b1 | ||
|
|
7eb193fc49 | ||
|
|
4aa6a8e9a6 | ||
|
|
a1f3aff110 | ||
|
|
bb351c2f41 | ||
|
|
bed552f8d0 | ||
|
|
41c564fe60 | ||
|
|
cae817e833 | ||
|
|
7bb8b8f438 | ||
|
|
c32652716d | ||
|
|
748fd4543b | ||
|
|
56f7ed979c | ||
|
|
3a15c9f8d0 | ||
|
|
fc7ceaead0 | ||
|
|
a626a5c208 | ||
|
|
cb333ad6f3 | ||
|
|
5b58674c6b | ||
|
|
1017d2aec8 | ||
|
|
9786db045f | ||
|
|
158b84844e | ||
|
|
421c22d32c | ||
|
|
90ccadecc3 | ||
|
|
07638448b0 | ||
|
|
f343aa4cc6 | ||
|
|
5cd07954ea | ||
|
|
e0338b844f | ||
|
|
cde57ddf84 | ||
|
|
bee5046908 | ||
|
|
4e6b4c09a5 | ||
|
|
89496194ba | ||
|
|
19f9b25947 | ||
|
|
f97e445fc6 | ||
|
|
2766a88a3b | ||
|
|
b57c529115 | ||
|
|
319f4ee481 | ||
|
|
2cf26ef150 | ||
|
|
2c1b5b1054 | ||
|
|
b8be92c34d | ||
|
|
6b4133b08f | ||
|
|
96d43c2410 | ||
|
|
cde2aa61cf | ||
|
|
1d18aeafa6 | ||
|
|
b8a67d3000 | ||
|
|
c3c8084855 | ||
|
|
cd0ef4d7c1 | ||
|
|
c9e35a126a | ||
|
|
4456226e45 | ||
|
|
bf8f071c5b | ||
|
|
3b1e8a214e | ||
|
|
1015dfce8d | ||
|
|
a0b1c14587 | ||
|
|
66934fc311 | ||
|
|
80de0f946b | ||
|
|
5e65389335 | ||
|
|
d513e5f92c | ||
|
|
e9f8b99215 |
10
.gitignore
vendored
@@ -1,9 +1,11 @@
|
||||
bitbake
|
||||
*.pyc
|
||||
*.pyo
|
||||
/*.patch
|
||||
build*/
|
||||
pyshtables.py
|
||||
build*/conf/local.conf
|
||||
build*/conf/bblayers.conf
|
||||
build*/downloads
|
||||
build*/tmp/
|
||||
build*/sstate-cache
|
||||
build*/pyshtables.py
|
||||
pstage/
|
||||
scripts/oe-git-proxy-socks
|
||||
sources/
|
||||
|
||||
8
README
@@ -18,12 +18,8 @@ e.g. for the hardware support. Poky is in turn a component of the Yocto Project.
|
||||
|
||||
The Yocto Project has extensive documentation about the system including a
|
||||
reference manual which can be found at:
|
||||
http://yoctoproject.org/documentation
|
||||
http://yoctoproject.org/community/documentation
|
||||
|
||||
OpenEmbedded-Core is a layer containing the core metadata for current versions
|
||||
of OpenEmbedded. It is distro-less (can build a functional image with
|
||||
DISTRO = "") and contains only emulated machine support.
|
||||
|
||||
For information about OpenEmbedded, see the OpenEmbedded website:
|
||||
For information about OpenEmbedded see their website:
|
||||
http://www.openembedded.org/
|
||||
|
||||
|
||||
@@ -96,9 +96,8 @@ USB Device:
|
||||
|
||||
# dd if=core-image-minimal-atom-pc.hddimg of=/dev/sdb
|
||||
|
||||
If the device fails to boot with "Boot error" displayed, or apparently
|
||||
stops just after the SYSLINUX version banner, it is likely the BIOS cannot
|
||||
understand the physical layout of the disk (or rather it expects a
|
||||
If the device fails to boot with "Boot error" displayed, it is likely the BIOS
|
||||
cannot understand the physical layout of the disk (or rather it expects a
|
||||
particular layout and cannot handle anything else). There are two possible
|
||||
solutions to this problem:
|
||||
|
||||
@@ -107,47 +106,26 @@ USB Device:
|
||||
geometry from the device.
|
||||
|
||||
2. Without such an option, the BIOS generally boots the device in USB-ZIP
|
||||
mode. To write an image to a USB device that will be bootable in
|
||||
USB-ZIP mode, carry out the following actions:
|
||||
mode.
|
||||
|
||||
a. Determine the geometry of your USB device using fdisk:
|
||||
|
||||
# fdisk /dev/sdb
|
||||
Command (m for help): p
|
||||
|
||||
Disk /dev/sdb: 4011 MB, 4011491328 bytes
|
||||
124 heads, 62 sectors/track, 1019 cylinders, total 7834944 sectors
|
||||
...
|
||||
|
||||
Command (m for help): q
|
||||
|
||||
b. Configure the USB device for USB-ZIP mode:
|
||||
a. Configure the USB device for USB-ZIP mode:
|
||||
|
||||
# mkdiskimage -4 /dev/sdb 1019 124 62
|
||||
# mkdiskimage -4 /dev/sdb 0 63 62
|
||||
|
||||
Where 1019, 124 and 62 are the cylinder, head and sectors/track counts
|
||||
as reported by fdisk (substitute the values reported for your device).
|
||||
When the operation has finished and the access LED (if any) on the
|
||||
device stops flashing, remove and reinsert the device to allow the
|
||||
kernel to detect the new partition layout.
|
||||
Where 63 and 62 are the head and sector count as reported by fdisk.
|
||||
Remove and reinsert the device to allow the kernel to detect the new
|
||||
partition layout.
|
||||
|
||||
c. Copy the contents of the poky image to the USB-ZIP mode device:
|
||||
b. Copy the contents of the poky image to the USB-ZIP mode device:
|
||||
|
||||
# mkdir /tmp/image
|
||||
# mkdir /tmp/usbkey
|
||||
# mount -o loop core-image-minimal-atom-pc.hddimg /tmp/image
|
||||
# mount /dev/sdb4 /tmp/usbkey
|
||||
# cp -rf /tmp/image/* /tmp/usbkey
|
||||
|
||||
d. Install the syslinux boot loader:
|
||||
c. Install the syslinux boot loader:
|
||||
|
||||
# syslinux /dev/sdb4
|
||||
|
||||
e. Unmount everything:
|
||||
|
||||
# umount /tmp/image
|
||||
# umount /tmp/usbkey
|
||||
|
||||
Install the boot device in the target board and configure the BIOS to boot
|
||||
from it.
|
||||
|
||||
@@ -263,8 +241,8 @@ Setup instructions
|
||||
You will need the following:
|
||||
* NFS root setup on your workstation
|
||||
* TFTP server installed on your workstation
|
||||
* Straight-thru 9-conductor serial cable (DB9, M/F) connected from your
|
||||
PC to UART1
|
||||
* Null modem cable connected from your workstation to the first serial port
|
||||
on the board
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
--- Preparation ---
|
||||
|
||||
@@ -40,7 +40,7 @@ from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
|
||||
__version__ = "1.15.2"
|
||||
__version__ = "1.13.3"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
|
||||
@@ -56,11 +56,10 @@ class BBConfiguration(object):
|
||||
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
if config.ui:
|
||||
interface = config.ui
|
||||
else:
|
||||
interface = 'knotty'
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
@@ -70,7 +69,7 @@ def get_ui(config):
|
||||
return getattr(module, interface).main
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default], knotty2." % interface)
|
||||
"Valid interfaces: depexp, goggle, ncurses, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
@@ -118,9 +117,6 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
parser.add_option("-c", "--cmd", help = "Specify task to execute. Note that this only executes the specified task for the providee and the packages it depends on, i.e. 'compile' does not implicitly call stage for the dependencies (IOW: use only if you know what you are doing). Depending on the base.bbclass a listtasks tasks is defined and will show available tasks",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified cmd such as 'compile' and run the default task for the specified target(s)",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "read the specified file before bitbake.conf",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
@@ -148,7 +144,7 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
parser.add_option("-e", "--environment", help = "show the global or per-package environment (this is what used to be bbread)",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax, and the pn-buildlist to show the build list",
|
||||
parser.add_option("-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
@@ -169,11 +165,6 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without UI, the frontend can connect with bitbake server itself",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to",
|
||||
action = "store", dest = "bind", default = False)
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
configuration = BBConfiguration(options)
|
||||
@@ -195,15 +186,6 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
sys.exit("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default], none." % servertype)
|
||||
|
||||
if configuration.server_only:
|
||||
if configuration.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--server-only' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
if not configuration.bind:
|
||||
sys.exit("FATAL: The '--server-only' option requires a name/address to bind to with the -B option.\n")
|
||||
|
||||
if configuration.bind and configuration.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '-B' or '--bind' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
# Save a logfile for cooker into the current working directory. When the
|
||||
# server is daemonized this logfile will be truncated.
|
||||
cooker_logfile = os.path.join(os.getcwd(), "cooker.log")
|
||||
@@ -224,11 +206,8 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
bb.utils.clean_environment()
|
||||
|
||||
server = server.BitBakeServer()
|
||||
if configuration.bind:
|
||||
server.initServer((configuration.bind, 0))
|
||||
else:
|
||||
server.initServer()
|
||||
|
||||
server.initServer()
|
||||
idle = server.getServerIdleCB()
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, idle, initialenv)
|
||||
@@ -243,17 +222,14 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
if not configuration.server_only:
|
||||
# Setup a connection to the server (cooker)
|
||||
server_connection = server.establishConnection()
|
||||
# Setup a connection to the server (cooker)
|
||||
server_connection = server.establishConnection()
|
||||
|
||||
try:
|
||||
return server.launchUI(ui_main, server_connection.connection, server_connection.events)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("server address: %s, server port: %s" % (server.serverinfo.host, server.serverinfo.port))
|
||||
try:
|
||||
return server.launchUI(ui_main, server_connection.connection, server_connection.events)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
|
||||
import bb.siggen
|
||||
|
||||
bb.siggen.dump_sigfile(sys.argv[1])
|
||||
@@ -4,16 +4,10 @@
|
||||
# displaying useful information, or acting against them.
|
||||
# See the help output for details on available commands.
|
||||
|
||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
|
||||
import cmd
|
||||
import logging
|
||||
import warnings
|
||||
import os
|
||||
import sys
|
||||
import fnmatch
|
||||
from collections import defaultdict
|
||||
|
||||
bindir = os.path.dirname(__file__)
|
||||
topdir = os.path.dirname(bindir)
|
||||
@@ -24,12 +18,10 @@ import bb.cooker
|
||||
import bb.providers
|
||||
import bb.utils
|
||||
from bb.cooker import state
|
||||
import bb.fetch2
|
||||
|
||||
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
|
||||
def main(args):
|
||||
# Set up logging
|
||||
@@ -44,8 +36,6 @@ def main(args):
|
||||
|
||||
cmds = Commands(initialenv)
|
||||
if args:
|
||||
# Allow user to specify e.g. show-layers instead of show_layers
|
||||
args = [args[0].replace('-', '_')] + args[1:]
|
||||
cmds.onecmd(' '.join(args))
|
||||
else:
|
||||
cmds.do_help('')
|
||||
@@ -63,7 +53,6 @@ class Commands(cmd.Cmd):
|
||||
self.config_data = self.cooker.configuration.data
|
||||
bb.providers.logger.setLevel(logging.ERROR)
|
||||
self.cooker_data = None
|
||||
self.bblayers = (self.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
|
||||
def register_idle_function(self, function, data):
|
||||
pass
|
||||
@@ -99,14 +88,14 @@ class Commands(cmd.Cmd):
|
||||
"""display general help or help on a specified command"""
|
||||
if topic:
|
||||
sys.stdout.write('%s: ' % topic)
|
||||
cmd.Cmd.do_help(self, topic.replace('-', '_'))
|
||||
cmd.Cmd.do_help(self,topic)
|
||||
else:
|
||||
sys.stdout.write("usage: bitbake-layers <command> [arguments]\n\n")
|
||||
sys.stdout.write("Available commands:\n")
|
||||
procnames = self.get_names()
|
||||
for procname in procnames:
|
||||
if procname[:3] == 'do_':
|
||||
sys.stdout.write(" %s\n" % procname[3:].replace('_', '-'))
|
||||
sys.stdout.write(" %s\n" % procname[3:])
|
||||
doc = getattr(self, procname).__doc__
|
||||
if doc:
|
||||
sys.stdout.write(" %s\n" % doc.splitlines()[0])
|
||||
@@ -117,227 +106,41 @@ class Commands(cmd.Cmd):
|
||||
logger.plain('')
|
||||
logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority"))
|
||||
logger.plain('=' * 74)
|
||||
for layerdir in self.bblayers:
|
||||
layername = self.get_layer_name(layerdir)
|
||||
layerdirs = str(self.config_data.getVar('BBLAYERS', True)).split()
|
||||
for layerdir in layerdirs:
|
||||
layername = '?'
|
||||
layerpri = 0
|
||||
for layer, _, regex, pri in self.cooker.status.bbfile_config_priorities:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
layername = layer
|
||||
layerpri = pri
|
||||
break
|
||||
|
||||
logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), layerpri))
|
||||
|
||||
|
||||
def version_str(self, pe, pv, pr = None):
|
||||
verstr = "%s" % pv
|
||||
if pr:
|
||||
verstr = "%s-%s" % (verstr, pr)
|
||||
if pe:
|
||||
verstr = "%s:%s" % (pe, verstr)
|
||||
return verstr
|
||||
|
||||
|
||||
def do_show_overlayed(self, args):
|
||||
"""list overlayed recipes (where the same recipe exists in another layer that has a higher layer priority)
|
||||
"""list overlayed recipes (where there is a recipe in another layer that has a higher layer priority)
|
||||
|
||||
usage: show-overlayed [-f] [-s]
|
||||
usage: show_overlayed
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Note that skipped recipes that
|
||||
are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with the ones they overlay indented underneath
|
||||
-s only list overlayed recipes where the version is the same
|
||||
Highest priority recipes are listed with the recipes they overlay as subitems.
|
||||
"""
|
||||
self.check_prepare_cooker()
|
||||
|
||||
show_filenames = False
|
||||
show_same_ver_only = False
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-s':
|
||||
show_same_ver_only = True
|
||||
else:
|
||||
sys.stderr.write("show-overlayed: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
|
||||
items_listed = self.list_recipes('Overlayed recipes', None, True, show_same_ver_only, show_filenames, True)
|
||||
|
||||
# Check for overlayed .bbclass files
|
||||
classes = defaultdict(list)
|
||||
for layerdir in self.bblayers:
|
||||
classdir = os.path.join(layerdir, 'classes')
|
||||
if os.path.exists(classdir):
|
||||
for classfile in os.listdir(classdir):
|
||||
if os.path.splitext(classfile)[1] == '.bbclass':
|
||||
classes[classfile].append(classdir)
|
||||
|
||||
# Locating classes and other files is a bit more complicated than recipes -
|
||||
# layer priority is not a factor; instead BitBake uses the first matching
|
||||
# file in BBPATH, which is manipulated directly by each layer's
|
||||
# conf/layer.conf in turn, thus the order of layers in bblayers.conf is a
|
||||
# factor - however, each layer.conf is free to either prepend or append to
|
||||
# BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might
|
||||
# not be exactly the order present in bblayers.conf either.
|
||||
bbpath = str(self.config_data.getVar('BBPATH', True))
|
||||
overlayed_class_found = False
|
||||
for (classfile, classdirs) in classes.items():
|
||||
if len(classdirs) > 1:
|
||||
if not overlayed_class_found:
|
||||
logger.plain('=== Overlayed classes ===')
|
||||
overlayed_class_found = True
|
||||
|
||||
mainfile = bb.utils.which(bbpath, os.path.join('classes', classfile))
|
||||
if show_filenames:
|
||||
logger.plain('%s' % mainfile)
|
||||
else:
|
||||
# We effectively have to guess the layer here
|
||||
logger.plain('%s:' % classfile)
|
||||
mainlayername = '?'
|
||||
for layerdir in self.bblayers:
|
||||
classdir = os.path.join(layerdir, 'classes')
|
||||
if mainfile.startswith(classdir):
|
||||
mainlayername = self.get_layer_name(layerdir)
|
||||
logger.plain(' %s' % mainlayername)
|
||||
for classdir in classdirs:
|
||||
fullpath = os.path.join(classdir, classfile)
|
||||
if fullpath != mainfile:
|
||||
if show_filenames:
|
||||
print(' %s' % fullpath)
|
||||
else:
|
||||
print(' %s' % self.get_layer_name(os.path.dirname(classdir)))
|
||||
|
||||
if overlayed_class_found:
|
||||
items_listed = True;
|
||||
|
||||
if not items_listed:
|
||||
logger.plain('No overlayed files found.')
|
||||
|
||||
|
||||
def do_show_recipes(self, args):
|
||||
"""list available recipes, showing the layer they are provided by
|
||||
|
||||
usage: show-recipes [-f] [-m] [pnspec]
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Optionally you may specify
|
||||
pnspec to match a specified recipe name (supports wildcards). Note that
|
||||
skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with other available recipes indented underneath
|
||||
-m only list where multiple recipes (in the same layer or different
|
||||
layers) exist for the same recipe name
|
||||
"""
|
||||
self.check_prepare_cooker()
|
||||
|
||||
show_filenames = False
|
||||
show_multi_provider_only = False
|
||||
pnspec = None
|
||||
title = 'Available recipes:'
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-m':
|
||||
show_multi_provider_only = True
|
||||
elif not arg.startswith('-'):
|
||||
pnspec = arg
|
||||
title = 'Available recipes matching %s:' % pnspec
|
||||
else:
|
||||
sys.stderr.write("show-recipes: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
self.list_recipes(title, pnspec, False, False, show_filenames, show_multi_provider_only)
|
||||
|
||||
|
||||
def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only):
|
||||
pkg_pn = self.cooker.status.pkg_pn
|
||||
(latest_versions, preferred_versions) = bb.providers.findProviders(self.cooker.configuration.data, self.cooker.status, pkg_pn)
|
||||
allproviders = bb.providers.allProviders(self.cooker.status)
|
||||
|
||||
# Ensure we list skipped recipes
|
||||
# We are largely guessing about PN, PV and the preferred version here,
|
||||
# but we have no choice since skipped recipes are not fully parsed
|
||||
skiplist = self.cooker.skiplist.keys()
|
||||
skiplist.sort( key=lambda fileitem: self.cooker.calc_bbfile_priority(fileitem) )
|
||||
skiplist.reverse()
|
||||
for fn in skiplist:
|
||||
recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_')
|
||||
p = recipe_parts[0]
|
||||
if len(recipe_parts) > 1:
|
||||
ver = (None, recipe_parts[1], None)
|
||||
else:
|
||||
ver = (None, 'unknown', None)
|
||||
allproviders[p].append((ver, fn))
|
||||
if not p in pkg_pn:
|
||||
pkg_pn[p] = 'dummy'
|
||||
preferred_versions[p] = (ver, fn)
|
||||
|
||||
def print_item(f, pn, ver, layer, ispref):
|
||||
if f in skiplist:
|
||||
skipped = ' (skipped)'
|
||||
else:
|
||||
skipped = ''
|
||||
if show_filenames:
|
||||
if ispref:
|
||||
logger.plain("%s%s", f, skipped)
|
||||
else:
|
||||
logger.plain(" %s%s", f, skipped)
|
||||
else:
|
||||
if ispref:
|
||||
logger.plain("%s:", pn)
|
||||
logger.plain(" %s %s%s", layer.ljust(20), ver, skipped)
|
||||
|
||||
preffiles = []
|
||||
items_listed = False
|
||||
for p in sorted(pkg_pn):
|
||||
if pnspec:
|
||||
if not fnmatch.fnmatch(p, pnspec):
|
||||
continue
|
||||
|
||||
if len(allproviders[p]) > 1 or not show_multi_provider_only:
|
||||
pref = preferred_versions[p]
|
||||
preffile = bb.cache.Cache.virtualfn2realfn(pref[1])[0]
|
||||
if preffile not in preffiles:
|
||||
preflayer = self.get_file_layer(preffile)
|
||||
multilayer = False
|
||||
same_ver = True
|
||||
provs = []
|
||||
for prov in allproviders[p]:
|
||||
provfile = bb.cache.Cache.virtualfn2realfn(prov[1])[0]
|
||||
provlayer = self.get_file_layer(provfile)
|
||||
provs.append((provfile, provlayer, prov[0]))
|
||||
if provlayer != preflayer:
|
||||
multilayer = True
|
||||
if prov[0] != pref[0]:
|
||||
same_ver = False
|
||||
|
||||
if (multilayer or not show_overlayed_only) and (same_ver or not show_same_ver_only):
|
||||
if not items_listed:
|
||||
logger.plain('=== %s ===' % title)
|
||||
items_listed = True
|
||||
print_item(preffile, p, self.version_str(pref[0][0], pref[0][1]), preflayer, True)
|
||||
for (provfile, provlayer, provver) in provs:
|
||||
if provfile != preffile:
|
||||
print_item(provfile, p, self.version_str(provver[0], provver[1]), provlayer, False)
|
||||
# Ensure we don't show two entries for BBCLASSEXTENDed recipes
|
||||
preffiles.append(preffile)
|
||||
|
||||
return items_listed
|
||||
|
||||
if self.cooker.overlayed:
|
||||
logger.plain('Overlayed recipes:')
|
||||
for f in self.cooker.overlayed.iterkeys():
|
||||
logger.plain('%s' % f)
|
||||
for of in self.cooker.overlayed[f]:
|
||||
logger.plain(' %s' % of)
|
||||
else:
|
||||
logger.plain('No overlayed recipes found')
|
||||
|
||||
def do_flatten(self, args):
|
||||
"""flattens layer configuration into a separate output directory.
|
||||
|
||||
usage: flatten [layer1 layer2 [layer3]...] <outputdir>
|
||||
usage: flatten <outputdir>
|
||||
|
||||
Takes the specified layers (or all layers in the current layer
|
||||
configuration if none are specified) and builds a "flattened" directory
|
||||
Takes the current layer configuration and builds a "flattened" directory
|
||||
containing the contents of all layers, with any overlayed recipes removed
|
||||
and bbappends appended to the corresponding recipes. Note that some manual
|
||||
cleanup may still be necessary afterwards, in particular:
|
||||
@@ -345,61 +148,21 @@ cleanup may still be necessary afterwards, in particular:
|
||||
* where non-recipe files (such as patches) are overwritten (the flatten
|
||||
command will show a warning for these)
|
||||
* where anything beyond the normal layer setup has been added to
|
||||
layer.conf (only the lowest priority number layer's layer.conf is used)
|
||||
layer.conf (only the lowest priority layer's layer.conf is used)
|
||||
* overridden/appended items from bbappends will need to be tidied up
|
||||
* when the flattened layers do not have the same directory structure (the
|
||||
flatten command should show a warning when this will cause a problem)
|
||||
|
||||
Warning: if you flatten several layers where another layer is intended to
|
||||
be used "inbetween" them (in layer priority order) such that recipes /
|
||||
bbappends in the layers interact, and then attempt to use the new output
|
||||
layer together with that other layer, you may no longer get the same
|
||||
build results (as the layer priority order has effectively changed).
|
||||
"""
|
||||
arglist = args.split()
|
||||
if len(arglist) < 1:
|
||||
if len(arglist) != 1:
|
||||
logger.error('Please specify an output directory')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
if len(arglist) == 2:
|
||||
logger.error('If you specify layers to flatten you must specify at least two')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
outputdir = arglist[-1]
|
||||
if os.path.exists(outputdir) and os.listdir(outputdir):
|
||||
logger.error('Directory %s exists and is non-empty, please clear it out first' % outputdir)
|
||||
if os.path.exists(arglist[0]) and os.listdir(arglist[0]):
|
||||
logger.error('Directory %s exists and is non-empty, please clear it out first' % arglist[0])
|
||||
return
|
||||
|
||||
self.check_prepare_cooker()
|
||||
layers = self.bblayers
|
||||
if len(arglist) > 2:
|
||||
layernames = arglist[:-1]
|
||||
found_layernames = []
|
||||
found_layerdirs = []
|
||||
for layerdir in layers:
|
||||
layername = self.get_layer_name(layerdir)
|
||||
if layername in layernames:
|
||||
found_layerdirs.append(layerdir)
|
||||
found_layernames.append(layername)
|
||||
|
||||
for layername in layernames:
|
||||
if not layername in found_layernames:
|
||||
logger.error('Unable to find layer %s in current configuration, please run "%s show-layers" to list configured layers' % (layername, os.path.basename(sys.argv[0])))
|
||||
return
|
||||
layers = found_layerdirs
|
||||
else:
|
||||
layernames = []
|
||||
|
||||
# Ensure a specified path matches our list of layers
|
||||
def layer_path_match(path):
|
||||
for layerdir in layers:
|
||||
if path.startswith(os.path.join(layerdir, '')):
|
||||
return layerdir
|
||||
return None
|
||||
|
||||
appended_recipes = []
|
||||
layers = (self.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
for layer in layers:
|
||||
overlayed = []
|
||||
for f in self.cooker.overlayed.iterkeys():
|
||||
@@ -417,7 +180,7 @@ build results (as the layer priority order has effectively changed).
|
||||
ext = os.path.splitext(f1)[1]
|
||||
if ext != '.bbappend':
|
||||
fdest = f1full[len(layer):]
|
||||
fdest = os.path.normpath(os.sep.join([outputdir,fdest]))
|
||||
fdest = os.path.normpath(os.sep.join([arglist[0],fdest]))
|
||||
bb.utils.mkdirhier(os.path.dirname(fdest))
|
||||
if os.path.exists(fdest):
|
||||
if f1 == 'layer.conf' and root.endswith('/conf'):
|
||||
@@ -432,84 +195,25 @@ build results (as the layer priority order has effectively changed).
|
||||
if appends:
|
||||
logger.plain(' Applying appends to %s' % fdest )
|
||||
for appendname in appends:
|
||||
if layer_path_match(appendname):
|
||||
self.apply_append(appendname, fdest)
|
||||
appended_recipes.append(f1)
|
||||
self.apply_append(appendname, fdest)
|
||||
|
||||
# Take care of when some layers are excluded and yet we have included bbappends for those recipes
|
||||
for recipename in self.cooker_data.appends.iterkeys():
|
||||
if recipename not in appended_recipes:
|
||||
appends = self.cooker_data.appends[recipename]
|
||||
first_append = None
|
||||
for appendname in appends:
|
||||
layer = layer_path_match(appendname)
|
||||
if layer:
|
||||
if first_append:
|
||||
self.apply_append(appendname, first_append)
|
||||
else:
|
||||
fdest = appendname[len(layer):]
|
||||
fdest = os.path.normpath(os.sep.join([outputdir,fdest]))
|
||||
bb.utils.mkdirhier(os.path.dirname(fdest))
|
||||
bb.utils.copyfile(appendname, fdest)
|
||||
first_append = fdest
|
||||
|
||||
# Get the regex for the first layer in our list (which is where the conf/layer.conf file will
|
||||
# have come from)
|
||||
first_regex = None
|
||||
layerdir = layers[0]
|
||||
for layername, pattern, regex, _ in self.cooker.status.bbfile_config_priorities:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
first_regex = regex
|
||||
break
|
||||
|
||||
if first_regex:
|
||||
# Find the BBFILES entries that match (which will have come from this conf/layer.conf file)
|
||||
bbfiles = str(self.config_data.getVar('BBFILES', True)).split()
|
||||
bbfiles_layer = []
|
||||
for item in bbfiles:
|
||||
if first_regex.match(item):
|
||||
newpath = os.path.join(outputdir, item[len(layerdir)+1:])
|
||||
bbfiles_layer.append(newpath)
|
||||
|
||||
if bbfiles_layer:
|
||||
# Check that all important layer files match BBFILES
|
||||
for root, dirs, files in os.walk(outputdir):
|
||||
for f1 in files:
|
||||
ext = os.path.splitext(f1)[1]
|
||||
if ext in ['.bb', '.bbappend']:
|
||||
f1full = os.sep.join([root, f1])
|
||||
entry_found = False
|
||||
for item in bbfiles_layer:
|
||||
if fnmatch.fnmatch(f1full, item):
|
||||
entry_found = True
|
||||
break
|
||||
if not entry_found:
|
||||
logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full)
|
||||
|
||||
def get_file_layer(self, filename):
|
||||
def get_append_layer(self, appendname):
|
||||
for layer, _, regex, _ in self.cooker.status.bbfile_config_priorities:
|
||||
if regex.match(filename):
|
||||
for layerdir in self.bblayers:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
return self.get_layer_name(layerdir)
|
||||
if regex.match(appendname):
|
||||
return layer
|
||||
return "?"
|
||||
|
||||
def get_layer_name(self, layerdir):
|
||||
return os.path.basename(layerdir.rstrip(os.sep))
|
||||
|
||||
def apply_append(self, appendname, recipename):
|
||||
appendfile = open(appendname, 'r')
|
||||
recipefile = open(recipename, 'a')
|
||||
recipefile.write('\n')
|
||||
recipefile.write('##### bbappended from %s #####\n' % self.get_file_layer(appendname))
|
||||
recipefile.write('##### bbappended from %s #####\n' % self.get_append_layer(appendname))
|
||||
recipefile.writelines(appendfile.readlines())
|
||||
recipefile.close()
|
||||
appendfile.close()
|
||||
|
||||
def do_show_appends(self, args):
|
||||
"""list bbappend files and recipe files they apply to
|
||||
|
||||
usage: show-appends
|
||||
usage: show_appends
|
||||
|
||||
Recipes are listed with the bbappends that apply to them as subitems.
|
||||
"""
|
||||
|
||||
26
bitbake/bin/bitbake-prserv
Executable file → Normal file
@@ -10,39 +10,37 @@ import prserv.serv
|
||||
|
||||
__version__="1.0.0"
|
||||
|
||||
PRHOST_DEFAULT='0.0.0.0'
|
||||
PRHOST_DEFAULT=''
|
||||
PRPORT_DEFAULT=8585
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__),
|
||||
usage = "%prog < --start | --stop > [options]")
|
||||
usage = "%prog [options]")
|
||||
|
||||
parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store",
|
||||
dest="dbfile", type="string", default="prserv.sqlite3")
|
||||
parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store",
|
||||
parser.add_option("-f", "--file", help="database filename(default prserv.db)", action="store",
|
||||
dest="dbfile", type="string", default="prserv.db")
|
||||
parser.add_option("-l", "--log", help="log filename(default prserv.log)", action="store",
|
||||
dest="logfile", type="string", default="prserv.log")
|
||||
parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
|
||||
action = "store", type="string", dest="loglevel", default = "INFO")
|
||||
action = "store", type="string", dest="loglevel", default = "WARNING")
|
||||
parser.add_option("--start", help="start daemon",
|
||||
action="store_true", dest="start")
|
||||
action="store_true", dest="start", default="True")
|
||||
parser.add_option("--stop", help="stop daemon",
|
||||
action="store_true", dest="stop")
|
||||
action="store_false", dest="start")
|
||||
parser.add_option("--host", help="ip address to bind", action="store",
|
||||
dest="host", type="string", default=PRHOST_DEFAULT)
|
||||
parser.add_option("--port", help="port number(default: 8585)", action="store",
|
||||
parser.add_option("--port", help="port number(default 8585)", action="store",
|
||||
dest="port", type="int", default=PRPORT_DEFAULT)
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
prserv.init_logger(os.path.abspath(options.logfile),options.loglevel)
|
||||
|
||||
if options.start:
|
||||
ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile))
|
||||
elif options.stop:
|
||||
ret=prserv.serv.stop_daemon(options.host, options.port)
|
||||
prserv.serv.start_daemon(options)
|
||||
else:
|
||||
ret=parser.print_help()
|
||||
return ret
|
||||
prserv.serv.stop_daemon()
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
|
||||
@@ -91,7 +91,7 @@ def register_idle_function(self, function, data):
|
||||
cooker = bb.cooker.BBCooker(config, register_idle_function, initialenv)
|
||||
config_data = cooker.configuration.data
|
||||
cooker.status = config_data
|
||||
cooker.handleCollections(config_data.getVar("BBFILE_COLLECTIONS", 1))
|
||||
cooker.handleCollections(bb.data.getVar("BBFILE_COLLECTIONS", config_data, 1))
|
||||
|
||||
fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
|
||||
buildfile = cooker.matchFile(fn)
|
||||
@@ -108,9 +108,9 @@ if taskname.endswith("_setscene"):
|
||||
if hashdata:
|
||||
bb.parse.siggen.set_taskdata(hashdata["hashes"], hashdata["deps"])
|
||||
for h in hashdata["hashes"]:
|
||||
the_data.setVar("BBHASH_%s" % h, hashdata["hashes"][h])
|
||||
bb.data.setVar("BBHASH_%s" % h, hashdata["hashes"][h], the_data)
|
||||
for h in hashdata["deps"]:
|
||||
the_data.setVar("BBHASHDEPS_%s" % h, hashdata["deps"][h])
|
||||
bb.data.setVar("BBHASHDEPS_%s" % h, hashdata["deps"][h], the_data)
|
||||
|
||||
ret = 0
|
||||
if dryrun != "True":
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys, logging
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
|
||||
|
||||
import unittest
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
tests = ["bb.tests.codeparser",
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.utils"]
|
||||
|
||||
for t in tests:
|
||||
__import__(t)
|
||||
|
||||
unittest.main(argv=["bitbake-selftest"] + tests)
|
||||
|
||||
@@ -462,7 +462,7 @@ def main():
|
||||
state_group = 2
|
||||
|
||||
for key in bb.data.keys(documentation):
|
||||
data = documentation.getVarFlag(key, "doc")
|
||||
data = bb.data.getVarFlag(key, "doc", documentation)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 Wind River Systems, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname( \
|
||||
os.path.abspath(__file__))), 'lib'))
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
import gtk
|
||||
import optparse
|
||||
import pygtk
|
||||
|
||||
from bb.ui.crumbs.hig import DeployImageDialog, ImageSelectionDialog, CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hobwidget import HobAltButton, HobButton
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Writer"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
def __init__(self, image_path=''):
|
||||
super(DeployWindow, self).__init__()
|
||||
|
||||
if len(image_path) > 0:
|
||||
valid = True
|
||||
if not os.path.exists(image_path):
|
||||
valid = False
|
||||
lbl = "<b>Invalid image file path: %s.</b>\nPress <b>Select Image</b> to select an image." % image_path
|
||||
else:
|
||||
image_path = os.path.abspath(image_path)
|
||||
extend_name = os.path.splitext(image_path)[1][1:]
|
||||
if extend_name not in DEPLOYABLE_IMAGE_TYPES:
|
||||
valid = False
|
||||
lbl = "<b>Undeployable imge type: %s</b>\nPress <b>Select Image</b> to select an image." % extend_name
|
||||
|
||||
if not valid:
|
||||
image_path = ''
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
|
||||
self.deploy_dialog = DeployImageDialog(Title, image_path, self,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR, None, standalone=True)
|
||||
close_button = self.deploy_dialog.add_button("Close", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(close_button)
|
||||
close_button.connect('clicked', gtk.main_quit)
|
||||
|
||||
write_button = self.deploy_dialog.add_button("Write USB image", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(write_button)
|
||||
|
||||
self.deploy_dialog.connect('select_image_clicked', self.select_image_clicked_cb)
|
||||
self.deploy_dialog.connect('destroy', gtk.main_quit)
|
||||
response = self.deploy_dialog.show()
|
||||
|
||||
def select_image_clicked_cb(self, dialog):
|
||||
cwd = os.getcwd()
|
||||
dialog = ImageSelectionDialog(cwd, DEPLOYABLE_IMAGE_TYPES, Title, self, gtk.FILE_CHOOSER_ACTION_SAVE )
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(button)
|
||||
response = dialog.run()
|
||||
|
||||
if response == gtk.RESPONSE_YES:
|
||||
if not dialog.image_names:
|
||||
lbl = "<b>No selections made</b>\nClicked the radio button to select a image."
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
# get the full path of image
|
||||
image_path = os.path.join(dialog.image_folder, dialog.image_names[0])
|
||||
self.deploy_dialog.set_image_text_buffer(image_path)
|
||||
self.deploy_dialog.set_image_path(image_path)
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
usage = """%prog [-h] [image_file]
|
||||
|
||||
%prog writes bootable images to USB devices. You can
|
||||
provide the image file on the command line or select it using the GUI.""")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
image_file = args[1] if len(args) > 1 else ''
|
||||
dw = DeployWindow(image_file)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
gtk.main()
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc(3)
|
||||
@@ -52,8 +52,8 @@ syn match bbExport "^export" nextgroup=bbIdentifier skipwhite
|
||||
syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
|
||||
syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained
|
||||
syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained
|
||||
syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)" contained nextgroup=bbVarValue
|
||||
syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
|
||||
syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue
|
||||
syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
|
||||
syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
|
||||
syn region bbVarPyValue start=+${@+ skip=+\\$+ excludenl end=+}+ contained contains=@python
|
||||
|
||||
|
||||
@@ -103,13 +103,7 @@ Show debug logging for the specified logging domains
|
||||
.TP
|
||||
.B \-P, \-\-profile
|
||||
profile the command and print a report
|
||||
|
||||
.SH ENVIRONMENT VARIABLES
|
||||
bitbake uses the following environment variables to control its
|
||||
operation:
|
||||
.TP
|
||||
.B BITBAKE_UI
|
||||
The bitbake user interface; overridden by the \fB-u\fP commandline option.
|
||||
|
||||
.SH AUTHORS
|
||||
BitBake was written by
|
||||
|
||||
@@ -186,7 +186,7 @@ include</literal> directive.</para>
|
||||
<title>Defining Python functions into the global Python namespace</title>
|
||||
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||
<para><screen>def get_depends(bb, d):
|
||||
if d.getVar('SOMECONDITION', True):
|
||||
if bb.data.getVar('SOMECONDITION', d, True):
|
||||
return "dependencywithcond"
|
||||
else:
|
||||
return "dependency"
|
||||
@@ -204,7 +204,7 @@ include</literal> directive.</para>
|
||||
<section>
|
||||
<title>Inheritance</title>
|
||||
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||
<para>The <literal>inherit</literal> directive is a means of specifying what classes of functionality your .bb requires. It is a rudimentary form of inheritance. For example, you can easily abstract out the tasks involved in building a package that uses autoconf and automake, and put that into a bbclass for your packages to make use of. A given bbclass is located by searching for classes/filename.bbclass in <envar>BBPATH</envar>, where filename is what you inherited.</para>
|
||||
<para>The <literal>inherit</literal> directive is a means of specifying what classes of functionality your .bb requires. It is a rudimentary form of inheritance. For example, you can easily abstract out the tasks involved in building a package that uses autoconf and automake, and put that into a bbclass for your packages to make use of. A given bbclass is located by searching for classes/filename.oeclass in <envar>BBPATH</envar>, where filename is what you inherited.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Tasks</title>
|
||||
@@ -228,7 +228,7 @@ addtask printdate before do_build</screen></para>
|
||||
<para>'nostamp' - don't generate a stamp file for a task. This means the task is always rexecuted.</para>
|
||||
<para>'fakeroot' - this task needs to be run in a fakeroot environment, obtained by adding the variables in FAKEROOTENV to the environment.</para>
|
||||
<para>'umask' - the umask to run the task under.</para>
|
||||
<para> For the 'deptask', 'rdeptask', 'depends', 'rdepends' and 'recrdeptask' flags please see the dependencies section.</para>
|
||||
<para> For the 'deptask', 'rdeptask', 'recdeptask' and 'recrdeptask' flags please see the dependencies section.</para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
@@ -251,7 +251,7 @@ of the event and the content of the <varname>FILE</varname> variable.</para>
|
||||
<section>
|
||||
<title>Variants</title>
|
||||
<para>Two BitBake features exist to facilitate the creation of multiple buildable incarnations from a single recipe file.</para>
|
||||
<para>The first is <varname>BBCLASSEXTEND</varname>. This variable is a space separated list of classes used to "extend" the recipe for each variant. As an example, setting <screen>BBCLASSEXTEND = "native"</screen> results in a second incarnation of the current recipe being available. This second incarnation will have the "native" class inherited.</para>
|
||||
<para>The first is <varname>BBCLASSEXTEND</varname>. This variable is a space separated list of classes used to "extend" the recipe for each variant. As an example, setting <screen>BBCLASSEXTEND = "native"</screen> results in a second incarnation of the current recipe being available. This second incarantion will have the "native" class inherited.</para>
|
||||
<para>The second feature is <varname>BBVERSIONS</varname>. This variable allows a single recipe to build multiple versions of a project from a single recipe file, and allows you to specify conditional metadata (using the <varname>OVERRIDES</varname> mechanism) for a single version, or an optionally named range of versions:</para>
|
||||
<para><screen>BBVERSIONS = "1.0 2.0 git"
|
||||
SRC_URI_git = "git://someurl/somepath.git"</screen></para>
|
||||
@@ -308,35 +308,37 @@ SRC_URI_append_1.0.7+ = "file://some_patch_which_the_new_versions_need.patch;pat
|
||||
</section>
|
||||
<section>
|
||||
<title>Dependency handling</title>
|
||||
<para>BitBake handles dependencies at the task level since to allow for efficient operation with multiple processed executing in parallel. A robust method of specifying task dependencies is therefore needed. </para>
|
||||
<para>BitBake 1.7.x onwards works with the metadata at the task level since this is optimal when dealing with multiple threads of execution. A robust method of specifing task dependencies is therefore needed. </para>
|
||||
<section>
|
||||
<title>Dependencies internal to the .bb file</title>
|
||||
<para>Where the dependencies are internal to a given .bb file, the dependencies are handled by the previously detailed addtask directive.</para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>Build Dependencies</title>
|
||||
<title>DEPENDS</title>
|
||||
<para>DEPENDS lists build time dependencies. The 'deptask' flag for tasks is used to signify the task of each item listed in DEPENDS which must have completed before that task can be executed.</para>
|
||||
<para><screen>do_configure[deptask] = "do_populate_staging"</screen></para>
|
||||
<para>means the do_populate_staging task of each item in DEPENDS must have completed before do_configure can execute.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Runtime Dependencies</title>
|
||||
<para>The PACKAGES variable lists runtime packages and each of these can have RDEPENDS and RRECOMMENDS runtime dependencies. The 'rdeptask' flag for tasks is used to signify the task of each item runtime dependency which must have completed before that task can be executed.</para>
|
||||
<title>RDEPENDS</title>
|
||||
<para>RDEPENDS lists runtime dependencies. The 'rdeptask' flag for tasks is used to signify the task of each item listed in RDEPENDS which must have completed before that task can be executed.</para>
|
||||
<para><screen>do_package_write[rdeptask] = "do_package"</screen></para>
|
||||
<para>means the do_package task of each item in RDEPENDS must have completed before do_package_write can execute.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Recursive Dependencies</title>
|
||||
<para>These are specified with the 'recrdeptask' flag which is used signify the task(s) of dependencies which must have completed before that task can be executed. It works by looking though the build and runtime dependencies of the current recipe as well as any inter-task dependencies the task has, then adding a dependency on the listed task. It will then recurse through the dependencies of those tasks and so on.</para>
|
||||
<para>It may be desireable to recurse not just through the dependencies of those tasks but through the build and runtime dependencies of dependent tasks too. If that is the case, the taskname itself should be referenced in the task list, e.g. do_a[recrdeptask] = "do_a do_b".</para>
|
||||
<title>Recursive DEPENDS</title>
|
||||
<para>These are specified with the 'recdeptask' flag and is used signify the task(s) of each DEPENDS which must have completed before that task can be executed. It applies recursively so the DEPENDS of each item in the original DEPENDS must be met and so on.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Recursive RDEPENDS</title>
|
||||
<para>These are specified with the 'recrdeptask' flag and is used signify the task(s) of each RDEPENDS which must have completed before that task can be executed. It applies recursively so the RDEPENDS of each item in the original RDEPENDS must be met and so on. It also runs all DEPENDS first.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Inter task</title>
|
||||
<para>The 'depends' flag for tasks is a more generic form of which allows an interdependency on specific tasks rather than specifying the data in DEPENDS.</para>
|
||||
<para>The 'depends' flag for tasks is a more generic form of which allows an interdependency on specific tasks rather than specifying the data in DEPENDS or RDEPENDS.</para>
|
||||
<para><screen>do_patch[depends] = "quilt-native:do_populate_staging"</screen></para>
|
||||
<para>means the do_populate_staging task of the target quilt-native must have completed before the do_patch can execute.</para>
|
||||
<para>The 'rdepends' flag works in a similar way but takes targets in the runtime namespace instead of the build time dependency namespace.</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
@@ -386,7 +388,7 @@ ftp://.*/.* http://somemirror.org/sources/ \n \
|
||||
http://.*/.* http://somemirror.org/sources/ \n \
|
||||
https://.*/.* http://somemirror.org/sources/ \n"</screen></para>
|
||||
|
||||
<para>Non-local downloaded output is placed into the directory specified by the <varname>DL_DIR</varname>. For non local archive downloads the code can verify sha256 and md5 checksums for the download to ensure the file has been downloaded correctly. These may be specified either in the form <varname>SRC_URI[md5sum]</varname> for the md5 checksum and <varname>SRC_URI[sha256sum]</varname> for the sha256 checksum or as parameters on the SRC_URI such as SRC_URI="http://example.com/foobar.tar.bz2;md5sum=4a8e0f237e961fd7785d19d07fdb994d". If <varname>BB_STRICT_CHECKSUM</varname> is set, any download without a checksum will trigger an error message. In cases where multiple files are listed in SRC_URI, the name parameter is used assign names to the urls and these are then specified in the checksums in the form SRC_URI[name.sha256sum].</para>
|
||||
<para>Non-local downloaded output is placed into the directory specified by the <varname>DL_DIR</varname>. For non local downloads the code can check checksums for the download to ensure the file has been downloaded correctly. These are specified in the form <varname>SRC_URI[md5sum]</varname> for the md5 checksum and <varname>SRC_URI[sha256sum]</varname> for the sha256 checksum. If <varname>BB_STRICT_CHECKSUM</varname> is set, any download without a checksum will trigger an error message. In cases where multiple files are listed in SRC_URI, the name parameter is used assign names to the urls and these are then specified in the checksums in the form SRC_URI[name.sha256sum].</para>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
@@ -21,24 +21,12 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.15.2"
|
||||
__version__ = "1.13.3"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 6, 0):
|
||||
raise RuntimeError("Sorry, python 2.6.0 or later is required for this version of bitbake")
|
||||
|
||||
|
||||
class BBHandledException(Exception):
|
||||
"""
|
||||
The big dilemma for generic bitbake code is what information to give the user
|
||||
when an exception occurs. Any exception inheriting this base exception class
|
||||
has already provided information to the user via some 'fired' message type such as
|
||||
an explicitly fired event using bb.fire, or a bb.error message. If bitbake
|
||||
encounters an exception derived from this class, no backtrace or other information
|
||||
will be given to the user, its assumed the earlier event provided the relevant information.
|
||||
"""
|
||||
pass
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
@@ -79,8 +67,9 @@ if "BBDEBUG" in os.environ:
|
||||
if level:
|
||||
bb.msg.set_debug_level(level)
|
||||
|
||||
from bb import fetch2 as fetch
|
||||
sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
|
||||
if True or os.environ.get("BBFETCH2"):
|
||||
from bb import fetch2 as fetch
|
||||
sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
|
||||
|
||||
# Messaging convenience functions
|
||||
def plain(*args):
|
||||
|
||||
@@ -53,7 +53,7 @@ class FuncFailed(Exception):
|
||||
self.logfile = logfile
|
||||
self.name = name
|
||||
if name:
|
||||
self.msg = 'Function failed: %s' % name
|
||||
self.msg = "Function '%s' failed" % name
|
||||
else:
|
||||
self.msg = "Function failed"
|
||||
|
||||
@@ -70,9 +70,9 @@ class TaskBase(event.Event):
|
||||
|
||||
def __init__(self, t, d ):
|
||||
self._task = t
|
||||
self._package = d.getVar("PF", True)
|
||||
self._package = bb.data.getVar("PF", d, 1)
|
||||
event.Event.__init__(self)
|
||||
self._message = "package %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName())
|
||||
self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:])
|
||||
|
||||
def getTask(self):
|
||||
return self._task
|
||||
@@ -80,9 +80,6 @@ class TaskBase(event.Event):
|
||||
def setTask(self, task):
|
||||
self._task = task
|
||||
|
||||
def getDisplayName(self):
|
||||
return bb.event.getName(self)[4:]
|
||||
|
||||
task = property(getTask, setTask, None, "task property")
|
||||
|
||||
class TaskStarted(TaskBase):
|
||||
@@ -94,20 +91,9 @@ class TaskSucceeded(TaskBase):
|
||||
class TaskFailed(TaskBase):
|
||||
"""Task execution failed"""
|
||||
|
||||
def __init__(self, task, logfile, metadata, errprinted = False):
|
||||
self.logfile = logfile
|
||||
self.errprinted = errprinted
|
||||
super(TaskFailed, self).__init__(task, metadata)
|
||||
|
||||
class TaskFailedSilent(TaskBase):
|
||||
"""Task execution failed (silently)"""
|
||||
def __init__(self, task, logfile, metadata):
|
||||
self.logfile = logfile
|
||||
super(TaskFailedSilent, self).__init__(task, metadata)
|
||||
|
||||
def getDisplayName(self):
|
||||
# Don't need to tell the user it was silent
|
||||
return "Failed"
|
||||
super(TaskFailed, self).__init__(task, metadata)
|
||||
|
||||
class TaskInvalid(TaskBase):
|
||||
|
||||
@@ -135,8 +121,7 @@ class LogTee(object):
|
||||
|
||||
def __repr__(self):
|
||||
return '<LogTee {0}>'.format(self.name)
|
||||
def flush(self):
|
||||
self.outfile.flush()
|
||||
|
||||
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute an BB 'function'"""
|
||||
@@ -167,6 +152,8 @@ def exec_func(func, d, dirs = None):
|
||||
bb.utils.mkdirhier(adir)
|
||||
|
||||
ispython = flags.get('python')
|
||||
if flags.get('fakeroot') and not flags.get('task'):
|
||||
bb.fatal("Function %s specifies fakeroot but isn't a task?!" % func)
|
||||
|
||||
lockflag = flags.get('lockfiles')
|
||||
if lockflag:
|
||||
@@ -175,19 +162,8 @@ def exec_func(func, d, dirs = None):
|
||||
lockfiles = None
|
||||
|
||||
tempdir = data.getVar('T', d, 1)
|
||||
|
||||
# or func allows items to be executed outside of the normal
|
||||
# task set, such as buildhistory
|
||||
task = data.getVar('BB_RUNTASK', d, 1) or func
|
||||
if task == func:
|
||||
taskfunc = task
|
||||
else:
|
||||
taskfunc = "%s.%s" % (task, func)
|
||||
|
||||
runfmt = data.getVar('BB_RUNFMT', d, 1) or "run.{func}.{pid}"
|
||||
runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
|
||||
runfile = os.path.join(tempdir, runfn)
|
||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
||||
bb.utils.mkdirhier(tempdir)
|
||||
runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid()))
|
||||
|
||||
with bb.utils.fileslocked(lockfiles):
|
||||
if ispython:
|
||||
@@ -218,8 +194,6 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
olddir = None
|
||||
os.chdir(cwd)
|
||||
|
||||
bb.debug(2, "Executing python function %s" % func)
|
||||
|
||||
try:
|
||||
comp = utils.better_compile(code, func, bbfile)
|
||||
utils.better_exec(comp, {"d": d}, code, bbfile)
|
||||
@@ -229,15 +203,13 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
|
||||
raise FuncFailed(func, None)
|
||||
finally:
|
||||
bb.debug(2, "Python function %s finished" % func)
|
||||
|
||||
if cwd and olddir:
|
||||
try:
|
||||
os.chdir(olddir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def exec_func_shell(func, d, runfile, cwd=None):
|
||||
def exec_func_shell(function, d, runfile, cwd=None):
|
||||
"""Execute a shell function from the metadata
|
||||
|
||||
Note on directory behavior. The 'dirs' varflag should contain a list
|
||||
@@ -250,36 +222,27 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||
|
||||
with open(runfile, 'w') as script:
|
||||
script.write('#!/bin/sh -e\n')
|
||||
data.emit_func(func, script, d)
|
||||
|
||||
if bb.msg.loggerVerboseLogs:
|
||||
if bb.msg.loggerDefaultVerbose:
|
||||
script.write("set -x\n")
|
||||
data.emit_func(function, script, d)
|
||||
if cwd:
|
||||
script.write("cd %s\n" % cwd)
|
||||
script.write("%s\n" % func)
|
||||
script.write("%s\n" % function)
|
||||
|
||||
os.chmod(runfile, 0775)
|
||||
|
||||
cmd = runfile
|
||||
if d.getVarFlag(func, 'fakeroot'):
|
||||
fakerootcmd = d.getVar('FAKEROOT', True)
|
||||
if fakerootcmd:
|
||||
cmd = [fakerootcmd, runfile]
|
||||
|
||||
if bb.msg.loggerDefaultVerbose:
|
||||
logfile = LogTee(logger, sys.stdout)
|
||||
else:
|
||||
logfile = sys.stdout
|
||||
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
try:
|
||||
bb.process.run(cmd, shell=False, stdin=NULL, log=logfile)
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
|
||||
bb.debug(2, "Shell function %s finished" % func)
|
||||
raise FuncFailed(function, logfn)
|
||||
|
||||
def _task_data(fn, task, d):
|
||||
localdata = data.createCopy(d)
|
||||
@@ -310,46 +273,22 @@ def _exec_task(fn, task, d, quieterr):
|
||||
bb.fatal("T variable not set, unable to build")
|
||||
|
||||
bb.utils.mkdirhier(tempdir)
|
||||
|
||||
# Determine the logfile to generate
|
||||
logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}'
|
||||
logbase = logfmt.format(task=task, pid=os.getpid())
|
||||
|
||||
# Document the order of the tasks...
|
||||
logorder = os.path.join(tempdir, 'log.task_order')
|
||||
try:
|
||||
logorderfile = file(logorder, 'a')
|
||||
except OSError:
|
||||
logger.exception("Opening log file '%s'", logorder)
|
||||
pass
|
||||
logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
|
||||
logorderfile.close()
|
||||
|
||||
# Setup the courtesy link to the logfn
|
||||
loglink = os.path.join(tempdir, 'log.{0}'.format(task))
|
||||
logfn = os.path.join(tempdir, logbase)
|
||||
logfn = os.path.join(tempdir, 'log.{0}.{1}'.format(task, os.getpid()))
|
||||
if loglink:
|
||||
bb.utils.remove(loglink)
|
||||
|
||||
try:
|
||||
os.symlink(logbase, loglink)
|
||||
os.symlink(logfn, loglink)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
|
||||
postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
|
||||
|
||||
class ErrorCheckHandler(logging.Handler):
|
||||
def __init__(self):
|
||||
self.triggered = False
|
||||
logging.Handler.__init__(self, logging.ERROR)
|
||||
def emit(self, record):
|
||||
self.triggered = True
|
||||
|
||||
# Handle logfiles
|
||||
si = file('/dev/null', 'r')
|
||||
try:
|
||||
bb.utils.mkdirhier(os.path.dirname(logfn))
|
||||
logfile = file(logfn, 'w')
|
||||
except OSError:
|
||||
logger.exception("Opening log file '%s'", logfn)
|
||||
@@ -372,11 +311,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
handler.setLevel(logging.DEBUG - 2)
|
||||
bblogger.addHandler(handler)
|
||||
|
||||
errchk = ErrorCheckHandler()
|
||||
bblogger.addHandler(errchk)
|
||||
|
||||
localdata.setVar('BB_LOGFILE', logfn)
|
||||
localdata.setVar('BB_RUNTASK', task)
|
||||
|
||||
event.fire(TaskStarted(task, localdata), localdata)
|
||||
try:
|
||||
@@ -386,12 +321,9 @@ def _exec_task(fn, task, d, quieterr):
|
||||
for func in (postfuncs or '').split():
|
||||
exec_func(func, localdata)
|
||||
except FuncFailed as exc:
|
||||
if quieterr:
|
||||
event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
|
||||
else:
|
||||
errprinted = errchk.triggered
|
||||
if not quieterr:
|
||||
logger.error(str(exc))
|
||||
event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
|
||||
event.fire(TaskFailed(task, logfn, localdata), localdata)
|
||||
return 1
|
||||
finally:
|
||||
sys.stdout.flush()
|
||||
@@ -434,7 +366,7 @@ def exec_task(fn, task, d):
|
||||
if not quieterr:
|
||||
logger.error("Build of %s failed" % (task))
|
||||
logger.error(format_exc())
|
||||
failedevent = TaskFailed(task, None, d, True)
|
||||
failedevent = TaskFailed(task, None, d)
|
||||
event.fire(failedevent, d)
|
||||
return 1
|
||||
|
||||
@@ -495,24 +427,6 @@ def del_stamp(task, d, file_name = None):
|
||||
stamp = stamp_internal(task, d, file_name)
|
||||
bb.utils.remove(stamp)
|
||||
|
||||
def write_taint(task, d, file_name = None):
|
||||
"""
|
||||
Creates a "taint" file which will force the specified task and its
|
||||
dependents to be re-run the next time by influencing the value of its
|
||||
taskhash.
|
||||
(d can be a data dict or dataCache)
|
||||
"""
|
||||
import uuid
|
||||
if file_name:
|
||||
taintfn = d.stamp[file_name] + '.' + task + '.taint'
|
||||
else:
|
||||
taintfn = d.getVar('STAMP', True) + '.' + task + '.taint'
|
||||
bb.utils.mkdirhier(os.path.dirname(taintfn))
|
||||
# The specific content of the taint file is not really important,
|
||||
# we just need it to be random, so a random UUID is used
|
||||
with open(taintfn, 'w') as taintf:
|
||||
taintf.write(str(uuid.uuid4()))
|
||||
|
||||
def stampfile(taskname, d, file_name = None):
|
||||
"""
|
||||
Return the stamp for a given task
|
||||
@@ -544,7 +458,6 @@ def add_tasks(tasklist, d):
|
||||
deptask = data.expand(flags[name], d)
|
||||
task_deps[name][task] = deptask
|
||||
getTask('depends')
|
||||
getTask('rdepends')
|
||||
getTask('deptask')
|
||||
getTask('rdeptask')
|
||||
getTask('recrdeptask')
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Cache implementation
|
||||
# BitBake 'Event' implementation
|
||||
#
|
||||
# Caching of bitbake variables before task execution
|
||||
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
|
||||
# but small sections based on code from bin/bitbake:
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
@@ -32,6 +31,7 @@
|
||||
import os
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import bb.data
|
||||
import bb.utils
|
||||
|
||||
logger = logging.getLogger("BitBake.Cache")
|
||||
@@ -43,10 +43,10 @@ except ImportError:
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "144"
|
||||
__cache_version__ = "142"
|
||||
|
||||
def getCacheFile(path, filename, data_hash):
|
||||
return os.path.join(path, filename + "." + data_hash)
|
||||
def getCacheFile(path, filename):
|
||||
return os.path.join(path, filename)
|
||||
|
||||
# RecipeInfoCommon defines common data retrieving methods
|
||||
# from meta data for caches. CoreRecipeInfo as well as other
|
||||
@@ -76,13 +76,9 @@ class RecipeInfoCommon(object):
|
||||
for task in tasks)
|
||||
|
||||
@classmethod
|
||||
def flaglist(cls, flag, varlist, metadata, squash=False):
|
||||
out_dict = dict((var, metadata.getVarFlag(var, flag, True))
|
||||
def flaglist(cls, flag, varlist, metadata):
|
||||
return dict((var, metadata.getVarFlag(var, flag, True))
|
||||
for var in varlist)
|
||||
if squash:
|
||||
return dict((k,v) for (k,v) in out_dict.iteritems() if v)
|
||||
else:
|
||||
return out_dict
|
||||
|
||||
@classmethod
|
||||
def getvar(cls, var, metadata):
|
||||
@@ -132,7 +128,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.stamp = self.getvar('STAMP', metadata)
|
||||
self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
|
||||
self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
|
||||
self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
|
||||
self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
|
||||
self.depends = self.depvar('DEPENDS', metadata)
|
||||
self.provides = self.depvar('PROVIDES', metadata)
|
||||
@@ -143,9 +138,11 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
|
||||
self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
|
||||
self.inherits = self.getvar('__inherit_cache', metadata)
|
||||
self.summary = self.getvar('SUMMARY', metadata)
|
||||
self.license = self.getvar('LICENSE', metadata)
|
||||
self.section = self.getvar('SECTION', metadata)
|
||||
self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
|
||||
self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
|
||||
self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
|
||||
|
||||
@classmethod
|
||||
def init_cacheData(cls, cachedata):
|
||||
@@ -159,7 +156,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.stamp = {}
|
||||
cachedata.stamp_base = {}
|
||||
cachedata.stamp_extrainfo = {}
|
||||
cachedata.file_checksums = {}
|
||||
cachedata.fn_provides = {}
|
||||
cachedata.pn_provides = defaultdict(list)
|
||||
cachedata.all_depends = []
|
||||
@@ -178,8 +174,10 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
|
||||
cachedata.basetaskhash = {}
|
||||
cachedata.inherits = {}
|
||||
cachedata.summary = {}
|
||||
cachedata.license = {}
|
||||
cachedata.section = {}
|
||||
cachedata.fakerootenv = {}
|
||||
cachedata.fakerootnoenv = {}
|
||||
cachedata.fakerootdirs = {}
|
||||
|
||||
def add_cacheData(self, cachedata, fn):
|
||||
@@ -191,7 +189,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.stamp[fn] = self.stamp
|
||||
cachedata.stamp_base[fn] = self.stamp_base
|
||||
cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
|
||||
cachedata.file_checksums[fn] = self.file_checksums
|
||||
|
||||
provides = [self.pn]
|
||||
for provide in self.provides:
|
||||
@@ -242,8 +239,10 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.basetaskhash[identifier] = taskhash
|
||||
|
||||
cachedata.inherits[fn] = self.inherits
|
||||
cachedata.summary[fn] = self.summary
|
||||
cachedata.license[fn] = self.license
|
||||
cachedata.section[fn] = self.section
|
||||
cachedata.fakerootenv[fn] = self.fakerootenv
|
||||
cachedata.fakerootnoenv[fn] = self.fakerootnoenv
|
||||
cachedata.fakerootdirs[fn] = self.fakerootdirs
|
||||
|
||||
|
||||
@@ -253,19 +252,18 @@ class Cache(object):
|
||||
BitBake Cache implementation
|
||||
"""
|
||||
|
||||
def __init__(self, data, data_hash, caches_array):
|
||||
def __init__(self, data, caches_array):
|
||||
# Pass caches_array information into Cache Constructor
|
||||
# It will be used in later for deciding whether we
|
||||
# need extra cache file dump/load support
|
||||
self.caches_array = caches_array
|
||||
self.cachedir = data.getVar("CACHE", True)
|
||||
self.cachedir = bb.data.getVar("CACHE", data, True)
|
||||
self.clean = set()
|
||||
self.checked = set()
|
||||
self.depends_cache = {}
|
||||
self.data = None
|
||||
self.data_fn = None
|
||||
self.cacheclean = True
|
||||
self.data_hash = data_hash
|
||||
|
||||
if self.cachedir in [None, '']:
|
||||
self.has_cache = False
|
||||
@@ -274,17 +272,26 @@ class Cache(object):
|
||||
return
|
||||
|
||||
self.has_cache = True
|
||||
self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
|
||||
self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat")
|
||||
|
||||
logger.debug(1, "Using cache in '%s'", self.cachedir)
|
||||
bb.utils.mkdirhier(self.cachedir)
|
||||
|
||||
# If any of configuration.data's dependencies are newer than the
|
||||
# cache there isn't even any point in loading it...
|
||||
newest_mtime = 0
|
||||
deps = bb.data.getVar("__base_depends", data)
|
||||
|
||||
old_mtimes = [old_mtime for _, old_mtime in deps]
|
||||
old_mtimes.append(newest_mtime)
|
||||
newest_mtime = max(old_mtimes)
|
||||
|
||||
cache_ok = True
|
||||
if self.caches_array:
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cache_ok = cache_ok and os.path.exists(cachefile)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||
cache_ok = cache_ok and (bb.parse.cached_mtime_noerror(cachefile) >= newest_mtime)
|
||||
cache_class.init_cacheData(self)
|
||||
if cache_ok:
|
||||
self.load_cachefile()
|
||||
@@ -318,7 +325,7 @@ class Cache(object):
|
||||
# Calculate the correct cachesize of all those cache files
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
cachesize += os.fstat(cachefile.fileno()).st_size
|
||||
|
||||
@@ -326,7 +333,7 @@ class Cache(object):
|
||||
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
while cachefile:
|
||||
@@ -344,7 +351,7 @@ class Cache(object):
|
||||
current_percent = 100 * current_progress / cachesize
|
||||
if current_percent > previous_percent:
|
||||
previous_percent = current_percent
|
||||
bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
|
||||
bb.event.fire(bb.event.CacheLoadProgress(current_progress),
|
||||
self.data)
|
||||
|
||||
previous_progress += current_progress
|
||||
@@ -579,7 +586,7 @@ class Cache(object):
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cache_class_name = cache_class.__name__
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||
file_dict[cache_class_name] = open(cachefile, "wb")
|
||||
pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
@@ -684,7 +691,7 @@ def init(cooker):
|
||||
Files causing parsing errors are evicted from the cache.
|
||||
|
||||
"""
|
||||
return Cache(cooker.configuration.data, cooker.configuration.data_hash)
|
||||
return Cache(cooker.configuration.data)
|
||||
|
||||
|
||||
class CacheData(object):
|
||||
@@ -711,115 +718,4 @@ class CacheData(object):
|
||||
for info in info_array:
|
||||
info.add_cacheData(self, fn)
|
||||
|
||||
|
||||
class MultiProcessCache(object):
|
||||
"""
|
||||
BitBake multi-process cache implementation
|
||||
|
||||
Used by the codeparser & file checksum caches
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.cachefile = None
|
||||
self.cachedata = self.create_cachedata()
|
||||
self.cachedata_extras = self.create_cachedata()
|
||||
|
||||
def init_cache(self, d):
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
if cachedir in [None, '']:
|
||||
return
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
|
||||
logger.debug(1, "Using cache in '%s'", self.cachefile)
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(self.cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except:
|
||||
return
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
return
|
||||
|
||||
self.cachedata = data
|
||||
|
||||
def internSet(self, items):
|
||||
new = set()
|
||||
for i in items:
|
||||
new.add(intern(i))
|
||||
return new
|
||||
|
||||
def compress_keys(self, data):
|
||||
# Override in subclasses if desired
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}]
|
||||
return data
|
||||
|
||||
def save_extras(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
|
||||
|
||||
i = os.getpid()
|
||||
lf = None
|
||||
while not lf:
|
||||
lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
|
||||
if not lf or os.path.exists(self.cachefile + "-" + str(i)):
|
||||
if lf:
|
||||
bb.utils.unlockfile(lf)
|
||||
lf = None
|
||||
i = i + 1
|
||||
continue
|
||||
|
||||
p = pickle.Pickler(file(self.cachefile + "-" + str(i), "wb"), -1)
|
||||
p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(lf)
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
def merge_data(self, source, dest):
|
||||
for j in range(0,len(dest)):
|
||||
for h in source[j]:
|
||||
if h not in dest[j]:
|
||||
dest[j][h] = source[j][h]
|
||||
|
||||
def save_merge(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(self.cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
data = self.create_cachedata()
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(self.cachefile), f)
|
||||
try:
|
||||
p = pickle.Unpickler(file(f, "rb"))
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
extradata, version = self.create_cachedata(), None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
continue
|
||||
|
||||
self.merge_data(extradata, data)
|
||||
os.unlink(f)
|
||||
|
||||
self.compress_keys(data)
|
||||
|
||||
p = pickle.Pickler(file(self.cachefile, "wb"), -1)
|
||||
p.dump([data, self.__class__.CACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
|
||||
|
||||
@@ -40,7 +40,6 @@ class HobRecipeInfo(RecipeInfoCommon):
|
||||
self.summary = self.getvar('SUMMARY', metadata)
|
||||
self.license = self.getvar('LICENSE', metadata)
|
||||
self.section = self.getvar('SECTION', metadata)
|
||||
self.description = self.getvar('DESCRIPTION', metadata)
|
||||
|
||||
@classmethod
|
||||
def init_cacheData(cls, cachedata):
|
||||
@@ -48,10 +47,8 @@ class HobRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.summary = {}
|
||||
cachedata.license = {}
|
||||
cachedata.section = {}
|
||||
cachedata.description = {}
|
||||
|
||||
def add_cacheData(self, cachedata, fn):
|
||||
cachedata.summary[fn] = self.summary
|
||||
cachedata.license[fn] = self.license
|
||||
cachedata.section[fn] = self.section
|
||||
cachedata.description[fn] = self.description
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
# Local file checksum cache implementation
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import stat
|
||||
import bb.utils
|
||||
import logging
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
logger = logging.getLogger("BitBake.Cache")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
|
||||
# mtime cache (non-persistent)
|
||||
# based upon the assumption that files do not change during bitbake run
|
||||
class FileMtimeCache(object):
|
||||
cache = {}
|
||||
|
||||
def cached_mtime(self, f):
|
||||
if f not in self.cache:
|
||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return self.cache[f]
|
||||
|
||||
def cached_mtime_noerror(self, f):
|
||||
if f not in self.cache:
|
||||
try:
|
||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
return 0
|
||||
return self.cache[f]
|
||||
|
||||
def update_mtime(self, f):
|
||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return self.cache[f]
|
||||
|
||||
def clear(self):
|
||||
self.cache.clear()
|
||||
|
||||
# Checksum + mtime cache (persistent)
|
||||
class FileChecksumCache(MultiProcessCache):
|
||||
cache_file_name = "local_file_checksum_cache.dat"
|
||||
CACHE_VERSION = 1
|
||||
|
||||
def __init__(self):
|
||||
self.mtime_cache = FileMtimeCache()
|
||||
MultiProcessCache.__init__(self)
|
||||
|
||||
def get_checksum(self, f):
|
||||
entry = self.cachedata[0].get(f)
|
||||
cmtime = self.mtime_cache.cached_mtime(f)
|
||||
if entry:
|
||||
(mtime, hashval) = entry
|
||||
if cmtime == mtime:
|
||||
return hashval
|
||||
else:
|
||||
bb.debug(2, "file %s changed mtime, recompute checksum" % f)
|
||||
|
||||
hashval = bb.utils.md5_file(f)
|
||||
self.cachedata_extras[0][f] = (cmtime, hashval)
|
||||
return hashval
|
||||
|
||||
def merge_data(self, source, dest):
|
||||
for h in source[0]:
|
||||
if h in dest:
|
||||
(smtime, _) = source[0][h]
|
||||
(dmtime, _) = dest[0][h]
|
||||
if smtime > dmtime:
|
||||
dest[0][h] = source[0][h]
|
||||
else:
|
||||
dest[0][h] = source[0][h]
|
||||
@@ -5,10 +5,10 @@ import os.path
|
||||
import bb.utils, bb.data
|
||||
from itertools import chain
|
||||
from pysh import pyshyacc, pyshlex, sherrors
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
|
||||
logger = logging.getLogger('BitBake.CodeParser')
|
||||
PARSERCACHE_VERSION = 2
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
@@ -32,56 +32,123 @@ def check_indent(codestr):
|
||||
|
||||
return codestr
|
||||
|
||||
pythonparsecache = {}
|
||||
shellparsecache = {}
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
CACHE_VERSION = 2
|
||||
|
||||
def __init__(self):
|
||||
MultiProcessCache.__init__(self)
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
self.pythoncacheextras = self.cachedata_extras[0]
|
||||
self.shellcacheextras = self.cachedata_extras[1]
|
||||
|
||||
def init_cache(self, d):
|
||||
MultiProcessCache.init_cache(self, d)
|
||||
|
||||
# cachedata gets re-assigned in the parent
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
|
||||
def compress_keys(self, data):
|
||||
# When the dicts are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file. By interning the data here, the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. This is worth any performance hit from this loops and the use of the
|
||||
# intern() data storage.
|
||||
# Python 3.x may behave better in this area
|
||||
for h in data[0]:
|
||||
data[0][h]["refs"] = self.internSet(data[0][h]["refs"])
|
||||
data[0][h]["execs"] = self.internSet(data[0][h]["execs"])
|
||||
for h in data[1]:
|
||||
data[1][h]["execs"] = self.internSet(data[1][h]["execs"])
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}, {}]
|
||||
return data
|
||||
|
||||
codeparsercache = CodeParserCache()
|
||||
def parser_cachefile(d):
|
||||
cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
|
||||
bb.data.getVar("CACHE", d, True))
|
||||
if cachedir in [None, '']:
|
||||
return None
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
cachefile = os.path.join(cachedir, "bb_codeparser.dat")
|
||||
logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
|
||||
return cachefile
|
||||
|
||||
def parser_cache_init(d):
|
||||
codeparsercache.init_cache(d)
|
||||
global pythonparsecache
|
||||
global shellparsecache
|
||||
|
||||
cachefile = parser_cachefile(d)
|
||||
if not cachefile:
|
||||
return
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except:
|
||||
return
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
return
|
||||
|
||||
pythonparsecache = data[0]
|
||||
shellparsecache = data[1]
|
||||
|
||||
def parser_cache_save(d):
|
||||
codeparsercache.save_extras(d)
|
||||
cachefile = parser_cachefile(d)
|
||||
if not cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(cachefile + ".lock", shared=True)
|
||||
|
||||
i = os.getpid()
|
||||
lf = None
|
||||
while not lf:
|
||||
shellcache = {}
|
||||
pythoncache = {}
|
||||
|
||||
lf = bb.utils.lockfile(cachefile + ".lock." + str(i), retry=False)
|
||||
if not lf or os.path.exists(cachefile + "-" + str(i)):
|
||||
if lf:
|
||||
bb.utils.unlockfile(lf)
|
||||
lf = None
|
||||
i = i + 1
|
||||
continue
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError, ValueError):
|
||||
data, version = None, None
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
shellcache = shellparsecache
|
||||
pythoncache = pythonparsecache
|
||||
else:
|
||||
for h in pythonparsecache:
|
||||
if h not in data[0]:
|
||||
pythoncache[h] = pythonparsecache[h]
|
||||
for h in shellparsecache:
|
||||
if h not in data[1]:
|
||||
shellcache[h] = shellparsecache[h]
|
||||
|
||||
p = pickle.Pickler(file(cachefile + "-" + str(i), "wb"), -1)
|
||||
p.dump([[pythoncache, shellcache], PARSERCACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(lf)
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
def parser_cache_savemerge(d):
|
||||
codeparsercache.save_merge(d)
|
||||
cachefile = parser_cachefile(d)
|
||||
if not cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(cachefile + ".lock")
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
data = [{}, {}]
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(cachefile)) if y.startswith(os.path.basename(cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(cachefile), f)
|
||||
try:
|
||||
p = pickle.Unpickler(file(f, "rb"))
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
extradata, version = [{}, {}], None
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
continue
|
||||
|
||||
for h in extradata[0]:
|
||||
if h not in data[0]:
|
||||
data[0][h] = extradata[0][h]
|
||||
for h in extradata[1]:
|
||||
if h not in data[1]:
|
||||
data[1][h] = extradata[1][h]
|
||||
os.unlink(f)
|
||||
|
||||
p = pickle.Pickler(file(cachefile, "wb"), -1)
|
||||
p.dump([data, PARSERCACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
|
||||
Logger = logging.getLoggerClass()
|
||||
class BufferedLogger(Logger):
|
||||
@@ -158,17 +225,11 @@ class PythonParser():
|
||||
def parse_python(self, node):
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
self.references = codeparsercache.pythoncache[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncache[h]["execs"]
|
||||
if h in pythonparsecache:
|
||||
self.references = pythonparsecache[h]["refs"]
|
||||
self.execs = pythonparsecache[h]["execs"]
|
||||
return
|
||||
|
||||
if h in codeparsercache.pythoncacheextras:
|
||||
self.references = codeparsercache.pythoncacheextras[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncacheextras[h]["execs"]
|
||||
return
|
||||
|
||||
|
||||
code = compile(check_indent(str(node)), "<string>", "exec",
|
||||
ast.PyCF_ONLY_AST)
|
||||
|
||||
@@ -179,9 +240,9 @@ class PythonParser():
|
||||
self.references.update(self.var_references)
|
||||
self.references.update(self.var_execs)
|
||||
|
||||
codeparsercache.pythoncacheextras[h] = {}
|
||||
codeparsercache.pythoncacheextras[h]["refs"] = self.references
|
||||
codeparsercache.pythoncacheextras[h]["execs"] = self.execs
|
||||
pythonparsecache[h] = {}
|
||||
pythonparsecache[h]["refs"] = self.references
|
||||
pythonparsecache[h]["execs"] = self.execs
|
||||
|
||||
class ShellParser():
|
||||
def __init__(self, name, log):
|
||||
@@ -199,12 +260,8 @@ class ShellParser():
|
||||
|
||||
h = hash(str(value))
|
||||
|
||||
if h in codeparsercache.shellcache:
|
||||
self.execs = codeparsercache.shellcache[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
if h in codeparsercache.shellcacheextras:
|
||||
self.execs = codeparsercache.shellcacheextras[h]["execs"]
|
||||
if h in shellparsecache:
|
||||
self.execs = shellparsecache[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
try:
|
||||
@@ -216,8 +273,8 @@ class ShellParser():
|
||||
self.process_tokens(token)
|
||||
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
|
||||
|
||||
codeparsercache.shellcacheextras[h] = {}
|
||||
codeparsercache.shellcacheextras[h]["execs"] = self.execs
|
||||
shellparsecache[h] = {}
|
||||
shellparsecache[h]["execs"] = self.execs
|
||||
|
||||
return self.execs
|
||||
|
||||
|
||||
@@ -30,6 +30,11 @@ Commands are queued in a CommandQueue
|
||||
|
||||
import bb.event
|
||||
import bb.cooker
|
||||
import bb.data
|
||||
|
||||
async_cmds = {}
|
||||
sync_cmds = {}
|
||||
|
||||
|
||||
class CommandCompleted(bb.event.Event):
|
||||
pass
|
||||
@@ -56,6 +61,16 @@ class Command:
|
||||
# FIXME Add lock for this
|
||||
self.currentAsyncCommand = None
|
||||
|
||||
for attr in CommandsSync.__dict__:
|
||||
command = attr[:].lower()
|
||||
method = getattr(CommandsSync, attr)
|
||||
sync_cmds[command] = (method)
|
||||
|
||||
for attr in CommandsAsync.__dict__:
|
||||
command = attr[:].lower()
|
||||
method = getattr(CommandsAsync, attr)
|
||||
async_cmds[command] = (method)
|
||||
|
||||
def runCommand(self, commandline):
|
||||
try:
|
||||
command = commandline.pop(0)
|
||||
@@ -98,12 +113,9 @@ class Command:
|
||||
else:
|
||||
self.finishAsyncCommand("Exited with %s" % arg)
|
||||
return False
|
||||
except Exception as exc:
|
||||
except Exception:
|
||||
import traceback
|
||||
if isinstance(exc, bb.BBHandledException):
|
||||
self.finishAsyncCommand("")
|
||||
else:
|
||||
self.finishAsyncCommand(traceback.format_exc())
|
||||
self.finishAsyncCommand(traceback.format_exc())
|
||||
return False
|
||||
|
||||
def finishAsyncCommand(self, msg=None, code=None):
|
||||
@@ -150,7 +162,7 @@ class CommandsSync:
|
||||
if len(params) > 1:
|
||||
expand = params[1]
|
||||
|
||||
return command.cooker.configuration.data.getVar(varname, expand)
|
||||
return bb.data.getVar(varname, command.cooker.configuration.data, expand)
|
||||
|
||||
def setVariable(self, command, params):
|
||||
"""
|
||||
@@ -158,13 +170,7 @@ class CommandsSync:
|
||||
"""
|
||||
varname = params[0]
|
||||
value = params[1]
|
||||
command.cooker.configuration.data.setVar(varname, value)
|
||||
|
||||
def initCooker(self, command, params):
|
||||
"""
|
||||
Init the cooker to initial state with nothing parsed
|
||||
"""
|
||||
command.cooker.initialize()
|
||||
bb.data.setVar(varname, value, command.cooker.configuration.data)
|
||||
|
||||
def resetCooker(self, command, params):
|
||||
"""
|
||||
@@ -173,18 +179,6 @@ class CommandsSync:
|
||||
"""
|
||||
command.cooker.reset()
|
||||
|
||||
def getCpuCount(self, command, params):
|
||||
"""
|
||||
Get the CPU count on the bitbake server
|
||||
"""
|
||||
return bb.utils.cpu_count()
|
||||
|
||||
def setConfFilter(self, command, params):
|
||||
"""
|
||||
Set the configuration file parsing filter
|
||||
"""
|
||||
filterfunc = params[0]
|
||||
bb.parse.parse_py.ConfHandler.confFilters.append(filterfunc)
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
@@ -244,23 +238,15 @@ class CommandsAsync:
|
||||
klass) rather than generating a tree for all packages.
|
||||
"""
|
||||
klass = params[0]
|
||||
pkg_list = params[1]
|
||||
if len(params) > 1:
|
||||
pkg_list = params[1]
|
||||
else:
|
||||
pkg_list = []
|
||||
|
||||
command.cooker.generateTargetsTree(klass, pkg_list)
|
||||
command.finishAsyncCommand()
|
||||
generateTargetsTree.needcache = True
|
||||
|
||||
def findCoreBaseFiles(self, command, params):
|
||||
"""
|
||||
Find certain files in COREBASE directory. i.e. Layers
|
||||
"""
|
||||
subdir = params[0]
|
||||
filename = params[1]
|
||||
|
||||
command.cooker.findCoreBaseFiles(subdir, filename)
|
||||
command.finishAsyncCommand()
|
||||
findCoreBaseFiles.needcache = False
|
||||
|
||||
def findConfigFiles(self, command, params):
|
||||
"""
|
||||
Find config files which provide appropriate values
|
||||
@@ -270,7 +256,7 @@ class CommandsAsync:
|
||||
|
||||
command.cooker.findConfigFiles(varname)
|
||||
command.finishAsyncCommand()
|
||||
findConfigFiles.needcache = False
|
||||
findConfigFiles.needcache = True
|
||||
|
||||
def findFilesMatchingInDir(self, command, params):
|
||||
"""
|
||||
@@ -282,7 +268,7 @@ class CommandsAsync:
|
||||
|
||||
command.cooker.findFilesMatchingInDir(pattern, directory)
|
||||
command.finishAsyncCommand()
|
||||
findFilesMatchingInDir.needcache = False
|
||||
findFilesMatchingInDir.needcache = True
|
||||
|
||||
def findConfigFilePath(self, command, params):
|
||||
"""
|
||||
@@ -349,23 +335,3 @@ class CommandsAsync:
|
||||
else:
|
||||
command.finishAsyncCommand()
|
||||
compareRevisions.needcache = True
|
||||
|
||||
def parseConfigurationFiles(self, command, params):
|
||||
"""
|
||||
Parse the configuration files
|
||||
"""
|
||||
prefiles = params[0]
|
||||
postfiles = params[1]
|
||||
command.cooker.parseConfigurationFiles(prefiles, postfiles)
|
||||
command.finishAsyncCommand()
|
||||
parseConfigurationFiles.needcache = False
|
||||
|
||||
def triggerEvent(self, command, params):
|
||||
"""
|
||||
Trigger a certain event
|
||||
"""
|
||||
event = params[0]
|
||||
bb.event.fire(eval(event), command.cooker.configuration.data)
|
||||
command.currentAsyncCommand = None
|
||||
triggerEvent.needcache = False
|
||||
|
||||
|
||||
@@ -34,10 +34,8 @@ from cStringIO import StringIO
|
||||
from contextlib import closing
|
||||
from functools import wraps
|
||||
from collections import defaultdict
|
||||
import bb, bb.exceptions, bb.command
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
|
||||
import Queue
|
||||
import prserv.serv
|
||||
import bb, bb.exceptions
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
collectlog = logging.getLogger("BitBake.Collection")
|
||||
@@ -45,9 +43,9 @@ buildlog = logging.getLogger("BitBake.Build")
|
||||
parselog = logging.getLogger("BitBake.Parsing")
|
||||
providerlog = logging.getLogger("BitBake.Provider")
|
||||
|
||||
class NoSpecificMatch(bb.BBHandledException):
|
||||
class MultipleMatches(Exception):
|
||||
"""
|
||||
Exception raised when no or multiple file matches are found
|
||||
Exception raised when multiple file matches are found
|
||||
"""
|
||||
|
||||
class NothingToBuild(Exception):
|
||||
@@ -55,11 +53,6 @@ class NothingToBuild(Exception):
|
||||
Exception raised when there is nothing to build
|
||||
"""
|
||||
|
||||
class CollectionError(bb.BBHandledException):
|
||||
"""
|
||||
Exception raised when layer configuration is incorrect
|
||||
"""
|
||||
|
||||
class state:
|
||||
initial, parsing, running, shutdown, stop = range(5)
|
||||
|
||||
@@ -142,14 +135,10 @@ class BBCooker:
|
||||
self.configuration.data = None
|
||||
self.loadConfigurationData()
|
||||
|
||||
# Take a lock so only one copy of bitbake can run against a given build
|
||||
# directory at a time
|
||||
lockfile = self.configuration.data.expand("${TOPDIR}/bitbake.lock")
|
||||
self.lock = bb.utils.lockfile(lockfile, False, False)
|
||||
if not self.lock:
|
||||
bb.fatal("Only one copy of bitbake should be run against a build directory")
|
||||
if not self.configuration.cmd:
|
||||
self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
|
||||
|
||||
bbpkgs = self.configuration.data.getVar('BBPKGS', True)
|
||||
bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True)
|
||||
if bbpkgs and len(self.configuration.pkgs_to_build) == 0:
|
||||
self.configuration.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
@@ -174,20 +163,11 @@ class BBCooker:
|
||||
|
||||
self.parser = None
|
||||
|
||||
def initConfigurationData(self):
|
||||
self.configuration.data = bb.data.init()
|
||||
|
||||
if not self.server_registration_cb:
|
||||
self.configuration.data.setVar("BB_WORKERCONTEXT", "1")
|
||||
|
||||
filtered_keys = bb.utils.approved_variables()
|
||||
bb.data.inheritFromOS(self.configuration.data, self.savedenv, filtered_keys)
|
||||
|
||||
def loadConfigurationData(self):
|
||||
self.configuration.data = bb.data.init()
|
||||
|
||||
if not self.server_registration_cb:
|
||||
self.configuration.data.setVar("BB_WORKERCONTEXT", "1")
|
||||
bb.data.setVar("BB_WORKERCONTEXT", "1", self.configuration.data)
|
||||
|
||||
filtered_keys = bb.utils.approved_variables()
|
||||
bb.data.inheritFromOS(self.configuration.data, self.savedenv, filtered_keys)
|
||||
@@ -202,17 +182,13 @@ class BBCooker:
|
||||
sys.exit(1)
|
||||
|
||||
if not self.configuration.cmd:
|
||||
self.configuration.cmd = self.configuration.data.getVar("BB_DEFAULT_TASK", True) or "build"
|
||||
self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
|
||||
|
||||
def parseConfiguration(self):
|
||||
|
||||
# Set log file verbosity
|
||||
verboselogs = bb.utils.to_boolean(self.configuration.data.getVar("BB_VERBOSE_LOGS", "0"))
|
||||
if verboselogs:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
|
||||
# Change nice level if we're asked to
|
||||
nice = self.configuration.data.getVar("BB_NICE_LEVEL", True)
|
||||
nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True)
|
||||
if nice:
|
||||
curnice = os.nice(0)
|
||||
nice = int(nice) - curnice
|
||||
@@ -268,8 +244,20 @@ class BBCooker:
|
||||
# Need files parsed
|
||||
self.updateCache()
|
||||
|
||||
# Need to ensure data store is expanded
|
||||
localdata = data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
|
||||
pkg_pn = self.status.pkg_pn
|
||||
(latest_versions, preferred_versions) = bb.providers.findProviders(self.configuration.data, self.status, pkg_pn)
|
||||
preferred_versions = {}
|
||||
latest_versions = {}
|
||||
|
||||
# Sort by priority
|
||||
for pn in pkg_pn:
|
||||
(last_ver, last_file, pref_ver, pref_file) = bb.providers.findBestProvider(pn, localdata, self.status)
|
||||
preferred_versions[pn] = (pref_ver, pref_file)
|
||||
latest_versions[pn] = (last_ver, last_file)
|
||||
|
||||
logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
|
||||
logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
|
||||
@@ -298,7 +286,7 @@ class BBCooker:
|
||||
# this showEnvironment() code path doesn't use the cache
|
||||
self.parseConfiguration()
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
|
||||
|
||||
fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
|
||||
fn = self.matchFile(fn)
|
||||
@@ -343,7 +331,6 @@ class BBCooker:
|
||||
"""
|
||||
Prepare a runqueue and taskdata object for iteration over pkgs_to_build
|
||||
"""
|
||||
bb.event.fire(bb.event.TreeDataPreparationStarted(), self.configuration.data)
|
||||
# Need files parsed
|
||||
self.updateCache()
|
||||
# If we are told to do the None task then query the default task
|
||||
@@ -360,14 +347,11 @@ class BBCooker:
|
||||
taskdata = bb.taskdata.TaskData(False, skiplist=self.skiplist)
|
||||
|
||||
runlist = []
|
||||
current = 0
|
||||
for k in pkgs_to_build:
|
||||
taskdata.add_provider(localdata, self.status, k)
|
||||
runlist.append([k, "do_%s" % task])
|
||||
current += 1
|
||||
bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(pkgs_to_build)), self.configuration.data)
|
||||
taskdata.add_unresolved(localdata, self.status)
|
||||
bb.event.fire(bb.event.TreeDataPreparationCompleted(len(pkgs_to_build)), self.configuration.data)
|
||||
|
||||
return runlist, taskdata
|
||||
|
||||
def generateTaskDepTreeData(self, pkgs_to_build, task):
|
||||
@@ -457,8 +441,8 @@ class BBCooker:
|
||||
depend_tree["depends"] = {}
|
||||
depend_tree["pn"] = {}
|
||||
depend_tree["rdepends-pn"] = {}
|
||||
depend_tree["packages"] = {}
|
||||
depend_tree["rdepends-pkg"] = {}
|
||||
depend_tree["rrecs-pkg"] = {}
|
||||
|
||||
for task in xrange(len(tasks_fnid)):
|
||||
fnid = tasks_fnid[task]
|
||||
@@ -468,10 +452,6 @@ class BBCooker:
|
||||
summary = self.status.summary[fn]
|
||||
lic = self.status.license[fn]
|
||||
section = self.status.section[fn]
|
||||
description = self.status.description[fn]
|
||||
rdepends = self.status.rundeps[fn]
|
||||
rrecs = self.status.runrecs[fn]
|
||||
inherits = self.status.inherits.get(fn, None)
|
||||
if pn not in depend_tree["pn"]:
|
||||
depend_tree["pn"][pn] = {}
|
||||
depend_tree["pn"][pn]["filename"] = fn
|
||||
@@ -479,40 +459,32 @@ class BBCooker:
|
||||
depend_tree["pn"][pn]["summary"] = summary
|
||||
depend_tree["pn"][pn]["license"] = lic
|
||||
depend_tree["pn"][pn]["section"] = section
|
||||
depend_tree["pn"][pn]["description"] = description
|
||||
depend_tree["pn"][pn]["inherits"] = inherits
|
||||
|
||||
if fnid not in seen_fnids:
|
||||
seen_fnids.append(fnid)
|
||||
packages = []
|
||||
|
||||
depend_tree["depends"][pn] = []
|
||||
for dep in taskdata.depids[fnid]:
|
||||
item = taskdata.build_names_index[dep]
|
||||
pn_provider = ""
|
||||
targetid = taskdata.getbuild_id(item)
|
||||
if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
|
||||
id = taskdata.build_targets[targetid][0]
|
||||
fn_provider = taskdata.fn_index[id]
|
||||
pn_provider = self.status.pkg_fn[fn_provider]
|
||||
else:
|
||||
pn_provider = item
|
||||
depend_tree["depends"][pn].append(pn_provider)
|
||||
depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
|
||||
|
||||
depend_tree["rdepends-pn"][pn] = []
|
||||
for rdep in taskdata.rdepids[fnid]:
|
||||
item = taskdata.run_names_index[rdep]
|
||||
pn_rprovider = ""
|
||||
targetid = taskdata.getrun_id(item)
|
||||
if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
|
||||
id = taskdata.run_targets[targetid][0]
|
||||
fn_rprovider = taskdata.fn_index[id]
|
||||
pn_rprovider = self.status.pkg_fn[fn_rprovider]
|
||||
else:
|
||||
pn_rprovider = item
|
||||
depend_tree["rdepends-pn"][pn].append(pn_rprovider)
|
||||
depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
|
||||
|
||||
depend_tree["rdepends-pkg"].update(rdepends)
|
||||
depend_tree["rrecs-pkg"].update(rrecs)
|
||||
rdepends = self.status.rundeps[fn]
|
||||
for package in rdepends:
|
||||
depend_tree["rdepends-pkg"][package] = []
|
||||
for rdepend in rdepends[package]:
|
||||
depend_tree["rdepends-pkg"][package].append(rdepend)
|
||||
packages.append(package)
|
||||
|
||||
for package in packages:
|
||||
if package not in depend_tree["packages"]:
|
||||
depend_tree["packages"][package] = {}
|
||||
depend_tree["packages"][package]["pn"] = pn
|
||||
depend_tree["packages"][package]["filename"] = fn
|
||||
depend_tree["packages"][package]["version"] = version
|
||||
|
||||
return depend_tree
|
||||
|
||||
@@ -534,15 +506,11 @@ class BBCooker:
|
||||
|
||||
# Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
|
||||
depends_file = file('pn-depends.dot', 'w' )
|
||||
buildlist_file = file('pn-buildlist', 'w' )
|
||||
print("digraph depends {", file=depends_file)
|
||||
for pn in depgraph["pn"]:
|
||||
fn = depgraph["pn"][pn]["filename"]
|
||||
version = depgraph["pn"][pn]["version"]
|
||||
print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
|
||||
print("%s" % pn, file=buildlist_file)
|
||||
buildlist_file.close()
|
||||
logger.info("PN build list saved to 'pn-buildlist'")
|
||||
for pn in depgraph["depends"]:
|
||||
for depend in depgraph["depends"][pn]:
|
||||
print('"%s" -> "%s"' % (pn, depend), file=depends_file)
|
||||
@@ -624,7 +592,7 @@ class BBCooker:
|
||||
bb.data.expandKeys(localdata)
|
||||
|
||||
# Handle PREFERRED_PROVIDERS
|
||||
for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
|
||||
for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, True) or "").split():
|
||||
try:
|
||||
(providee, provider) = p.split(':')
|
||||
except:
|
||||
@@ -660,18 +628,6 @@ class BBCooker:
|
||||
if regex in unmatched:
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
|
||||
def findCoreBaseFiles(self, subdir, configfile):
|
||||
corebase = self.configuration.data.getVar('COREBASE', True) or ""
|
||||
paths = []
|
||||
for root, dirs, files in os.walk(corebase + '/' + subdir):
|
||||
for d in dirs:
|
||||
configfilepath = os.path.join(root, d, configfile)
|
||||
if os.path.exists(configfilepath):
|
||||
paths.append(os.path.join(root, d))
|
||||
|
||||
if paths:
|
||||
bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.configuration.data)
|
||||
|
||||
def findConfigFilePath(self, configfile):
|
||||
"""
|
||||
Find the location on disk of configfile and if it exists and was parsed by BitBake
|
||||
@@ -684,8 +640,8 @@ class BBCooker:
|
||||
# Generate a list of parsed configuration files by searching the files
|
||||
# listed in the __depends and __base_depends variables with a .conf suffix.
|
||||
conffiles = []
|
||||
dep_files = self.configuration.data.getVar('__depends') or set()
|
||||
dep_files.union(self.configuration.data.getVar('__base_depends') or set())
|
||||
dep_files = bb.data.getVar('__depends', self.configuration.data) or set()
|
||||
dep_files.union(bb.data.getVar('__base_depends', self.configuration.data) or set())
|
||||
|
||||
for f in dep_files:
|
||||
if f[0].endswith(".conf"):
|
||||
@@ -713,7 +669,7 @@ class BBCooker:
|
||||
|
||||
matches = []
|
||||
p = re.compile(re.escape(filepattern))
|
||||
bbpaths = self.configuration.data.getVar('BBPATH', True).split(':')
|
||||
bbpaths = bb.data.getVar('BBPATH', self.configuration.data, True).split(':')
|
||||
for path in bbpaths:
|
||||
dirpath = os.path.join(path, directory)
|
||||
if os.path.exists(dirpath):
|
||||
@@ -735,7 +691,7 @@ class BBCooker:
|
||||
|
||||
data = self.configuration.data
|
||||
# iterate configs
|
||||
bbpaths = data.getVar('BBPATH', True).split(':')
|
||||
bbpaths = bb.data.getVar('BBPATH', data, True).split(':')
|
||||
for path in bbpaths:
|
||||
confpath = os.path.join(path, "conf", var)
|
||||
if os.path.exists(confpath):
|
||||
@@ -840,16 +796,16 @@ class BBCooker:
|
||||
parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
|
||||
data = _parse(layerconf, data)
|
||||
|
||||
layers = (data.getVar('BBLAYERS', True) or "").split()
|
||||
layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
|
||||
|
||||
data = bb.data.createCopy(data)
|
||||
for layer in layers:
|
||||
parselog.debug(2, "Adding layer %s", layer)
|
||||
data.setVar('LAYERDIR', layer)
|
||||
bb.data.setVar('LAYERDIR', layer, data)
|
||||
data = _parse(os.path.join(layer, "conf", "layer.conf"), data)
|
||||
data.expandVarref('LAYERDIR')
|
||||
|
||||
data.delVar('LAYERDIR')
|
||||
bb.data.delVar('LAYERDIR', data)
|
||||
|
||||
if not data.getVar("BBPATH", True):
|
||||
raise SystemExit("The BBPATH variable is not set")
|
||||
@@ -867,21 +823,18 @@ class BBCooker:
|
||||
|
||||
# Nomally we only register event handlers at the end of parsing .bb files
|
||||
# We register any handlers we've found so far here...
|
||||
for var in data.getVar('__BBHANDLERS') or []:
|
||||
bb.event.register(var, data.getVar(var))
|
||||
for var in bb.data.getVar('__BBHANDLERS', data) or []:
|
||||
bb.event.register(var, bb.data.getVar(var, data))
|
||||
|
||||
if data.getVar("BB_WORKERCONTEXT", False) is None:
|
||||
bb.fetch.fetcher_init(data)
|
||||
bb.codeparser.parser_cache_init(data)
|
||||
bb.event.fire(bb.event.ConfigParsed(), data)
|
||||
bb.parse.init_parser(data)
|
||||
data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
|
||||
bb.event.fire(bb.event.ConfigParsed(), data)
|
||||
self.configuration.data = data
|
||||
self.configuration.data_hash = data.get_hash()
|
||||
|
||||
def handleCollections( self, collections ):
|
||||
"""Handle collections"""
|
||||
errors = False
|
||||
self.status.bbfile_config_priorities = []
|
||||
if collections:
|
||||
collection_priorities = {}
|
||||
@@ -890,13 +843,12 @@ class BBCooker:
|
||||
min_prio = 0
|
||||
for c in collection_list:
|
||||
# Get collection priority if defined explicitly
|
||||
priority = self.configuration.data.getVar("BBFILE_PRIORITY_%s" % c, True)
|
||||
priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
|
||||
if priority:
|
||||
try:
|
||||
prio = int(priority)
|
||||
except ValueError:
|
||||
parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
|
||||
errors = True
|
||||
if min_prio == 0 or prio < min_prio:
|
||||
min_prio = prio
|
||||
collection_priorities[c] = prio
|
||||
@@ -904,7 +856,7 @@ class BBCooker:
|
||||
collection_priorities[c] = None
|
||||
|
||||
# Check dependencies and store information for priority calculation
|
||||
deps = self.configuration.data.getVar("LAYERDEPENDS_%s" % c, True)
|
||||
deps = bb.data.getVar("LAYERDEPENDS_%s" % c, self.configuration.data, 1)
|
||||
if deps:
|
||||
depnamelist = []
|
||||
deplist = deps.split()
|
||||
@@ -915,7 +867,6 @@ class BBCooker:
|
||||
depver = int(depsplit[1])
|
||||
except ValueError:
|
||||
parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
|
||||
errors = True
|
||||
continue
|
||||
else:
|
||||
depver = None
|
||||
@@ -924,23 +875,19 @@ class BBCooker:
|
||||
|
||||
if dep in collection_list:
|
||||
if depver:
|
||||
layerver = self.configuration.data.getVar("LAYERVERSION_%s" % dep, True)
|
||||
layerver = bb.data.getVar("LAYERVERSION_%s" % dep, self.configuration.data, 1)
|
||||
if layerver:
|
||||
try:
|
||||
lver = int(layerver)
|
||||
except ValueError:
|
||||
parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
|
||||
errors = True
|
||||
continue
|
||||
if lver <> depver:
|
||||
parselog.error("Layer dependency %s of layer %s is at version %d, expected %d", dep, c, lver, depver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer dependency %s of layer %s has no version, expected %d", dep, c, depver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer dependency %s of layer %s not found", dep, c)
|
||||
errors = True
|
||||
collection_depends[c] = depnamelist
|
||||
else:
|
||||
collection_depends[c] = []
|
||||
@@ -961,40 +908,35 @@ class BBCooker:
|
||||
# Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
|
||||
for c in collection_list:
|
||||
calc_layer_priority(c)
|
||||
regex = self.configuration.data.getVar("BBFILE_PATTERN_%s" % c, True)
|
||||
regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
|
||||
if regex == None:
|
||||
parselog.error("BBFILE_PATTERN_%s not defined" % c)
|
||||
errors = True
|
||||
continue
|
||||
try:
|
||||
cre = re.compile(regex)
|
||||
except re.error:
|
||||
parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
|
||||
errors = True
|
||||
continue
|
||||
self.status.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
|
||||
if errors:
|
||||
# We've already printed the actual error(s)
|
||||
raise CollectionError("Errors during parsing layer configuration")
|
||||
|
||||
def buildSetVars(self):
|
||||
"""
|
||||
Setup any variables needed before starting a build
|
||||
"""
|
||||
if not self.configuration.data.getVar("BUILDNAME"):
|
||||
self.configuration.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'))
|
||||
self.configuration.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()))
|
||||
if not bb.data.getVar("BUILDNAME", self.configuration.data):
|
||||
bb.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'), self.configuration.data)
|
||||
bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()), self.configuration.data)
|
||||
|
||||
def matchFiles(self, bf):
|
||||
"""
|
||||
Find the .bb files which match the expression in 'buildfile'.
|
||||
"""
|
||||
|
||||
if bf.startswith("/") or bf.startswith("../"):
|
||||
bf = os.path.abspath(bf)
|
||||
filelist, masked = self.collect_bbfiles()
|
||||
try:
|
||||
os.stat(bf)
|
||||
bf = os.path.abspath(bf)
|
||||
return [bf]
|
||||
except OSError:
|
||||
regexp = re.compile(bf)
|
||||
@@ -1011,15 +953,10 @@ class BBCooker:
|
||||
"""
|
||||
matches = self.matchFiles(buildfile)
|
||||
if len(matches) != 1:
|
||||
if matches:
|
||||
msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
|
||||
if matches:
|
||||
for f in matches:
|
||||
msg += "\n %s" % f
|
||||
parselog.error(msg)
|
||||
else:
|
||||
parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
|
||||
raise NoSpecificMatch
|
||||
parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches)))
|
||||
for f in matches:
|
||||
parselog.error(" %s" % f)
|
||||
raise MultipleMatches
|
||||
return matches[0]
|
||||
|
||||
def buildFile(self, buildfile, task):
|
||||
@@ -1035,7 +972,7 @@ class BBCooker:
|
||||
# buildFile() doesn't use the cache
|
||||
self.parseConfiguration()
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
|
||||
|
||||
# If we are told to do the None task then query the default task
|
||||
if (task == None):
|
||||
@@ -1070,16 +1007,16 @@ class BBCooker:
|
||||
self.status.rundeps[fn] = []
|
||||
self.status.runrecs[fn] = []
|
||||
|
||||
# Invalidate task for target if force mode active
|
||||
# Remove stamp for target if force mode active
|
||||
if self.configuration.force:
|
||||
logger.verbose("Invalidate task %s, %s", task, fn)
|
||||
bb.parse.siggen.invalidate_task('do_%s' % task, self.status, fn)
|
||||
logger.verbose("Remove stamp %s, %s", task, fn)
|
||||
bb.build.del_stamp('do_%s' % task, self.status, fn)
|
||||
|
||||
# Setup taskdata structure
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
taskdata.add_provider(self.configuration.data, self.status, item)
|
||||
|
||||
buildname = self.configuration.data.getVar("BUILDNAME")
|
||||
buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data)
|
||||
|
||||
# Execute the runqueue
|
||||
@@ -1097,6 +1034,8 @@ class BBCooker:
|
||||
try:
|
||||
retval = rq.execute_runqueue()
|
||||
except runqueue.TaskFailure as exc:
|
||||
for fnid in exc.args:
|
||||
buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
|
||||
failures += len(exc.args)
|
||||
retval = False
|
||||
except SystemExit as exc:
|
||||
@@ -1104,7 +1043,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.configuration.event_data)
|
||||
bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data)
|
||||
self.command.finishAsyncCommand()
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1125,7 +1064,6 @@ class BBCooker:
|
||||
if (task == None):
|
||||
task = self.configuration.cmd
|
||||
|
||||
universe = ('universe' in targets)
|
||||
targets = self.checkPackages(targets)
|
||||
|
||||
def buildTargetsIdle(server, rq, abort):
|
||||
@@ -1137,6 +1075,8 @@ class BBCooker:
|
||||
try:
|
||||
retval = rq.execute_runqueue()
|
||||
except runqueue.TaskFailure as exc:
|
||||
for fnid in exc.args:
|
||||
buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
|
||||
failures += len(exc.args)
|
||||
retval = False
|
||||
except SystemExit as exc:
|
||||
@@ -1144,7 +1084,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.configuration.data)
|
||||
bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data)
|
||||
self.command.finishAsyncCommand()
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1153,8 +1093,8 @@ class BBCooker:
|
||||
|
||||
self.buildSetVars()
|
||||
|
||||
buildname = self.configuration.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.data)
|
||||
buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data)
|
||||
|
||||
localdata = data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(localdata)
|
||||
@@ -1169,8 +1109,6 @@ class BBCooker:
|
||||
taskdata.add_unresolved(localdata, self.status)
|
||||
|
||||
rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
|
||||
if universe:
|
||||
rq.rqdata.warn_multi_bb = True
|
||||
|
||||
self.server_registration_cb(buildTargetsIdle, rq)
|
||||
|
||||
@@ -1179,7 +1117,7 @@ class BBCooker:
|
||||
return
|
||||
|
||||
if self.state in (state.shutdown, state.stop):
|
||||
self.parser.shutdown(clean=False, force = True)
|
||||
self.parser.shutdown(clean=False)
|
||||
sys.exit(1)
|
||||
|
||||
if self.state != state.parsing:
|
||||
@@ -1189,16 +1127,16 @@ class BBCooker:
|
||||
del self.status
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
|
||||
ignore = self.configuration.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
|
||||
self.status.ignored_dependencies = set(ignore.split())
|
||||
|
||||
for dep in self.configuration.extra_assume_provided:
|
||||
self.status.ignored_dependencies.add(dep)
|
||||
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
|
||||
|
||||
(filelist, masked) = self.collect_bbfiles()
|
||||
self.configuration.data.renameVar("__depends", "__base_depends")
|
||||
bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
|
||||
|
||||
self.parser = CookerParser(self, filelist, masked)
|
||||
self.state = state.parsing
|
||||
@@ -1224,7 +1162,6 @@ class BBCooker:
|
||||
pkgs_to_build.append(t)
|
||||
|
||||
if 'universe' in pkgs_to_build:
|
||||
parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
|
||||
parselog.debug(1, "collating packages for \"universe\"")
|
||||
pkgs_to_build.remove('universe')
|
||||
for t in self.status.universe_target:
|
||||
@@ -1290,7 +1227,7 @@ class BBCooker:
|
||||
if g not in newfiles:
|
||||
newfiles.append(g)
|
||||
|
||||
bbmask = self.configuration.data.getVar('BBMASK', True)
|
||||
bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1)
|
||||
|
||||
if bbmask:
|
||||
try:
|
||||
@@ -1349,11 +1286,9 @@ class BBCooker:
|
||||
# Empty the environment. The environment will be populated as
|
||||
# necessary from the data store.
|
||||
#bb.utils.empty_environment()
|
||||
prserv.serv.auto_start(self.configuration.data)
|
||||
return
|
||||
|
||||
def post_serve(self):
|
||||
prserv.serv.auto_shutdown(self.configuration.data)
|
||||
bb.event.fire(CookerExit(), self.configuration.event_data)
|
||||
|
||||
def shutdown(self):
|
||||
@@ -1365,10 +1300,6 @@ class BBCooker:
|
||||
def reparseFiles(self):
|
||||
return
|
||||
|
||||
def initialize(self):
|
||||
self.state = state.initial
|
||||
self.initConfigurationData()
|
||||
|
||||
def reset(self):
|
||||
self.state = state.initial
|
||||
self.loadConfigurationData()
|
||||
@@ -1439,7 +1370,7 @@ def _parse(fn, data, include=True):
|
||||
|
||||
@catch_parse_error
|
||||
def _inherit(bbclass, data):
|
||||
bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
|
||||
bb.parse.BBHandler.inherit([bbclass], data)
|
||||
return data
|
||||
|
||||
class ParsingFailure(Exception):
|
||||
@@ -1448,94 +1379,26 @@ class ParsingFailure(Exception):
|
||||
self.recipe = recipe
|
||||
Exception.__init__(self, realexception, recipe)
|
||||
|
||||
class Feeder(multiprocessing.Process):
|
||||
def __init__(self, jobs, to_parsers, quit):
|
||||
self.quit = quit
|
||||
self.jobs = jobs
|
||||
self.to_parsers = to_parsers
|
||||
multiprocessing.Process.__init__(self)
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
try:
|
||||
quit = self.quit.get_nowait()
|
||||
except Queue.Empty:
|
||||
pass
|
||||
else:
|
||||
if quit == 'cancel':
|
||||
self.to_parsers.cancel_join_thread()
|
||||
break
|
||||
|
||||
try:
|
||||
job = self.jobs.pop()
|
||||
except IndexError:
|
||||
break
|
||||
|
||||
try:
|
||||
self.to_parsers.put(job, timeout=0.5)
|
||||
except Queue.Full:
|
||||
self.jobs.insert(0, job)
|
||||
continue
|
||||
|
||||
class Parser(multiprocessing.Process):
|
||||
def __init__(self, jobs, results, quit, init):
|
||||
self.jobs = jobs
|
||||
self.results = results
|
||||
self.quit = quit
|
||||
self.init = init
|
||||
multiprocessing.Process.__init__(self)
|
||||
|
||||
def run(self):
|
||||
if self.init:
|
||||
self.init()
|
||||
|
||||
pending = []
|
||||
while True:
|
||||
try:
|
||||
self.quit.get_nowait()
|
||||
except Queue.Empty:
|
||||
pass
|
||||
else:
|
||||
self.results.cancel_join_thread()
|
||||
break
|
||||
|
||||
if pending:
|
||||
result = pending.pop()
|
||||
else:
|
||||
try:
|
||||
job = self.jobs.get(timeout=0.25)
|
||||
except Queue.Empty:
|
||||
continue
|
||||
|
||||
if job is None:
|
||||
break
|
||||
result = self.parse(*job)
|
||||
|
||||
try:
|
||||
self.results.put(result, timeout=0.25)
|
||||
except Queue.Full:
|
||||
pending.append(result)
|
||||
|
||||
def parse(self, filename, appends, caches_array):
|
||||
try:
|
||||
return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
|
||||
except Exception as exc:
|
||||
tb = sys.exc_info()[2]
|
||||
exc.recipe = filename
|
||||
exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
|
||||
return True, exc
|
||||
# Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
|
||||
# and for example a worker thread doesn't just exit on its own in response to
|
||||
# a SystemExit event for example.
|
||||
except BaseException as exc:
|
||||
return True, ParsingFailure(exc, filename)
|
||||
def parse_file(task):
|
||||
filename, appends, caches_array = task
|
||||
try:
|
||||
return True, bb.cache.Cache.parse(filename, appends, parse_file.cfg, caches_array)
|
||||
except Exception as exc:
|
||||
tb = sys.exc_info()[2]
|
||||
exc.recipe = filename
|
||||
exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
|
||||
raise exc
|
||||
# Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
|
||||
# and for example a worker thread doesn't just exit on its own in response to
|
||||
# a SystemExit event for example.
|
||||
except BaseException as exc:
|
||||
raise ParsingFailure(exc, filename)
|
||||
|
||||
class CookerParser(object):
|
||||
def __init__(self, cooker, filelist, masked):
|
||||
self.filelist = filelist
|
||||
self.cooker = cooker
|
||||
self.cfgdata = cooker.configuration.data
|
||||
self.cfghash = cooker.configuration.data_hash
|
||||
|
||||
# Accounting statistics
|
||||
self.parsed = 0
|
||||
@@ -1551,7 +1414,7 @@ class CookerParser(object):
|
||||
self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
|
||||
multiprocessing.cpu_count())
|
||||
|
||||
self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
|
||||
self.bb_cache = bb.cache.Cache(self.cfgdata, cooker.caches_array)
|
||||
self.fromcache = []
|
||||
self.willparse = []
|
||||
for filename in self.filelist:
|
||||
@@ -1564,37 +1427,26 @@ class CookerParser(object):
|
||||
self.progress_chunk = max(self.toparse / 100, 1)
|
||||
|
||||
self.start()
|
||||
self.haveshutdown = False
|
||||
|
||||
def start(self):
|
||||
def init(cfg):
|
||||
parse_file.cfg = cfg
|
||||
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cooker.configuration.data, ), exitpriority=1)
|
||||
|
||||
self.results = self.load_cached()
|
||||
self.processes = []
|
||||
|
||||
if self.toparse:
|
||||
bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
|
||||
def init():
|
||||
Parser.cfg = self.cfgdata
|
||||
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
|
||||
multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1)
|
||||
|
||||
self.feeder_quit = multiprocessing.Queue(maxsize=1)
|
||||
self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
|
||||
self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
|
||||
self.result_queue = multiprocessing.Queue()
|
||||
self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
|
||||
self.feeder.start()
|
||||
for i in range(0, self.num_processes):
|
||||
parser = Parser(self.jobs, self.result_queue, self.parser_quit, init)
|
||||
parser.start()
|
||||
self.processes.append(parser)
|
||||
self.pool = multiprocessing.Pool(self.num_processes, init, [self.cfgdata])
|
||||
parsed = self.pool.imap(parse_file, self.willparse)
|
||||
self.pool.close()
|
||||
|
||||
self.results = itertools.chain(self.results, self.parse_generator())
|
||||
self.results = itertools.chain(self.results, parsed)
|
||||
|
||||
def shutdown(self, clean=True, force=False):
|
||||
def shutdown(self, clean=True):
|
||||
if not self.toparse:
|
||||
return
|
||||
if self.haveshutdown:
|
||||
return
|
||||
self.haveshutdown = True
|
||||
|
||||
if clean:
|
||||
event = bb.event.ParseCompleted(self.cached, self.parsed,
|
||||
@@ -1602,87 +1454,48 @@ class CookerParser(object):
|
||||
self.virtuals, self.error,
|
||||
self.total)
|
||||
bb.event.fire(event, self.cfgdata)
|
||||
self.feeder_quit.put(None)
|
||||
for process in self.processes:
|
||||
self.jobs.put(None)
|
||||
else:
|
||||
self.feeder_quit.put('cancel')
|
||||
|
||||
self.parser_quit.cancel_join_thread()
|
||||
for process in self.processes:
|
||||
self.parser_quit.put(None)
|
||||
|
||||
self.jobs.cancel_join_thread()
|
||||
|
||||
for process in self.processes:
|
||||
if force:
|
||||
process.join(.1)
|
||||
process.terminate()
|
||||
else:
|
||||
process.join()
|
||||
self.feeder.join()
|
||||
self.pool.terminate()
|
||||
self.pool.join()
|
||||
|
||||
sync = threading.Thread(target=self.bb_cache.sync)
|
||||
sync.start()
|
||||
multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
|
||||
bb.codeparser.parser_cache_savemerge(self.cooker.configuration.data)
|
||||
bb.fetch.fetcher_parse_done(self.cooker.configuration.data)
|
||||
|
||||
def load_cached(self):
|
||||
for filename, appends in self.fromcache:
|
||||
cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
|
||||
yield not cached, infos
|
||||
|
||||
def parse_generator(self):
|
||||
while True:
|
||||
if self.parsed >= self.toparse:
|
||||
break
|
||||
|
||||
try:
|
||||
result = self.result_queue.get(timeout=0.25)
|
||||
except Queue.Empty:
|
||||
pass
|
||||
else:
|
||||
value = result[1]
|
||||
if isinstance(value, BaseException):
|
||||
raise value
|
||||
else:
|
||||
yield result
|
||||
|
||||
def parse_next(self):
|
||||
result = []
|
||||
parsed = None
|
||||
try:
|
||||
parsed, result = self.results.next()
|
||||
except StopIteration:
|
||||
self.shutdown()
|
||||
return False
|
||||
except ParsingFailure as exc:
|
||||
logger.error('Unable to parse %s: %s' %
|
||||
self.shutdown(clean=False)
|
||||
bb.fatal('Unable to parse %s: %s' %
|
||||
(exc.recipe, bb.exceptions.to_string(exc.realexception)))
|
||||
self.shutdown(clean=False)
|
||||
except bb.parse.ParseError as exc:
|
||||
logger.error(str(exc))
|
||||
self.shutdown(clean=False)
|
||||
except bb.data_smart.ExpansionError as exc:
|
||||
_, value, _ = sys.exc_info()
|
||||
logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
|
||||
self.shutdown(clean=False)
|
||||
except (bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
bb.fatal(str(exc))
|
||||
except SyntaxError as exc:
|
||||
logger.error('Unable to parse %s', exc.recipe)
|
||||
self.shutdown(clean=False)
|
||||
sys.exit(1)
|
||||
except Exception as exc:
|
||||
etype, value, tb = sys.exc_info()
|
||||
logger.error('Unable to parse %s', value.recipe,
|
||||
exc_info=(etype, value, exc.traceback))
|
||||
self.shutdown(clean=False)
|
||||
sys.exit(1)
|
||||
|
||||
self.current += 1
|
||||
self.virtuals += len(result)
|
||||
if parsed:
|
||||
self.parsed += 1
|
||||
if self.parsed % self.progress_chunk == 0:
|
||||
bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
|
||||
bb.event.fire(bb.event.ParseProgress(self.parsed),
|
||||
self.cfgdata)
|
||||
else:
|
||||
self.cached += 1
|
||||
|
||||
@@ -266,7 +266,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
seen |= deps
|
||||
newdeps = set()
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, "func"):
|
||||
if bb.data.getVarFlag(dep, "func", d):
|
||||
emit_var(dep, o, d, False) and o.write('\n')
|
||||
newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
|
||||
newdeps -= seen
|
||||
@@ -279,12 +279,7 @@ def build_dependencies(key, keys, shelldeps, vardepvals, d):
|
||||
deps = set()
|
||||
vardeps = d.getVarFlag(key, "vardeps", True)
|
||||
try:
|
||||
if key[-1] == ']':
|
||||
vf = key[:-1].split('[')
|
||||
value = d.getVarFlag(vf[0], vf[1], False)
|
||||
else:
|
||||
value = d.getVar(key, False)
|
||||
|
||||
value = d.getVar(key, False)
|
||||
if key in vardepvals:
|
||||
value = d.getVarFlag(key, "vardepvalue", True)
|
||||
elif d.getVarFlag(key, "func"):
|
||||
@@ -306,19 +301,6 @@ def build_dependencies(key, keys, shelldeps, vardepvals, d):
|
||||
parser = d.expandWithRefs(value, key)
|
||||
deps |= parser.references
|
||||
deps = deps | (keys & parser.execs)
|
||||
|
||||
# Add varflags, assuming an exclusion list is set
|
||||
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
|
||||
if varflagsexcl:
|
||||
varfdeps = []
|
||||
varflags = d.getVarFlags(key)
|
||||
if varflags:
|
||||
for f in varflags:
|
||||
if f not in varflagsexcl:
|
||||
varfdeps.append('%s[%s]' % (key, f))
|
||||
if varfdeps:
|
||||
deps |= set(varfdeps)
|
||||
|
||||
deps |= set((vardeps or "").split())
|
||||
deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
|
||||
except:
|
||||
@@ -337,7 +319,7 @@ def generate_dependencies(d):
|
||||
deps = {}
|
||||
values = {}
|
||||
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
tasklist = bb.data.getVar('__BBTASKS', d) or []
|
||||
for task in tasklist:
|
||||
deps[task], values[task] = build_dependencies(task, keys, shelldeps, vardepvals, d)
|
||||
newdeps = deps[task]
|
||||
@@ -351,7 +333,7 @@ def generate_dependencies(d):
|
||||
deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, vardepvals, d)
|
||||
newdeps |= deps[dep]
|
||||
newdeps -= seen
|
||||
#print "For %s: %s" % (task, str(deps[task]))
|
||||
#print "For %s: %s" % (task, str(taskdeps[task]))
|
||||
return tasklist, deps, values
|
||||
|
||||
def inherits_class(klass, d):
|
||||
|
||||
@@ -31,7 +31,6 @@ BitBake build tools.
|
||||
import copy, re
|
||||
from collections import MutableMapping
|
||||
import logging
|
||||
import hashlib
|
||||
import bb, bb.codeparser
|
||||
from bb import utils
|
||||
from bb.COW import COWDictBase
|
||||
@@ -58,7 +57,7 @@ class VariableParse:
|
||||
if self.varname and key:
|
||||
if self.varname == key:
|
||||
raise Exception("variable %s references itself!" % self.varname)
|
||||
var = self.d.getVar(key, True)
|
||||
var = self.d.getVar(key, 1)
|
||||
if var is not None:
|
||||
self.references.add(key)
|
||||
return var
|
||||
@@ -102,10 +101,7 @@ class ExpansionError(Exception):
|
||||
self.expression = expression
|
||||
self.variablename = varname
|
||||
self.exception = exception
|
||||
if varname:
|
||||
self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
|
||||
else:
|
||||
self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
|
||||
self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
|
||||
Exception.__init__(self, self.msg)
|
||||
self.args = (varname, expression, exception)
|
||||
def __str__(self):
|
||||
@@ -150,7 +146,7 @@ class DataSmart(MutableMapping):
|
||||
|
||||
return varparse
|
||||
|
||||
def expand(self, s, varname = None):
|
||||
def expand(self, s, varname):
|
||||
return self.expandWithRefs(s, varname).value
|
||||
|
||||
|
||||
@@ -198,12 +194,7 @@ class DataSmart(MutableMapping):
|
||||
for append in appends:
|
||||
keep = []
|
||||
for (a, o) in self.getVarFlag(append, op) or []:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in overrides:
|
||||
match = False
|
||||
if not match:
|
||||
if o and not o in overrides:
|
||||
keep.append((a ,o))
|
||||
continue
|
||||
|
||||
@@ -313,14 +304,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
self.delVar(key)
|
||||
|
||||
def appendVar(self, key, value):
|
||||
value = (self.getVar(key, False) or "") + value
|
||||
self.setVar(key, value)
|
||||
|
||||
def prependVar(self, key, value):
|
||||
value = value + (self.getVar(key, False) or "")
|
||||
self.setVar(key, value)
|
||||
|
||||
def delVar(self, var):
|
||||
self.expand_cache = {}
|
||||
self.dict[var] = {}
|
||||
@@ -356,14 +339,6 @@ class DataSmart(MutableMapping):
|
||||
if var in self.dict and flag in self.dict[var]:
|
||||
del self.dict[var][flag]
|
||||
|
||||
def appendVarFlag(self, key, flag, value):
|
||||
value = (self.getVarFlag(key, flag, False) or "") + value
|
||||
self.setVarFlag(key, flag, value)
|
||||
|
||||
def prependVarFlag(self, key, flag, value):
|
||||
value = value + (self.getVarFlag(key, flag, False) or "")
|
||||
self.setVarFlag(key, flag, value)
|
||||
|
||||
def setVarFlags(self, var, flags):
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
@@ -468,16 +443,3 @@ class DataSmart(MutableMapping):
|
||||
|
||||
def __delitem__(self, var):
|
||||
self.delVar(var)
|
||||
|
||||
def get_hash(self):
|
||||
data = {}
|
||||
config_whitelist = set((self.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split())
|
||||
keys = set(key for key in iter(self) if not key.startswith("__"))
|
||||
for key in keys:
|
||||
if key in config_whitelist:
|
||||
continue
|
||||
value = self.getVar(key, False) or ""
|
||||
data.update({key:value})
|
||||
|
||||
data_str = str([(k, data[k]) for k in sorted(data.keys())])
|
||||
return hashlib.md5(data_str).hexdigest()
|
||||
|
||||
@@ -204,27 +204,6 @@ def getName(e):
|
||||
else:
|
||||
return e.__name__
|
||||
|
||||
class OperationStarted(Event):
|
||||
"""An operation has begun"""
|
||||
def __init__(self, msg = "Operation Started"):
|
||||
Event.__init__(self)
|
||||
self.msg = msg
|
||||
|
||||
class OperationCompleted(Event):
|
||||
"""An operation has completed"""
|
||||
def __init__(self, total, msg = "Operation Completed"):
|
||||
Event.__init__(self)
|
||||
self.total = total
|
||||
self.msg = msg
|
||||
|
||||
class OperationProgress(Event):
|
||||
"""An operation is in progress"""
|
||||
def __init__(self, current, total, msg = "Operation in Progress"):
|
||||
Event.__init__(self)
|
||||
self.current = current
|
||||
self.total = total
|
||||
self.msg = msg + ": %s/%s" % (current, total);
|
||||
|
||||
class ConfigParsed(Event):
|
||||
"""Configuration Parsing Complete"""
|
||||
|
||||
@@ -297,20 +276,14 @@ class BuildBase(Event):
|
||||
|
||||
|
||||
|
||||
class BuildStarted(BuildBase, OperationStarted):
|
||||
class BuildStarted(BuildBase):
|
||||
"""bbmake build run started"""
|
||||
def __init__(self, n, p, failures = 0):
|
||||
OperationStarted.__init__(self, "Building Started")
|
||||
BuildBase.__init__(self, n, p, failures)
|
||||
|
||||
class BuildCompleted(BuildBase, OperationCompleted):
|
||||
|
||||
class BuildCompleted(BuildBase):
|
||||
"""bbmake build run completed"""
|
||||
def __init__(self, total, n, p, failures = 0):
|
||||
if not failures:
|
||||
OperationCompleted.__init__(self, total, "Building Succeeded")
|
||||
else:
|
||||
OperationCompleted.__init__(self, total, "Building Failed")
|
||||
BuildBase.__init__(self, n, p, failures)
|
||||
|
||||
|
||||
|
||||
|
||||
class NoProvider(Event):
|
||||
@@ -356,16 +329,17 @@ class MultipleProviders(Event):
|
||||
"""
|
||||
return self._candidates
|
||||
|
||||
class ParseStarted(OperationStarted):
|
||||
class ParseStarted(Event):
|
||||
"""Recipe parsing for the runqueue has begun"""
|
||||
def __init__(self, total):
|
||||
OperationStarted.__init__(self, "Recipe parsing Started")
|
||||
Event.__init__(self)
|
||||
self.total = total
|
||||
|
||||
class ParseCompleted(OperationCompleted):
|
||||
class ParseCompleted(Event):
|
||||
"""Recipe parsing for the runqueue has completed"""
|
||||
|
||||
def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
|
||||
OperationCompleted.__init__(self, total, "Recipe parsing Completed")
|
||||
Event.__init__(self)
|
||||
self.cached = cached
|
||||
self.parsed = parsed
|
||||
self.skipped = skipped
|
||||
@@ -373,44 +347,33 @@ class ParseCompleted(OperationCompleted):
|
||||
self.masked = masked
|
||||
self.errors = errors
|
||||
self.sofar = cached + parsed
|
||||
|
||||
class ParseProgress(OperationProgress):
|
||||
"""Recipe parsing progress"""
|
||||
def __init__(self, current, total):
|
||||
OperationProgress.__init__(self, current, total, "Recipe parsing")
|
||||
|
||||
|
||||
class CacheLoadStarted(OperationStarted):
|
||||
"""Loading of the dependency cache has begun"""
|
||||
def __init__(self, total):
|
||||
OperationStarted.__init__(self, "Loading cache Started")
|
||||
self.total = total
|
||||
|
||||
class CacheLoadProgress(OperationProgress):
|
||||
"""Cache loading progress"""
|
||||
def __init__(self, current, total):
|
||||
OperationProgress.__init__(self, current, total, "Loading cache")
|
||||
class ParseProgress(Event):
|
||||
"""Recipe parsing progress"""
|
||||
|
||||
class CacheLoadCompleted(OperationCompleted):
|
||||
def __init__(self, current):
|
||||
self.current = current
|
||||
|
||||
class CacheLoadStarted(Event):
|
||||
"""Loading of the dependency cache has begun"""
|
||||
def __init__(self, total):
|
||||
Event.__init__(self)
|
||||
self.total = total
|
||||
|
||||
class CacheLoadProgress(Event):
|
||||
"""Cache loading progress"""
|
||||
def __init__(self, current):
|
||||
Event.__init__(self)
|
||||
self.current = current
|
||||
|
||||
class CacheLoadCompleted(Event):
|
||||
"""Cache loading is complete"""
|
||||
def __init__(self, total, num_entries):
|
||||
OperationCompleted.__init__(self, total, "Loading cache Completed")
|
||||
Event.__init__(self)
|
||||
self.total = total
|
||||
self.num_entries = num_entries
|
||||
|
||||
class TreeDataPreparationStarted(OperationStarted):
|
||||
"""Tree data preparation started"""
|
||||
def __init__(self):
|
||||
OperationStarted.__init__(self, "Preparing tree data Started")
|
||||
|
||||
class TreeDataPreparationProgress(OperationProgress):
|
||||
"""Tree data preparation is in progress"""
|
||||
def __init__(self, current, total):
|
||||
OperationProgress.__init__(self, current, total, "Preparing tree data")
|
||||
|
||||
class TreeDataPreparationCompleted(OperationCompleted):
|
||||
"""Tree data preparation completed"""
|
||||
def __init__(self, total):
|
||||
OperationCompleted.__init__(self, total, "Preparing tree data Completed")
|
||||
|
||||
class DepTreeGenerated(Event):
|
||||
"""
|
||||
@@ -439,14 +402,6 @@ class FilesMatchingFound(Event):
|
||||
self._pattern = pattern
|
||||
self._matches = matches
|
||||
|
||||
class CoreBaseFilesFound(Event):
|
||||
"""
|
||||
Event when a list of appropriate config files has been generated
|
||||
"""
|
||||
def __init__(self, paths):
|
||||
Event.__init__(self)
|
||||
self._paths = paths
|
||||
|
||||
class ConfigFilesFound(Event):
|
||||
"""
|
||||
Event when a list of appropriate config files has been generated
|
||||
@@ -504,34 +459,3 @@ class LogHandler(logging.Handler):
|
||||
def filter(self, record):
|
||||
record.taskpid = worker_pid
|
||||
return True
|
||||
|
||||
class RequestPackageInfo(Event):
|
||||
"""
|
||||
Event to request package information
|
||||
"""
|
||||
|
||||
class PackageInfo(Event):
|
||||
"""
|
||||
Package information for GUI
|
||||
"""
|
||||
def __init__(self, pkginfolist):
|
||||
Event.__init__(self)
|
||||
self._pkginfolist = pkginfolist
|
||||
|
||||
class SanityCheck(Event):
|
||||
"""
|
||||
Event to issue sanity check
|
||||
"""
|
||||
|
||||
class SanityCheckPassed(Event):
|
||||
"""
|
||||
Event to indicate sanity check is passed
|
||||
"""
|
||||
|
||||
class SanityCheckFailed(Event):
|
||||
"""
|
||||
Event to indicate sanity check has failed
|
||||
"""
|
||||
def __init__(self, msg):
|
||||
Event.__init__(self)
|
||||
self._msg = msg
|
||||
|
||||
@@ -32,14 +32,7 @@ class TracebackEntry(namedtuple.abc):
|
||||
def _get_frame_args(frame):
|
||||
"""Get the formatted arguments and class (if available) for a frame"""
|
||||
arginfo = inspect.getargvalues(frame)
|
||||
|
||||
try:
|
||||
if not arginfo.args:
|
||||
return '', None
|
||||
# There have been reports from the field of python 2.6 which doesn't
|
||||
# return a namedtuple here but simply a tuple so fallback gracefully if
|
||||
# args isn't present.
|
||||
except AttributeError:
|
||||
if not arginfo.args:
|
||||
return '', None
|
||||
|
||||
firstarg = arginfo.args[0]
|
||||
|
||||
832
bitbake/lib/bb/fetch/__init__.py
Normal file
@@ -0,0 +1,832 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
import os, re
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
from bb import persist_data
|
||||
from bb import utils
|
||||
|
||||
__version__ = "1"
|
||||
|
||||
logger = logging.getLogger("BitBake.Fetch")
|
||||
|
||||
class MalformedUrl(Exception):
|
||||
"""Exception raised when encountering an invalid url"""
|
||||
|
||||
class FetchError(Exception):
|
||||
"""Exception raised when a download fails"""
|
||||
|
||||
class NoMethodError(Exception):
|
||||
"""Exception raised when there is no method to obtain a supplied url or set of urls"""
|
||||
|
||||
class MissingParameterError(Exception):
|
||||
"""Exception raised when a fetch method is missing a critical parameter in the url"""
|
||||
|
||||
class ParameterError(Exception):
|
||||
"""Exception raised when a url cannot be proccessed due to invalid parameters."""
|
||||
|
||||
class MD5SumError(Exception):
|
||||
"""Exception raised when a MD5SUM of a file does not match the expected one"""
|
||||
|
||||
class InvalidSRCREV(Exception):
|
||||
"""Exception raised when an invalid SRCREV is encountered"""
|
||||
|
||||
def decodeurl(url):
|
||||
"""Decodes an URL into the tokens (scheme, network location, path,
|
||||
user, password, parameters).
|
||||
"""
|
||||
|
||||
m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
|
||||
if not m:
|
||||
raise MalformedUrl(url)
|
||||
|
||||
type = m.group('type')
|
||||
location = m.group('location')
|
||||
if not location:
|
||||
raise MalformedUrl(url)
|
||||
user = m.group('user')
|
||||
parm = m.group('parm')
|
||||
|
||||
locidx = location.find('/')
|
||||
if locidx != -1 and type.lower() != 'file':
|
||||
host = location[:locidx]
|
||||
path = location[locidx:]
|
||||
else:
|
||||
host = ""
|
||||
path = location
|
||||
if user:
|
||||
m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
|
||||
if m:
|
||||
user = m.group('user')
|
||||
pswd = m.group('pswd')
|
||||
else:
|
||||
user = ''
|
||||
pswd = ''
|
||||
|
||||
p = {}
|
||||
if parm:
|
||||
for s in parm.split(';'):
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
|
||||
return (type, host, path, user, pswd, p)
|
||||
|
||||
def encodeurl(decoded):
|
||||
"""Encodes a URL from tokens (scheme, network location, path,
|
||||
user, password, parameters).
|
||||
"""
|
||||
|
||||
(type, host, path, user, pswd, p) = decoded
|
||||
|
||||
if not type or not path:
|
||||
raise MissingParameterError("Type or path url components missing when encoding %s" % decoded)
|
||||
url = '%s://' % type
|
||||
if user:
|
||||
url += "%s" % user
|
||||
if pswd:
|
||||
url += ":%s" % pswd
|
||||
url += "@"
|
||||
if host:
|
||||
url += "%s" % host
|
||||
url += "%s" % path
|
||||
if p:
|
||||
for parm in p:
|
||||
url += ";%s=%s" % (parm, p[parm])
|
||||
|
||||
return url
|
||||
|
||||
def uri_replace(uri, uri_find, uri_replace, d):
|
||||
if not uri or not uri_find or not uri_replace:
|
||||
logger.debug(1, "uri_replace: passed an undefined value, not replacing")
|
||||
uri_decoded = list(decodeurl(uri))
|
||||
uri_find_decoded = list(decodeurl(uri_find))
|
||||
uri_replace_decoded = list(decodeurl(uri_replace))
|
||||
result_decoded = ['', '', '', '', '', {}]
|
||||
for i in uri_find_decoded:
|
||||
loc = uri_find_decoded.index(i)
|
||||
result_decoded[loc] = uri_decoded[loc]
|
||||
if isinstance(i, basestring):
|
||||
if (re.match(i, uri_decoded[loc])):
|
||||
result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
|
||||
if uri_find_decoded.index(i) == 2:
|
||||
if d:
|
||||
localfn = bb.fetch.localpath(uri, d)
|
||||
if localfn:
|
||||
result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d)))
|
||||
else:
|
||||
return uri
|
||||
return encodeurl(result_decoded)
|
||||
|
||||
methods = []
|
||||
urldata_cache = {}
|
||||
saved_headrevs = {}
|
||||
|
||||
def fetcher_init(d):
|
||||
"""
|
||||
Called to initialize the fetchers once the configuration data is known.
|
||||
Calls before this must not hit the cache.
|
||||
"""
|
||||
# When to drop SCM head revisions controlled by user policy
|
||||
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
|
||||
if srcrev_policy == "cache":
|
||||
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||
elif srcrev_policy == "clear":
|
||||
logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||
revs = persist_data.persist('BB_URI_HEADREVS', d)
|
||||
try:
|
||||
bb.fetch.saved_headrevs = revs.items()
|
||||
except:
|
||||
pass
|
||||
revs.clear()
|
||||
else:
|
||||
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
|
||||
|
||||
for m in methods:
|
||||
if hasattr(m, "init"):
|
||||
m.init(d)
|
||||
|
||||
def fetcher_compare_revisions(d):
|
||||
"""
|
||||
Compare the revisions in the persistant cache with current values and
|
||||
return true/false on whether they've changed.
|
||||
"""
|
||||
|
||||
data = persist_data.persist('BB_URI_HEADREVS', d).items()
|
||||
data2 = bb.fetch.saved_headrevs
|
||||
|
||||
changed = False
|
||||
for key in data:
|
||||
if key not in data2 or data2[key] != data[key]:
|
||||
logger.debug(1, "%s changed", key)
|
||||
changed = True
|
||||
return True
|
||||
else:
|
||||
logger.debug(2, "%s did not change", key)
|
||||
return False
|
||||
|
||||
# Function call order is usually:
|
||||
# 1. init
|
||||
# 2. go
|
||||
# 3. localpaths
|
||||
# localpath can be called at any time
|
||||
|
||||
def init(urls, d, setup = True):
|
||||
urldata = {}
|
||||
|
||||
fn = bb.data.getVar('FILE', d, 1)
|
||||
if fn in urldata_cache:
|
||||
urldata = urldata_cache[fn]
|
||||
|
||||
for url in urls:
|
||||
if url not in urldata:
|
||||
urldata[url] = FetchData(url, d)
|
||||
|
||||
if setup:
|
||||
for url in urldata:
|
||||
if not urldata[url].setup:
|
||||
urldata[url].setup_localpath(d)
|
||||
|
||||
urldata_cache[fn] = urldata
|
||||
return urldata
|
||||
|
||||
def mirror_from_string(data):
|
||||
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
|
||||
|
||||
def verify_checksum(u, ud, d):
|
||||
"""
|
||||
verify the MD5 and SHA256 checksum for downloaded src
|
||||
|
||||
return value:
|
||||
- True: checksum matched
|
||||
- False: checksum unmatched
|
||||
|
||||
if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
|
||||
if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
|
||||
matched
|
||||
"""
|
||||
|
||||
if not ud.type in ["http", "https", "ftp", "ftps"]:
|
||||
return
|
||||
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
|
||||
if (ud.md5_expected == None or ud.sha256_expected == None):
|
||||
logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data)
|
||||
if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
|
||||
raise FetchError("No checksum specified for %s." % u)
|
||||
return
|
||||
|
||||
if (ud.md5_expected != md5data or ud.sha256_expected != sha256data):
|
||||
logger.error('The checksums for "%s" did not match.\n'
|
||||
' MD5: expected "%s", got "%s"\n'
|
||||
' SHA256: expected "%s", got "%s"\n',
|
||||
ud.localpath, ud.md5_expected, md5data,
|
||||
ud.sha256_expected, sha256data)
|
||||
raise FetchError("%s checksum mismatch." % u)
|
||||
|
||||
def go(d, urls = None):
|
||||
"""
|
||||
Fetch all urls
|
||||
init must have previously been called
|
||||
"""
|
||||
if not urls:
|
||||
urls = d.getVar("SRC_URI", 1).split()
|
||||
urldata = init(urls, d, True)
|
||||
|
||||
for u in urls:
|
||||
ud = urldata[u]
|
||||
m = ud.method
|
||||
localpath = ""
|
||||
|
||||
if not ud.localfile:
|
||||
continue
|
||||
|
||||
lf = bb.utils.lockfile(ud.lockfile)
|
||||
|
||||
if m.try_premirror(u, ud, d):
|
||||
# First try fetching uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
||||
localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d))
|
||||
elif os.path.exists(ud.localfile):
|
||||
localpath = ud.localfile
|
||||
|
||||
# Need to re-test forcefetch() which will return true if our copy is too old
|
||||
if m.forcefetch(u, ud, d) or not localpath:
|
||||
# Next try fetching from the original uri, u
|
||||
try:
|
||||
m.go(u, ud, d)
|
||||
localpath = ud.localpath
|
||||
except FetchError:
|
||||
# Remove any incomplete file
|
||||
bb.utils.remove(ud.localpath)
|
||||
# Finally, try fetching uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
|
||||
localpath = try_mirrors (d, u, mirrors)
|
||||
if not localpath or not os.path.exists(localpath):
|
||||
raise FetchError("Unable to fetch URL %s from any source." % u)
|
||||
|
||||
ud.localpath = localpath
|
||||
|
||||
if os.path.exists(ud.md5):
|
||||
# Touch the md5 file to show active use of the download
|
||||
try:
|
||||
os.utime(ud.md5, None)
|
||||
except:
|
||||
# Errors aren't fatal here
|
||||
pass
|
||||
else:
|
||||
# Only check the checksums if we've not seen this item before
|
||||
verify_checksum(u, ud, d)
|
||||
Fetch.write_md5sum(u, ud, d)
|
||||
|
||||
bb.utils.unlockfile(lf)
|
||||
|
||||
def checkstatus(d, urls = None):
|
||||
"""
|
||||
Check all urls exist upstream
|
||||
init must have previously been called
|
||||
"""
|
||||
urldata = init([], d, True)
|
||||
|
||||
if not urls:
|
||||
urls = urldata
|
||||
|
||||
for u in urls:
|
||||
ud = urldata[u]
|
||||
m = ud.method
|
||||
logger.debug(1, "Testing URL %s", u)
|
||||
# First try checking uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
||||
ret = try_mirrors(d, u, mirrors, True)
|
||||
if not ret:
|
||||
# Next try checking from the original uri, u
|
||||
try:
|
||||
ret = m.checkstatus(u, ud, d)
|
||||
except:
|
||||
# Finally, try checking uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
|
||||
ret = try_mirrors (d, u, mirrors, True)
|
||||
|
||||
if not ret:
|
||||
raise FetchError("URL %s doesn't work" % u)
|
||||
|
||||
def localpaths(d):
|
||||
"""
|
||||
Return a list of the local filenames, assuming successful fetch
|
||||
"""
|
||||
local = []
|
||||
urldata = init([], d, True)
|
||||
|
||||
for u in urldata:
|
||||
ud = urldata[u]
|
||||
local.append(ud.localpath)
|
||||
|
||||
return local
|
||||
|
||||
srcrev_internal_call = False
|
||||
|
||||
def get_autorev(d):
|
||||
return get_srcrev(d)
|
||||
|
||||
def get_srcrev(d):
|
||||
"""
|
||||
Return the version string for the current package
|
||||
(usually to be used as PV)
|
||||
Most packages usually only have one SCM so we just pass on the call.
|
||||
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
|
||||
have been set.
|
||||
"""
|
||||
|
||||
#
|
||||
# Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
|
||||
# could translate into a call to here. If it does, we need to catch this
|
||||
# and provide some way so it knows get_srcrev is active instead of being
|
||||
# some number etc. hence the srcrev_internal_call tracking and the magic
|
||||
# "SRCREVINACTION" return value.
|
||||
#
|
||||
# Neater solutions welcome!
|
||||
#
|
||||
if bb.fetch.srcrev_internal_call:
|
||||
return "SRCREVINACTION"
|
||||
|
||||
scms = []
|
||||
|
||||
# Only call setup_localpath on URIs which supports_srcrev()
|
||||
urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
|
||||
for u in urldata:
|
||||
ud = urldata[u]
|
||||
if ud.method.supports_srcrev():
|
||||
if not ud.setup:
|
||||
ud.setup_localpath(d)
|
||||
scms.append(u)
|
||||
|
||||
if len(scms) == 0:
|
||||
logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
|
||||
raise ParameterError
|
||||
|
||||
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
|
||||
bb.data.setVar('__BB_DONT_CACHE', '1', d)
|
||||
|
||||
if len(scms) == 1:
|
||||
return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
|
||||
|
||||
#
|
||||
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
|
||||
#
|
||||
format = bb.data.getVar('SRCREV_FORMAT', d, 1)
|
||||
if not format:
|
||||
logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
||||
raise ParameterError
|
||||
|
||||
for scm in scms:
|
||||
if 'name' in urldata[scm].parm:
|
||||
name = urldata[scm].parm["name"]
|
||||
rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
|
||||
format = format.replace(name, rev)
|
||||
|
||||
return format
|
||||
|
||||
def localpath(url, d, cache = True):
|
||||
"""
|
||||
Called from the parser with cache=False since the cache isn't ready
|
||||
at this point. Also called from classed in OE e.g. patch.bbclass
|
||||
"""
|
||||
ud = init([url], d)
|
||||
if ud[url].method:
|
||||
return ud[url].localpath
|
||||
return url
|
||||
|
||||
def runfetchcmd(cmd, d, quiet = False):
|
||||
"""
|
||||
Run cmd returning the command output
|
||||
Raise an error if interrupted or cmd fails
|
||||
Optionally echo command output to stdout
|
||||
"""
|
||||
|
||||
# Need to export PATH as binary could be in metadata paths
|
||||
# rather than host provided
|
||||
# Also include some other variables.
|
||||
# FIXME: Should really include all export varaiables?
|
||||
exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
|
||||
'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
|
||||
'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
|
||||
'KRB5CCNAME', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
|
||||
|
||||
for var in exportvars:
|
||||
val = data.getVar(var, d, True)
|
||||
if val:
|
||||
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
|
||||
|
||||
logger.debug(1, "Running %s", cmd)
|
||||
|
||||
# redirect stderr to stdout
|
||||
stdout_handle = os.popen(cmd + " 2>&1", "r")
|
||||
output = ""
|
||||
|
||||
while True:
|
||||
line = stdout_handle.readline()
|
||||
if not line:
|
||||
break
|
||||
if not quiet:
|
||||
print(line, end=' ')
|
||||
output += line
|
||||
|
||||
status = stdout_handle.close() or 0
|
||||
signal = status >> 8
|
||||
exitstatus = status & 0xff
|
||||
|
||||
if signal:
|
||||
raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
|
||||
elif status != 0:
|
||||
raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
|
||||
|
||||
return output
|
||||
|
||||
def try_mirrors(d, uri, mirrors, check = False, force = False):
|
||||
"""
|
||||
Try to use a mirrored version of the sources.
|
||||
This method will be automatically called before the fetchers go.
|
||||
|
||||
d Is a bb.data instance
|
||||
uri is the original uri we're trying to download
|
||||
mirrors is the list of mirrors we're going to try
|
||||
"""
|
||||
fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
|
||||
if not check and os.access(fpath, os.R_OK) and not force:
|
||||
logger.debug(1, "%s already exists, skipping checkout.", fpath)
|
||||
return fpath
|
||||
|
||||
ld = d.createCopy()
|
||||
for (find, replace) in mirrors:
|
||||
newuri = uri_replace(uri, find, replace, ld)
|
||||
if newuri != uri:
|
||||
try:
|
||||
ud = FetchData(newuri, ld)
|
||||
except bb.fetch.NoMethodError:
|
||||
logger.debug(1, "No method for %s", uri)
|
||||
continue
|
||||
|
||||
ud.setup_localpath(ld)
|
||||
|
||||
try:
|
||||
if check:
|
||||
found = ud.method.checkstatus(newuri, ud, ld)
|
||||
if found:
|
||||
return found
|
||||
else:
|
||||
ud.method.go(newuri, ud, ld)
|
||||
return ud.localpath
|
||||
except (bb.fetch.MissingParameterError,
|
||||
bb.fetch.FetchError,
|
||||
bb.fetch.MD5SumError):
|
||||
import sys
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
logger.debug(2, "Mirror fetch failure: %s", value)
|
||||
bb.utils.remove(ud.localpath)
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
class FetchData(object):
|
||||
"""
|
||||
A class which represents the fetcher state for a given URI.
|
||||
"""
|
||||
def __init__(self, url, d):
|
||||
self.localfile = ""
|
||||
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
|
||||
self.date = Fetch.getSRCDate(self, d)
|
||||
self.url = url
|
||||
if not self.user and "user" in self.parm:
|
||||
self.user = self.parm["user"]
|
||||
if not self.pswd and "pswd" in self.parm:
|
||||
self.pswd = self.parm["pswd"]
|
||||
self.setup = False
|
||||
|
||||
if "name" in self.parm:
|
||||
self.md5_name = "%s.md5sum" % self.parm["name"]
|
||||
self.sha256_name = "%s.sha256sum" % self.parm["name"]
|
||||
else:
|
||||
self.md5_name = "md5sum"
|
||||
self.sha256_name = "sha256sum"
|
||||
self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
|
||||
self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
|
||||
|
||||
for m in methods:
|
||||
if m.supports(url, self, d):
|
||||
self.method = m
|
||||
return
|
||||
raise NoMethodError("Missing implementation for url %s" % url)
|
||||
|
||||
def setup_localpath(self, d):
|
||||
self.setup = True
|
||||
if "localpath" in self.parm:
|
||||
# if user sets localpath for file, use it instead.
|
||||
self.localpath = self.parm["localpath"]
|
||||
self.basename = os.path.basename(self.localpath)
|
||||
else:
|
||||
premirrors = bb.data.getVar('PREMIRRORS', d, True)
|
||||
local = ""
|
||||
if premirrors and self.url:
|
||||
aurl = self.url.split(";")[0]
|
||||
mirrors = mirror_from_string(premirrors)
|
||||
for (find, replace) in mirrors:
|
||||
if replace.startswith("file://"):
|
||||
path = aurl.split("://")[1]
|
||||
path = path.split(";")[0]
|
||||
local = replace.split("://")[1] + os.path.basename(path)
|
||||
if local == aurl or not os.path.exists(local) or os.path.isdir(local):
|
||||
local = ""
|
||||
self.localpath = local
|
||||
if not local:
|
||||
try:
|
||||
bb.fetch.srcrev_internal_call = True
|
||||
self.localpath = self.method.localpath(self.url, self, d)
|
||||
finally:
|
||||
bb.fetch.srcrev_internal_call = False
|
||||
# We have to clear data's internal caches since the cached value of SRCREV is now wrong.
|
||||
# Horrible...
|
||||
bb.data.delVar("ISHOULDNEVEREXIST", d)
|
||||
|
||||
if self.localpath is not None:
|
||||
# Note: These files should always be in DL_DIR whereas localpath may not be.
|
||||
basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
|
||||
self.md5 = basepath + '.md5'
|
||||
self.lockfile = basepath + '.lock'
|
||||
|
||||
|
||||
class Fetch(object):
|
||||
"""Base class for 'fetch'ing data"""
|
||||
|
||||
def __init__(self, urls = []):
|
||||
self.urls = []
|
||||
|
||||
def supports(self, url, urldata, d):
|
||||
"""
|
||||
Check to see if this fetch class supports a given url.
|
||||
"""
|
||||
return 0
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
Can also setup variables in urldata for use in go (saving code duplication
|
||||
and duplicate code execution)
|
||||
"""
|
||||
return url
|
||||
def _strip_leading_slashes(self, relpath):
|
||||
"""
|
||||
Remove leading slash as os.path.join can't cope
|
||||
"""
|
||||
while os.path.isabs(relpath):
|
||||
relpath = relpath[1:]
|
||||
return relpath
|
||||
|
||||
def setUrls(self, urls):
|
||||
self.__urls = urls
|
||||
|
||||
def getUrls(self):
|
||||
return self.__urls
|
||||
|
||||
urls = property(getUrls, setUrls, None, "Urls property")
|
||||
|
||||
def forcefetch(self, url, urldata, d):
|
||||
"""
|
||||
Force a fetch, even if localpath exists?
|
||||
"""
|
||||
return False
|
||||
|
||||
def supports_srcrev(self):
|
||||
"""
|
||||
The fetcher supports auto source revisions (SRCREV)
|
||||
"""
|
||||
return False
|
||||
|
||||
def go(self, url, urldata, d):
|
||||
"""
|
||||
Fetch urls
|
||||
Assumes localpath was called first
|
||||
"""
|
||||
raise NoMethodError("Missing implementation for url")
|
||||
|
||||
def try_premirror(self, url, urldata, d):
|
||||
"""
|
||||
Should premirrors be used?
|
||||
"""
|
||||
if urldata.method.forcefetch(url, urldata, d):
|
||||
return True
|
||||
elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def checkstatus(self, url, urldata, d):
|
||||
"""
|
||||
Check the status of a URL
|
||||
Assumes localpath was called first
|
||||
"""
|
||||
logger.info("URL %s could not be checked for status since no method exists.", url)
|
||||
return True
|
||||
|
||||
def getSRCDate(urldata, d):
|
||||
"""
|
||||
Return the SRC Date for the component
|
||||
|
||||
d the bb.data module
|
||||
"""
|
||||
if "srcdate" in urldata.parm:
|
||||
return urldata.parm['srcdate']
|
||||
|
||||
pn = data.getVar("PN", d, 1)
|
||||
|
||||
if pn:
|
||||
return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
|
||||
|
||||
return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
|
||||
getSRCDate = staticmethod(getSRCDate)
|
||||
|
||||
def srcrev_internal_helper(ud, d):
|
||||
"""
|
||||
Return:
|
||||
a) a source revision if specified
|
||||
b) True if auto srcrev is in action
|
||||
c) False otherwise
|
||||
"""
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
return ud.parm['rev']
|
||||
|
||||
if 'tag' in ud.parm:
|
||||
return ud.parm['tag']
|
||||
|
||||
rev = None
|
||||
if 'name' in ud.parm:
|
||||
pn = data.getVar("PN", d, 1)
|
||||
rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1)
|
||||
if not rev:
|
||||
rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1)
|
||||
if not rev:
|
||||
rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
|
||||
if not rev:
|
||||
rev = data.getVar("SRCREV", d, 1)
|
||||
if rev == "INVALID":
|
||||
raise InvalidSRCREV("Please set SRCREV to a valid value")
|
||||
if not rev:
|
||||
return False
|
||||
if rev == "SRCREVINACTION":
|
||||
return True
|
||||
return rev
|
||||
|
||||
srcrev_internal_helper = staticmethod(srcrev_internal_helper)
|
||||
|
||||
def localcount_internal_helper(ud, d):
|
||||
"""
|
||||
Return:
|
||||
a) a locked localcount if specified
|
||||
b) None otherwise
|
||||
"""
|
||||
|
||||
localcount = None
|
||||
if 'name' in ud.parm:
|
||||
pn = data.getVar("PN", d, 1)
|
||||
localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
|
||||
if not localcount:
|
||||
localcount = data.getVar("LOCALCOUNT", d, 1)
|
||||
return localcount
|
||||
|
||||
localcount_internal_helper = staticmethod(localcount_internal_helper)
|
||||
|
||||
def verify_md5sum(ud, got_sum):
|
||||
"""
|
||||
Verify the md5sum we wanted with the one we got
|
||||
"""
|
||||
wanted_sum = ud.parm.get('md5sum')
|
||||
if not wanted_sum:
|
||||
return True
|
||||
|
||||
return wanted_sum == got_sum
|
||||
verify_md5sum = staticmethod(verify_md5sum)
|
||||
|
||||
def write_md5sum(url, ud, d):
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
# verify the md5sum
|
||||
if not Fetch.verify_md5sum(ud, md5data):
|
||||
raise MD5SumError(url)
|
||||
|
||||
md5out = file(ud.md5, 'w')
|
||||
md5out.write(md5data)
|
||||
md5out.close()
|
||||
write_md5sum = staticmethod(write_md5sum)
|
||||
|
||||
def latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Look in the cache for the latest revision, if not present ask the SCM.
|
||||
"""
|
||||
if not hasattr(self, "_latest_revision"):
|
||||
raise ParameterError
|
||||
|
||||
revs = persist_data.persist('BB_URI_HEADREVS', d)
|
||||
key = self.generate_revision_key(url, ud, d)
|
||||
try:
|
||||
return revs[key]
|
||||
except KeyError:
|
||||
revs[key] = rev = self._latest_revision(url, ud, d)
|
||||
return rev
|
||||
|
||||
def sortable_revision(self, url, ud, d):
|
||||
"""
|
||||
|
||||
"""
|
||||
if hasattr(self, "_sortable_revision"):
|
||||
return self._sortable_revision(url, ud, d)
|
||||
|
||||
localcounts = persist_data.persist('BB_URI_LOCALCOUNT', d)
|
||||
key = self.generate_revision_key(url, ud, d)
|
||||
|
||||
latest_rev = self._build_revision(url, ud, d)
|
||||
last_rev = localcounts.get(key + '_rev')
|
||||
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
||||
count = None
|
||||
if uselocalcount:
|
||||
count = Fetch.localcount_internal_helper(ud, d)
|
||||
if count is None:
|
||||
count = localcounts.get(key + '_count')
|
||||
|
||||
if last_rev == latest_rev:
|
||||
return str(count + "+" + latest_rev)
|
||||
|
||||
buildindex_provided = hasattr(self, "_sortable_buildindex")
|
||||
if buildindex_provided:
|
||||
count = self._sortable_buildindex(url, ud, d, latest_rev)
|
||||
|
||||
if count is None:
|
||||
count = "0"
|
||||
elif uselocalcount or buildindex_provided:
|
||||
count = str(count)
|
||||
else:
|
||||
count = str(int(count) + 1)
|
||||
|
||||
localcounts[key + '_rev'] = latest_rev
|
||||
localcounts[key + '_count'] = count
|
||||
|
||||
return str(count + "+" + latest_rev)
|
||||
|
||||
def generate_revision_key(self, url, ud, d):
|
||||
key = self._revision_key(url, ud, d)
|
||||
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
||||
|
||||
from . import cvs
|
||||
from . import git
|
||||
from . import local
|
||||
from . import svn
|
||||
from . import wget
|
||||
from . import svk
|
||||
from . import ssh
|
||||
from . import perforce
|
||||
from . import bzr
|
||||
from . import hg
|
||||
from . import osc
|
||||
from . import repo
|
||||
|
||||
methods.append(local.Local())
|
||||
methods.append(wget.Wget())
|
||||
methods.append(svn.Svn())
|
||||
methods.append(git.Git())
|
||||
methods.append(cvs.Cvs())
|
||||
methods.append(svk.Svk())
|
||||
methods.append(ssh.SSH())
|
||||
methods.append(perforce.Perforce())
|
||||
methods.append(bzr.Bzr())
|
||||
methods.append(hg.Hg())
|
||||
methods.append(osc.Osc())
|
||||
methods.append(repo.Repo())
|
||||
148
bitbake/lib/bb/fetch/bzr.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""
|
||||
BitBake 'Fetch' implementation for bzr.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2007 Ross Burton
|
||||
# Copyright (C) 2007 Richard Purdie
|
||||
#
|
||||
# Classes for obtaining upstream sources for the
|
||||
# BitBake build tools.
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch, FetchError, runfetchcmd, logger
|
||||
|
||||
class Bzr(Fetch):
|
||||
def supports(self, url, ud, d):
|
||||
return ud.type in ['bzr']
|
||||
|
||||
def localpath (self, url, ud, d):
|
||||
|
||||
# Create paths to bzr checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
|
||||
|
||||
revision = Fetch.srcrev_internal_helper(ud, d)
|
||||
if revision is True:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
elif revision:
|
||||
ud.revision = revision
|
||||
|
||||
if not ud.revision:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
|
||||
ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _buildbzrcommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an bzr commandline based on ud
|
||||
command is "fetch", "update", "revno"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_bzr}', d)
|
||||
|
||||
proto = ud.parm.get('proto', 'http')
|
||||
|
||||
bzrroot = ud.host + ud.path
|
||||
|
||||
options = []
|
||||
|
||||
if command == "revno":
|
||||
bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
||||
else:
|
||||
if ud.revision:
|
||||
options.append("-r %s" % ud.revision)
|
||||
|
||||
if command == "fetch":
|
||||
bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
||||
elif command == "update":
|
||||
bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid bzr command %s" % command)
|
||||
|
||||
return bzrcmd
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
|
||||
bzrcmd = self._buildbzrcommand(ud, d, "update")
|
||||
logger.debug(1, "BZR Update %s", loc)
|
||||
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
|
||||
runfetchcmd(bzrcmd, d)
|
||||
else:
|
||||
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
|
||||
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
|
||||
logger.debug(1, "BZR Checkout %s", loc)
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", bzrcmd)
|
||||
runfetchcmd(bzrcmd, d)
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
|
||||
|
||||
# tar them up to a defined filename
|
||||
try:
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d)
|
||||
except:
|
||||
t, v, tb = sys.exc_info()
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise t, v, tb
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
def _revision_key(self, url, ud, d):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "bzr:" + ud.pkgdir
|
||||
|
||||
def _latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Return the latest upstream revision number
|
||||
"""
|
||||
logger.debug(2, "BZR fetcher hitting network for %s", url)
|
||||
|
||||
output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
|
||||
|
||||
return output.strip()
|
||||
|
||||
def _sortable_revision(self, url, ud, d):
|
||||
"""
|
||||
Return a sortable revision number which in our case is the revision number
|
||||
"""
|
||||
|
||||
return self._build_revision(url, ud, d)
|
||||
|
||||
def _build_revision(self, url, ud, d):
|
||||
return ud.revision
|
||||
172
bitbake/lib/bb/fetch/cvs.py
Normal file
@@ -0,0 +1,172 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch, FetchError, MissingParameterError, logger
|
||||
|
||||
class Cvs(Fetch):
|
||||
"""
|
||||
Class to fetch a module or modules from cvs repositories
|
||||
"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with cvs.
|
||||
"""
|
||||
return ud.type in ['cvs']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("cvs method needs a 'module' parameter")
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
ud.tag = ud.parm.get('tag', "")
|
||||
|
||||
# Override the default date in certain cases
|
||||
if 'date' in ud.parm:
|
||||
ud.date = ud.parm['date']
|
||||
elif ud.tag:
|
||||
ud.date = ""
|
||||
|
||||
norecurse = ''
|
||||
if 'norecurse' in ud.parm:
|
||||
norecurse = '_norecurse'
|
||||
|
||||
fullpath = ''
|
||||
if 'fullpath' in ud.parm:
|
||||
fullpath = '_fullpath'
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def forcefetch(self, url, ud, d):
|
||||
if (ud.date == "now"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
|
||||
method = ud.parm.get('method', 'pserver')
|
||||
localdir = ud.parm.get('localdir', ud.module)
|
||||
cvs_port = ud.parm.get('port', '')
|
||||
|
||||
cvs_rsh = None
|
||||
if method == "ext":
|
||||
if "rsh" in ud.parm:
|
||||
cvs_rsh = ud.parm["rsh"]
|
||||
|
||||
if method == "dir":
|
||||
cvsroot = ud.path
|
||||
else:
|
||||
cvsroot = ":" + method
|
||||
cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
|
||||
if cvsproxyhost:
|
||||
cvsroot += ";proxy=" + cvsproxyhost
|
||||
cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
|
||||
if cvsproxyport:
|
||||
cvsroot += ";proxyport=" + cvsproxyport
|
||||
cvsroot += ":" + ud.user
|
||||
if ud.pswd:
|
||||
cvsroot += ":" + ud.pswd
|
||||
cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
|
||||
|
||||
options = []
|
||||
if 'norecurse' in ud.parm:
|
||||
options.append("-l")
|
||||
if ud.date:
|
||||
# treat YYYYMMDDHHMM specially for CVS
|
||||
if len(ud.date) == 12:
|
||||
options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
|
||||
else:
|
||||
options.append("-D \"%s UTC\"" % ud.date)
|
||||
if ud.tag:
|
||||
options.append("-r %s" % ud.tag)
|
||||
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
data.setVar('CVSROOT', cvsroot, localdata)
|
||||
data.setVar('CVSCOOPTS', " ".join(options), localdata)
|
||||
data.setVar('CVSMODULE', ud.module, localdata)
|
||||
cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
|
||||
cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
|
||||
|
||||
if cvs_rsh:
|
||||
cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
|
||||
cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
|
||||
|
||||
# create module directory
|
||||
logger.debug(2, "Fetch: checking for module directory")
|
||||
pkg = data.expand('${PN}', d)
|
||||
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
|
||||
moddir = os.path.join(pkgdir, localdir)
|
||||
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
|
||||
logger.info("Update " + loc)
|
||||
# update sources there
|
||||
os.chdir(moddir)
|
||||
myret = os.system(cvsupdatecmd)
|
||||
else:
|
||||
logger.info("Fetch " + loc)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(pkgdir)
|
||||
os.chdir(pkgdir)
|
||||
logger.debug(1, "Running %s", cvscmd)
|
||||
myret = os.system(cvscmd)
|
||||
|
||||
if myret != 0 or not os.access(moddir, os.R_OK):
|
||||
try:
|
||||
os.rmdir(moddir)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(ud.module)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude 'CVS'"
|
||||
|
||||
# tar them up to a defined filename
|
||||
if 'fullpath' in ud.parm:
|
||||
os.chdir(pkgdir)
|
||||
myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir))
|
||||
else:
|
||||
os.chdir(moddir)
|
||||
os.chdir('..')
|
||||
myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)))
|
||||
|
||||
if myret != 0:
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(ud.module)
|
||||
339
bitbake/lib/bb/fetch/git.py
Normal file
@@ -0,0 +1,339 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' git implementation
|
||||
|
||||
"""
|
||||
|
||||
#Copyright (C) 2005 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import bb
|
||||
import bb.persist_data
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import runfetchcmd
|
||||
from bb.fetch import logger
|
||||
|
||||
class Git(Fetch):
|
||||
"""Class to fetch a module or modules from git repositories"""
|
||||
def init(self, d):
|
||||
#
|
||||
# Only enable _sortable revision if the key is set
|
||||
#
|
||||
if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
|
||||
self._sortable_buildindex = self._sortable_buildindex_disabled
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with git.
|
||||
"""
|
||||
return ud.type in ['git']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
elif not ud.host:
|
||||
ud.proto = 'file'
|
||||
else:
|
||||
ud.proto = "rsync"
|
||||
|
||||
ud.branch = ud.parm.get("branch", "master")
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
|
||||
ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname)
|
||||
ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
|
||||
|
||||
tag = Fetch.srcrev_internal_helper(ud, d)
|
||||
if tag is True:
|
||||
ud.tag = self.latest_revision(url, ud, d)
|
||||
elif tag:
|
||||
ud.tag = tag
|
||||
|
||||
if not ud.tag or ud.tag == "master":
|
||||
ud.tag = self.latest_revision(url, ud, d)
|
||||
|
||||
subdir = ud.parm.get("subpath", "")
|
||||
if subdir != "":
|
||||
if subdir.endswith("/"):
|
||||
subdir = subdir[:-1]
|
||||
subdirpath = os.path.join(ud.path, subdir);
|
||||
else:
|
||||
subdirpath = ud.path;
|
||||
|
||||
if 'fullclone' in ud.parm:
|
||||
ud.localfile = ud.mirrortarball
|
||||
else:
|
||||
ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d)
|
||||
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
|
||||
if 'noclone' in ud.parm:
|
||||
ud.localfile = None
|
||||
return None
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def forcefetch(self, url, ud, d):
|
||||
if 'fullclone' in ud.parm:
|
||||
return True
|
||||
if 'noclone' in ud.parm:
|
||||
return False
|
||||
if os.path.exists(ud.localpath):
|
||||
return False
|
||||
if not self._contains_ref(ud.tag, d):
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_premirror(self, u, ud, d):
|
||||
if 'noclone' in ud.parm:
|
||||
return False
|
||||
if os.path.exists(ud.clonedir):
|
||||
return False
|
||||
if os.path.exists(ud.localpath):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball)
|
||||
|
||||
|
||||
coname = '%s' % (ud.tag)
|
||||
codir = os.path.join(ud.clonedir, coname)
|
||||
|
||||
# If we have no existing clone and no mirror tarball, try and obtain one
|
||||
if not os.path.exists(ud.clonedir) and not os.path.exists(repofile):
|
||||
try:
|
||||
Fetch.try_mirrors(ud.mirrortarball)
|
||||
except:
|
||||
pass
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.clonedir) and os.path.exists(repofile):
|
||||
bb.utils.mkdirhier(ud.clonedir)
|
||||
os.chdir(ud.clonedir)
|
||||
runfetchcmd("tar -xzf %s" % (repofile), d)
|
||||
|
||||
# If the repo still doesn't exist, fallback to cloning it
|
||||
if not os.path.exists(ud.clonedir):
|
||||
runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
|
||||
|
||||
os.chdir(ud.clonedir)
|
||||
# Update the checkout if needed
|
||||
if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm:
|
||||
# Remove all but the .git directory
|
||||
runfetchcmd("rm * -Rf", d)
|
||||
if 'fullclone' in ud.parm:
|
||||
runfetchcmd("%s fetch --all" % (ud.basecmd), d)
|
||||
else:
|
||||
runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d)
|
||||
runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
|
||||
runfetchcmd("%s prune-packed" % ud.basecmd, d)
|
||||
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
|
||||
|
||||
# Generate a mirror tarball if needed
|
||||
os.chdir(ud.clonedir)
|
||||
mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
|
||||
if mirror_tarballs != "0" or 'fullclone' in ud.parm:
|
||||
logger.info("Creating tarball of git repository")
|
||||
runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
|
||||
|
||||
if 'fullclone' in ud.parm:
|
||||
return
|
||||
|
||||
if os.path.exists(codir):
|
||||
bb.utils.prunedir(codir)
|
||||
|
||||
subdir = ud.parm.get("subpath", "")
|
||||
if subdir != "":
|
||||
if subdir.endswith("/"):
|
||||
subdirbase = os.path.basename(subdir[:-1])
|
||||
else:
|
||||
subdirbase = os.path.basename(subdir)
|
||||
else:
|
||||
subdirbase = ""
|
||||
|
||||
if subdir != "":
|
||||
readpathspec = ":%s" % (subdir)
|
||||
codir = os.path.join(codir, "git")
|
||||
coprefix = os.path.join(codir, subdirbase, "")
|
||||
else:
|
||||
readpathspec = ""
|
||||
coprefix = os.path.join(codir, "git", "")
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d)
|
||||
os.chdir(coprefix)
|
||||
runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
|
||||
else:
|
||||
bb.utils.mkdirhier(codir)
|
||||
os.chdir(ud.clonedir)
|
||||
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
|
||||
runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
|
||||
|
||||
os.chdir(codir)
|
||||
logger.info("Creating tarball of git checkout")
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
|
||||
|
||||
os.chdir(ud.clonedir)
|
||||
bb.utils.prunedir(codir)
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
def _contains_ref(self, tag, d):
|
||||
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
|
||||
return output.split()[0] != "0"
|
||||
|
||||
def _revision_key(self, url, ud, d, branch=False):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
"""
|
||||
key = 'git:' + ud.host + ud.path.replace('/', '.')
|
||||
if branch:
|
||||
return key + ud.branch
|
||||
else:
|
||||
return key
|
||||
|
||||
def generate_revision_key(self, url, ud, d, branch=False):
|
||||
key = self._revision_key(url, ud, d, branch)
|
||||
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
||||
|
||||
def _latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Compute the HEAD revision for the url
|
||||
"""
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch)
|
||||
output = runfetchcmd(cmd, d, True)
|
||||
if not output:
|
||||
raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd))
|
||||
return output.split()[0]
|
||||
|
||||
def latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Look in the cache for the latest revision, if not present ask the SCM.
|
||||
"""
|
||||
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
|
||||
|
||||
key = self.generate_revision_key(url, ud, d, branch=True)
|
||||
|
||||
try:
|
||||
return revs[key]
|
||||
except KeyError:
|
||||
# Compatibility with old key format, no branch included
|
||||
oldkey = self.generate_revision_key(url, ud, d, branch=False)
|
||||
try:
|
||||
rev = revs[oldkey]
|
||||
except KeyError:
|
||||
rev = self._latest_revision(url, ud, d)
|
||||
else:
|
||||
del revs[oldkey]
|
||||
revs[key] = rev
|
||||
return rev
|
||||
|
||||
def sortable_revision(self, url, ud, d):
|
||||
"""
|
||||
|
||||
"""
|
||||
localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', d)
|
||||
key = self.generate_revision_key(url, ud, d, branch=True)
|
||||
oldkey = self.generate_revision_key(url, ud, d, branch=False)
|
||||
|
||||
latest_rev = self._build_revision(url, ud, d)
|
||||
last_rev = localcounts.get(key + '_rev')
|
||||
if last_rev is None:
|
||||
last_rev = localcounts.get(oldkey + '_rev')
|
||||
if last_rev is not None:
|
||||
del localcounts[oldkey + '_rev']
|
||||
localcounts[key + '_rev'] = last_rev
|
||||
|
||||
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
||||
count = None
|
||||
if uselocalcount:
|
||||
count = Fetch.localcount_internal_helper(ud, d)
|
||||
if count is None:
|
||||
count = localcounts.get(key + '_count')
|
||||
if count is None:
|
||||
count = localcounts.get(oldkey + '_count')
|
||||
if count is not None:
|
||||
del localcounts[oldkey + '_count']
|
||||
localcounts[key + '_count'] = count
|
||||
|
||||
if last_rev == latest_rev:
|
||||
return str(count + "+" + latest_rev)
|
||||
|
||||
buildindex_provided = hasattr(self, "_sortable_buildindex")
|
||||
if buildindex_provided:
|
||||
count = self._sortable_buildindex(url, ud, d, latest_rev)
|
||||
if count is None:
|
||||
count = "0"
|
||||
elif uselocalcount or buildindex_provided:
|
||||
count = str(count)
|
||||
else:
|
||||
count = str(int(count) + 1)
|
||||
|
||||
localcounts[key + '_rev'] = latest_rev
|
||||
localcounts[key + '_count'] = count
|
||||
|
||||
return str(count + "+" + latest_rev)
|
||||
|
||||
def _build_revision(self, url, ud, d):
|
||||
return ud.tag
|
||||
|
||||
def _sortable_buildindex_disabled(self, url, ud, d, rev):
|
||||
"""
|
||||
Return a suitable buildindex for the revision specified. This is done by counting revisions
|
||||
using "git rev-list" which may or may not work in different circumstances.
|
||||
"""
|
||||
|
||||
cwd = os.getcwd()
|
||||
|
||||
# Check if we have the rev already
|
||||
|
||||
if not os.path.exists(ud.clonedir):
|
||||
print("no repo")
|
||||
self.go(None, ud, d)
|
||||
if not os.path.exists(ud.clonedir):
|
||||
logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
|
||||
return None
|
||||
|
||||
|
||||
os.chdir(ud.clonedir)
|
||||
if not self._contains_ref(rev, d):
|
||||
self.go(None, ud, d)
|
||||
|
||||
output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
|
||||
os.chdir(cwd)
|
||||
|
||||
buildindex = "%s" % output.split()[0]
|
||||
logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
|
||||
return buildindex
|
||||
180
bitbake/lib/bb/fetch/hg.py
Normal file
@@ -0,0 +1,180 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementation for mercurial DRCS (hg).
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2004 Marcin Juszkiewicz
|
||||
# Copyright (C) 2007 Robert Schuster
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import MissingParameterError
|
||||
from bb.fetch import runfetchcmd
|
||||
from bb.fetch import logger
|
||||
|
||||
class Hg(Fetch):
|
||||
"""Class to fetch from mercurial repositories"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with mercurial.
|
||||
"""
|
||||
return ud.type in ['hg']
|
||||
|
||||
def forcefetch(self, url, ud, d):
|
||||
revTag = ud.parm.get('rev', 'tip')
|
||||
return revTag == "tip"
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("hg method needs a 'module' parameter")
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
# Create paths to mercurial checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
ud.revision = ud.parm['rev']
|
||||
else:
|
||||
tag = Fetch.srcrev_internal_helper(ud, d)
|
||||
if tag is True:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
elif tag:
|
||||
ud.revision = tag
|
||||
else:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _buildhgcommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an hg commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||
|
||||
proto = ud.parm.get('proto', 'http')
|
||||
|
||||
host = ud.host
|
||||
if proto == "file":
|
||||
host = "/"
|
||||
ud.host = "localhost"
|
||||
|
||||
if not ud.user:
|
||||
hgroot = host + ud.path
|
||||
else:
|
||||
hgroot = ud.user + "@" + host + ud.path
|
||||
|
||||
if command is "info":
|
||||
return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
|
||||
|
||||
options = [];
|
||||
if ud.revision:
|
||||
options.append("-r %s" % ud.revision)
|
||||
|
||||
if command is "fetch":
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
elif command is "pull":
|
||||
# do not pass options list; limiting pull to rev causes the local
|
||||
# repo not to contain it and immediately following "update" command
|
||||
# will crash
|
||||
cmd = "%s pull" % (basecmd)
|
||||
elif command is "update":
|
||||
cmd = "%s update -C %s" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid hg command %s" % command)
|
||||
|
||||
return cmd
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Update " + loc)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
else:
|
||||
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + loc)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", fetchcmd)
|
||||
runfetchcmd(fetchcmd, d)
|
||||
|
||||
# Even when we clone (fetch), we still need to update as hg's clone
|
||||
# won't checkout the specified revision if its on a branch
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.hg' --exclude '.hgrags'"
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
try:
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
|
||||
except:
|
||||
t, v, tb = sys.exc_info()
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise t, v, tb
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
def _latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Compute tip revision for the url
|
||||
"""
|
||||
output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
|
||||
return output.strip()
|
||||
|
||||
def _build_revision(self, url, ud, d):
|
||||
return ud.revision
|
||||
|
||||
def _revision_key(self, url, ud, d):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "hg:" + ud.moddir
|
||||
73
bitbake/lib/bb/fetch/local.py
Normal file
@@ -0,0 +1,73 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import bb
|
||||
import bb.utils
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
|
||||
class Local(Fetch):
|
||||
def supports(self, url, urldata, d):
|
||||
"""
|
||||
Check to see if a given url represents a local fetch.
|
||||
"""
|
||||
return urldata.type in ['file']
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
"""
|
||||
path = url.split("://")[1]
|
||||
path = path.split(";")[0]
|
||||
newpath = path
|
||||
if path[0] != "/":
|
||||
filespath = data.getVar('FILESPATH', d, 1)
|
||||
if filespath:
|
||||
newpath = bb.utils.which(filespath, path)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, 1)
|
||||
if filesdir:
|
||||
newpath = os.path.join(filesdir, path)
|
||||
# We don't set localfile as for this fetcher the file is already local!
|
||||
return newpath
|
||||
|
||||
def go(self, url, urldata, d):
|
||||
"""Fetch urls (no-op for Local method)"""
|
||||
# no need to fetch local files, we'll deal with them in place.
|
||||
return 1
|
||||
|
||||
def checkstatus(self, url, urldata, d):
|
||||
"""
|
||||
Check the status of the url
|
||||
"""
|
||||
if urldata.localpath.find("*") != -1:
|
||||
logger.info("URL %s looks like a glob and was therefore not checked.", url)
|
||||
return True
|
||||
if os.path.exists(urldata.localpath):
|
||||
return True
|
||||
return False
|
||||
143
bitbake/lib/bb/fetch/osc.py
Normal file
@@ -0,0 +1,143 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
Bitbake "Fetch" implementation for osc (Opensuse build service client).
|
||||
Based on the svn "Fetch" implementation.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
from bb import utils
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import MissingParameterError
|
||||
from bb.fetch import runfetchcmd
|
||||
|
||||
class Osc(Fetch):
|
||||
"""Class to fetch a module or modules from Opensuse build server
|
||||
repositories."""
|
||||
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with osc.
|
||||
"""
|
||||
return ud.type in ['osc']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("osc method needs a 'module' parameter.")
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
# Create paths to osc checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
|
||||
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
ud.revision = ud.parm['rev']
|
||||
else:
|
||||
pv = data.getVar("PV", d, 0)
|
||||
rev = Fetch.srcrev_internal_helper(ud, d)
|
||||
if rev and rev != True:
|
||||
ud.revision = rev
|
||||
else:
|
||||
ud.revision = ""
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _buildosccommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an ocs commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_osc}', d)
|
||||
|
||||
proto = ud.parm.get('proto', 'ocs')
|
||||
|
||||
options = []
|
||||
|
||||
config = "-c %s" % self.generate_config(ud, d)
|
||||
|
||||
if ud.revision:
|
||||
options.append("-r %s" % ud.revision)
|
||||
|
||||
coroot = self._strip_leading_slashes(ud.path)
|
||||
|
||||
if command is "fetch":
|
||||
osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
|
||||
elif command is "update":
|
||||
osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid osc command %s" % command)
|
||||
|
||||
return osccmd
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""
|
||||
Fetch url
|
||||
"""
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
|
||||
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
||||
logger.info("Update "+ loc)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", oscupdatecmd)
|
||||
runfetchcmd(oscupdatecmd, d)
|
||||
else:
|
||||
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + loc)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", oscfetchcmd)
|
||||
runfetchcmd(oscfetchcmd, d)
|
||||
|
||||
os.chdir(os.path.join(ud.pkgdir + ud.path))
|
||||
# tar them up to a defined filename
|
||||
try:
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
|
||||
except:
|
||||
t, v, tb = sys.exc_info()
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise t, v, tb
|
||||
|
||||
def supports_srcrev(self):
|
||||
return False
|
||||
|
||||
def generate_config(self, ud, d):
|
||||
"""
|
||||
Generate a .oscrc to be used for this run.
|
||||
"""
|
||||
|
||||
config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
|
||||
bb.utils.remove(config_path)
|
||||
|
||||
f = open(config_path, 'w')
|
||||
f.write("[general]\n")
|
||||
f.write("apisrv = %s\n" % ud.host)
|
||||
f.write("scheme = http\n")
|
||||
f.write("su-wrapper = su -c\n")
|
||||
f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
|
||||
f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
|
||||
f.write("extra-pkgs = gzip\n")
|
||||
f.write("\n")
|
||||
f.write("[%s]\n" % ud.host)
|
||||
f.write("user = %s\n" % ud.parm["user"])
|
||||
f.write("pass = %s\n" % ud.parm["pswd"])
|
||||
f.close()
|
||||
|
||||
return config_path
|
||||
206
bitbake/lib/bb/fetch/perforce.py
Normal file
@@ -0,0 +1,206 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
from future_builtins import zip
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import logger
|
||||
|
||||
class Perforce(Fetch):
|
||||
def supports(self, url, ud, d):
|
||||
return ud.type in ['p4']
|
||||
|
||||
def doparse(url, d):
|
||||
parm = {}
|
||||
path = url.split("://")[1]
|
||||
delim = path.find("@");
|
||||
if delim != -1:
|
||||
(user, pswd, host, port) = path.split('@')[0].split(":")
|
||||
path = path.split('@')[1]
|
||||
else:
|
||||
(host, port) = data.getVar('P4PORT', d).split(':')
|
||||
user = ""
|
||||
pswd = ""
|
||||
|
||||
if path.find(";") != -1:
|
||||
keys=[]
|
||||
values=[]
|
||||
plist = path.split(';')
|
||||
for item in plist:
|
||||
if item.count('='):
|
||||
(key, value) = item.split('=')
|
||||
keys.append(key)
|
||||
values.append(value)
|
||||
|
||||
parm = dict(zip(keys, values))
|
||||
path = "//" + path.split(';')[0]
|
||||
host += ":%s" % (port)
|
||||
parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
return host, path, user, pswd, parm
|
||||
doparse = staticmethod(doparse)
|
||||
|
||||
def getcset(d, depot, host, user, pswd, parm):
|
||||
p4opt = ""
|
||||
if "cset" in parm:
|
||||
return parm["cset"];
|
||||
if user:
|
||||
p4opt += " -u %s" % (user)
|
||||
if pswd:
|
||||
p4opt += " -P %s" % (pswd)
|
||||
if host:
|
||||
p4opt += " -p %s" % (host)
|
||||
|
||||
p4date = data.getVar("P4DATE", d, 1)
|
||||
if "revision" in parm:
|
||||
depot += "#%s" % (parm["revision"])
|
||||
elif "label" in parm:
|
||||
depot += "@%s" % (parm["label"])
|
||||
elif p4date:
|
||||
depot += "@%s" % (p4date)
|
||||
|
||||
p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
|
||||
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
|
||||
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.readline().strip()
|
||||
logger.debug(1, "READ %s", cset)
|
||||
if not cset:
|
||||
return -1
|
||||
|
||||
return cset.split(' ')[1]
|
||||
getcset = staticmethod(getcset)
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
|
||||
(host, path, user, pswd, parm) = Perforce.doparse(url, d)
|
||||
|
||||
# If a label is specified, we use that as our filename
|
||||
|
||||
if "label" in parm:
|
||||
ud.localfile = "%s.tar.gz" % (parm["label"])
|
||||
return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
|
||||
|
||||
base = path
|
||||
which = path.find('/...')
|
||||
if which != -1:
|
||||
base = path[:which]
|
||||
|
||||
base = self._strip_leading_slashes(base)
|
||||
|
||||
cset = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""
|
||||
Fetch urls
|
||||
"""
|
||||
|
||||
(host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
|
||||
|
||||
if depot.find('/...') != -1:
|
||||
path = depot[:depot.find('/...')]
|
||||
else:
|
||||
path = depot
|
||||
|
||||
module = parm.get('module', os.path.basename(path))
|
||||
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
# Get the p4 command
|
||||
p4opt = ""
|
||||
if user:
|
||||
p4opt += " -u %s" % (user)
|
||||
|
||||
if pswd:
|
||||
p4opt += " -P %s" % (pswd)
|
||||
|
||||
if host:
|
||||
p4opt += " -p %s" % (host)
|
||||
|
||||
p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
|
||||
|
||||
# create temp directory
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
||||
raise FetchError(module)
|
||||
|
||||
if "label" in parm:
|
||||
depot = "%s@%s" % (depot, parm["label"])
|
||||
else:
|
||||
cset = Perforce.getcset(d, depot, host, user, pswd, parm)
|
||||
depot = "%s@%s" % (depot, cset)
|
||||
|
||||
os.chdir(tmpfile)
|
||||
logger.info("Fetch " + loc)
|
||||
logger.info("%s%s files %s", p4cmd, p4opt, depot)
|
||||
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
|
||||
if not p4file:
|
||||
logger.error("Fetch: unable to get the P4 files from %s", depot)
|
||||
raise FetchError(module)
|
||||
|
||||
count = 0
|
||||
|
||||
for file in p4file:
|
||||
list = file.split()
|
||||
|
||||
if list[2] == "delete":
|
||||
continue
|
||||
|
||||
dest = list[0][len(path)+1:]
|
||||
where = dest.find("#")
|
||||
|
||||
os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
|
||||
count = count + 1
|
||||
|
||||
if count == 0:
|
||||
logger.error("Fetch: No files gathered from the P4 fetch")
|
||||
raise FetchError(module)
|
||||
|
||||
myret = os.system("tar -czf %s %s" % (ud.localpath, module))
|
||||
if myret != 0:
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(module)
|
||||
# cleanup
|
||||
bb.utils.prunedir(tmpfile)
|
||||
98
bitbake/lib/bb/fetch/repo.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake "Fetch" repo (git) implementation
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
|
||||
#
|
||||
# Based on git.py which is:
|
||||
#Copyright (C) 2005 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import runfetchcmd
|
||||
|
||||
class Repo(Fetch):
|
||||
"""Class to fetch a module or modules from repo (git) repositories"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with repo.
|
||||
"""
|
||||
return ud.type in ["repo"]
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
"""
|
||||
We don"t care about the git rev of the manifests repository, but
|
||||
we do care about the manifest to use. The default is "default".
|
||||
We also care about the branch or tag to be used. The default is
|
||||
"master".
|
||||
"""
|
||||
|
||||
ud.proto = ud.parm.get('protocol', 'git')
|
||||
ud.branch = ud.parm.get('branch', 'master')
|
||||
ud.manifest = ud.parm.get('manifest', 'default.xml')
|
||||
if not ud.manifest.endswith('.xml'):
|
||||
ud.manifest += '.xml'
|
||||
|
||||
ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
|
||||
logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
|
||||
return
|
||||
|
||||
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
|
||||
repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
|
||||
codir = os.path.join(repodir, gitsrcname, ud.manifest)
|
||||
|
||||
if ud.user:
|
||||
username = ud.user + "@"
|
||||
else:
|
||||
username = ""
|
||||
|
||||
bb.utils.mkdirhier(os.path.join(codir, "repo"))
|
||||
os.chdir(os.path.join(codir, "repo"))
|
||||
if not os.path.exists(os.path.join(codir, "repo", ".repo")):
|
||||
runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
|
||||
|
||||
runfetchcmd("repo sync", d)
|
||||
os.chdir(codir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.repo' --exclude '.git'"
|
||||
|
||||
# Create a cache
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
|
||||
|
||||
def supports_srcrev(self):
|
||||
return False
|
||||
|
||||
def _build_revision(self, url, ud, d):
|
||||
return ud.manifest
|
||||
|
||||
def _want_sortable_revision(self, url, ud, d):
|
||||
return False
|
||||
118
bitbake/lib/bb/fetch/ssh.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
'''
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
This implementation is for Secure Shell (SSH), and attempts to comply with the
|
||||
IETF secsh internet draft:
|
||||
http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
|
||||
|
||||
Currently does not support the sftp parameters, as this uses scp
|
||||
Also does not support the 'fingerprint' connection parameter.
|
||||
|
||||
'''
|
||||
|
||||
# Copyright (C) 2006 OpenedHand Ltd.
|
||||
#
|
||||
#
|
||||
# Based in part on svk.py:
|
||||
# Copyright (C) 2006 Holger Hans Peter Freyther
|
||||
# Based on svn.py:
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Based on functions from the base bb module:
|
||||
# Copyright 2003 Holger Schurig
|
||||
#
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import re, os
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
|
||||
|
||||
__pattern__ = re.compile(r'''
|
||||
\s* # Skip leading whitespace
|
||||
ssh:// # scheme
|
||||
( # Optional username/password block
|
||||
(?P<user>\S+) # username
|
||||
(:(?P<pass>\S+))? # colon followed by the password (optional)
|
||||
)?
|
||||
(?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
|
||||
@
|
||||
(?P<host>\S+?) # non-greedy match of the host
|
||||
(:(?P<port>[0-9]+))? # colon followed by the port (optional)
|
||||
/
|
||||
(?P<path>[^;]+) # path on the remote system, may be absolute or relative,
|
||||
# and may include the use of '~' to reference the remote home
|
||||
# directory
|
||||
(?P<sparam>(;[^;]+)*)? # parameters block (optional)
|
||||
$
|
||||
''', re.VERBOSE)
|
||||
|
||||
class SSH(Fetch):
|
||||
'''Class to fetch a module or modules via Secure Shell'''
|
||||
|
||||
def supports(self, url, urldata, d):
|
||||
return __pattern__.match(url) != None
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
m = __pattern__.match(url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
|
||||
return lpath
|
||||
|
||||
def go(self, url, urldata, d):
|
||||
dldir = data.getVar('DL_DIR', d, 1)
|
||||
|
||||
m = __pattern__.match(url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
port = m.group('port')
|
||||
user = m.group('user')
|
||||
password = m.group('pass')
|
||||
|
||||
ldir = os.path.join(dldir, host)
|
||||
lpath = os.path.join(ldir, os.path.basename(path))
|
||||
|
||||
if not os.path.exists(ldir):
|
||||
os.makedirs(ldir)
|
||||
|
||||
if port:
|
||||
port = '-P %s' % port
|
||||
else:
|
||||
port = ''
|
||||
|
||||
if user:
|
||||
fr = user
|
||||
if password:
|
||||
fr += ':%s' % password
|
||||
fr += '@%s' % host
|
||||
else:
|
||||
fr = host
|
||||
fr += ':%s' % path
|
||||
|
||||
|
||||
import commands
|
||||
cmd = 'scp -B -r %s %s %s/' % (
|
||||
port,
|
||||
commands.mkarg(fr),
|
||||
commands.mkarg(ldir)
|
||||
)
|
||||
|
||||
(exitstatus, output) = commands.getstatusoutput(cmd)
|
||||
if exitstatus != 0:
|
||||
print(output)
|
||||
raise FetchError('Unable to fetch %s' % url)
|
||||
104
bitbake/lib/bb/fetch/svk.py
Normal file
@@ -0,0 +1,104 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
This implementation is for svk. It is based on the svn implementation
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2006 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import MissingParameterError
|
||||
from bb.fetch import logger
|
||||
|
||||
class Svk(Fetch):
|
||||
"""Class to fetch a module or modules from svk repositories"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with svk.
|
||||
"""
|
||||
return ud.type in ['svk']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("svk method needs a 'module' parameter")
|
||||
else:
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
ud.revision = ud.parm.get('rev', "")
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def forcefetch(self, url, ud, d):
|
||||
return ud.date == "now"
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch urls"""
|
||||
|
||||
svkroot = ud.host + ud.path
|
||||
|
||||
svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
|
||||
|
||||
if ud.revision:
|
||||
svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
|
||||
|
||||
# create temp directory
|
||||
localdata = data.createCopy(d)
|
||||
data.update_data(localdata)
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
||||
raise FetchError(ud.module)
|
||||
|
||||
# check out sources there
|
||||
os.chdir(tmpfile)
|
||||
logger.info("Fetch " + loc)
|
||||
logger.debug(1, "Running %s", svkcmd)
|
||||
myret = os.system(svkcmd)
|
||||
if myret != 0:
|
||||
try:
|
||||
os.rmdir(tmpfile)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(ud.module)
|
||||
|
||||
os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
|
||||
# tar them up to a defined filename
|
||||
myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
|
||||
if myret != 0:
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(ud.module)
|
||||
# cleanup
|
||||
bb.utils.prunedir(tmpfile)
|
||||
204
bitbake/lib/bb/fetch/svn.py
Normal file
@@ -0,0 +1,204 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementation for svn.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2004 Marcin Juszkiewicz
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import MissingParameterError
|
||||
from bb.fetch import runfetchcmd
|
||||
from bb.fetch import logger
|
||||
|
||||
class Svn(Fetch):
|
||||
"""Class to fetch a module or modules from svn repositories"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with svn.
|
||||
"""
|
||||
return ud.type in ['svn']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("svn method needs a 'module' parameter")
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
# Create paths to svn checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
ud.date = ""
|
||||
ud.revision = ud.parm['rev']
|
||||
elif 'date' in ud.date:
|
||||
ud.date = ud.parm['date']
|
||||
ud.revision = ""
|
||||
else:
|
||||
#
|
||||
# ***Nasty hack***
|
||||
# If DATE in unexpanded PV, use ud.date (which is set from SRCDATE)
|
||||
# Should warn people to switch to SRCREV here
|
||||
#
|
||||
pv = data.getVar("PV", d, 0)
|
||||
if "DATE" in pv:
|
||||
ud.revision = ""
|
||||
else:
|
||||
rev = Fetch.srcrev_internal_helper(ud, d)
|
||||
if rev is True:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
ud.date = ""
|
||||
elif rev:
|
||||
ud.revision = rev
|
||||
ud.date = ""
|
||||
else:
|
||||
ud.revision = ""
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _buildsvncommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an svn commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_svn}', d)
|
||||
|
||||
proto = ud.parm.get('proto', 'svn')
|
||||
|
||||
svn_rsh = None
|
||||
if proto == "svn+ssh" and "rsh" in ud.parm:
|
||||
svn_rsh = ud.parm["rsh"]
|
||||
|
||||
svnroot = ud.host + ud.path
|
||||
|
||||
# either use the revision, or SRCDATE in braces,
|
||||
options = []
|
||||
|
||||
if ud.user:
|
||||
options.append("--username %s" % ud.user)
|
||||
|
||||
if ud.pswd:
|
||||
options.append("--password %s" % ud.pswd)
|
||||
|
||||
if command is "info":
|
||||
svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
|
||||
else:
|
||||
suffix = ""
|
||||
if ud.revision:
|
||||
options.append("-r %s" % ud.revision)
|
||||
suffix = "@%s" % (ud.revision)
|
||||
elif ud.date:
|
||||
options.append("-r {%s}" % ud.date)
|
||||
|
||||
if command is "fetch":
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
elif command is "update":
|
||||
svncmd = "%s update %s" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid svn command %s" % command)
|
||||
|
||||
if svn_rsh:
|
||||
svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
|
||||
|
||||
return svncmd
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
|
||||
svnupdatecmd = self._buildsvncommand(ud, d, "update")
|
||||
logger.info("Update " + loc)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", svnupdatecmd)
|
||||
runfetchcmd(svnupdatecmd, d)
|
||||
else:
|
||||
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + loc)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", svnfetchcmd)
|
||||
runfetchcmd(svnfetchcmd, d)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.svn'"
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
# tar them up to a defined filename
|
||||
try:
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
|
||||
except:
|
||||
t, v, tb = sys.exc_info()
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise t, v, tb
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
def _revision_key(self, url, ud, d):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "svn:" + ud.moddir
|
||||
|
||||
def _latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Return the latest upstream revision number
|
||||
"""
|
||||
logger.debug(2, "SVN fetcher hitting network for %s", url)
|
||||
|
||||
output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
|
||||
|
||||
revision = None
|
||||
for line in output.splitlines():
|
||||
if "Last Changed Rev" in line:
|
||||
revision = line.split(":")[1].strip()
|
||||
|
||||
return revision
|
||||
|
||||
def _sortable_revision(self, url, ud, d):
|
||||
"""
|
||||
Return a sortable revision number which in our case is the revision number
|
||||
"""
|
||||
|
||||
return self._build_revision(url, ud, d)
|
||||
|
||||
def _build_revision(self, url, ud, d):
|
||||
return ud.revision
|
||||
93
bitbake/lib/bb/fetch/wget.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
import urllib
|
||||
from bb import data
|
||||
from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd
|
||||
|
||||
class Wget(Fetch):
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with wget.
|
||||
"""
|
||||
return ud.type in ['http', 'https', 'ftp']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
|
||||
url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def go(self, uri, ud, d, checkonly = False):
|
||||
"""Fetch urls"""
|
||||
|
||||
def fetch_uri(uri, ud, d):
|
||||
if checkonly:
|
||||
fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
|
||||
elif os.path.exists(ud.localpath):
|
||||
# file exists, but we didnt complete it.. trying again..
|
||||
fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
|
||||
else:
|
||||
fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
|
||||
|
||||
uri = uri.split(";")[0]
|
||||
uri_decoded = list(decodeurl(uri))
|
||||
uri_type = uri_decoded[0]
|
||||
uri_host = uri_decoded[1]
|
||||
|
||||
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||
logger.info("fetch " + uri)
|
||||
logger.debug(2, "executing " + fetchcmd)
|
||||
runfetchcmd(fetchcmd, d)
|
||||
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath) and not checkonly:
|
||||
logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
if fetch_uri(uri, ud, localdata):
|
||||
return True
|
||||
|
||||
raise FetchError(uri)
|
||||
|
||||
|
||||
def checkstatus(self, uri, ud, d):
|
||||
return self.go(uri, ud, d, True)
|
||||
@@ -8,7 +8,6 @@ BitBake build tools.
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -29,13 +28,10 @@ from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
import os, re
|
||||
import logging
|
||||
import urllib
|
||||
import bb.persist_data, bb.utils
|
||||
import bb.checksum
|
||||
import bb.data, bb.persist_data, bb.utils
|
||||
from bb import data
|
||||
|
||||
__version__ = "2"
|
||||
_checksum_cache = bb.checksum.FileChecksumCache()
|
||||
|
||||
logger = logging.getLogger("BitBake.Fetcher")
|
||||
|
||||
@@ -59,17 +55,11 @@ class MalformedUrl(BBFetchException):
|
||||
class FetchError(BBFetchException):
|
||||
"""General fetcher exception when something happens incorrectly"""
|
||||
def __init__(self, message, url = None):
|
||||
if url:
|
||||
msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
|
||||
else:
|
||||
msg = "Fetcher failure: %s" % message
|
||||
msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (message, url)
|
||||
|
||||
class ChecksumError(FetchError):
|
||||
"""Exception when mismatched checksum encountered"""
|
||||
|
||||
class UnpackError(BBFetchException):
|
||||
"""General fetcher exception when something happens incorrectly when unpacking"""
|
||||
def __init__(self, message, url):
|
||||
@@ -103,18 +93,37 @@ class ParameterError(BBFetchException):
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (message, url)
|
||||
|
||||
class MD5SumError(BBFetchException):
|
||||
"""Exception raised when a MD5 checksum of a file does not match for a downloaded file"""
|
||||
def __init__(self, path, wanted, got, url):
|
||||
msg = "File: '%s' has md5 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
|
||||
self.url = url
|
||||
self.path = path
|
||||
self.wanted = wanted
|
||||
self.got = got
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (path, wanted, got, url)
|
||||
|
||||
class SHA256SumError(MD5SumError):
|
||||
"""Exception raised when a SHA256 checksum of a file does not match for a downloaded file"""
|
||||
def __init__(self, path, wanted, got, url):
|
||||
msg = "File: '%s' has sha256 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
|
||||
self.url = url
|
||||
self.path = path
|
||||
self.wanted = wanted
|
||||
self.got = got
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (path, wanted, got, url)
|
||||
|
||||
class NetworkAccess(BBFetchException):
|
||||
"""Exception raised when network access is disabled but it is required."""
|
||||
def __init__(self, url, cmd):
|
||||
msg = "Network access disabled through BB_NO_NETWORK but access requested with command %s (for url %s)" % (cmd, url)
|
||||
msg = "Network access disabled through BB_NO_NETWORK but access rquested with command %s (for url %s)" % (cmd, url)
|
||||
self.url = url
|
||||
self.cmd = cmd
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url, cmd)
|
||||
|
||||
class NonLocalMethod(Exception):
|
||||
def __init__(self):
|
||||
Exception.__init__(self)
|
||||
|
||||
def decodeurl(url):
|
||||
"""Decodes an URL into the tokens (scheme, network location, path,
|
||||
@@ -154,14 +163,14 @@ def decodeurl(url):
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
|
||||
return type, host, urllib.unquote(path), user, pswd, p
|
||||
return (type, host, path, user, pswd, p)
|
||||
|
||||
def encodeurl(decoded):
|
||||
"""Encodes a URL from tokens (scheme, network location, path,
|
||||
user, password, parameters).
|
||||
"""
|
||||
|
||||
type, host, path, user, pswd, p = decoded
|
||||
(type, host, path, user, pswd, p) = decoded
|
||||
|
||||
if not path:
|
||||
raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
|
||||
@@ -175,64 +184,42 @@ def encodeurl(decoded):
|
||||
url += "@"
|
||||
if host and type != "file":
|
||||
url += "%s" % host
|
||||
url += "%s" % urllib.quote(path)
|
||||
url += "%s" % path
|
||||
if p:
|
||||
for parm in p:
|
||||
url += ";%s=%s" % (parm, p[parm])
|
||||
|
||||
return url
|
||||
|
||||
def uri_replace(ud, uri_find, uri_replace, replacements, d):
|
||||
def uri_replace(ud, uri_find, uri_replace, d):
|
||||
if not ud.url or not uri_find or not uri_replace:
|
||||
logger.error("uri_replace: passed an undefined value, not replacing")
|
||||
return None
|
||||
logger.debug(1, "uri_replace: passed an undefined value, not replacing")
|
||||
uri_decoded = list(decodeurl(ud.url))
|
||||
uri_find_decoded = list(decodeurl(uri_find))
|
||||
uri_replace_decoded = list(decodeurl(uri_replace))
|
||||
logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
|
||||
result_decoded = ['', '', '', '', '', {}]
|
||||
for loc, i in enumerate(uri_find_decoded):
|
||||
for i in uri_find_decoded:
|
||||
loc = uri_find_decoded.index(i)
|
||||
result_decoded[loc] = uri_decoded[loc]
|
||||
regexp = i
|
||||
if loc == 0 and regexp and not regexp.endswith("$"):
|
||||
# Leaving the type unanchored can mean "https" matching "file" can become "files"
|
||||
# which is clearly undesirable.
|
||||
regexp += "$"
|
||||
if loc == 5:
|
||||
# Handle URL parameters
|
||||
if i:
|
||||
# Any specified URL parameters must match
|
||||
for k in uri_replace_decoded[loc]:
|
||||
if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
|
||||
return None
|
||||
# Overwrite any specified replacement parameters
|
||||
for k in uri_replace_decoded[loc]:
|
||||
result_decoded[loc][k] = uri_replace_decoded[loc][k]
|
||||
elif (re.match(regexp, uri_decoded[loc])):
|
||||
if not uri_replace_decoded[loc]:
|
||||
result_decoded[loc] = ""
|
||||
if isinstance(i, basestring):
|
||||
if (re.match(i, uri_decoded[loc])):
|
||||
if not uri_replace_decoded[loc]:
|
||||
result_decoded[loc] = ""
|
||||
else:
|
||||
result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
|
||||
if uri_find_decoded.index(i) == 2:
|
||||
if ud.mirrortarball:
|
||||
if result_decoded[loc].endswith("/"):
|
||||
result_decoded[loc] = os.path.dirname(result_decoded[loc])
|
||||
result_decoded[loc] = os.path.join(result_decoded[loc], os.path.basename(ud.mirrortarball))
|
||||
elif ud.localpath:
|
||||
if result_decoded[loc].endswith("/"):
|
||||
result_decoded[loc] = os.path.dirname(result_decoded[loc])
|
||||
result_decoded[loc] = os.path.join(result_decoded[loc], os.path.basename(ud.localpath))
|
||||
else:
|
||||
for k in replacements:
|
||||
uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
|
||||
#bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
|
||||
result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
|
||||
if loc == 2:
|
||||
# Handle path manipulations
|
||||
basename = None
|
||||
if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
|
||||
# If the source and destination url types differ, must be a mirrortarball mapping
|
||||
basename = os.path.basename(ud.mirrortarball)
|
||||
# Kill parameters, they make no sense for mirror tarballs
|
||||
uri_decoded[5] = {}
|
||||
elif ud.localpath and ud.method.supports_checksum(ud):
|
||||
basename = os.path.basename(ud.localpath)
|
||||
if basename and not result_decoded[loc].endswith(basename):
|
||||
result_decoded[loc] = os.path.join(result_decoded[loc], basename)
|
||||
else:
|
||||
return None
|
||||
return ud.url
|
||||
result = encodeurl(result_decoded)
|
||||
if result == ud.url:
|
||||
return None
|
||||
logger.debug(2, "For url %s returning %s" % (ud.url, result))
|
||||
return result
|
||||
|
||||
@@ -246,7 +233,7 @@ def fetcher_init(d):
|
||||
Calls before this must not hit the cache.
|
||||
"""
|
||||
# When to drop SCM head revisions controlled by user policy
|
||||
srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
|
||||
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
|
||||
if srcrev_policy == "cache":
|
||||
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||
elif srcrev_policy == "clear":
|
||||
@@ -260,18 +247,10 @@ def fetcher_init(d):
|
||||
else:
|
||||
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
|
||||
|
||||
_checksum_cache.init_cache(d)
|
||||
|
||||
for m in methods:
|
||||
if hasattr(m, "init"):
|
||||
m.init(d)
|
||||
|
||||
def fetcher_parse_save(d):
|
||||
_checksum_cache.save_extras(d)
|
||||
|
||||
def fetcher_parse_done(d):
|
||||
_checksum_cache.save_merge(d)
|
||||
|
||||
def fetcher_compare_revisions(d):
|
||||
"""
|
||||
Compare the revisions in the persistant cache with current values and
|
||||
@@ -298,59 +277,35 @@ def verify_checksum(u, ud, d):
|
||||
"""
|
||||
verify the MD5 and SHA256 checksum for downloaded src
|
||||
|
||||
Raises a FetchError if one or both of the SRC_URI checksums do not match
|
||||
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
|
||||
checksums specified.
|
||||
return value:
|
||||
- True: checksum matched
|
||||
- False: checksum unmatched
|
||||
|
||||
if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
|
||||
if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
|
||||
matched
|
||||
"""
|
||||
|
||||
if not ud.method.supports_checksum(ud):
|
||||
if not ud.type in ["http", "https", "ftp", "ftps"]:
|
||||
return
|
||||
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
|
||||
if ud.method.recommends_checksum(ud):
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
|
||||
if (strict and ud.md5_expected == None and ud.sha256_expected == None):
|
||||
raise FetchError('No checksum specified for %s, please add at least one to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
|
||||
(ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data), u)
|
||||
|
||||
# Log missing sums so user can more easily add them
|
||||
if ud.md5_expected == None:
|
||||
logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data)
|
||||
|
||||
if ud.sha256_expected == None:
|
||||
logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.sha256_name, sha256data)
|
||||
|
||||
md5mismatch = False
|
||||
sha256mismatch = False
|
||||
if (ud.md5_expected == None or ud.sha256_expected == None):
|
||||
logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data)
|
||||
if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
|
||||
raise FetchError("No checksum specified for %s." % u, u)
|
||||
return
|
||||
|
||||
if ud.md5_expected != md5data:
|
||||
md5mismatch = True
|
||||
raise MD5SumError(ud.localpath, ud.md5_expected, md5data, u)
|
||||
|
||||
if ud.sha256_expected != sha256data:
|
||||
sha256mismatch = True
|
||||
|
||||
# We want to alert the user if a checksum is defined in the recipe but
|
||||
# it does not match.
|
||||
msg = ""
|
||||
if md5mismatch and ud.md5_expected:
|
||||
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
|
||||
|
||||
if sha256mismatch and ud.sha256_expected:
|
||||
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
|
||||
|
||||
if len(msg):
|
||||
raise ChecksumError('Checksum mismatch!%s' % msg, u)
|
||||
|
||||
raise SHA256SumError(ud.localpath, ud.sha256_expected, sha256data, u)
|
||||
|
||||
def update_stamp(u, ud, d):
|
||||
"""
|
||||
@@ -377,8 +332,8 @@ def subprocess_setup():
|
||||
|
||||
def get_autorev(d):
|
||||
# only not cache src rev in autorev case
|
||||
if d.getVar('BB_SRCREV_POLICY', True) != "cache":
|
||||
d.setVar('__BB_DONT_CACHE', '1')
|
||||
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
|
||||
bb.data.setVar('__BB_DONT_CACHE', '1', d)
|
||||
return "AUTOINC"
|
||||
|
||||
def get_srcrev(d):
|
||||
@@ -391,7 +346,7 @@ def get_srcrev(d):
|
||||
"""
|
||||
|
||||
scms = []
|
||||
fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
|
||||
fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d)
|
||||
urldata = fetcher.ud
|
||||
for u in urldata:
|
||||
if urldata[u].method.supports_srcrev():
|
||||
@@ -406,7 +361,7 @@ def get_srcrev(d):
|
||||
#
|
||||
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
|
||||
#
|
||||
format = d.getVar('SRCREV_FORMAT', True)
|
||||
format = bb.data.getVar('SRCREV_FORMAT', d, True)
|
||||
if not format:
|
||||
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
||||
|
||||
@@ -430,9 +385,6 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
||||
Optionally remove the files/directories listed in cleanup upon failure
|
||||
"""
|
||||
|
||||
import bb.process
|
||||
import subprocess
|
||||
|
||||
# Need to export PATH as binary could be in metadata paths
|
||||
# rather than host provided
|
||||
# Also include some other variables.
|
||||
@@ -444,32 +396,39 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
||||
'GIT_PROXY_IGNORE', 'SOCKS5_USER', 'SOCKS5_PASSWD']
|
||||
|
||||
for var in exportvars:
|
||||
val = d.getVar(var, True)
|
||||
val = bb.data.getVar(var, d, True)
|
||||
if val:
|
||||
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
|
||||
|
||||
logger.debug(1, "Running %s", cmd)
|
||||
|
||||
success = False
|
||||
error_message = ""
|
||||
# redirect stderr to stdout
|
||||
stdout_handle = os.popen(cmd + " 2>&1", "r")
|
||||
output = ""
|
||||
|
||||
try:
|
||||
(output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
|
||||
success = True
|
||||
except bb.process.NotFoundError as e:
|
||||
error_message = "Fetch command %s" % (e.command)
|
||||
except bb.process.ExecutionError as e:
|
||||
error_message = "Fetch command %s failed with exit code %s, output:\nSTDOUT: %s\nSTDERR: %s" % (e.command, e.exitcode, e.stdout, e.stderr)
|
||||
except bb.process.CmdError as e:
|
||||
error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
|
||||
if not success:
|
||||
while True:
|
||||
line = stdout_handle.readline()
|
||||
if not line:
|
||||
break
|
||||
if not quiet:
|
||||
print(line, end=' ')
|
||||
output += line
|
||||
|
||||
status = stdout_handle.close() or 0
|
||||
signal = status >> 8
|
||||
exitstatus = status & 0xff
|
||||
|
||||
if (signal or status != 0):
|
||||
for f in cleanup:
|
||||
try:
|
||||
bb.utils.remove(f, True)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
raise FetchError(error_message)
|
||||
if signal:
|
||||
raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
|
||||
elif status != 0:
|
||||
raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
|
||||
|
||||
return output
|
||||
|
||||
@@ -477,106 +436,11 @@ def check_network_access(d, info = "", url = None):
|
||||
"""
|
||||
log remote network access, and error if BB_NO_NETWORK is set
|
||||
"""
|
||||
if d.getVar("BB_NO_NETWORK", True) == "1":
|
||||
if bb.data.getVar("BB_NO_NETWORK", d, True) == "1":
|
||||
raise NetworkAccess(url, info)
|
||||
else:
|
||||
logger.debug(1, "Fetcher accessed the network with the command %s" % info)
|
||||
|
||||
def build_mirroruris(origud, mirrors, ld):
|
||||
uris = []
|
||||
uds = []
|
||||
|
||||
replacements = {}
|
||||
replacements["TYPE"] = origud.type
|
||||
replacements["HOST"] = origud.host
|
||||
replacements["PATH"] = origud.path
|
||||
replacements["BASENAME"] = origud.path.split("/")[-1]
|
||||
replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
|
||||
|
||||
def adduri(uri, ud, uris, uds):
|
||||
for line in mirrors:
|
||||
try:
|
||||
(find, replace) = line
|
||||
except ValueError:
|
||||
continue
|
||||
newuri = uri_replace(ud, find, replace, replacements, ld)
|
||||
if not newuri or newuri in uris or newuri == origud.url:
|
||||
continue
|
||||
try:
|
||||
newud = FetchData(newuri, ld)
|
||||
newud.setup_localpath(ld)
|
||||
except bb.fetch2.BBFetchException as e:
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
ud.method.clean(ud, ld)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
continue
|
||||
uris.append(newuri)
|
||||
uds.append(newud)
|
||||
|
||||
adduri(newuri, newud, uris, uds)
|
||||
|
||||
adduri(None, origud, uris, uds)
|
||||
|
||||
return uris, uds
|
||||
|
||||
def try_mirror_url(newuri, origud, ud, ld, check = False):
|
||||
# Return of None or a value means we're finished
|
||||
# False means try another url
|
||||
try:
|
||||
if check:
|
||||
found = ud.method.checkstatus(newuri, ud, ld)
|
||||
if found:
|
||||
return found
|
||||
return False
|
||||
|
||||
os.chdir(ld.getVar("DL_DIR", True))
|
||||
|
||||
if not os.path.exists(ud.donestamp) or ud.method.need_update(newuri, ud, ld):
|
||||
ud.method.download(newuri, ud, ld)
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(newuri, ud, ld)
|
||||
|
||||
if not ud.localpath or not os.path.exists(ud.localpath):
|
||||
return False
|
||||
|
||||
if ud.localpath == origud.localpath:
|
||||
return ud.localpath
|
||||
|
||||
# We may be obtaining a mirror tarball which needs further processing by the real fetcher
|
||||
# If that tarball is a local file:// we need to provide a symlink to it
|
||||
dldir = ld.getVar("DL_DIR", True)
|
||||
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
|
||||
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
||||
open(ud.donestamp, 'w').close()
|
||||
dest = os.path.join(dldir, os.path.basename(ud.localpath))
|
||||
if not os.path.exists(dest):
|
||||
os.symlink(ud.localpath, dest)
|
||||
return None
|
||||
# Otherwise the result is a local file:// and we symlink to it
|
||||
if not os.path.exists(origud.localpath):
|
||||
os.symlink(ud.localpath, origud.localpath)
|
||||
update_stamp(newuri, origud, ld)
|
||||
return ud.localpath
|
||||
|
||||
except bb.fetch2.NetworkAccess:
|
||||
raise
|
||||
|
||||
except bb.fetch2.BBFetchException as e:
|
||||
if isinstance(e, ChecksumError):
|
||||
logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (newuri, origud.url))
|
||||
logger.warn(str(e))
|
||||
else:
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
ud.method.clean(ud, ld)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def try_mirrors(d, origud, mirrors, check = False):
|
||||
"""
|
||||
Try to use a mirrored version of the sources.
|
||||
@@ -587,13 +451,60 @@ def try_mirrors(d, origud, mirrors, check = False):
|
||||
mirrors is the list of mirrors we're going to try
|
||||
"""
|
||||
ld = d.createCopy()
|
||||
for line in mirrors:
|
||||
try:
|
||||
(find, replace) = line
|
||||
except ValueError:
|
||||
continue
|
||||
newuri = uri_replace(origud, find, replace, ld)
|
||||
if newuri == origud.url:
|
||||
continue
|
||||
try:
|
||||
ud = FetchData(newuri, ld)
|
||||
ud.setup_localpath(ld)
|
||||
|
||||
uris, uds = build_mirroruris(origud, mirrors, ld)
|
||||
if check:
|
||||
found = ud.method.checkstatus(newuri, ud, ld)
|
||||
if found:
|
||||
return found
|
||||
continue
|
||||
|
||||
for index, uri in enumerate(uris):
|
||||
ret = try_mirror_url(uri, origud, uds[index], ld, check)
|
||||
if ret != False:
|
||||
return ret
|
||||
if ud.method.need_update(newuri, ud, ld):
|
||||
ud.method.download(newuri, ud, ld)
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(newuri, ud, ld)
|
||||
|
||||
if not ud.localpath or not os.path.exists(ud.localpath):
|
||||
continue
|
||||
|
||||
if ud.localpath == origud.localpath:
|
||||
return ud.localpath
|
||||
|
||||
# We may be obtaining a mirror tarball which needs further processing by the real fetcher
|
||||
# If that tarball is a local file:// we need to provide a symlink to it
|
||||
dldir = ld.getVar("DL_DIR", True)
|
||||
if os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
||||
dest = os.path.join(dldir, os.path.basename(ud.localpath))
|
||||
if not os.path.exists(dest):
|
||||
os.symlink(ud.localpath, dest)
|
||||
return None
|
||||
# Otherwise the result is a local file:// and we symlink to it
|
||||
if not os.path.exists(origud.localpath):
|
||||
os.symlink(ud.localpath, origud.localpath)
|
||||
return ud.localpath
|
||||
|
||||
except bb.fetch2.NetworkAccess:
|
||||
raise
|
||||
|
||||
except bb.fetch2.BBFetchException as e:
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
if os.path.isfile(ud.localpath):
|
||||
bb.utils.remove(ud.localpath)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
continue
|
||||
return None
|
||||
|
||||
def srcrev_internal_helper(ud, d, name):
|
||||
@@ -611,15 +522,15 @@ def srcrev_internal_helper(ud, d, name):
|
||||
return ud.parm['tag']
|
||||
|
||||
rev = None
|
||||
pn = d.getVar("PN", True)
|
||||
pn = bb.data.getVar("PN", d, True)
|
||||
if name != '':
|
||||
rev = d.getVar("SRCREV_%s_pn-%s" % (name, pn), True)
|
||||
rev = bb.data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True)
|
||||
if not rev:
|
||||
rev = d.getVar("SRCREV_%s" % name, True)
|
||||
rev = bb.data.getVar("SRCREV_%s" % name, d, True)
|
||||
if not rev:
|
||||
rev = d.getVar("SRCREV_pn-%s" % pn, True)
|
||||
rev = bb.data.getVar("SRCREV_pn-%s" % pn, d, True)
|
||||
if not rev:
|
||||
rev = d.getVar("SRCREV", True)
|
||||
rev = bb.data.getVar("SRCREV", d, True)
|
||||
if rev == "INVALID":
|
||||
raise FetchError("Please set SRCREV to a valid value", ud.url)
|
||||
if rev == "AUTOINC":
|
||||
@@ -627,85 +538,11 @@ def srcrev_internal_helper(ud, d, name):
|
||||
|
||||
return rev
|
||||
|
||||
|
||||
def get_checksum_file_list(d):
|
||||
""" Get a list of files checksum in SRC_URI
|
||||
|
||||
Returns the resolved local paths of all local file entries in
|
||||
SRC_URI as a space-separated string
|
||||
"""
|
||||
fetch = Fetch([], d, cache = False, localonly = True)
|
||||
|
||||
dl_dir = d.getVar('DL_DIR', True)
|
||||
filelist = []
|
||||
for u in fetch.urls:
|
||||
ud = fetch.ud[u]
|
||||
|
||||
if ud and isinstance(ud.method, local.Local):
|
||||
ud.setup_localpath(d)
|
||||
f = ud.localpath
|
||||
if f.startswith(dl_dir):
|
||||
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
|
||||
if os.path.exists(f):
|
||||
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
else:
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
continue
|
||||
filelist.append(f)
|
||||
|
||||
return " ".join(filelist)
|
||||
|
||||
|
||||
def get_file_checksums(filelist, pn):
|
||||
"""Get a list of the checksums for a list of local files
|
||||
|
||||
Returns the checksums for a list of local files, caching the results as
|
||||
it proceeds
|
||||
|
||||
"""
|
||||
|
||||
def checksum_file(f):
|
||||
try:
|
||||
checksum = _checksum_cache.get_checksum(f)
|
||||
except OSError as e:
|
||||
import traceback
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
|
||||
return None
|
||||
return checksum
|
||||
|
||||
checksums = []
|
||||
for pth in filelist.split():
|
||||
checksum = None
|
||||
if '*' in pth:
|
||||
# Handle globs
|
||||
import glob
|
||||
for f in glob.glob(pth):
|
||||
checksum = checksum_file(f)
|
||||
if checksum:
|
||||
checksums.append((f, checksum))
|
||||
elif os.path.isdir(pth):
|
||||
# Handle directories
|
||||
for root, dirs, files in os.walk(pth):
|
||||
for name in files:
|
||||
fullpth = os.path.join(root, name)
|
||||
checksum = checksum_file(fullpth)
|
||||
if checksum:
|
||||
checksums.append((fullpth, checksum))
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort()
|
||||
return checksums
|
||||
|
||||
|
||||
class FetchData(object):
|
||||
"""
|
||||
A class which represents the fetcher state for a given URI.
|
||||
"""
|
||||
def __init__(self, url, d, localonly = False):
|
||||
def __init__(self, url, d):
|
||||
# localpath is the location of a downloaded result. If not set, the file is local.
|
||||
self.donestamp = None
|
||||
self.localfile = ""
|
||||
@@ -728,18 +565,8 @@ class FetchData(object):
|
||||
else:
|
||||
self.md5_name = "md5sum"
|
||||
self.sha256_name = "sha256sum"
|
||||
if self.md5_name in self.parm:
|
||||
self.md5_expected = self.parm[self.md5_name]
|
||||
elif self.type not in ["http", "https", "ftp", "ftps"]:
|
||||
self.md5_expected = None
|
||||
else:
|
||||
self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
|
||||
if self.sha256_name in self.parm:
|
||||
self.sha256_expected = self.parm[self.sha256_name]
|
||||
elif self.type not in ["http", "https", "ftp", "ftps"]:
|
||||
self.sha256_expected = None
|
||||
else:
|
||||
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
|
||||
self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
|
||||
self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
|
||||
|
||||
self.names = self.parm.get("name",'default').split(',')
|
||||
|
||||
@@ -752,13 +579,6 @@ class FetchData(object):
|
||||
if not self.method:
|
||||
raise NoMethodError(url)
|
||||
|
||||
if localonly and not isinstance(self.method, local.Local):
|
||||
raise NonLocalMethod()
|
||||
|
||||
if self.parm.get("proto", None) and "protocol" not in self.parm:
|
||||
logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
|
||||
self.parm["protocol"] = self.parm.get("proto", None)
|
||||
|
||||
if hasattr(self.method, "urldata_init"):
|
||||
self.method.urldata_init(self, d)
|
||||
|
||||
@@ -770,7 +590,7 @@ class FetchData(object):
|
||||
self.localpath = self.method.localpath(self.url, self, d)
|
||||
|
||||
# Note: These files should always be in DL_DIR whereas localpath may not be.
|
||||
basepath = d.expand("${DL_DIR}/%s" % os.path.basename(self.localpath or self.basename))
|
||||
basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath or self.basename), d)
|
||||
self.donestamp = basepath + '.done'
|
||||
self.lockfile = basepath + '.lock'
|
||||
|
||||
@@ -796,12 +616,12 @@ class FetchData(object):
|
||||
if "srcdate" in self.parm:
|
||||
return self.parm['srcdate']
|
||||
|
||||
pn = d.getVar("PN", True)
|
||||
pn = bb.data.getVar("PN", d, True)
|
||||
|
||||
if pn:
|
||||
return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
|
||||
return bb.data.getVar("SRCDATE_%s" % pn, d, True) or bb.data.getVar("SRCDATE", d, True) or bb.data.getVar("DATE", d, True)
|
||||
|
||||
return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
|
||||
return bb.data.getVar("SRCDATE", d, True) or bb.data.getVar("DATE", d, True)
|
||||
|
||||
class FetchMethod(object):
|
||||
"""Base class for 'fetch'ing data"""
|
||||
@@ -823,26 +643,6 @@ class FetchMethod(object):
|
||||
"""
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
"""
|
||||
Is localpath something that can be represented by a checksum?
|
||||
"""
|
||||
|
||||
# We cannot compute checksums for directories
|
||||
if os.path.isdir(urldata.localpath) == True:
|
||||
return False
|
||||
if urldata.localpath.find("*") != -1:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def recommends_checksum(self, urldata):
|
||||
"""
|
||||
Is the backend on where checksumming is recommended (should warnings
|
||||
by displayed if there is no checksum)?
|
||||
"""
|
||||
return False
|
||||
|
||||
def _strip_leading_slashes(self, relpath):
|
||||
"""
|
||||
Remove leading slash as os.path.join can't cope
|
||||
@@ -893,7 +693,7 @@ class FetchMethod(object):
|
||||
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z']:
|
||||
efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
|
||||
efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
cmd = None
|
||||
@@ -937,7 +737,7 @@ class FetchMethod(object):
|
||||
dest = os.path.join(rootdir, os.path.basename(file))
|
||||
if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
|
||||
if os.path.isdir(file):
|
||||
filesdir = os.path.realpath(data.getVar("FILESDIR", True))
|
||||
filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True))
|
||||
destdir = "."
|
||||
if file[0:len(filesdir)] == filesdir:
|
||||
destdir = file[len(filesdir):file.rfind('/')]
|
||||
@@ -969,9 +769,7 @@ class FetchMethod(object):
|
||||
bb.utils.mkdirhier(newdir)
|
||||
os.chdir(newdir)
|
||||
|
||||
path = data.getVar('PATH', True)
|
||||
if path:
|
||||
cmd = "PATH=\"%s\" %s" % (path, cmd)
|
||||
cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd)
|
||||
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
|
||||
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
|
||||
|
||||
@@ -1016,10 +814,10 @@ class FetchMethod(object):
|
||||
|
||||
localcount = None
|
||||
if name != '':
|
||||
pn = d.getVar("PN", True)
|
||||
localcount = d.getVar("LOCALCOUNT_" + name, True)
|
||||
pn = bb.data.getVar("PN", d, True)
|
||||
localcount = bb.data.getVar("LOCALCOUNT_" + name, d, True)
|
||||
if not localcount:
|
||||
localcount = d.getVar("LOCALCOUNT", True)
|
||||
localcount = bb.data.getVar("LOCALCOUNT", d, True)
|
||||
return localcount
|
||||
|
||||
localcount_internal_helper = staticmethod(localcount_internal_helper)
|
||||
@@ -1051,7 +849,7 @@ class FetchMethod(object):
|
||||
|
||||
latest_rev = self._build_revision(url, ud, d, name)
|
||||
last_rev = localcounts.get(key + '_rev')
|
||||
uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False
|
||||
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
||||
count = None
|
||||
if uselocalcount:
|
||||
count = FetchMethod.localcount_internal_helper(ud, d, name)
|
||||
@@ -1079,33 +877,25 @@ class FetchMethod(object):
|
||||
|
||||
def generate_revision_key(self, url, ud, d, name):
|
||||
key = self._revision_key(url, ud, d, name)
|
||||
return "%s-%s" % (key, d.getVar("PN", True) or "")
|
||||
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
||||
|
||||
class Fetch(object):
|
||||
def __init__(self, urls, d, cache = True, localonly = False):
|
||||
if localonly and cache:
|
||||
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
|
||||
|
||||
def __init__(self, urls, d, cache = True):
|
||||
if len(urls) == 0:
|
||||
urls = d.getVar("SRC_URI", True).split()
|
||||
self.urls = urls
|
||||
self.d = d
|
||||
self.ud = {}
|
||||
|
||||
fn = d.getVar('FILE', True)
|
||||
if cache and fn and fn in urldata_cache:
|
||||
fn = bb.data.getVar('FILE', d, True)
|
||||
if cache and fn in urldata_cache:
|
||||
self.ud = urldata_cache[fn]
|
||||
|
||||
for url in urls:
|
||||
if url not in self.ud:
|
||||
try:
|
||||
self.ud[url] = FetchData(url, d, localonly)
|
||||
except NonLocalMethod:
|
||||
if localonly:
|
||||
self.ud[url] = None
|
||||
pass
|
||||
self.ud[url] = FetchData(url, d)
|
||||
|
||||
if fn and cache:
|
||||
if cache:
|
||||
urldata_cache[fn] = self.ud
|
||||
|
||||
def localpath(self, url):
|
||||
@@ -1113,7 +903,7 @@ class Fetch(object):
|
||||
self.ud[url] = FetchData(url, self.d)
|
||||
|
||||
self.ud[url].setup_localpath(self.d)
|
||||
return self.d.expand(self.ud[url].localpath)
|
||||
return bb.data.expand(self.ud[url].localpath, self.d)
|
||||
|
||||
def localpaths(self):
|
||||
"""
|
||||
@@ -1135,8 +925,8 @@ class Fetch(object):
|
||||
if len(urls) == 0:
|
||||
urls = self.urls
|
||||
|
||||
network = self.d.getVar("BB_NO_NETWORK", True)
|
||||
premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
|
||||
network = bb.data.getVar("BB_NO_NETWORK", self.d, True)
|
||||
premirroronly = (bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) == "1")
|
||||
|
||||
for u in urls:
|
||||
ud = self.ud[u]
|
||||
@@ -1147,22 +937,19 @@ class Fetch(object):
|
||||
lf = bb.utils.lockfile(ud.lockfile)
|
||||
|
||||
try:
|
||||
self.d.setVar("BB_NO_NETWORK", network)
|
||||
bb.data.setVar("BB_NO_NETWORK", network, self.d)
|
||||
|
||||
if os.path.exists(ud.donestamp) and not m.need_update(u, ud, self.d):
|
||||
if not m.need_update(u, ud, self.d):
|
||||
localpath = ud.localpath
|
||||
elif m.try_premirror(u, ud, self.d):
|
||||
logger.debug(1, "Trying PREMIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
|
||||
localpath = try_mirrors(self.d, ud, mirrors, False)
|
||||
|
||||
if premirroronly:
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
bb.data.setVar("BB_NO_NETWORK", "1", self.d)
|
||||
|
||||
os.chdir(self.d.getVar("DL_DIR", True))
|
||||
|
||||
firsterr = None
|
||||
if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(u, ud, self.d)):
|
||||
if not localpath and m.need_update(u, ud, self.d):
|
||||
try:
|
||||
logger.debug(1, "Trying Upstream")
|
||||
m.download(u, ud, self.d)
|
||||
@@ -1177,22 +964,16 @@ class Fetch(object):
|
||||
raise
|
||||
|
||||
except BBFetchException as e:
|
||||
if isinstance(e, ChecksumError):
|
||||
logger.warn("Checksum error encountered with download (will attempt other sources): %s" % str(e))
|
||||
else:
|
||||
logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
|
||||
logger.debug(1, str(e))
|
||||
firsterr = e
|
||||
logger.warn(str(e))
|
||||
# Remove any incomplete fetch
|
||||
m.clean(ud, self.d)
|
||||
if os.path.isfile(ud.localpath):
|
||||
bb.utils.remove(ud.localpath)
|
||||
logger.debug(1, "Trying MIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
|
||||
localpath = try_mirrors (self.d, ud, mirrors)
|
||||
|
||||
if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
|
||||
if firsterr:
|
||||
logger.error(str(firsterr))
|
||||
raise FetchError("Unable to fetch URL from any source.", u)
|
||||
raise FetchError("Unable to fetch URL %s from any source." % u, u)
|
||||
|
||||
update_stamp(u, ud, self.d)
|
||||
|
||||
@@ -1213,7 +994,7 @@ class Fetch(object):
|
||||
m = ud.method
|
||||
logger.debug(1, "Testing URL %s", u)
|
||||
# First try checking uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
if not ret:
|
||||
# Next try checking from the original uri, u
|
||||
@@ -1221,8 +1002,8 @@ class Fetch(object):
|
||||
ret = m.checkstatus(u, ud, self.d)
|
||||
except:
|
||||
# Finally, try checking uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
|
||||
ret = try_mirrors (self.d, ud, mirrors, True)
|
||||
|
||||
if not ret:
|
||||
raise FetchError("URL %s doesn't work" % u, u)
|
||||
@@ -1239,7 +1020,7 @@ class Fetch(object):
|
||||
ud = self.ud[u]
|
||||
ud.setup_localpath(self.d)
|
||||
|
||||
if self.d.expand(self.localpath) is None:
|
||||
if bb.data.expand(self.localpath, self.d) is None:
|
||||
continue
|
||||
|
||||
if ud.lockfile:
|
||||
|
||||
@@ -60,7 +60,7 @@ class Bzr(FetchMethod):
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_bzr}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
proto = ud.parm.get('proto', 'http')
|
||||
|
||||
bzrroot = ud.host + ud.path
|
||||
|
||||
|
||||
@@ -111,9 +111,15 @@ class Cvs(FetchMethod):
|
||||
if ud.tag:
|
||||
options.append("-r %s" % ud.tag)
|
||||
|
||||
cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
|
||||
cvscmd = cvsbasecmd + "'-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
|
||||
cvsupdatecmd = cvsbasecmd + "'-d" + cvsroot + "' update -d -P " + " ".join(options)
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
data.setVar('CVSROOT', cvsroot, localdata)
|
||||
data.setVar('CVSCOOPTS', " ".join(options), localdata)
|
||||
data.setVar('CVSMODULE', ud.module, localdata)
|
||||
cvscmd = data.getVar('FETCHCOMMAND', localdata, True)
|
||||
cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True)
|
||||
|
||||
if cvs_rsh:
|
||||
cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
|
||||
|
||||
@@ -38,12 +38,6 @@ Supported SRC_URI options are:
|
||||
who has its own routine to checkout code.
|
||||
The default is "0", set nocheckout=1 if needed.
|
||||
|
||||
- bareclone
|
||||
Create a bare clone of the source code and don't checkout the source code
|
||||
when unpacking. Set this option for the recipe who has its own routine to
|
||||
checkout code and tracking branch requirements.
|
||||
The default is "0", set bareclone=1 if needed.
|
||||
|
||||
"""
|
||||
|
||||
#Copyright (C) 2005 Richard Purdie
|
||||
@@ -74,7 +68,7 @@ class Git(FetchMethod):
|
||||
#
|
||||
# Only enable _sortable revision if the key is set
|
||||
#
|
||||
if d.getVar("BB_GIT_CLONE_FOR_SRCREV", True):
|
||||
if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
|
||||
self._sortable_buildindex = self._sortable_buildindex_disabled
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
@@ -82,9 +76,6 @@ class Git(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['git']
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
return False
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init git specific variable within url data
|
||||
@@ -104,11 +95,6 @@ class Git(FetchMethod):
|
||||
|
||||
ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
|
||||
|
||||
# bareclone implies nocheckout
|
||||
ud.bareclone = ud.parm.get("bareclone","0") == "1"
|
||||
if ud.bareclone:
|
||||
ud.nocheckout = 1
|
||||
|
||||
branches = ud.parm.get("branch", "master").split(',')
|
||||
if len(branches) != len(ud.names):
|
||||
raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
|
||||
@@ -126,11 +112,10 @@ class Git(FetchMethod):
|
||||
for name in ud.names:
|
||||
# Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
|
||||
if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
|
||||
if ud.revisions[name]:
|
||||
ud.branches[name] = ud.revisions[name]
|
||||
ud.branches[name] = ud.revisions[name]
|
||||
ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
|
||||
gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
|
||||
# for rebaseable git repo, it is necessary to keep mirror tar ball
|
||||
# per revision, so that even the revision disappears from the
|
||||
# upstream repo in the future, the mirror will remain intact and still
|
||||
@@ -139,9 +124,8 @@ class Git(FetchMethod):
|
||||
for name in ud.names:
|
||||
gitsrcname = gitsrcname + '_' + ud.revisions[name]
|
||||
ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
|
||||
ud.clonedir = os.path.join(gitdir, gitsrcname)
|
||||
ud.fullmirror = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
|
||||
ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
|
||||
|
||||
ud.localfile = ud.clonedir
|
||||
|
||||
@@ -162,7 +146,7 @@ class Git(FetchMethod):
|
||||
def try_premirror(self, u, ud, d):
|
||||
# If we don't do this, updating an existing checkout with only premirrors
|
||||
# is not possible
|
||||
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
|
||||
if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None:
|
||||
return True
|
||||
if os.path.exists(ud.clonedir):
|
||||
return False
|
||||
@@ -188,12 +172,8 @@ class Git(FetchMethod):
|
||||
|
||||
# If the repo still doesn't exist, fallback to cloning it
|
||||
if not os.path.exists(ud.clonedir):
|
||||
# We do this since git will use a "-l" option automatically for local urls where possible
|
||||
if repourl.startswith("file://"):
|
||||
repourl = repourl[7:]
|
||||
clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, clone_cmd)
|
||||
bb.fetch2.check_network_access(d, clone_cmd)
|
||||
runfetchcmd(clone_cmd, d)
|
||||
|
||||
os.chdir(ud.clonedir)
|
||||
@@ -204,14 +184,14 @@ class Git(FetchMethod):
|
||||
needupdate = True
|
||||
if needupdate:
|
||||
try:
|
||||
runfetchcmd("%s remote prune origin" % ud.basecmd, d)
|
||||
runfetchcmd("%s remote rm origin" % ud.basecmd, d)
|
||||
except bb.fetch2.FetchError:
|
||||
logger.debug(1, "No Origin")
|
||||
|
||||
runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
|
||||
fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
|
||||
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
|
||||
runfetchcmd(fetch_cmd, d)
|
||||
runfetchcmd("%s prune-packed" % ud.basecmd, d)
|
||||
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
|
||||
@@ -223,7 +203,6 @@ class Git(FetchMethod):
|
||||
os.chdir(ud.clonedir)
|
||||
logger.info("Creating tarball of git repository")
|
||||
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d)
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
""" unpack the downloaded src to destdir"""
|
||||
@@ -241,11 +220,7 @@ class Git(FetchMethod):
|
||||
if os.path.exists(destdir):
|
||||
bb.utils.prunedir(destdir)
|
||||
|
||||
cloneflags = "-s -n"
|
||||
if ud.bareclone:
|
||||
cloneflags += " --mirror"
|
||||
|
||||
runfetchcmd("git clone %s %s/ %s" % (cloneflags, ud.clonedir, destdir), d)
|
||||
runfetchcmd("git clone -s -n %s/ %s" % (ud.clonedir, destdir), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
@@ -290,8 +265,7 @@ class Git(FetchMethod):
|
||||
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
cmd = "%s ls-remote %s://%s%s%s %s" % \
|
||||
(basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name])
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
output = runfetchcmd(cmd, d, True)
|
||||
if not output:
|
||||
raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url)
|
||||
|
||||
@@ -82,7 +82,7 @@ class Hg(FetchMethod):
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
proto = ud.parm.get('proto', 'http')
|
||||
|
||||
host = ud.host
|
||||
if proto == "file":
|
||||
|
||||
@@ -26,7 +26,6 @@ BitBake build tools.
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import urllib
|
||||
import bb
|
||||
import bb.utils
|
||||
from bb import data
|
||||
@@ -41,7 +40,7 @@ class Local(FetchMethod):
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
# We don't set localfile as for this fetcher the file is already local!
|
||||
ud.basename = os.path.basename(urllib.unquote(ud.url.split("://")[1].split(";")[0]))
|
||||
ud.basename = os.path.basename(ud.url.split("://")[1].split(";")[0])
|
||||
return
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
@@ -50,7 +49,6 @@ class Local(FetchMethod):
|
||||
"""
|
||||
path = url.split("://")[1]
|
||||
path = path.split(";")[0]
|
||||
path = urllib.unquote(path)
|
||||
newpath = path
|
||||
if path[0] != "/":
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
|
||||
@@ -57,7 +57,7 @@ class Osc(FetchMethod):
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_osc}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'ocs')
|
||||
proto = ud.parm.get('proto', 'ocs')
|
||||
|
||||
options = []
|
||||
|
||||
|
||||
@@ -27,7 +27,6 @@ BitBake build tools.
|
||||
|
||||
from future_builtins import zip
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
@@ -91,8 +90,8 @@ class Perforce(FetchMethod):
|
||||
|
||||
p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
|
||||
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.strip()
|
||||
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.readline().strip()
|
||||
logger.debug(1, "READ %s", cset)
|
||||
if not cset:
|
||||
return -1
|
||||
@@ -155,8 +154,8 @@ class Perforce(FetchMethod):
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
|
||||
tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmpfile.strip()
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
|
||||
|
||||
@@ -169,8 +168,7 @@ class Perforce(FetchMethod):
|
||||
os.chdir(tmpfile)
|
||||
logger.info("Fetch " + loc)
|
||||
logger.info("%s%s files %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
p4file = p4file.strip()
|
||||
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
|
||||
if not p4file:
|
||||
raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)
|
||||
@@ -186,7 +184,7 @@ class Perforce(FetchMethod):
|
||||
dest = list[0][len(path)+1:]
|
||||
where = dest.find("#")
|
||||
|
||||
subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
|
||||
os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
|
||||
count = count + 1
|
||||
|
||||
if count == 0:
|
||||
|
||||
@@ -69,9 +69,6 @@ class SSH(FetchMethod):
|
||||
def supports(self, url, urldata, d):
|
||||
return __pattern__.match(url) != None
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
return False
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
m = __pattern__.match(urldata.url)
|
||||
path = m.group('path')
|
||||
|
||||
@@ -77,8 +77,8 @@ class Svk(FetchMethod):
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
|
||||
tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmpfile.strip()
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
logger.error()
|
||||
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
|
||||
|
||||
@@ -49,8 +49,6 @@ class Svn(FetchMethod):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError('module', ud.url)
|
||||
|
||||
ud.basecmd = d.getVar('FETCHCMD_svn', True)
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
# Create paths to svn checkouts
|
||||
@@ -71,6 +69,8 @@ class Svn(FetchMethod):
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_svn}', d)
|
||||
|
||||
proto = ud.parm.get('proto', 'svn')
|
||||
|
||||
svn_rsh = None
|
||||
@@ -88,7 +88,7 @@ class Svn(FetchMethod):
|
||||
options.append("--password %s" % ud.pswd)
|
||||
|
||||
if command == "info":
|
||||
svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
|
||||
svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
|
||||
else:
|
||||
suffix = ""
|
||||
if ud.revision:
|
||||
@@ -96,9 +96,9 @@ class Svn(FetchMethod):
|
||||
suffix = "@%s" % (ud.revision)
|
||||
|
||||
if command == "fetch":
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
elif command == "update":
|
||||
svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
|
||||
svncmd = "%s update %s" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid svn command %s" % command, ud.url)
|
||||
|
||||
@@ -117,11 +117,6 @@ class Svn(FetchMethod):
|
||||
logger.info("Update " + loc)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
# We need to attempt to run svn upgrade first in case its an older working format
|
||||
try:
|
||||
runfetchcmd(ud.basecmd + " upgrade", d)
|
||||
except FetchError:
|
||||
pass
|
||||
logger.debug(1, "Running %s", svnupdatecmd)
|
||||
bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
|
||||
runfetchcmd(svnupdatecmd, d)
|
||||
|
||||
@@ -45,9 +45,6 @@ class Wget(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['http', 'https', 'ftp']
|
||||
|
||||
def recommends_checksum(self, urldata):
|
||||
return True
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
@@ -56,34 +53,39 @@ class Wget(FetchMethod):
|
||||
def download(self, uri, ud, d, checkonly = False):
|
||||
"""Fetch urls"""
|
||||
|
||||
basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
|
||||
def fetch_uri(uri, ud, d):
|
||||
if checkonly:
|
||||
fetchcmd = data.getVar("CHECKCOMMAND", d, True)
|
||||
elif os.path.exists(ud.localpath):
|
||||
# file exists, but we didnt complete it.. trying again..
|
||||
fetchcmd = data.getVar("RESUMECOMMAND", d, True)
|
||||
else:
|
||||
fetchcmd = data.getVar("FETCHCOMMAND", d, True)
|
||||
|
||||
if checkonly:
|
||||
fetchcmd = d.getVar("CHECKCOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'")
|
||||
elif os.path.exists(ud.localpath):
|
||||
# file exists, but we didnt complete it.. trying again..
|
||||
fetchcmd = d.getVar("RESUMECOMMAND_wget", True) or d.expand(basecmd + " --spider -P ${DL_DIR} '${URI}'")
|
||||
else:
|
||||
fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'")
|
||||
uri = uri.split(";")[0]
|
||||
uri_decoded = list(decodeurl(uri))
|
||||
uri_type = uri_decoded[0]
|
||||
uri_host = uri_decoded[1]
|
||||
|
||||
uri = uri.split(";")[0]
|
||||
uri_decoded = list(decodeurl(uri))
|
||||
uri_type = uri_decoded[0]
|
||||
uri_host = uri_decoded[1]
|
||||
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||
if not checkonly:
|
||||
logger.info("fetch " + uri)
|
||||
logger.debug(2, "executing " + fetchcmd)
|
||||
bb.fetch2.check_network_access(d, fetchcmd)
|
||||
runfetchcmd(fetchcmd, d, quiet=checkonly)
|
||||
|
||||
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||
if not checkonly:
|
||||
logger.info("fetch " + uri)
|
||||
logger.debug(2, "executing " + fetchcmd)
|
||||
bb.fetch2.check_network_access(d, fetchcmd)
|
||||
runfetchcmd(fetchcmd, d, quiet=checkonly)
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath) and not checkonly:
|
||||
raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
|
||||
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath) and not checkonly:
|
||||
raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
fetch_uri(uri, ud, localdata)
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, uri, ud, d):
|
||||
|
||||
@@ -52,7 +52,7 @@ def insert_method(modulename, code, fn):
|
||||
if name in ['None', 'False']:
|
||||
continue
|
||||
elif name in _parsed_fns and not _parsed_fns[name] == modulename:
|
||||
error("The function %s defined in %s was already declared in %s. BitBake has a global python function namespace so shared functions should be declared in a common include file rather than being duplicated, or if the functions are different, please use different function names." % (name, modulename, _parsed_fns[name]))
|
||||
error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename))
|
||||
else:
|
||||
_parsed_fns[name] = modulename
|
||||
|
||||
|
||||
@@ -1,244 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2012 Robert Yang
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os, logging, re, sys
|
||||
import bb
|
||||
logger = logging.getLogger("BitBake.Monitor")
|
||||
|
||||
def printErr(info):
|
||||
logger.error("%s\n Disk space monitor will NOT be enabled" % info)
|
||||
|
||||
def convertGMK(unit):
|
||||
|
||||
""" Convert the space unit G, M, K, the unit is case-insensitive """
|
||||
|
||||
unitG = re.match('([1-9][0-9]*)[gG]\s?$', unit)
|
||||
if unitG:
|
||||
return int(unitG.group(1)) * (1024 ** 3)
|
||||
unitM = re.match('([1-9][0-9]*)[mM]\s?$', unit)
|
||||
if unitM:
|
||||
return int(unitM.group(1)) * (1024 ** 2)
|
||||
unitK = re.match('([1-9][0-9]*)[kK]\s?$', unit)
|
||||
if unitK:
|
||||
return int(unitK.group(1)) * 1024
|
||||
unitN = re.match('([1-9][0-9]*)\s?$', unit)
|
||||
if unitN:
|
||||
return int(unitN.group(1))
|
||||
else:
|
||||
return None
|
||||
|
||||
def getMountedDev(path):
|
||||
|
||||
""" Get the device mounted at the path, uses /proc/mounts """
|
||||
|
||||
# Get the mount point of the filesystem containing path
|
||||
# st_dev is the ID of device containing file
|
||||
parentDev = os.stat(path).st_dev
|
||||
currentDev = parentDev
|
||||
# When the current directory's device is different from the
|
||||
# parrent's, then the current directory is a mount point
|
||||
while parentDev == currentDev:
|
||||
mountPoint = path
|
||||
# Use dirname to get the parrent's directory
|
||||
path = os.path.dirname(path)
|
||||
# Reach the "/"
|
||||
if path == mountPoint:
|
||||
break
|
||||
parentDev= os.stat(path).st_dev
|
||||
|
||||
try:
|
||||
with open("/proc/mounts", "r") as ifp:
|
||||
for line in ifp:
|
||||
procLines = line.rstrip('\n').split()
|
||||
if procLines[1] == mountPoint:
|
||||
return procLines[0]
|
||||
except EnvironmentError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def getDiskData(BBDirs, configuration):
|
||||
|
||||
"""Prepare disk data for disk space monitor"""
|
||||
|
||||
# Save the device IDs, need the ID to be unique (the dictionary's key is
|
||||
# unique), so that when more than one directories are located in the same
|
||||
# device, we just monitor it once
|
||||
devDict = {}
|
||||
for pathSpaceInode in BBDirs.split():
|
||||
# The input format is: "dir,space,inode", dir is a must, space
|
||||
# and inode are optional
|
||||
pathSpaceInodeRe = re.match('([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
|
||||
if not pathSpaceInodeRe:
|
||||
printErr("Invalid value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
|
||||
return None
|
||||
|
||||
action = pathSpaceInodeRe.group(1)
|
||||
if action not in ("ABORT", "STOPTASKS", "WARN"):
|
||||
printErr("Unknown disk space monitor action: %s" % action)
|
||||
return None
|
||||
|
||||
path = os.path.realpath(pathSpaceInodeRe.group(2))
|
||||
if not path:
|
||||
printErr("Invalid path value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
|
||||
return None
|
||||
|
||||
# The disk space or inode is optional, but it should have a correct
|
||||
# value once it is specified
|
||||
minSpace = pathSpaceInodeRe.group(3)
|
||||
if minSpace:
|
||||
minSpace = convertGMK(minSpace)
|
||||
if not minSpace:
|
||||
printErr("Invalid disk space value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(3))
|
||||
return None
|
||||
else:
|
||||
# 0 means that it is not specified
|
||||
minSpace = None
|
||||
|
||||
minInode = pathSpaceInodeRe.group(4)
|
||||
if minInode:
|
||||
minInode = convertGMK(minInode)
|
||||
if not minInode:
|
||||
printErr("Invalid inode value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(4))
|
||||
return None
|
||||
else:
|
||||
# 0 means that it is not specified
|
||||
minInode = None
|
||||
|
||||
if minSpace is None and minInode is None:
|
||||
printErr("No disk space or inode value in found BB_DISKMON_DIRS: %s" % pathSpaceInode)
|
||||
return None
|
||||
# mkdir for the directory since it may not exist, for example the
|
||||
# DL_DIR may not exist at the very beginning
|
||||
if not os.path.exists(path):
|
||||
bb.utils.mkdirhier(path)
|
||||
mountedDev = getMountedDev(path)
|
||||
devDict[mountedDev] = action, path, minSpace, minInode
|
||||
|
||||
return devDict
|
||||
|
||||
def getInterval(configuration):
|
||||
|
||||
""" Get the disk space interval """
|
||||
|
||||
# The default value is 50M and 5K.
|
||||
spaceDefault = 50 * 1024 * 1024
|
||||
inodeDefault = 5 * 1024
|
||||
|
||||
interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", True)
|
||||
if not interval:
|
||||
return spaceDefault, inodeDefault
|
||||
else:
|
||||
# The disk space or inode interval is optional, but it should
|
||||
# have a correct value once it is specified
|
||||
intervalRe = re.match('([^,]*),?\s*(.*)', interval)
|
||||
if intervalRe:
|
||||
intervalSpace = intervalRe.group(1)
|
||||
if intervalSpace:
|
||||
intervalSpace = convertGMK(intervalSpace)
|
||||
if not intervalSpace:
|
||||
printErr("Invalid disk space interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(1))
|
||||
return None, None
|
||||
else:
|
||||
intervalSpace = spaceDefault
|
||||
intervalInode = intervalRe.group(2)
|
||||
if intervalInode:
|
||||
intervalInode = convertGMK(intervalInode)
|
||||
if not intervalInode:
|
||||
printErr("Invalid disk inode interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(2))
|
||||
return None, None
|
||||
else:
|
||||
intervalInode = inodeDefault
|
||||
return intervalSpace, intervalInode
|
||||
else:
|
||||
printErr("Invalid interval value in BB_DISKMON_WARNINTERVAL: %s" % interval)
|
||||
return None, None
|
||||
|
||||
class diskMonitor:
|
||||
|
||||
"""Prepare the disk space monitor data"""
|
||||
|
||||
def __init__(self, configuration):
|
||||
|
||||
self.enableMonitor = False
|
||||
|
||||
BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None
|
||||
if BBDirs:
|
||||
self.devDict = getDiskData(BBDirs, configuration)
|
||||
if self.devDict:
|
||||
self.spaceInterval, self.inodeInterval = getInterval(configuration)
|
||||
if self.spaceInterval and self.inodeInterval:
|
||||
self.enableMonitor = True
|
||||
# These are for saving the previous disk free space and inode, we
|
||||
# use them to avoid print too many warning messages
|
||||
self.preFreeS = {}
|
||||
self.preFreeI = {}
|
||||
# This is for STOPTASKS and ABORT, to avoid print the message repeatly
|
||||
# during waiting the tasks to finish
|
||||
self.checked = {}
|
||||
for dev in self.devDict:
|
||||
self.preFreeS[dev] = 0
|
||||
self.preFreeI[dev] = 0
|
||||
self.checked[dev] = False
|
||||
if self.spaceInterval is None and self.inodeInterval is None:
|
||||
self.enableMonitor = False
|
||||
|
||||
def check(self, rq):
|
||||
|
||||
""" Take action for the monitor """
|
||||
|
||||
if self.enableMonitor:
|
||||
for dev in self.devDict:
|
||||
st = os.statvfs(self.devDict[dev][1])
|
||||
|
||||
# The free space, float point number
|
||||
freeSpace = st.f_bavail * st.f_frsize
|
||||
|
||||
if self.devDict[dev][2] and freeSpace < self.devDict[dev][2]:
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeS[dev] == 0 or self.preFreeS[dev] - freeSpace > self.spaceInterval and not self.checked[dev]:
|
||||
logger.warn("The free space of %s is running low (%.3fGB left)" % (dev, freeSpace / 1024 / 1024 / 1024.0))
|
||||
self.preFreeS[dev] = freeSpace
|
||||
|
||||
if self.devDict[dev][0] == "STOPTASKS" and not self.checked[dev]:
|
||||
logger.error("No new tasks can be excuted since the disk space monitor action is \"STOPTASKS\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(False)
|
||||
elif self.devDict[dev][0] == "ABORT" and not self.checked[dev]:
|
||||
logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(True)
|
||||
|
||||
# The free inodes, float point number
|
||||
freeInode = st.f_favail
|
||||
|
||||
if self.devDict[dev][3] and freeInode < self.devDict[dev][3]:
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeI[dev] == 0 or self.preFreeI[dev] - freeInode > self.inodeInterval and not self.checked[dev]:
|
||||
logger.warn("The free inode of %s is running low (%.3fK left)" % (dev, freeInode / 1024.0))
|
||||
self.preFreeI[dev] = freeInode
|
||||
|
||||
if self.devDict[dev][0] == "STOPTASKS" and not self.checked[dev]:
|
||||
logger.error("No new tasks can be excuted since the disk space monitor action is \"STOPTASKS\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(False)
|
||||
elif self.devDict[dev][0] == "ABORT" and not self.checked[dev]:
|
||||
logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(True)
|
||||
return
|
||||
@@ -100,7 +100,6 @@ class BBLogFilter(object):
|
||||
|
||||
loggerDefaultDebugLevel = 0
|
||||
loggerDefaultVerbose = False
|
||||
loggerVerboseLogs = False
|
||||
loggerDefaultDomains = []
|
||||
|
||||
def init_msgconfig(verbose, debug, debug_domains = []):
|
||||
@@ -109,8 +108,6 @@ def init_msgconfig(verbose, debug, debug_domains = []):
|
||||
"""
|
||||
bb.msg.loggerDefaultDebugLevel = debug
|
||||
bb.msg.loggerDefaultVerbose = verbose
|
||||
if verbose:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
bb.msg.loggerDefaultDomains = debug_domains
|
||||
|
||||
def addDefaultlogFilter(handler):
|
||||
|
||||
@@ -37,17 +37,6 @@ logger = logging.getLogger("BitBake.Parsing")
|
||||
|
||||
class ParseError(Exception):
|
||||
"""Exception raised when parsing fails"""
|
||||
def __init__(self, msg, filename, lineno=0):
|
||||
self.msg = msg
|
||||
self.filename = filename
|
||||
self.lineno = lineno
|
||||
Exception.__init__(self, msg, filename, lineno)
|
||||
|
||||
def __str__(self):
|
||||
if self.lineno:
|
||||
return "ParseError at %s:%d: %s" % (self.filename, self.lineno, self.msg)
|
||||
else:
|
||||
return "ParseError in %s: %s" % (self.filename, self.msg)
|
||||
|
||||
class SkipPackage(Exception):
|
||||
"""Exception raised to skip this package"""
|
||||
@@ -73,9 +62,9 @@ def update_mtime(f):
|
||||
def mark_dependency(d, f):
|
||||
if f.startswith('./'):
|
||||
f = "%s/%s" % (os.getcwd(), f[2:])
|
||||
deps = d.getVar('__depends') or set()
|
||||
deps = bb.data.getVar('__depends', d) or set()
|
||||
deps.update([(f, cached_mtime(f))])
|
||||
d.setVar('__depends', deps)
|
||||
bb.data.setVar('__depends', deps, d)
|
||||
|
||||
def supports(fn, data):
|
||||
"""Returns true if we have a handler for this file, false otherwise"""
|
||||
@@ -89,7 +78,7 @@ def handle(fn, data, include = 0):
|
||||
for h in handlers:
|
||||
if h['supports'](fn, data):
|
||||
return h['handle'](fn, data, include)
|
||||
raise ParseError("not a BitBake file", fn)
|
||||
raise ParseError("%s is not a BitBake file" % fn)
|
||||
|
||||
def init(fn, data):
|
||||
for h in handlers:
|
||||
@@ -101,7 +90,7 @@ def init_parser(d):
|
||||
|
||||
def resolve_file(fn, d):
|
||||
if not os.path.isabs(fn):
|
||||
bbpath = d.getVar("BBPATH", True)
|
||||
bbpath = bb.data.getVar("BBPATH", d, True)
|
||||
newfn = bb.utils.which(bbpath, fn)
|
||||
if not newfn:
|
||||
raise IOError("file %s not found in %s" % (fn, bbpath))
|
||||
@@ -122,7 +111,7 @@ def vars_from_file(mypkg, d):
|
||||
parts = myfile[0].split('_')
|
||||
__pkgsplit_cache__[mypkg] = parts
|
||||
if len(parts) > 3:
|
||||
raise ParseError("Unable to generate default variables from filename (too many underscores)", mypkg)
|
||||
raise ParseError("Unable to generate default variables from the filename: %s (too many underscores)" % mypkg)
|
||||
exp = 3 - len(parts)
|
||||
tmplist = []
|
||||
while exp != 0:
|
||||
@@ -131,13 +120,4 @@ def vars_from_file(mypkg, d):
|
||||
parts.extend(tmplist)
|
||||
return parts
|
||||
|
||||
def get_file_depends(d):
|
||||
'''Return the dependent files'''
|
||||
dep_files = []
|
||||
depends = d.getVar('__depends', True) or set()
|
||||
depends = depends.union(d.getVar('__base_depends', True) or set())
|
||||
for (fn, _) in depends:
|
||||
dep_files.append(os.path.abspath(fn))
|
||||
return " ".join(dep_files)
|
||||
|
||||
from bb.parse.parse_py import __version__, ConfHandler, BBHandler
|
||||
|
||||
@@ -54,14 +54,14 @@ class IncludeNode(AstNode):
|
||||
"""
|
||||
Include the file and evaluate the statements
|
||||
"""
|
||||
s = data.expand(self.what_file)
|
||||
s = bb.data.expand(self.what_file, data)
|
||||
logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
|
||||
|
||||
# TODO: Cache those includes... maybe not here though
|
||||
if self.force:
|
||||
bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, "include required")
|
||||
bb.parse.ConfHandler.include(self.filename, s, data, "include required")
|
||||
else:
|
||||
bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False)
|
||||
bb.parse.ConfHandler.include(self.filename, s, data, False)
|
||||
|
||||
class ExportNode(AstNode):
|
||||
def __init__(self, filename, lineno, var):
|
||||
@@ -69,7 +69,7 @@ class ExportNode(AstNode):
|
||||
self.var = var
|
||||
|
||||
def eval(self, data):
|
||||
data.setVarFlag(self.var, "export", 1)
|
||||
bb.data.setVarFlag(self.var, "export", 1, data)
|
||||
|
||||
class DataNode(AstNode):
|
||||
"""
|
||||
@@ -92,7 +92,7 @@ class DataNode(AstNode):
|
||||
groupd = self.groupd
|
||||
key = groupd["var"]
|
||||
if "exp" in groupd and groupd["exp"] != None:
|
||||
data.setVarFlag(key, "export", 1)
|
||||
bb.data.setVarFlag(key, "export", 1, data)
|
||||
if "ques" in groupd and groupd["ques"] != None:
|
||||
val = self.getFunc(key, data)
|
||||
if val == None:
|
||||
@@ -100,7 +100,7 @@ class DataNode(AstNode):
|
||||
elif "colon" in groupd and groupd["colon"] != None:
|
||||
e = data.createCopy()
|
||||
bb.data.update_data(e)
|
||||
val = e.expand(groupd["value"], key + "[:=]")
|
||||
val = bb.data.expand(groupd["value"], e, key + "[:=]")
|
||||
elif "append" in groupd and groupd["append"] != None:
|
||||
val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"])
|
||||
elif "prepend" in groupd and groupd["prepend"] != None:
|
||||
@@ -113,11 +113,11 @@ class DataNode(AstNode):
|
||||
val = groupd["value"]
|
||||
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
data.setVarFlag(key, groupd['flag'], val)
|
||||
bb.data.setVarFlag(key, groupd['flag'], val, data)
|
||||
elif groupd["lazyques"]:
|
||||
data.setVarFlag(key, "defaultval", val)
|
||||
bb.data.setVarFlag(key, "defaultval", val, data)
|
||||
else:
|
||||
data.setVar(key, val)
|
||||
bb.data.setVar(key, val, data)
|
||||
|
||||
class MethodNode(AstNode):
|
||||
def __init__(self, filename, lineno, func_name, body):
|
||||
@@ -131,12 +131,12 @@ class MethodNode(AstNode):
|
||||
if not funcname in bb.methodpool._parsed_fns:
|
||||
text = "def %s(d):\n" % (funcname) + '\n'.join(self.body)
|
||||
bb.methodpool.insert_method(funcname, text, self.filename)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS') or []
|
||||
anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or []
|
||||
anonfuncs.append(funcname)
|
||||
data.setVar('__BBANONFUNCS', anonfuncs)
|
||||
bb.data.setVar('__BBANONFUNCS', anonfuncs, data)
|
||||
else:
|
||||
data.setVarFlag(self.func_name, "func", 1)
|
||||
data.setVar(self.func_name, '\n'.join(self.body))
|
||||
bb.data.setVarFlag(self.func_name, "func", 1, data)
|
||||
bb.data.setVar(self.func_name, '\n'.join(self.body), data)
|
||||
|
||||
class PythonMethodNode(AstNode):
|
||||
def __init__(self, filename, lineno, function, define, body):
|
||||
@@ -152,9 +152,9 @@ class PythonMethodNode(AstNode):
|
||||
text = '\n'.join(self.body)
|
||||
if not bb.methodpool.parsed_module(self.define):
|
||||
bb.methodpool.insert_method(self.define, text, self.filename)
|
||||
data.setVarFlag(self.function, "func", 1)
|
||||
data.setVarFlag(self.function, "python", 1)
|
||||
data.setVar(self.function, text)
|
||||
bb.data.setVarFlag(self.function, "func", 1, data)
|
||||
bb.data.setVarFlag(self.function, "python", 1, data)
|
||||
bb.data.setVar(self.function, text, data)
|
||||
|
||||
class MethodFlagsNode(AstNode):
|
||||
def __init__(self, filename, lineno, key, m):
|
||||
@@ -163,19 +163,19 @@ class MethodFlagsNode(AstNode):
|
||||
self.m = m
|
||||
|
||||
def eval(self, data):
|
||||
if data.getVar(self.key):
|
||||
if bb.data.getVar(self.key, data):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.setVarFlag(self.key, 'python', None)
|
||||
data.setVarFlag(self.key, 'fakeroot', None)
|
||||
bb.data.setVarFlag(self.key, 'python', None, data)
|
||||
bb.data.setVarFlag(self.key, 'fakeroot', None, data)
|
||||
if self.m.group("py") is not None:
|
||||
data.setVarFlag(self.key, "python", "1")
|
||||
bb.data.setVarFlag(self.key, "python", "1", data)
|
||||
else:
|
||||
data.delVarFlag(self.key, "python")
|
||||
bb.data.delVarFlag(self.key, "python", data)
|
||||
if self.m.group("fr") is not None:
|
||||
data.setVarFlag(self.key, "fakeroot", "1")
|
||||
bb.data.setVarFlag(self.key, "fakeroot", "1", data)
|
||||
else:
|
||||
data.delVarFlag(self.key, "fakeroot")
|
||||
bb.data.delVarFlag(self.key, "fakeroot", data)
|
||||
|
||||
class ExportFuncsNode(AstNode):
|
||||
def __init__(self, filename, lineno, fns, classes):
|
||||
@@ -197,25 +197,25 @@ class ExportFuncsNode(AstNode):
|
||||
vars.append([allvars[0], allvars[2]])
|
||||
|
||||
for (var, calledvar) in vars:
|
||||
if data.getVar(var) and not data.getVarFlag(var, 'export_func'):
|
||||
if bb.data.getVar(var, data) and not bb.data.getVarFlag(var, 'export_func', data):
|
||||
continue
|
||||
|
||||
if data.getVar(var):
|
||||
data.setVarFlag(var, 'python', None)
|
||||
data.setVarFlag(var, 'func', None)
|
||||
if bb.data.getVar(var, data):
|
||||
bb.data.setVarFlag(var, 'python', None, data)
|
||||
bb.data.setVarFlag(var, 'func', None, data)
|
||||
|
||||
for flag in [ "func", "python" ]:
|
||||
if data.getVarFlag(calledvar, flag):
|
||||
data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag))
|
||||
if bb.data.getVarFlag(calledvar, flag, data):
|
||||
bb.data.setVarFlag(var, flag, bb.data.getVarFlag(calledvar, flag, data), data)
|
||||
for flag in [ "dirs" ]:
|
||||
if data.getVarFlag(var, flag):
|
||||
data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag))
|
||||
if bb.data.getVarFlag(var, flag, data):
|
||||
bb.data.setVarFlag(calledvar, flag, bb.data.getVarFlag(var, flag, data), data)
|
||||
|
||||
if data.getVarFlag(calledvar, "python"):
|
||||
data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n")
|
||||
if bb.data.getVarFlag(calledvar, "python", data):
|
||||
bb.data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", data)
|
||||
else:
|
||||
data.setVar(var, "\t" + calledvar + "\n")
|
||||
data.setVarFlag(var, 'export_func', '1')
|
||||
bb.data.setVar(var, "\t" + calledvar + "\n", data)
|
||||
bb.data.setVarFlag(var, 'export_func', '1', data)
|
||||
|
||||
class AddTaskNode(AstNode):
|
||||
def __init__(self, filename, lineno, func, before, after):
|
||||
@@ -229,25 +229,25 @@ class AddTaskNode(AstNode):
|
||||
if self.func[:3] != "do_":
|
||||
var = "do_" + self.func
|
||||
|
||||
data.setVarFlag(var, "task", 1)
|
||||
bbtasks = data.getVar('__BBTASKS') or []
|
||||
bb.data.setVarFlag(var, "task", 1, data)
|
||||
bbtasks = bb.data.getVar('__BBTASKS', data) or []
|
||||
if not var in bbtasks:
|
||||
bbtasks.append(var)
|
||||
data.setVar('__BBTASKS', bbtasks)
|
||||
bb.data.setVar('__BBTASKS', bbtasks, data)
|
||||
|
||||
existing = data.getVarFlag(var, "deps") or []
|
||||
existing = bb.data.getVarFlag(var, "deps", data) or []
|
||||
if self.after is not None:
|
||||
# set up deps for function
|
||||
for entry in self.after.split():
|
||||
if entry not in existing:
|
||||
existing.append(entry)
|
||||
data.setVarFlag(var, "deps", existing)
|
||||
bb.data.setVarFlag(var, "deps", existing, data)
|
||||
if self.before is not None:
|
||||
# set up things that depend on this func
|
||||
for entry in self.before.split():
|
||||
existing = data.getVarFlag(entry, "deps") or []
|
||||
existing = bb.data.getVarFlag(entry, "deps", data) or []
|
||||
if var not in existing:
|
||||
data.setVarFlag(entry, "deps", [var] + existing)
|
||||
bb.data.setVarFlag(entry, "deps", [var] + existing, data)
|
||||
|
||||
class BBHandlerNode(AstNode):
|
||||
def __init__(self, filename, lineno, fns):
|
||||
@@ -255,11 +255,11 @@ class BBHandlerNode(AstNode):
|
||||
self.hs = fns.split()
|
||||
|
||||
def eval(self, data):
|
||||
bbhands = data.getVar('__BBHANDLERS') or []
|
||||
bbhands = bb.data.getVar('__BBHANDLERS', data) or []
|
||||
for h in self.hs:
|
||||
bbhands.append(h)
|
||||
data.setVarFlag(h, "handler", 1)
|
||||
data.setVar('__BBHANDLERS', bbhands)
|
||||
bb.data.setVarFlag(h, "handler", 1, data)
|
||||
bb.data.setVar('__BBHANDLERS', bbhands, data)
|
||||
|
||||
class InheritNode(AstNode):
|
||||
def __init__(self, filename, lineno, classes):
|
||||
@@ -267,7 +267,7 @@ class InheritNode(AstNode):
|
||||
self.classes = classes
|
||||
|
||||
def eval(self, data):
|
||||
bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
|
||||
bb.parse.BBHandler.inherit(self.classes, data)
|
||||
|
||||
def handleInclude(statements, filename, lineno, m, force):
|
||||
statements.append(IncludeNode(filename, lineno, m.group(1), force))
|
||||
@@ -304,13 +304,13 @@ def handleBBHandlers(statements, filename, lineno, m):
|
||||
|
||||
def handleInherit(statements, filename, lineno, m):
|
||||
classes = m.group(1)
|
||||
statements.append(InheritNode(filename, lineno, classes))
|
||||
statements.append(InheritNode(filename, lineno, classes.split()))
|
||||
|
||||
def finalize(fn, d, variant = None):
|
||||
all_handlers = {}
|
||||
for var in d.getVar('__BBHANDLERS') or []:
|
||||
for var in bb.data.getVar('__BBHANDLERS', d) or []:
|
||||
# try to add the handler
|
||||
handler = d.getVar(var)
|
||||
handler = bb.data.getVar(var, d)
|
||||
bb.event.register(var, handler)
|
||||
|
||||
bb.event.fire(bb.event.RecipePreFinalise(fn), d)
|
||||
@@ -318,18 +318,16 @@ def finalize(fn, d, variant = None):
|
||||
bb.data.expandKeys(d)
|
||||
bb.data.update_data(d)
|
||||
code = []
|
||||
for funcname in d.getVar("__BBANONFUNCS") or []:
|
||||
for funcname in bb.data.getVar("__BBANONFUNCS", d) or []:
|
||||
code.append("%s(d)" % funcname)
|
||||
bb.utils.simple_exec("\n".join(code), {"d": d})
|
||||
bb.data.update_data(d)
|
||||
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
tasklist = bb.data.getVar('__BBTASKS', d) or []
|
||||
bb.build.add_tasks(tasklist, d)
|
||||
|
||||
bb.parse.siggen.finalise(fn, d, variant)
|
||||
|
||||
d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
|
||||
|
||||
bb.event.fire(bb.event.RecipeParsed(fn), d)
|
||||
|
||||
def _create_variants(datastores, names, function):
|
||||
@@ -380,7 +378,7 @@ def multi_finalize(fn, d):
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
bb.data.setVar("__SKIPPED", e.args[0], d)
|
||||
datastores = {"": safe_d}
|
||||
|
||||
versions = (d.getVar("BBVERSIONS", True) or "").split()
|
||||
@@ -423,7 +421,7 @@ def multi_finalize(fn, d):
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
bb.data.setVar("__SKIPPED", e.args[0], d)
|
||||
|
||||
_create_variants(datastores, versions, verfunc)
|
||||
|
||||
@@ -452,7 +450,7 @@ def multi_finalize(fn, d):
|
||||
d.setVar("BBEXTENDVARIANT", variantmap[name])
|
||||
else:
|
||||
d.setVar("PN", "%s-%s" % (pn, name))
|
||||
bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d)
|
||||
bb.parse.BBHandler.inherit([extendedmap[name]], d)
|
||||
|
||||
safe_d.setVar("BBCLASSEXTEND", extended)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc)
|
||||
@@ -463,7 +461,7 @@ def multi_finalize(fn, d):
|
||||
if not onlyfinalise or variant in onlyfinalise:
|
||||
finalize(fn, variant_d, variant)
|
||||
except bb.parse.SkipPackage as e:
|
||||
variant_d.setVar("__SKIPPED", e.args[0])
|
||||
bb.data.setVar("__SKIPPED", e.args[0], variant_d)
|
||||
|
||||
if len(datastores) > 1:
|
||||
variants = filter(None, datastores.iterkeys())
|
||||
|
||||
@@ -68,10 +68,12 @@ def supports(fn, d):
|
||||
"""Return True if fn has a supported extension"""
|
||||
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
|
||||
|
||||
def inherit(files, fn, lineno, d):
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
files = d.expand(files).split()
|
||||
def inherit(files, d):
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
fn = ""
|
||||
lineno = 0
|
||||
for file in files:
|
||||
file = data.expand(file, d)
|
||||
if not os.path.isabs(file) and not file.endswith(".bbclass"):
|
||||
file = os.path.join('classes', '%s.bbclass' % file)
|
||||
|
||||
@@ -79,8 +81,8 @@ def inherit(files, fn, lineno, d):
|
||||
logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
|
||||
__inherit_cache.append( file )
|
||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
include(fn, file, d, "inherit")
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
|
||||
def get_statements(filename, absolute_filename, base_name):
|
||||
global cached_statements
|
||||
@@ -126,13 +128,13 @@ def handle(fn, d, include):
|
||||
if ext == ".bbclass":
|
||||
__classname__ = root
|
||||
classes.append(__classname__)
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
if not fn in __inherit_cache:
|
||||
__inherit_cache.append(fn)
|
||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||
|
||||
if include != 0:
|
||||
oldfile = d.getVar('FILE')
|
||||
oldfile = data.getVar('FILE', d)
|
||||
else:
|
||||
oldfile = None
|
||||
|
||||
@@ -157,7 +159,7 @@ def handle(fn, d, include):
|
||||
return ast.multi_finalize(fn, d)
|
||||
|
||||
if oldfile:
|
||||
d.setVar("FILE", oldfile)
|
||||
bb.data.setVar("FILE", oldfile, d)
|
||||
|
||||
# we have parsed the bb class now
|
||||
if ext == ".bbclass" or ext == ".inc":
|
||||
@@ -191,6 +193,7 @@ def feeder(lineno, s, fn, root, statements):
|
||||
if lineno == IN_PYTHON_EOF:
|
||||
return
|
||||
|
||||
|
||||
if s and s[0] == '#':
|
||||
if len(__residue__) != 0 and __residue__[0][0] != "#":
|
||||
bb.error("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
|
||||
|
||||
@@ -24,41 +24,41 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import re, os
|
||||
import re, bb.data, os
|
||||
import logging
|
||||
import bb.utils
|
||||
from bb.parse import ParseError, resolve_file, ast, logger
|
||||
|
||||
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"])(?P<value>.*)(?P=apo)$")
|
||||
#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
||||
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
||||
__include_regexp__ = re.compile( r"include\s+(.+)" )
|
||||
__require_regexp__ = re.compile( r"require\s+(.+)" )
|
||||
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
|
||||
__export_regexp__ = re.compile( r"export\s+(.+)" )
|
||||
|
||||
def init(data):
|
||||
topdir = data.getVar('TOPDIR')
|
||||
topdir = bb.data.getVar('TOPDIR', data)
|
||||
if not topdir:
|
||||
data.setVar('TOPDIR', os.getcwd())
|
||||
bb.data.setVar('TOPDIR', os.getcwd(), data)
|
||||
|
||||
|
||||
def supports(fn, d):
|
||||
return fn[-5:] == ".conf"
|
||||
|
||||
def include(oldfn, fn, lineno, data, error_out):
|
||||
def include(oldfn, fn, data, error_out):
|
||||
"""
|
||||
error_out: A string indicating the verb (e.g. "include", "inherit") to be
|
||||
used in a ParseError that will be raised if the file to be included could
|
||||
not be included. Specify False to avoid raising an error in this case.
|
||||
error_out If True a ParseError will be raised if the to be included
|
||||
config-files could not be included.
|
||||
"""
|
||||
if oldfn == fn: # prevent infinite recursion
|
||||
return None
|
||||
|
||||
import bb
|
||||
fn = data.expand(fn)
|
||||
oldfn = data.expand(oldfn)
|
||||
fn = bb.data.expand(fn, data)
|
||||
oldfn = bb.data.expand(oldfn, data)
|
||||
|
||||
if not os.path.isabs(fn):
|
||||
dname = os.path.dirname(oldfn)
|
||||
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
|
||||
bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1))
|
||||
abs_fn = bb.utils.which(bbpath, fn)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
@@ -68,24 +68,16 @@ def include(oldfn, fn, lineno, data, error_out):
|
||||
ret = handle(fn, data, True)
|
||||
except IOError:
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
|
||||
# We have an issue where a UI might want to enforce particular settings such as
|
||||
# an empty DISTRO variable. If configuration files do something like assigning
|
||||
# a weak default, it turns out to be very difficult to filter out these changes,
|
||||
# particularly when the weak default might appear half way though parsing a chain
|
||||
# of configuration files. We therefore let the UIs hook into configuration file
|
||||
# parsing. This turns out to be a hard problem to solve any other way.
|
||||
confFilters = []
|
||||
|
||||
def handle(fn, data, include):
|
||||
init(data)
|
||||
|
||||
if include == 0:
|
||||
oldfile = None
|
||||
else:
|
||||
oldfile = data.getVar('FILE')
|
||||
oldfile = bb.data.getVar('FILE', data)
|
||||
|
||||
abs_fn = resolve_file(fn, data)
|
||||
f = open(abs_fn, 'r')
|
||||
@@ -110,13 +102,10 @@ def handle(fn, data, include):
|
||||
feeder(lineno, s, fn, statements)
|
||||
|
||||
# DONE WITH PARSING... time to evaluate
|
||||
data.setVar('FILE', abs_fn)
|
||||
bb.data.setVar('FILE', abs_fn, data)
|
||||
statements.eval(data)
|
||||
if oldfile:
|
||||
data.setVar('FILE', oldfile)
|
||||
|
||||
for f in confFilters:
|
||||
f(fn, data)
|
||||
bb.data.setVar('FILE', oldfile, data)
|
||||
|
||||
return data
|
||||
|
||||
@@ -142,7 +131,7 @@ def feeder(lineno, s, fn, statements):
|
||||
ast.handleExport(statements, fn, lineno, m)
|
||||
return
|
||||
|
||||
raise ParseError("unparsed line: '%s'" % s, fn, lineno);
|
||||
raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s));
|
||||
|
||||
# Add us to the handlers list
|
||||
from bb.parse import handlers
|
||||
|
||||
@@ -41,10 +41,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
|
||||
|
||||
logger = logging.getLogger("BitBake.PersistData")
|
||||
if hasattr(sqlite3, 'enable_shared_cache'):
|
||||
try:
|
||||
sqlite3.enable_shared_cache(True)
|
||||
except sqlite3.OperationalError:
|
||||
pass
|
||||
sqlite3.enable_shared_cache(True)
|
||||
|
||||
|
||||
@total_ordering
|
||||
@@ -198,9 +195,9 @@ def connect(database):
|
||||
|
||||
def persist(domain, d):
|
||||
"""Convenience factory for SQLTable objects based upon metadata"""
|
||||
import bb.utils
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
import bb.data, bb.utils
|
||||
cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
|
||||
bb.data.getVar("CACHE", d, True))
|
||||
if not cachedir:
|
||||
logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import logging
|
||||
import signal
|
||||
import subprocess
|
||||
import errno
|
||||
import select
|
||||
|
||||
logger = logging.getLogger('BitBake.Process')
|
||||
|
||||
@@ -70,38 +68,20 @@ def _logged_communicate(pipe, log, input):
|
||||
pipe.stdin.write(input)
|
||||
pipe.stdin.close()
|
||||
|
||||
bufsize = 512
|
||||
outdata, errdata = [], []
|
||||
rin = []
|
||||
while pipe.poll() is None:
|
||||
if pipe.stdout is not None:
|
||||
data = pipe.stdout.read(bufsize)
|
||||
if data is not None:
|
||||
outdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
if pipe.stdout is not None:
|
||||
bb.utils.nonblockingfd(pipe.stdout.fileno())
|
||||
rin.append(pipe.stdout)
|
||||
if pipe.stderr is not None:
|
||||
bb.utils.nonblockingfd(pipe.stderr.fileno())
|
||||
rin.append(pipe.stderr)
|
||||
|
||||
try:
|
||||
while pipe.poll() is None:
|
||||
rlist = rin
|
||||
try:
|
||||
r,w,e = select.select (rlist, [], [])
|
||||
except OSError, e:
|
||||
if e.errno != errno.EINTR:
|
||||
raise
|
||||
|
||||
if pipe.stdout in r:
|
||||
data = pipe.stdout.read()
|
||||
if data is not None:
|
||||
outdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
if pipe.stderr in r:
|
||||
data = pipe.stderr.read()
|
||||
if data is not None:
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
finally:
|
||||
log.flush()
|
||||
if pipe.stderr is not None:
|
||||
data = pipe.stderr.read(bufsize)
|
||||
if data is not None:
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
return ''.join(outdata), ''.join(errdata)
|
||||
|
||||
def run(cmd, input=None, log=None, **options):
|
||||
|
||||
@@ -24,54 +24,16 @@
|
||||
import re
|
||||
import logging
|
||||
from bb import data, utils
|
||||
from collections import defaultdict
|
||||
import bb
|
||||
|
||||
logger = logging.getLogger("BitBake.Provider")
|
||||
|
||||
class NoProvider(bb.BBHandledException):
|
||||
class NoProvider(Exception):
|
||||
"""Exception raised when no provider of a build dependency can be found"""
|
||||
|
||||
class NoRProvider(bb.BBHandledException):
|
||||
class NoRProvider(Exception):
|
||||
"""Exception raised when no provider of a runtime dependency can be found"""
|
||||
|
||||
class MultipleRProvider(bb.BBHandledException):
|
||||
"""Exception raised when multiple providers of a runtime dependency can be found"""
|
||||
|
||||
def findProviders(cfgData, dataCache, pkg_pn = None):
|
||||
"""
|
||||
Convenience function to get latest and preferred providers in pkg_pn
|
||||
"""
|
||||
|
||||
if not pkg_pn:
|
||||
pkg_pn = dataCache.pkg_pn
|
||||
|
||||
# Need to ensure data store is expanded
|
||||
localdata = data.createCopy(cfgData)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
|
||||
preferred_versions = {}
|
||||
latest_versions = {}
|
||||
|
||||
for pn in pkg_pn:
|
||||
(last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn)
|
||||
preferred_versions[pn] = (pref_ver, pref_file)
|
||||
latest_versions[pn] = (last_ver, last_file)
|
||||
|
||||
return (latest_versions, preferred_versions)
|
||||
|
||||
|
||||
def allProviders(dataCache):
|
||||
"""
|
||||
Find all providers for each pn
|
||||
"""
|
||||
all_providers = defaultdict(list)
|
||||
for (fn, pn) in dataCache.pkg_fn.items():
|
||||
ver = dataCache.pkg_pepvpr[fn]
|
||||
all_providers[pn].append((ver, fn))
|
||||
return all_providers
|
||||
|
||||
|
||||
def sortPriorities(pn, dataCache, pkg_pn = None):
|
||||
"""
|
||||
@@ -122,10 +84,10 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
preferred_ver = None
|
||||
|
||||
localdata = data.createCopy(cfgData)
|
||||
localdata.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn))
|
||||
bb.data.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn), localdata)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
preferred_v = localdata.getVar('PREFERRED_VERSION', True)
|
||||
preferred_v = bb.data.getVar('PREFERRED_VERSION', localdata, True)
|
||||
if preferred_v:
|
||||
m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
|
||||
if m:
|
||||
@@ -286,7 +248,7 @@ def filterProviders(providers, item, cfgData, dataCache):
|
||||
|
||||
eligible = _filterProviders(providers, item, cfgData, dataCache)
|
||||
|
||||
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, True)
|
||||
prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, cfgData, 1)
|
||||
if prefervar:
|
||||
dataCache.preferred[item] = prefervar
|
||||
|
||||
@@ -324,7 +286,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
|
||||
pn = dataCache.pkg_fn[p]
|
||||
provides = dataCache.pn_provides[pn]
|
||||
for provide in provides:
|
||||
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True)
|
||||
prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
|
||||
logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
|
||||
if prefervar in pns and pns[prefervar] not in preferred:
|
||||
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
||||
|
||||
@@ -31,7 +31,6 @@ import fcntl
|
||||
import logging
|
||||
import bb
|
||||
from bb import msg, data, event
|
||||
from bb import monitordisk
|
||||
|
||||
bblogger = logging.getLogger("BitBake")
|
||||
logger = logging.getLogger("BitBake.RunQueue")
|
||||
@@ -188,10 +187,9 @@ class RunQueueData:
|
||||
self.taskData = taskData
|
||||
self.targets = targets
|
||||
self.rq = rq
|
||||
self.warn_multi_bb = False
|
||||
|
||||
self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", True) or ""
|
||||
self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
|
||||
self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or ""
|
||||
self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split()
|
||||
|
||||
self.reset()
|
||||
|
||||
@@ -375,8 +373,9 @@ class RunQueueData:
|
||||
"""
|
||||
|
||||
runq_build = []
|
||||
recursivetasks = {}
|
||||
recursivetasksselfref = set()
|
||||
recursive_tdepends = {}
|
||||
runq_recrdepends = []
|
||||
tdepends_fnid = {}
|
||||
|
||||
taskData = self.taskData
|
||||
|
||||
@@ -405,10 +404,11 @@ class RunQueueData:
|
||||
depdata = taskData.build_targets[depid][0]
|
||||
if depdata is None:
|
||||
continue
|
||||
dep = taskData.fn_index[depdata]
|
||||
for taskname in tasknames:
|
||||
taskid = taskData.gettask_id_fromfnid(depdata, taskname)
|
||||
taskid = taskData.gettask_id(dep, taskname, False)
|
||||
if taskid is not None:
|
||||
depends.add(taskid)
|
||||
depends.append(taskid)
|
||||
|
||||
def add_runtime_dependencies(depids, tasknames, depends):
|
||||
for depid in depids:
|
||||
@@ -417,20 +417,15 @@ class RunQueueData:
|
||||
depdata = taskData.run_targets[depid][0]
|
||||
if depdata is None:
|
||||
continue
|
||||
dep = taskData.fn_index[depdata]
|
||||
for taskname in tasknames:
|
||||
taskid = taskData.gettask_id_fromfnid(depdata, taskname)
|
||||
taskid = taskData.gettask_id(dep, taskname, False)
|
||||
if taskid is not None:
|
||||
depends.add(taskid)
|
||||
|
||||
def add_resolved_dependencies(depids, tasknames, depends):
|
||||
for depid in depids:
|
||||
for taskname in tasknames:
|
||||
taskid = taskData.gettask_id_fromfnid(depid, taskname)
|
||||
if taskid is not None:
|
||||
depends.add(taskid)
|
||||
depends.append(taskid)
|
||||
|
||||
for task in xrange(len(taskData.tasks_name)):
|
||||
depends = set()
|
||||
depends = []
|
||||
recrdepends = []
|
||||
fnid = taskData.tasks_fnid[task]
|
||||
fn = taskData.fn_index[fnid]
|
||||
task_deps = self.dataCache.task_deps[fn]
|
||||
@@ -442,7 +437,7 @@ class RunQueueData:
|
||||
# Resolve task internal dependencies
|
||||
#
|
||||
# e.g. addtask before X after Y
|
||||
depends = set(taskData.tasks_tdepends[task])
|
||||
depends = taskData.tasks_tdepends[task]
|
||||
|
||||
# Resolve 'deptask' dependencies
|
||||
#
|
||||
@@ -457,91 +452,99 @@ class RunQueueData:
|
||||
# e.g. do_sometask[rdeptask] = "do_someothertask"
|
||||
# (makes sure sometask runs after someothertask of all RDEPENDS)
|
||||
if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']:
|
||||
tasknames = task_deps['rdeptask'][taskData.tasks_name[task]].split()
|
||||
add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
|
||||
taskname = task_deps['rdeptask'][taskData.tasks_name[task]]
|
||||
add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
|
||||
|
||||
# Resolve inter-task dependencies
|
||||
#
|
||||
# e.g. do_sometask[depends] = "targetname:do_someothertask"
|
||||
# (makes sure sometask runs after targetname's someothertask)
|
||||
if fnid not in tdepends_fnid:
|
||||
tdepends_fnid[fnid] = set()
|
||||
idepends = taskData.tasks_idepends[task]
|
||||
for (depid, idependtask) in idepends:
|
||||
if depid in taskData.build_targets:
|
||||
# Won't be in build_targets if ASSUME_PROVIDED
|
||||
depdata = taskData.build_targets[depid][0]
|
||||
if depdata is not None:
|
||||
taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
|
||||
dep = taskData.fn_index[depdata]
|
||||
taskid = taskData.gettask_id(dep, idependtask, False)
|
||||
if taskid is None:
|
||||
bb.msg.fatal("RunQueue", "Task %s in %s depends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, dep))
|
||||
depends.add(taskid)
|
||||
irdepends = taskData.tasks_irdepends[task]
|
||||
for (depid, idependtask) in irdepends:
|
||||
if depid in taskData.run_targets:
|
||||
# Won't be in run_targets if ASSUME_PROVIDED
|
||||
depdata = taskData.run_targets[depid][0]
|
||||
if depdata is not None:
|
||||
taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
|
||||
if taskid is None:
|
||||
bb.msg.fatal("RunQueue", "Task %s in %s rdepends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, dep))
|
||||
depends.add(taskid)
|
||||
bb.msg.fatal("RunQueue", "Task %s in %s depends upon nonexistant task %s in %s" % (taskData.tasks_name[task], fn, idependtask, dep))
|
||||
depends.append(taskid)
|
||||
if depdata != fnid:
|
||||
tdepends_fnid[fnid].add(taskid)
|
||||
|
||||
# Resolve recursive 'recrdeptask' dependencies (Part A)
|
||||
|
||||
# Resolve recursive 'recrdeptask' dependencies (A)
|
||||
#
|
||||
# e.g. do_sometask[recrdeptask] = "do_someothertask"
|
||||
# (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
|
||||
# We cover the recursive part of the dependencies below
|
||||
if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']:
|
||||
tasknames = task_deps['recrdeptask'][taskData.tasks_name[task]].split()
|
||||
recursivetasks[task] = tasknames
|
||||
add_build_dependencies(taskData.depids[fnid], tasknames, depends)
|
||||
add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
|
||||
if taskData.tasks_name[task] in tasknames:
|
||||
recursivetasksselfref.add(task)
|
||||
for taskname in task_deps['recrdeptask'][taskData.tasks_name[task]].split():
|
||||
recrdepends.append(taskname)
|
||||
add_build_dependencies(taskData.depids[fnid], [taskname], depends)
|
||||
add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
|
||||
|
||||
# Rmove all self references
|
||||
if task in depends:
|
||||
newdep = []
|
||||
logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)
|
||||
for dep in depends:
|
||||
if task != dep:
|
||||
newdep.append(dep)
|
||||
depends = newdep
|
||||
|
||||
self.runq_fnid.append(taskData.tasks_fnid[task])
|
||||
self.runq_task.append(taskData.tasks_name[task])
|
||||
self.runq_depends.append(depends)
|
||||
self.runq_depends.append(set(depends))
|
||||
self.runq_revdeps.append(set())
|
||||
self.runq_hash.append("")
|
||||
|
||||
runq_build.append(0)
|
||||
runq_recrdepends.append(recrdepends)
|
||||
|
||||
# Resolve recursive 'recrdeptask' dependencies (Part B)
|
||||
#
|
||||
# Build a list of recursive cumulative dependencies for each fnid
|
||||
# We do this by fnid, since if A depends on some task in B
|
||||
# we're interested in later tasks B's fnid might have but B itself
|
||||
# doesn't depend on
|
||||
#
|
||||
# Algorithm is O(tasks) + O(tasks)*O(fnids)
|
||||
#
|
||||
reccumdepends = {}
|
||||
for task in xrange(len(self.runq_fnid)):
|
||||
fnid = self.runq_fnid[task]
|
||||
if fnid not in reccumdepends:
|
||||
if fnid in tdepends_fnid:
|
||||
reccumdepends[fnid] = tdepends_fnid[fnid]
|
||||
else:
|
||||
reccumdepends[fnid] = set()
|
||||
reccumdepends[fnid].update(self.runq_depends[task])
|
||||
for task in xrange(len(self.runq_fnid)):
|
||||
taskfnid = self.runq_fnid[task]
|
||||
for fnid in reccumdepends:
|
||||
if task in reccumdepends[fnid]:
|
||||
reccumdepends[fnid].add(task)
|
||||
if taskfnid in reccumdepends:
|
||||
reccumdepends[fnid].update(reccumdepends[taskfnid])
|
||||
|
||||
|
||||
# Resolve recursive 'recrdeptask' dependencies (B)
|
||||
#
|
||||
# e.g. do_sometask[recrdeptask] = "do_someothertask"
|
||||
# (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
|
||||
# We need to do this separately since we need all of self.runq_depends to be complete before this is processed
|
||||
extradeps = {}
|
||||
for task in recursivetasks:
|
||||
extradeps[task] = set(self.runq_depends[task])
|
||||
tasknames = recursivetasks[task]
|
||||
seendeps = set()
|
||||
seenfnid = []
|
||||
|
||||
def generate_recdeps(t):
|
||||
newdeps = set()
|
||||
add_resolved_dependencies([taskData.tasks_fnid[t]], tasknames, newdeps)
|
||||
extradeps[task].update(newdeps)
|
||||
seendeps.add(t)
|
||||
newdeps.add(t)
|
||||
for i in newdeps:
|
||||
for n in self.runq_depends[i]:
|
||||
if n not in seendeps:
|
||||
generate_recdeps(n)
|
||||
generate_recdeps(task)
|
||||
|
||||
# Remove circular references so that do_a[recrdeptask] = "do_a do_b" can work
|
||||
for task in recursivetasks:
|
||||
extradeps[task].difference_update(recursivetasksselfref)
|
||||
|
||||
for task in xrange(len(taskData.tasks_name)):
|
||||
# Add in extra dependencies
|
||||
if task in extradeps:
|
||||
self.runq_depends[task] = extradeps[task]
|
||||
# Remove all self references
|
||||
if task in self.runq_depends[task]:
|
||||
logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], self.runq_depends[task])
|
||||
self.runq_depends[task].remove(task)
|
||||
for task in xrange(len(self.runq_fnid)):
|
||||
if len(runq_recrdepends[task]) > 0:
|
||||
taskfnid = self.runq_fnid[task]
|
||||
for dep in reccumdepends[taskfnid]:
|
||||
# Ignore self references
|
||||
if dep == task:
|
||||
continue
|
||||
for taskname in runq_recrdepends[task]:
|
||||
if taskData.tasks_name[dep] == taskname:
|
||||
self.runq_depends[task].add(dep)
|
||||
|
||||
# Step B - Mark all active tasks
|
||||
#
|
||||
@@ -671,14 +674,11 @@ class RunQueueData:
|
||||
prov_list[prov] = [fn]
|
||||
elif fn not in prov_list[prov]:
|
||||
prov_list[prov].append(fn)
|
||||
error = False
|
||||
for prov in prov_list:
|
||||
if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
|
||||
msg = "Multiple .bb files are due to be built which each provide %s (%s)." % (prov, " ".join(prov_list[prov]))
|
||||
if self.warn_multi_bb:
|
||||
logger.warn(msg)
|
||||
else:
|
||||
msg += "\n This usually means one provides something the other doesn't and should."
|
||||
logger.error(msg)
|
||||
error = True
|
||||
logger.error("Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should.", prov, " ".join(prov_list[prov]))
|
||||
|
||||
|
||||
# Create a whitelist usable by the stamp checks
|
||||
@@ -700,28 +700,6 @@ class RunQueueData:
|
||||
continue
|
||||
self.runq_setscene.append(task)
|
||||
|
||||
def invalidate_task(fn, taskname, error_nostamp):
|
||||
taskdep = self.dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
||||
if error_nostamp:
|
||||
bb.fatal("Task %s is marked nostamp, cannot invalidate this task" % taskname)
|
||||
else:
|
||||
bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
|
||||
else:
|
||||
logger.verbose("Invalidate task %s, %s", taskname, fn)
|
||||
bb.parse.siggen.invalidate_task(taskname, self.dataCache, fn)
|
||||
|
||||
# Invalidate task if force mode active
|
||||
if self.cooker.configuration.force:
|
||||
for (fn, target) in self.target_pairs:
|
||||
invalidate_task(fn, target, False)
|
||||
|
||||
# Invalidate task if invalidate mode active
|
||||
if self.cooker.configuration.invalidate_stamp:
|
||||
for (fn, target) in self.target_pairs:
|
||||
for st in self.cooker.configuration.invalidate_stamp.split(','):
|
||||
invalidate_task(fn, "do_%s" % st, True)
|
||||
|
||||
# Interate over the task list and call into the siggen code
|
||||
dealtwith = set()
|
||||
todeal = set(range(len(self.runq_fnid)))
|
||||
@@ -748,6 +726,12 @@ class RunQueueData:
|
||||
deps.append(depidentifier)
|
||||
self.hash_deps[identifier] = deps
|
||||
|
||||
# Remove stamps for targets if force mode active
|
||||
if self.cooker.configuration.force:
|
||||
for (fn, target) in self.target_pairs:
|
||||
logger.verbose("Remove stamp %s, %s", target, fn)
|
||||
bb.build.del_stamp(target, self.dataCache, fn)
|
||||
|
||||
return len(self.runq_fnid)
|
||||
|
||||
def dump_data(self, taskQueue):
|
||||
@@ -781,18 +765,106 @@ class RunQueue:
|
||||
self.cfgData = cfgData
|
||||
self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets)
|
||||
|
||||
self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile"
|
||||
self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None
|
||||
self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION", True) or None
|
||||
self.stamppolicy = bb.data.getVar("BB_STAMP_POLICY", cfgData, True) or "perfile"
|
||||
self.hashvalidate = bb.data.getVar("BB_HASHCHECK_FUNCTION", cfgData, True) or None
|
||||
|
||||
self.state = runQueuePrepare
|
||||
|
||||
# For disk space monitor
|
||||
self.dm = monitordisk.diskMonitor(cfgData)
|
||||
def check_stamps(self):
|
||||
unchecked = {}
|
||||
current = []
|
||||
notcurrent = []
|
||||
buildable = []
|
||||
|
||||
self.rqexe = None
|
||||
if self.stamppolicy == "perfile":
|
||||
fulldeptree = False
|
||||
else:
|
||||
fulldeptree = True
|
||||
stampwhitelist = []
|
||||
if self.stamppolicy == "whitelist":
|
||||
stampwhitelist = self.rqdata.stampfnwhitelist
|
||||
|
||||
def check_stamp_task(self, task, taskname = None, recurse = False, cache = None):
|
||||
for task in xrange(len(self.rqdata.runq_fnid)):
|
||||
unchecked[task] = ""
|
||||
if len(self.rqdata.runq_depends[task]) == 0:
|
||||
buildable.append(task)
|
||||
|
||||
def check_buildable(self, task, buildable):
|
||||
for revdep in self.rqdata.runq_revdeps[task]:
|
||||
alldeps = 1
|
||||
for dep in self.rqdata.runq_depends[revdep]:
|
||||
if dep in unchecked:
|
||||
alldeps = 0
|
||||
if alldeps == 1:
|
||||
if revdep in unchecked:
|
||||
buildable.append(revdep)
|
||||
|
||||
for task in xrange(len(self.rqdata.runq_fnid)):
|
||||
if task not in unchecked:
|
||||
continue
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
taskname = self.rqdata.runq_task[task]
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
# If the stamp is missing its not current
|
||||
if not os.access(stampfile, os.F_OK):
|
||||
del unchecked[task]
|
||||
notcurrent.append(task)
|
||||
check_buildable(self, task, buildable)
|
||||
continue
|
||||
# If its a 'nostamp' task, it's not current
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and task in taskdep['nostamp']:
|
||||
del unchecked[task]
|
||||
notcurrent.append(task)
|
||||
check_buildable(self, task, buildable)
|
||||
continue
|
||||
|
||||
while (len(buildable) > 0):
|
||||
nextbuildable = []
|
||||
for task in buildable:
|
||||
if task in unchecked:
|
||||
fn = self.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
taskname = self.rqdata.runq_task[task]
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
iscurrent = True
|
||||
|
||||
t1 = os.stat(stampfile)[stat.ST_MTIME]
|
||||
for dep in self.rqdata.runq_depends[task]:
|
||||
if iscurrent:
|
||||
fn2 = self.taskData.fn_index[self.rqdata.runq_fnid[dep]]
|
||||
taskname2 = self.rqdata.runq_task[dep]
|
||||
stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
|
||||
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
|
||||
if dep in notcurrent:
|
||||
iscurrent = False
|
||||
else:
|
||||
t2 = os.stat(stampfile2)[stat.ST_MTIME]
|
||||
if t1 < t2:
|
||||
iscurrent = False
|
||||
del unchecked[task]
|
||||
if iscurrent:
|
||||
current.append(task)
|
||||
else:
|
||||
notcurrent.append(task)
|
||||
|
||||
check_buildable(self, task, nextbuildable)
|
||||
|
||||
buildable = nextbuildable
|
||||
|
||||
#for task in range(len(self.runq_fnid)):
|
||||
# fn = self.taskData.fn_index[self.runq_fnid[task]]
|
||||
# taskname = self.runq_task[task]
|
||||
# print "%s %s.%s" % (task, taskname, fn)
|
||||
|
||||
#print "Unchecked: %s" % unchecked
|
||||
#print "Current: %s" % current
|
||||
#print "Not current: %s" % notcurrent
|
||||
|
||||
if len(unchecked) > 0:
|
||||
bb.msg.fatal("RunQueue", "check_stamps fatal internal error")
|
||||
return current
|
||||
|
||||
def check_stamp_task(self, task, taskname = None):
|
||||
def get_timestamp(f):
|
||||
try:
|
||||
if not os.access(f, os.F_OK):
|
||||
@@ -828,16 +900,10 @@ class RunQueue:
|
||||
if taskname != "do_setscene" and taskname.endswith("_setscene"):
|
||||
return True
|
||||
|
||||
if cache is None:
|
||||
cache = {}
|
||||
|
||||
iscurrent = True
|
||||
t1 = get_timestamp(stampfile)
|
||||
for dep in self.rqdata.runq_depends[task]:
|
||||
if iscurrent:
|
||||
if dep in cache:
|
||||
iscurrent = cache[dep]
|
||||
continue
|
||||
fn2 = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[dep]]
|
||||
taskname2 = self.rqdata.runq_task[dep]
|
||||
stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
|
||||
@@ -853,10 +919,7 @@ class RunQueue:
|
||||
if t1 < t2:
|
||||
logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
|
||||
iscurrent = False
|
||||
if recurse and iscurrent:
|
||||
iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache)
|
||||
cache[dep] = iscurrent
|
||||
cache[task] = iscurrent
|
||||
|
||||
return iscurrent
|
||||
|
||||
def execute_runqueue(self):
|
||||
@@ -881,9 +944,6 @@ class RunQueue:
|
||||
else:
|
||||
self.rqexe = RunQueueExecuteScenequeue(self)
|
||||
|
||||
if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp]:
|
||||
self.dm.check(self)
|
||||
|
||||
if self.state is runQueueSceneRun:
|
||||
retval = self.rqexe.execute()
|
||||
|
||||
@@ -898,13 +958,6 @@ class RunQueue:
|
||||
if self.state is runQueueCleanUp:
|
||||
self.rqexe.finish()
|
||||
|
||||
if self.state is runQueueComplete or self.state is runQueueFailed:
|
||||
if self.rqexe.stats.failed:
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
|
||||
else:
|
||||
# Let's avoid the word "failed" if nothing actually did
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and all succeeded.", self.rqexe.stats.completed, self.rqexe.stats.skipped)
|
||||
|
||||
if self.state is runQueueFailed:
|
||||
if not self.rqdata.taskData.tryaltconfigs:
|
||||
raise bb.runqueue.TaskFailure(self.rqexe.failed_fnids)
|
||||
@@ -914,6 +967,7 @@ class RunQueue:
|
||||
|
||||
if self.state is runQueueComplete:
|
||||
# All done
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
|
||||
return False
|
||||
|
||||
if self.state is runQueueChildProcess:
|
||||
@@ -924,9 +978,6 @@ class RunQueue:
|
||||
return retval
|
||||
|
||||
def finish_runqueue(self, now = False):
|
||||
if not self.rqexe:
|
||||
return
|
||||
|
||||
if now:
|
||||
self.rqexe.finish_now()
|
||||
else:
|
||||
@@ -955,8 +1006,8 @@ class RunQueueExecute:
|
||||
self.cfgData = rq.cfgData
|
||||
self.rqdata = rq.rqdata
|
||||
|
||||
self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", True) or 1)
|
||||
self.scheduler = self.cfgData.getVar("BB_SCHEDULER", True) or "speed"
|
||||
self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", self.cfgData, 1) or 1)
|
||||
self.scheduler = bb.data.getVar("BB_SCHEDULER", self.cfgData, 1) or "speed"
|
||||
|
||||
self.runq_buildable = []
|
||||
self.runq_running = []
|
||||
@@ -966,36 +1017,23 @@ class RunQueueExecute:
|
||||
self.build_stamps = {}
|
||||
self.failed_fnids = []
|
||||
|
||||
self.stampcache = {}
|
||||
|
||||
def runqueue_process_waitpid(self):
|
||||
"""
|
||||
Return none is there are no processes awaiting result collection, otherwise
|
||||
collect the process exit codes and close the information pipe.
|
||||
"""
|
||||
pid, status = os.waitpid(-1, os.WNOHANG)
|
||||
if pid == 0 or os.WIFSTOPPED(status):
|
||||
result = os.waitpid(-1, os.WNOHANG)
|
||||
if result[0] == 0 and result[1] == 0:
|
||||
return None
|
||||
|
||||
if os.WIFEXITED(status):
|
||||
status = os.WEXITSTATUS(status)
|
||||
elif os.WIFSIGNALED(status):
|
||||
# Per shell conventions for $?, when a process exits due to
|
||||
# a signal, we return an exit code of 128 + SIGNUM
|
||||
status = 128 + os.WTERMSIG(status)
|
||||
|
||||
task = self.build_pids[pid]
|
||||
del self.build_pids[pid]
|
||||
|
||||
self.build_pipes[pid].close()
|
||||
del self.build_pipes[pid]
|
||||
|
||||
# self.build_stamps[pid] may not exist when use shared work directory.
|
||||
if pid in self.build_stamps:
|
||||
del self.build_stamps[pid]
|
||||
|
||||
if status != 0:
|
||||
self.task_fail(task, status)
|
||||
task = self.build_pids[result[0]]
|
||||
del self.build_pids[result[0]]
|
||||
self.build_pipes[result[0]].close()
|
||||
del self.build_pipes[result[0]]
|
||||
# self.build_stamps[result[0]] may not exist when use shared work directory.
|
||||
if result[0] in self.build_stamps.keys():
|
||||
del self.build_stamps[result[0]]
|
||||
if result[1] != 0:
|
||||
self.task_fail(task, result[1]>>8)
|
||||
else:
|
||||
self.task_complete(task)
|
||||
return True
|
||||
@@ -1006,19 +1044,11 @@ class RunQueueExecute:
|
||||
for k, v in self.build_pids.iteritems():
|
||||
try:
|
||||
os.kill(-k, signal.SIGTERM)
|
||||
os.waitpid(-1, 0)
|
||||
except:
|
||||
pass
|
||||
for pipe in self.build_pipes:
|
||||
self.build_pipes[pipe].read()
|
||||
|
||||
if len(self.failed_fnids) != 0:
|
||||
self.rq.state = runQueueFailed
|
||||
return
|
||||
|
||||
self.rq.state = runQueueComplete
|
||||
return
|
||||
|
||||
def finish(self):
|
||||
self.rq.state = runQueueCleanUp
|
||||
|
||||
@@ -1066,12 +1096,6 @@ class RunQueueExecute:
|
||||
|
||||
logger.debug(2, 'Running %s:%s under fakeroot, fakedirs: %s' %
|
||||
(fn, taskname, ', '.join(fakedirs)))
|
||||
else:
|
||||
envvars = (self.rqdata.dataCache.fakerootnoenv[fn] or "").split()
|
||||
for key, value in (var.split('=') for var in envvars):
|
||||
envbackup[key] = os.environ.get(key)
|
||||
os.environ[key] = value
|
||||
fakeenv[key] = value
|
||||
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
@@ -1101,7 +1125,9 @@ class RunQueueExecute:
|
||||
if umask:
|
||||
os.umask(umask)
|
||||
|
||||
self.cooker.configuration.data.setVar("BB_WORKERCONTEXT", "1")
|
||||
bb.data.setVar("BB_WORKERCONTEXT", "1", self.cooker.configuration.data)
|
||||
bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self, self.cooker.configuration.data)
|
||||
bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn, self.cooker.configuration.data)
|
||||
bb.parse.siggen.set_taskdata(self.rqdata.hashes, self.rqdata.hash_deps)
|
||||
ret = 0
|
||||
try:
|
||||
@@ -1176,34 +1202,33 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
for task in xrange(self.stats.total):
|
||||
if task in self.rq.scenequeue_covered:
|
||||
continue
|
||||
logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task])))
|
||||
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
|
||||
ok = True
|
||||
for revdep in self.rqdata.runq_revdeps[task]:
|
||||
if self.rqdata.runq_fnid[task] != self.rqdata.runq_fnid[revdep]:
|
||||
logger.debug(1, 'Found "bad" dep %s (%s) for %s (%s)' % (revdep, self.rqdata.get_user_idstring(revdep), task, self.rqdata.get_user_idstring(task)))
|
||||
|
||||
ok = False
|
||||
break
|
||||
if ok:
|
||||
found = True
|
||||
self.rq.scenequeue_covered.add(task)
|
||||
|
||||
logger.debug(1, 'Skip list (pre setsceneverify) %s', sorted(self.rq.scenequeue_covered))
|
||||
|
||||
# Allow the metadata to elect for setscene tasks to run anyway
|
||||
# Detect when the real task needs to be run anyway by looking to see
|
||||
# if any of its dependencies within the same package are scheduled
|
||||
# to be run.
|
||||
covered_remove = set()
|
||||
if self.rq.setsceneverify:
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.configuration.data }
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
for task in self.rq.scenequeue_covered:
|
||||
task_fnid = self.rqdata.runq_fnid[task]
|
||||
for dep in self.rqdata.runq_depends[task]:
|
||||
if self.rqdata.runq_fnid[dep] == task_fnid:
|
||||
if dep not in self.rq.scenequeue_covered:
|
||||
covered_remove.add(task)
|
||||
break
|
||||
|
||||
for task in covered_remove:
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
taskname = self.rqdata.runq_task[task] + '_setscene'
|
||||
bb.build.del_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
logger.debug(1, 'Not skipping task %s due to setsceneverify', task)
|
||||
logger.debug(1, 'Not skipping task %s because it will have to be run anyway', task)
|
||||
self.rq.scenequeue_covered.remove(task)
|
||||
|
||||
logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
|
||||
@@ -1226,7 +1251,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
if type(obj) is type and
|
||||
issubclass(obj, RunQueueScheduler))
|
||||
|
||||
user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True)
|
||||
user_schedulers = bb.data.getVar("BB_SCHEDULERS", self.cfgData, True)
|
||||
if user_schedulers:
|
||||
for sched in user_schedulers.split():
|
||||
if not "." in sched:
|
||||
@@ -1309,7 +1334,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
self.task_skip(task)
|
||||
return True
|
||||
|
||||
if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(task, taskname):
|
||||
logger.debug(2, "Stamp current task %s (%s)", task,
|
||||
self.rqdata.get_user_idstring(task))
|
||||
self.task_skip(task)
|
||||
@@ -1450,14 +1475,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
dep = self.rqdata.taskData.fn_index[depdata]
|
||||
taskid = self.rqdata.get_task_id(self.rqdata.taskData.getfn_id(dep), idependtask.replace("_setscene", ""))
|
||||
if taskid is None:
|
||||
bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (self.rqdata.taskData.tasks_name[realid], dep, idependtask))
|
||||
bb.msg.fatal("RunQueue", "Task %s depends upon nonexistant task %s:%s" % (self.rqdata.taskData.tasks_name[realid], dep, idependtask))
|
||||
|
||||
sq_revdeps_squash[self.rqdata.runq_setscene.index(task)].add(self.rqdata.runq_setscene.index(taskid))
|
||||
# Have to zero this to avoid circular dependencies
|
||||
sq_revdeps_squash[self.rqdata.runq_setscene.index(taskid)] = set()
|
||||
|
||||
#for task in xrange(len(sq_revdeps_squash)):
|
||||
# print "Task %s: %s.%s is %s " % (task, self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[self.rqdata.runq_setscene[task]]], self.rqdata.runq_task[self.rqdata.runq_setscene[task]] + "_setscene", sq_revdeps_squash[task])
|
||||
# print "Task %s: %s.%s is %s " % (task, self.taskData.fn_index[self.runq_fnid[self.runq_setscene[task]]], self.runq_task[self.runq_setscene[task]] + "_setscene", sq_revdeps_squash[task])
|
||||
|
||||
self.sq_deps = []
|
||||
self.sq_revdeps = sq_revdeps_squash
|
||||
@@ -1493,7 +1518,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCache, fn)
|
||||
continue
|
||||
|
||||
if self.rq.check_stamp_task(realtask, taskname + "_setscene", cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(realtask, taskname + "_setscene"):
|
||||
logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
|
||||
stamppresent.append(task)
|
||||
self.task_skip(task)
|
||||
@@ -1549,7 +1574,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
|
||||
def task_fail(self, task, result):
|
||||
self.stats.taskFailed()
|
||||
bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
|
||||
index = self.rqdata.runq_setscene[task]
|
||||
bb.event.fire(sceneQueueTaskFailed(index, self.stats, result, self), self.cfgData)
|
||||
self.scenequeue_notcovered.add(task)
|
||||
self.scenequeue_updatecounters(task)
|
||||
|
||||
@@ -1586,7 +1612,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
|
||||
|
||||
taskname = self.rqdata.runq_task[realtask] + "_setscene"
|
||||
if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask], recurse = True, cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]):
|
||||
logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
|
||||
task, self.rqdata.get_user_idstring(realtask))
|
||||
self.task_failoutright(task)
|
||||
@@ -1598,14 +1624,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
self.task_failoutright(task)
|
||||
return True
|
||||
|
||||
if self.rq.check_stamp_task(realtask, taskname, cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(realtask, taskname):
|
||||
logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
|
||||
task, self.rqdata.get_user_idstring(realtask))
|
||||
self.task_skip(task)
|
||||
return True
|
||||
|
||||
startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
|
||||
bb.event.fire(startevent, self.cfgData)
|
||||
logger.info("Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
|
||||
self.stats.total, fn, taskname))
|
||||
|
||||
pid, pipein, pipeout = self.fork_off_task(fn, realtask, taskname)
|
||||
|
||||
@@ -1666,15 +1692,6 @@ class runQueueEvent(bb.event.Event):
|
||||
self.stats = stats.copy()
|
||||
bb.event.Event.__init__(self)
|
||||
|
||||
class sceneQueueEvent(runQueueEvent):
|
||||
"""
|
||||
Base sceneQueue event class
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
realtask = rq.rqdata.runq_setscene[task]
|
||||
self.taskstring = rq.rqdata.get_user_idstring(realtask, "_setscene")
|
||||
|
||||
class runQueueTaskStarted(runQueueEvent):
|
||||
"""
|
||||
Event notifing a task was started
|
||||
@@ -1683,14 +1700,6 @@ class runQueueTaskStarted(runQueueEvent):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
self.noexec = noexec
|
||||
|
||||
class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifing a setscene task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
self.noexec = noexec
|
||||
|
||||
class runQueueTaskFailed(runQueueEvent):
|
||||
"""
|
||||
Event notifing a task failed
|
||||
@@ -1699,19 +1708,28 @@ class runQueueTaskFailed(runQueueEvent):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
self.exitcode = exitcode
|
||||
|
||||
class sceneQueueTaskFailed(sceneQueueEvent):
|
||||
class sceneQueueTaskFailed(runQueueTaskFailed):
|
||||
"""
|
||||
Event notifing a setscene task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
self.exitcode = exitcode
|
||||
runQueueTaskFailed.__init__(self, task, stats, exitcode, rq)
|
||||
self.taskstring = rq.rqdata.get_user_idstring(task, "_setscene")
|
||||
|
||||
class runQueueTaskCompleted(runQueueEvent):
|
||||
"""
|
||||
Event notifing a task completed
|
||||
"""
|
||||
|
||||
def check_stamp_fn(fn, taskname, d):
|
||||
rqexe = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d)
|
||||
fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d)
|
||||
fnid = rqexe.rqdata.taskData.getfn_id(fn)
|
||||
taskid = rqexe.rqdata.get_task_id(fnid, taskname)
|
||||
if taskid is not None:
|
||||
return rqexe.rq.check_stamp_task(taskid)
|
||||
return None
|
||||
|
||||
class runQueuePipe():
|
||||
"""
|
||||
Abstraction for a pipe between a worker thread and the server
|
||||
@@ -1719,7 +1737,7 @@ class runQueuePipe():
|
||||
def __init__(self, pipein, pipeout, d):
|
||||
self.input = pipein
|
||||
pipeout.close()
|
||||
bb.utils.nonblockingfd(self.input)
|
||||
fcntl.fcntl(self.input, fcntl.F_SETFL, fcntl.fcntl(self.input, fcntl.F_GETFL) | os.O_NONBLOCK)
|
||||
self.queue = ""
|
||||
self.d = d
|
||||
|
||||
|
||||
@@ -163,7 +163,7 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
|
||||
# remove this when you're done with debugging
|
||||
# allow_reuse_address = True
|
||||
|
||||
def __init__(self, interface):
|
||||
def __init__(self, interface = ("localhost", 0)):
|
||||
"""
|
||||
Constructor
|
||||
"""
|
||||
@@ -242,14 +242,14 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
|
||||
return
|
||||
|
||||
class BitbakeServerInfo():
|
||||
def __init__(self, host, port):
|
||||
self.host = host
|
||||
self.port = port
|
||||
def __init__(self, server):
|
||||
self.host = server.host
|
||||
self.port = server.port
|
||||
|
||||
class BitBakeServerConnection():
|
||||
def __init__(self, serverinfo, clientinfo=("localhost", 0)):
|
||||
def __init__(self, serverinfo):
|
||||
self.connection = _create_server(serverinfo.host, serverinfo.port)
|
||||
self.events = uievent.BBUIEventQueue(self.connection, clientinfo)
|
||||
self.events = uievent.BBUIEventQueue(self.connection)
|
||||
for event in bb.event.ui_queue:
|
||||
self.events.queue_event(event)
|
||||
|
||||
@@ -267,8 +267,8 @@ class BitBakeServerConnection():
|
||||
pass
|
||||
|
||||
class BitBakeServer(object):
|
||||
def initServer(self, interface = ("localhost", 0)):
|
||||
self.server = BitBakeXMLRPCServer(interface)
|
||||
def initServer(self):
|
||||
self.server = BitBakeXMLRPCServer()
|
||||
|
||||
def addcooker(self, cooker):
|
||||
self.cooker = cooker
|
||||
@@ -278,7 +278,7 @@ class BitBakeServer(object):
|
||||
return self.server.register_idle_function
|
||||
|
||||
def saveConnectionDetails(self):
|
||||
self.serverinfo = BitbakeServerInfo(self.server.host, self.server.port)
|
||||
self.serverinfo = BitbakeServerInfo(self.server)
|
||||
|
||||
def detach(self, cooker_logfile):
|
||||
daemonize.createDaemon(self.server.serve_forever, cooker_logfile)
|
||||
|
||||
@@ -16,7 +16,7 @@ def init(d):
|
||||
siggens = [obj for obj in globals().itervalues()
|
||||
if type(obj) is type and issubclass(obj, SignatureGenerator)]
|
||||
|
||||
desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
|
||||
desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
|
||||
for sg in siggens:
|
||||
if desired == sg.name:
|
||||
return sg(d)
|
||||
@@ -50,10 +50,6 @@ class SignatureGenerator(object):
|
||||
def dump_sigtask(self, fn, task, stampbase, runtime):
|
||||
return
|
||||
|
||||
def invalidate_task(self, task, d, fn):
|
||||
bb.build.del_stamp(task, d, fn)
|
||||
|
||||
|
||||
class SignatureGeneratorBasic(SignatureGenerator):
|
||||
"""
|
||||
"""
|
||||
@@ -64,16 +60,11 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
self.taskhash = {}
|
||||
self.taskdeps = {}
|
||||
self.runtaskdeps = {}
|
||||
self.file_checksum_values = {}
|
||||
self.gendeps = {}
|
||||
self.lookupcache = {}
|
||||
self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
|
||||
self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
|
||||
self.taskwhitelist = None
|
||||
self.init_rundepcheck(data)
|
||||
|
||||
def init_rundepcheck(self, data):
|
||||
self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
|
||||
|
||||
if self.taskwhitelist:
|
||||
self.twl = re.compile(self.taskwhitelist)
|
||||
else:
|
||||
@@ -112,10 +103,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
data = data + dep
|
||||
if dep in lookupcache:
|
||||
var = lookupcache[dep]
|
||||
elif dep[-1] == ']':
|
||||
vf = dep[:-1].split('[')
|
||||
var = d.getVarFlag(vf[0], vf[1], False)
|
||||
lookupcache[dep] = var
|
||||
else:
|
||||
var = d.getVar(dep, False)
|
||||
lookupcache[dep] = var
|
||||
@@ -135,11 +122,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
if variant:
|
||||
fn = "virtual:" + variant + ":" + fn
|
||||
|
||||
try:
|
||||
taskdeps = self._build_data(fn, d)
|
||||
except:
|
||||
bb.note("Error during finalise of %s" % fn)
|
||||
raise
|
||||
taskdeps = self._build_data(fn, d)
|
||||
|
||||
#Slow but can be useful for debugging mismatched basehashes
|
||||
#for task in self.taskdeps[fn]:
|
||||
@@ -148,49 +131,21 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
for task in taskdeps:
|
||||
d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
|
||||
|
||||
def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
|
||||
# Return True if we should keep the dependency, False to drop it
|
||||
# We only manipulate the dependencies for packages not in the whitelist
|
||||
if self.twl and not self.twl.search(recipename):
|
||||
# then process the actual dependencies
|
||||
if self.twl.search(depname):
|
||||
return False
|
||||
return True
|
||||
|
||||
def read_taint(self, fn, task, stampbase):
|
||||
taint = None
|
||||
try:
|
||||
with open(stampbase + '.' + task + '.taint', 'r') as taintf:
|
||||
taint = taintf.read()
|
||||
except IOError:
|
||||
pass
|
||||
return taint
|
||||
|
||||
def get_taskhash(self, fn, task, deps, dataCache):
|
||||
k = fn + "." + task
|
||||
data = dataCache.basetaskhash[k]
|
||||
self.runtaskdeps[k] = []
|
||||
self.file_checksum_values[k] = {}
|
||||
recipename = dataCache.pkg_fn[fn]
|
||||
for dep in sorted(deps, key=clean_basepath):
|
||||
depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
|
||||
if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
|
||||
continue
|
||||
# We only manipulate the dependencies for packages not in the whitelist
|
||||
if self.twl and not self.twl.search(dataCache.pkg_fn[fn]):
|
||||
# then process the actual dependencies
|
||||
dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn')
|
||||
if self.twl.search(dataCache.pkg_fn[dep_fn]):
|
||||
continue
|
||||
if dep not in self.taskhash:
|
||||
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
|
||||
data = data + self.taskhash[dep]
|
||||
self.runtaskdeps[k].append(dep)
|
||||
|
||||
if task in dataCache.file_checksums[fn]:
|
||||
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
for (f,cs) in checksums:
|
||||
self.file_checksum_values[k][f] = cs
|
||||
data = data + cs
|
||||
|
||||
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
||||
if taint:
|
||||
data = data + taint
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
self.taskhash[k] = h
|
||||
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
||||
@@ -227,19 +182,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
|
||||
if runtime and k in self.taskhash:
|
||||
data['runtaskdeps'] = self.runtaskdeps[k]
|
||||
data['file_checksum_values'] = self.file_checksum_values[k]
|
||||
data['runtaskhashes'] = {}
|
||||
for dep in data['runtaskdeps']:
|
||||
data['runtaskhashes'][dep] = self.taskhash[dep]
|
||||
|
||||
taint = self.read_taint(fn, task, stampbase)
|
||||
if taint:
|
||||
data['taint'] = taint
|
||||
|
||||
with open(sigfile, "wb") as f:
|
||||
p = pickle.Pickler(f, -1)
|
||||
p.dump(data)
|
||||
os.chmod(sigfile, 0664)
|
||||
p = pickle.Pickler(file(sigfile, "wb"), -1)
|
||||
p.dump(data)
|
||||
|
||||
def dump_sigs(self, dataCache):
|
||||
for fn in self.taskdeps:
|
||||
@@ -260,17 +208,9 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
|
||||
k = fn + "." + taskname[:-9]
|
||||
else:
|
||||
k = fn + "." + taskname
|
||||
if k in self.taskhash:
|
||||
h = self.taskhash[k]
|
||||
else:
|
||||
# If k is not in basehash, then error
|
||||
h = self.basehash[k]
|
||||
h = self.taskhash[k]
|
||||
return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
|
||||
|
||||
def invalidate_task(self, task, d, fn):
|
||||
bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
|
||||
bb.build.write_taint(task, d, fn)
|
||||
|
||||
def dump_this_task(outfile, d):
|
||||
import bb.parse
|
||||
fn = d.getVar("BB_FILENAME", True)
|
||||
@@ -291,9 +231,9 @@ def clean_basepaths(a):
|
||||
return b
|
||||
|
||||
def compare_sigfiles(a, b):
|
||||
p1 = pickle.Unpickler(open(a, "rb"))
|
||||
p1 = pickle.Unpickler(file(a, "rb"))
|
||||
a_data = p1.load()
|
||||
p2 = pickle.Unpickler(open(b, "rb"))
|
||||
p2 = pickle.Unpickler(file(b, "rb"))
|
||||
b_data = p2.load()
|
||||
|
||||
def dict_diff(a, b, whitelist=set()):
|
||||
@@ -310,13 +250,11 @@ def compare_sigfiles(a, b):
|
||||
|
||||
if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
|
||||
print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
|
||||
if a_data['basewhitelist'] and b_data['basewhitelist']:
|
||||
print "changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])
|
||||
print "changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])
|
||||
|
||||
if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
|
||||
print "taskwhitelist changed from %s to %s" % (a_data['taskwhitelist'], b_data['taskwhitelist'])
|
||||
if a_data['taskwhitelist'] and b_data['taskwhitelist']:
|
||||
print "changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])
|
||||
print "changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])
|
||||
|
||||
if a_data['taskdeps'] != b_data['taskdeps']:
|
||||
print "Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))
|
||||
@@ -328,8 +266,7 @@ def compare_sigfiles(a, b):
|
||||
if changed:
|
||||
for dep in changed:
|
||||
print "List of dependencies for variable %s changed from %s to %s" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])
|
||||
if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
|
||||
print "changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])
|
||||
print "changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])
|
||||
if added:
|
||||
for dep in added:
|
||||
print "Dependency on variable %s was added" % (dep)
|
||||
@@ -343,55 +280,25 @@ def compare_sigfiles(a, b):
|
||||
for dep in changed:
|
||||
print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
|
||||
|
||||
changed, added, removed = dict_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
|
||||
if changed:
|
||||
for f in changed:
|
||||
print "Checksum for file %s changed from %s to %s" % (f, a_data['file_checksum_values'][f], b_data['file_checksum_values'][f])
|
||||
if added:
|
||||
for f in added:
|
||||
print "Dependency on checksum of file %s was added" % (f)
|
||||
if removed:
|
||||
for f in removed:
|
||||
print "Dependency on checksum of file %s was removed" % (f)
|
||||
|
||||
|
||||
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
||||
a = clean_basepaths(a_data['runtaskhashes'])
|
||||
b = clean_basepaths(b_data['runtaskhashes'])
|
||||
changed, added, removed = dict_diff(a, b)
|
||||
if added:
|
||||
for dep in added:
|
||||
bdep_found = False
|
||||
if removed:
|
||||
for bdep in removed:
|
||||
if a[dep] == b[bdep]:
|
||||
#print "Dependency on task %s was replaced by %s with same hash" % (dep, bdep)
|
||||
bdep_found = True
|
||||
if not bdep_found:
|
||||
print "Dependency on task %s was added with hash %s" % (dep, a[dep])
|
||||
print "Dependency on task %s was added" % (dep)
|
||||
if removed:
|
||||
for dep in removed:
|
||||
adep_found = False
|
||||
if added:
|
||||
for adep in added:
|
||||
if a[adep] == b[dep]:
|
||||
#print "Dependency on task %s was replaced by %s with same hash" % (adep, dep)
|
||||
adep_found = True
|
||||
if not adep_found:
|
||||
print "Dependency on task %s was removed with hash %s" % (dep, b[dep])
|
||||
print "Dependency on task %s was removed" % (dep)
|
||||
if changed:
|
||||
for dep in changed:
|
||||
print "Hash for dependent task %s changed from %s to %s" % (dep, a[dep], b[dep])
|
||||
|
||||
|
||||
a_taint = a_data.get('taint', None)
|
||||
b_taint = b_data.get('taint', None)
|
||||
if a_taint != b_taint:
|
||||
print "Taint (by forced/invalidated task) changed from %s to %s" % (a_taint, b_taint)
|
||||
|
||||
elif 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
|
||||
print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))
|
||||
print "changed items: %s" % a_data['runtaskdeps'].symmetric_difference(b_data['runtaskdeps'])
|
||||
|
||||
def dump_sigfile(a):
|
||||
p1 = pickle.Unpickler(open(a, "rb"))
|
||||
p1 = pickle.Unpickler(file(a, "rb"))
|
||||
a_data = p1.load()
|
||||
|
||||
print "basewhitelist: %s" % (a_data['basewhitelist'])
|
||||
@@ -411,12 +318,6 @@ def dump_sigfile(a):
|
||||
if 'runtaskdeps' in a_data:
|
||||
print "Tasks this task depends on: %s" % (a_data['runtaskdeps'])
|
||||
|
||||
if 'file_checksum_values' in a_data:
|
||||
print "This task depends on the checksums of files: %s" % (a_data['file_checksum_values'])
|
||||
|
||||
if 'runtaskhashes' in a_data:
|
||||
for dep in a_data['runtaskhashes']:
|
||||
print "Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])
|
||||
|
||||
if 'taint' in a_data:
|
||||
print "Tainted (by forced/invalidated task): %s" % a_data['taint']
|
||||
|
||||
@@ -55,7 +55,6 @@ class TaskData:
|
||||
self.tasks_name = []
|
||||
self.tasks_tdepends = []
|
||||
self.tasks_idepends = []
|
||||
self.tasks_irdepends = []
|
||||
# Cache to speed up task ID lookups
|
||||
self.tasks_lookup = {}
|
||||
|
||||
@@ -116,16 +115,6 @@ class TaskData:
|
||||
ids.append(self.tasks_lookup[fnid][task])
|
||||
return ids
|
||||
|
||||
def gettask_id_fromfnid(self, fnid, task):
|
||||
"""
|
||||
Return an ID number for the task matching fnid and task.
|
||||
"""
|
||||
if fnid in self.tasks_lookup:
|
||||
if task in self.tasks_lookup[fnid]:
|
||||
return self.tasks_lookup[fnid][task]
|
||||
|
||||
return None
|
||||
|
||||
def gettask_id(self, fn, task, create = True):
|
||||
"""
|
||||
Return an ID number for the task matching fn and task.
|
||||
@@ -145,7 +134,6 @@ class TaskData:
|
||||
self.tasks_fnid.append(fnid)
|
||||
self.tasks_tdepends.append([])
|
||||
self.tasks_idepends.append([])
|
||||
self.tasks_irdepends.append([])
|
||||
|
||||
listid = len(self.tasks_name) - 1
|
||||
|
||||
@@ -190,15 +178,6 @@ class TaskData:
|
||||
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep))
|
||||
ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
|
||||
self.tasks_idepends[taskid].extend(ids)
|
||||
if 'rdepends' in task_deps and task in task_deps['rdepends']:
|
||||
ids = []
|
||||
for dep in task_deps['rdepends'][task].split():
|
||||
if dep:
|
||||
if ":" not in dep:
|
||||
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep))
|
||||
ids.append(((self.getrun_id(dep.split(":")[0])), dep.split(":")[1]))
|
||||
self.tasks_irdepends[taskid].extend(ids)
|
||||
|
||||
|
||||
# Work out build dependencies
|
||||
if not fnid in self.depids:
|
||||
@@ -482,7 +461,6 @@ class TaskData:
|
||||
providers_list.append(dataCache.pkg_fn[fn])
|
||||
bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
|
||||
self.consider_msgs_cache.append(item)
|
||||
raise bb.providers.MultipleRProvider(item)
|
||||
|
||||
# run through the list until we find one that we can build
|
||||
for fn in eligible:
|
||||
@@ -555,11 +533,6 @@ class TaskData:
|
||||
dependees = self.get_rdependees(targetid)
|
||||
for fnid in dependees:
|
||||
self.fail_fnid(fnid, missing_list)
|
||||
for taskid in xrange(len(self.tasks_irdepends)):
|
||||
irdepends = self.tasks_irdepends[taskid]
|
||||
for (idependid, idependtask) in irdepends:
|
||||
if idependid == targetid:
|
||||
self.fail_fnid(self.tasks_fnid[taskid], missing_list)
|
||||
|
||||
def add_unresolved(self, cfgData, dataCache):
|
||||
"""
|
||||
@@ -581,7 +554,7 @@ class TaskData:
|
||||
try:
|
||||
self.add_rprovider(cfgData, dataCache, target)
|
||||
added = added + 1
|
||||
except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
|
||||
except bb.providers.NoRProvider:
|
||||
self.remove_runtarget(self.getrun_id(target))
|
||||
logger.debug(1, "Resolved " + str(added) + " extra dependencies")
|
||||
if added == 0:
|
||||
|
||||
@@ -1,369 +0,0 @@
|
||||
#
|
||||
# BitBake Test for codeparser.py
|
||||
#
|
||||
# Copyright (C) 2010 Chris Larson
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import logging
|
||||
import bb
|
||||
|
||||
logger = logging.getLogger('BitBake.TestCodeParser')
|
||||
|
||||
import bb.data
|
||||
|
||||
class ReferenceTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
|
||||
def setEmptyVars(self, varlist):
|
||||
for k in varlist:
|
||||
self.d.setVar(k, "")
|
||||
|
||||
def setValues(self, values):
|
||||
for k, v in values.items():
|
||||
self.d.setVar(k, v)
|
||||
|
||||
def assertReferences(self, refs):
|
||||
self.assertEqual(self.references, refs)
|
||||
|
||||
def assertExecs(self, execs):
|
||||
self.assertEqual(self.execs, execs)
|
||||
|
||||
class VariableReferenceTest(ReferenceTest):
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
self.references = parsedvar.references
|
||||
|
||||
def test_simple_reference(self):
|
||||
self.setEmptyVars(["FOO"])
|
||||
self.parseExpression("${FOO}")
|
||||
self.assertReferences(set(["FOO"]))
|
||||
|
||||
def test_nested_reference(self):
|
||||
self.setEmptyVars(["BAR"])
|
||||
self.d.setVar("FOO", "BAR")
|
||||
self.parseExpression("${${FOO}}")
|
||||
self.assertReferences(set(["FOO", "BAR"]))
|
||||
|
||||
def test_python_reference(self):
|
||||
self.setEmptyVars(["BAR"])
|
||||
self.parseExpression("${@bb.data.getVar('BAR', d, True) + 'foo'}")
|
||||
self.assertReferences(set(["BAR"]))
|
||||
|
||||
class ShellReferenceTest(ReferenceTest):
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
parser = bb.codeparser.ShellParser("ParserTest", logger)
|
||||
parser.parse_shell(parsedvar.value)
|
||||
|
||||
self.references = parsedvar.references
|
||||
self.execs = parser.execs
|
||||
|
||||
def test_quotes_inside_assign(self):
|
||||
self.parseExpression('foo=foo"bar"baz')
|
||||
self.assertReferences(set([]))
|
||||
|
||||
def test_quotes_inside_arg(self):
|
||||
self.parseExpression('sed s#"bar baz"#"alpha beta"#g')
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
def test_arg_continuation(self):
|
||||
self.parseExpression("sed -i -e s,foo,bar,g \\\n *.pc")
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
def test_dollar_in_quoted(self):
|
||||
self.parseExpression('sed -i -e "foo$" *.pc')
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
def test_quotes_inside_arg_continuation(self):
|
||||
self.setEmptyVars(["bindir", "D", "libdir"])
|
||||
self.parseExpression("""
|
||||
sed -i -e s#"moc_location=.*$"#"moc_location=${bindir}/moc4"# \\
|
||||
-e s#"uic_location=.*$"#"uic_location=${bindir}/uic4"# \\
|
||||
${D}${libdir}/pkgconfig/*.pc
|
||||
""")
|
||||
self.assertReferences(set(["bindir", "D", "libdir"]))
|
||||
|
||||
def test_assign_subshell_expansion(self):
|
||||
self.parseExpression("foo=$(echo bar)")
|
||||
self.assertExecs(set(["echo"]))
|
||||
|
||||
def test_shell_unexpanded(self):
|
||||
self.setEmptyVars(["QT_BASE_NAME"])
|
||||
self.parseExpression('echo "${QT_BASE_NAME}"')
|
||||
self.assertExecs(set(["echo"]))
|
||||
self.assertReferences(set(["QT_BASE_NAME"]))
|
||||
|
||||
def test_incomplete_varexp_single_quotes(self):
|
||||
self.parseExpression("sed -i -e 's:IP{:I${:g' $pc")
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
|
||||
def test_until(self):
|
||||
self.parseExpression("until false; do echo true; done")
|
||||
self.assertExecs(set(["false", "echo"]))
|
||||
self.assertReferences(set())
|
||||
|
||||
def test_case(self):
|
||||
self.parseExpression("""
|
||||
case $foo in
|
||||
*)
|
||||
bar
|
||||
;;
|
||||
esac
|
||||
""")
|
||||
self.assertExecs(set(["bar"]))
|
||||
self.assertReferences(set())
|
||||
|
||||
def test_assign_exec(self):
|
||||
self.parseExpression("a=b c='foo bar' alpha 1 2 3")
|
||||
self.assertExecs(set(["alpha"]))
|
||||
|
||||
def test_redirect_to_file(self):
|
||||
self.setEmptyVars(["foo"])
|
||||
self.parseExpression("echo foo >${foo}/bar")
|
||||
self.assertExecs(set(["echo"]))
|
||||
self.assertReferences(set(["foo"]))
|
||||
|
||||
def test_heredoc(self):
|
||||
self.setEmptyVars(["theta"])
|
||||
self.parseExpression("""
|
||||
cat <<END
|
||||
alpha
|
||||
beta
|
||||
${theta}
|
||||
END
|
||||
""")
|
||||
self.assertReferences(set(["theta"]))
|
||||
|
||||
def test_redirect_from_heredoc(self):
|
||||
v = ["B", "SHADOW_MAILDIR", "SHADOW_MAILFILE", "SHADOW_UTMPDIR", "SHADOW_LOGDIR", "bindir"]
|
||||
self.setEmptyVars(v)
|
||||
self.parseExpression("""
|
||||
cat <<END >${B}/cachedpaths
|
||||
shadow_cv_maildir=${SHADOW_MAILDIR}
|
||||
shadow_cv_mailfile=${SHADOW_MAILFILE}
|
||||
shadow_cv_utmpdir=${SHADOW_UTMPDIR}
|
||||
shadow_cv_logdir=${SHADOW_LOGDIR}
|
||||
shadow_cv_passwd_dir=${bindir}
|
||||
END
|
||||
""")
|
||||
self.assertReferences(set(v))
|
||||
self.assertExecs(set(["cat"]))
|
||||
|
||||
# def test_incomplete_command_expansion(self):
|
||||
# self.assertRaises(reftracker.ShellSyntaxError, reftracker.execs,
|
||||
# bbvalue.shparse("cp foo`", self.d), self.d)
|
||||
|
||||
# def test_rogue_dollarsign(self):
|
||||
# self.setValues({"D" : "/tmp"})
|
||||
# self.parseExpression("install -d ${D}$")
|
||||
# self.assertReferences(set(["D"]))
|
||||
# self.assertExecs(set(["install"]))
|
||||
|
||||
|
||||
class PythonReferenceTest(ReferenceTest):
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
if hasattr(bb.utils, "_context"):
|
||||
self.context = bb.utils._context
|
||||
else:
|
||||
import __builtin__
|
||||
self.context = __builtin__.__dict__
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
parser = bb.codeparser.PythonParser("ParserTest", logger)
|
||||
parser.parse_python(parsedvar.value)
|
||||
|
||||
self.references = parsedvar.references | parser.references
|
||||
self.execs = parser.execs
|
||||
|
||||
@staticmethod
|
||||
def indent(value):
|
||||
"""Python Snippets have to be indented, python values don't have to
|
||||
be. These unit tests are testing snippets."""
|
||||
return " " + value
|
||||
|
||||
def test_getvar_reference(self):
|
||||
self.parseExpression("bb.data.getVar('foo', d, True)")
|
||||
self.assertReferences(set(["foo"]))
|
||||
self.assertExecs(set())
|
||||
|
||||
def test_getvar_computed_reference(self):
|
||||
self.parseExpression("bb.data.getVar('f' + 'o' + 'o', d, True)")
|
||||
self.assertReferences(set())
|
||||
self.assertExecs(set())
|
||||
|
||||
def test_getvar_exec_reference(self):
|
||||
self.parseExpression("eval('bb.data.getVar(\"foo\", d, True)')")
|
||||
self.assertReferences(set())
|
||||
self.assertExecs(set(["eval"]))
|
||||
|
||||
def test_var_reference(self):
|
||||
self.context["foo"] = lambda x: x
|
||||
self.setEmptyVars(["FOO"])
|
||||
self.parseExpression("foo('${FOO}')")
|
||||
self.assertReferences(set(["FOO"]))
|
||||
self.assertExecs(set(["foo"]))
|
||||
del self.context["foo"]
|
||||
|
||||
def test_var_exec(self):
|
||||
for etype in ("func", "task"):
|
||||
self.d.setVar("do_something", "echo 'hi mom! ${FOO}'")
|
||||
self.d.setVarFlag("do_something", etype, True)
|
||||
self.parseExpression("bb.build.exec_func('do_something', d)")
|
||||
self.assertReferences(set(["do_something"]))
|
||||
|
||||
def test_function_reference(self):
|
||||
self.context["testfunc"] = lambda msg: bb.msg.note(1, None, msg)
|
||||
self.d.setVar("FOO", "Hello, World!")
|
||||
self.parseExpression("testfunc('${FOO}')")
|
||||
self.assertReferences(set(["FOO"]))
|
||||
self.assertExecs(set(["testfunc"]))
|
||||
del self.context["testfunc"]
|
||||
|
||||
def test_qualified_function_reference(self):
|
||||
self.parseExpression("time.time()")
|
||||
self.assertExecs(set(["time.time"]))
|
||||
|
||||
def test_qualified_function_reference_2(self):
|
||||
self.parseExpression("os.path.dirname('/foo/bar')")
|
||||
self.assertExecs(set(["os.path.dirname"]))
|
||||
|
||||
def test_qualified_function_reference_nested(self):
|
||||
self.parseExpression("time.strftime('%Y%m%d',time.gmtime())")
|
||||
self.assertExecs(set(["time.strftime", "time.gmtime"]))
|
||||
|
||||
def test_function_reference_chained(self):
|
||||
self.context["testget"] = lambda: "\tstrip me "
|
||||
self.parseExpression("testget().strip()")
|
||||
self.assertExecs(set(["testget"]))
|
||||
del self.context["testget"]
|
||||
|
||||
|
||||
class DependencyReferenceTest(ReferenceTest):
|
||||
|
||||
pydata = """
|
||||
bb.data.getVar('somevar', d, True)
|
||||
def test(d):
|
||||
foo = 'bar %s' % 'foo'
|
||||
def test2(d):
|
||||
d.getVar(foo, True)
|
||||
d.getVar('bar', False)
|
||||
test2(d)
|
||||
|
||||
def a():
|
||||
\"\"\"some
|
||||
stuff
|
||||
\"\"\"
|
||||
return "heh"
|
||||
|
||||
test(d)
|
||||
|
||||
bb.data.expand(bb.data.getVar("something", False, d), d)
|
||||
bb.data.expand("${inexpand} somethingelse", d)
|
||||
bb.data.getVar(a(), d, False)
|
||||
"""
|
||||
|
||||
def test_python(self):
|
||||
self.d.setVar("FOO", self.pydata)
|
||||
self.setEmptyVars(["inexpand", "a", "test2", "test"])
|
||||
self.d.setVarFlags("FOO", {"func": True, "python": True})
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
|
||||
|
||||
|
||||
shelldata = """
|
||||
foo () {
|
||||
bar
|
||||
}
|
||||
{
|
||||
echo baz
|
||||
$(heh)
|
||||
eval `moo`
|
||||
}
|
||||
a=b
|
||||
c=d
|
||||
(
|
||||
true && false
|
||||
test -f foo
|
||||
testval=something
|
||||
$testval
|
||||
) || aiee
|
||||
! inverted
|
||||
echo ${somevar}
|
||||
|
||||
case foo in
|
||||
bar)
|
||||
echo bar
|
||||
;;
|
||||
baz)
|
||||
echo baz
|
||||
;;
|
||||
foo*)
|
||||
echo foo
|
||||
;;
|
||||
esac
|
||||
"""
|
||||
|
||||
def test_shell(self):
|
||||
execs = ["bar", "echo", "heh", "moo", "true", "aiee"]
|
||||
self.d.setVar("somevar", "heh")
|
||||
self.d.setVar("inverted", "echo inverted...")
|
||||
self.d.setVarFlag("inverted", "func", True)
|
||||
self.d.setVar("FOO", self.shelldata)
|
||||
self.d.setVarFlags("FOO", {"func": True})
|
||||
self.setEmptyVars(execs)
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["somevar", "inverted"] + execs))
|
||||
|
||||
|
||||
def test_vardeps(self):
|
||||
self.d.setVar("oe_libinstall", "echo test")
|
||||
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
|
||||
self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
def test_vardeps_expand(self):
|
||||
self.d.setVar("oe_libinstall", "echo test")
|
||||
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
|
||||
self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
#Currently no wildcard support
|
||||
#def test_vardeps_wildcards(self):
|
||||
# self.d.setVar("oe_libinstall", "echo test")
|
||||
# self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
|
||||
# self.d.setVarFlag("FOO", "vardeps", "oe_*")
|
||||
# self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for Copy-on-Write (cow.py)
|
||||
#
|
||||
# Copyright 2006 Holger Freyther <freyther@handhelds.org>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import os
|
||||
|
||||
class COWTestCase(unittest.TestCase):
|
||||
"""
|
||||
Test case for the COW module from mithro
|
||||
"""
|
||||
|
||||
def testGetSet(self):
|
||||
"""
|
||||
Test and set
|
||||
"""
|
||||
from bb.COW import COWDictBase
|
||||
a = COWDictBase.copy()
|
||||
|
||||
self.assertEquals(False, a.has_key('a'))
|
||||
|
||||
a['a'] = 'a'
|
||||
a['b'] = 'b'
|
||||
self.assertEquals(True, a.has_key('a'))
|
||||
self.assertEquals(True, a.has_key('b'))
|
||||
self.assertEquals('a', a['a'] )
|
||||
self.assertEquals('b', a['b'] )
|
||||
|
||||
def testCopyCopy(self):
|
||||
"""
|
||||
Test the copy of copies
|
||||
"""
|
||||
|
||||
from bb.COW import COWDictBase
|
||||
|
||||
# create two COW dict 'instances'
|
||||
b = COWDictBase.copy()
|
||||
c = COWDictBase.copy()
|
||||
|
||||
# assign some keys to one instance, some keys to another
|
||||
b['a'] = 10
|
||||
b['c'] = 20
|
||||
c['a'] = 30
|
||||
|
||||
# test separation of the two instances
|
||||
self.assertEquals(False, c.has_key('c'))
|
||||
self.assertEquals(30, c['a'])
|
||||
self.assertEquals(10, b['a'])
|
||||
|
||||
# test copy
|
||||
b_2 = b.copy()
|
||||
c_2 = c.copy()
|
||||
|
||||
self.assertEquals(False, c_2.has_key('c'))
|
||||
self.assertEquals(10, b_2['a'])
|
||||
|
||||
b_2['d'] = 40
|
||||
self.assertEquals(False, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
c_2['d'] = 30
|
||||
self.assertEquals(True, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(30, c_2['d'])
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
# test copy of the copy
|
||||
c_3 = c_2.copy()
|
||||
b_3 = b_2.copy()
|
||||
b_3_2 = b_2.copy()
|
||||
|
||||
c_3['e'] = 4711
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(False, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
b_3['e'] = 'viel'
|
||||
self.assertEquals('viel', b_3['e'])
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(True, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
def testCow(self):
|
||||
from bb.COW import COWDictBase
|
||||
c = COWDictBase.copy()
|
||||
c['123'] = 1027
|
||||
c['other'] = 4711
|
||||
c['d'] = { 'abc' : 10, 'bcd' : 20 }
|
||||
|
||||
copy = c.copy()
|
||||
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1027, copy['123'])
|
||||
self.assertEquals(4711, copy['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
|
||||
|
||||
# cow it now
|
||||
copy['123'] = 1028
|
||||
copy['other'] = 4712
|
||||
copy['d']['abc'] = 20
|
||||
|
||||
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1028, copy['123'])
|
||||
self.assertEquals(4712, copy['other'])
|
||||
self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
|
||||
@@ -1,252 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for the Data Store (data.py/data_smart.py)
|
||||
#
|
||||
# Copyright (C) 2010 Chris Larson
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import bb
|
||||
import bb.data
|
||||
|
||||
class DataExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d["foo"] = "value of foo"
|
||||
self.d["bar"] = "value of bar"
|
||||
self.d["value of foo"] = "value of 'value of foo'"
|
||||
|
||||
def test_one_var(self):
|
||||
val = self.d.expand("${foo}")
|
||||
self.assertEqual(str(val), "value of foo")
|
||||
|
||||
def test_indirect_one_var(self):
|
||||
val = self.d.expand("${${foo}}")
|
||||
self.assertEqual(str(val), "value of 'value of foo'")
|
||||
|
||||
def test_indirect_and_another(self):
|
||||
val = self.d.expand("${${foo}} ${bar}")
|
||||
self.assertEqual(str(val), "value of 'value of foo' value of bar")
|
||||
|
||||
def test_python_snippet(self):
|
||||
val = self.d.expand("${@5*12}")
|
||||
self.assertEqual(str(val), "60")
|
||||
|
||||
def test_expand_in_python_snippet(self):
|
||||
val = self.d.expand("${@'boo ' + '${foo}'}")
|
||||
self.assertEqual(str(val), "boo value of foo")
|
||||
|
||||
def test_python_snippet_getvar(self):
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "value of foo value of bar")
|
||||
|
||||
def test_python_snippet_syntax_error(self):
|
||||
self.d.setVar("FOO", "${@foo = 5}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_python_snippet_runtime_error(self):
|
||||
self.d.setVar("FOO", "${@int('test')}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_python_snippet_error_path(self):
|
||||
self.d.setVar("FOO", "foo value ${BAR}")
|
||||
self.d.setVar("BAR", "bar value ${@int('test')}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_value_containing_value(self):
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "value of foo value of bar")
|
||||
|
||||
def test_reference_undefined_var(self):
|
||||
val = self.d.expand("${undefinedvar} meh")
|
||||
self.assertEqual(str(val), "${undefinedvar} meh")
|
||||
|
||||
def test_double_reference(self):
|
||||
self.d.setVar("BAR", "bar value")
|
||||
self.d.setVar("FOO", "${BAR} foo ${BAR}")
|
||||
val = self.d.getVar("FOO", True)
|
||||
self.assertEqual(str(val), "bar value foo bar value")
|
||||
|
||||
def test_direct_recursion(self):
|
||||
self.d.setVar("FOO", "${FOO}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_indirect_recursion(self):
|
||||
self.d.setVar("FOO", "${BAR}")
|
||||
self.d.setVar("BAR", "${BAZ}")
|
||||
self.d.setVar("BAZ", "${FOO}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_recursion_exception(self):
|
||||
self.d.setVar("FOO", "${BAR}")
|
||||
self.d.setVar("BAR", "${${@'FOO'}}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_incomplete_varexp_single_quotes(self):
|
||||
self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc")
|
||||
val = self.d.getVar("FOO", True)
|
||||
self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc")
|
||||
|
||||
def test_nonstring(self):
|
||||
self.d.setVar("TEST", 5)
|
||||
val = self.d.getVar("TEST", True)
|
||||
self.assertEqual(str(val), "5")
|
||||
|
||||
def test_rename(self):
|
||||
self.d.renameVar("foo", "newfoo")
|
||||
self.assertEqual(self.d.getVar("newfoo"), "value of foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_deletion(self):
|
||||
self.d.delVar("foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_keys(self):
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value of foo', 'foo', 'bar'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d["foo"] = "foo"
|
||||
self.d["bar"] = "bar"
|
||||
self.d["value of foobar"] = "187"
|
||||
|
||||
def test_refs(self):
|
||||
val = self.d.expand("${value of ${foo}${bar}}")
|
||||
self.assertEqual(str(val), "187")
|
||||
|
||||
#def test_python_refs(self):
|
||||
# val = self.d.expand("${@${@3}**2 + ${@4}**2}")
|
||||
# self.assertEqual(str(val), "25")
|
||||
|
||||
def test_ref_in_python_ref(self):
|
||||
val = self.d.expand("${@'${foo}' + 'bar'}")
|
||||
self.assertEqual(str(val), "foobar")
|
||||
|
||||
def test_python_ref_in_ref(self):
|
||||
val = self.d.expand("${${@'f'+'o'+'o'}}")
|
||||
self.assertEqual(str(val), "foo")
|
||||
|
||||
def test_deep_nesting(self):
|
||||
depth = 100
|
||||
val = self.d.expand("${" * depth + "foo" + "}" * depth)
|
||||
self.assertEqual(str(val), "foo")
|
||||
|
||||
#def test_deep_python_nesting(self):
|
||||
# depth = 50
|
||||
# val = self.d.expand("${@" * depth + "1" + "+1}" * depth)
|
||||
# self.assertEqual(str(val), str(depth + 1))
|
||||
|
||||
def test_mixed(self):
|
||||
val = self.d.expand("${value of ${@('${foo}'+'bar')[0:3]}${${@'BAR'.lower()}}}")
|
||||
self.assertEqual(str(val), "187")
|
||||
|
||||
def test_runtime(self):
|
||||
val = self.d.expand("${${@'value of' + ' f'+'o'+'o'+'b'+'a'+'r'}}")
|
||||
self.assertEqual(str(val), "187")
|
||||
|
||||
class TestMemoize(unittest.TestCase):
|
||||
def test_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("FOO", "bar")
|
||||
self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
|
||||
|
||||
def test_not_memoized(self):
|
||||
d1 = bb.data.init()
|
||||
d2 = bb.data.init()
|
||||
d1.setVar("FOO", "bar")
|
||||
d2.setVar("FOO", "bar2")
|
||||
self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
|
||||
|
||||
def test_changed_after_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "value of foo")
|
||||
d.setVar("foo", "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "second value of foo")
|
||||
|
||||
def test_same_value(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of")
|
||||
d.setVar("bar", "value of")
|
||||
self.assertEqual(d.getVar("foo"),
|
||||
d.getVar("bar"))
|
||||
|
||||
class TestConcat(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("FOO", "foo")
|
||||
self.d.setVar("VAL", "val")
|
||||
self.d.setVar("BAR", "bar")
|
||||
|
||||
def test_prepend(self):
|
||||
self.d.setVar("TEST", "${VAL}")
|
||||
self.d.prependVar("TEST", "${FOO}:")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val")
|
||||
|
||||
def test_append(self):
|
||||
self.d.setVar("TEST", "${VAL}")
|
||||
self.d.appendVar("TEST", ":${BAR}")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "val:bar")
|
||||
|
||||
def test_multiple_append(self):
|
||||
self.d.setVar("TEST", "${VAL}")
|
||||
self.d.prependVar("TEST", "${FOO}:")
|
||||
self.d.appendVar("TEST", ":val2")
|
||||
self.d.appendVar("TEST", ":${BAR}")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
|
||||
|
||||
class TestOverrides(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("OVERRIDES", "foo:bar:local")
|
||||
self.d.setVar("TEST", "testvalue")
|
||||
|
||||
def test_no_override(self):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue")
|
||||
|
||||
def test_one_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
|
||||
|
||||
def test_multiple_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_local", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
|
||||
class TestFlags(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("foo", "value of foo")
|
||||
self.d.setVarFlag("foo", "flag1", "value of flag1")
|
||||
self.d.setVarFlag("foo", "flag2", "value of flag2")
|
||||
|
||||
def test_setflag(self):
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), "value of flag2")
|
||||
|
||||
def test_delflag(self):
|
||||
self.d.delVarFlag("foo", "flag2")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
|
||||
|
||||
|
||||
@@ -1,191 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for the Fetcher (fetch2/)
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import bb
|
||||
|
||||
|
||||
class FetcherTest(unittest.TestCase):
|
||||
|
||||
replaceuris = {
|
||||
("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/")
|
||||
: "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
|
||||
("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
|
||||
: "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
|
||||
: "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
|
||||
: "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
|
||||
: "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890",
|
||||
("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
|
||||
: "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
|
||||
("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
|
||||
: "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
|
||||
("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
|
||||
: "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
|
||||
("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
|
||||
: "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
|
||||
("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist")
|
||||
: "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
|
||||
("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
|
||||
: "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
|
||||
("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
|
||||
: "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
|
||||
: "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
|
||||
: "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
|
||||
#Renaming files doesn't work
|
||||
#("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
|
||||
#("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
|
||||
}
|
||||
|
||||
mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \
|
||||
"git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \
|
||||
"https://.*/.* file:///someotherpath/downloads/ \n" \
|
||||
"http://.*/.* file:///someotherpath/downloads/ \n"
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.dldir = os.path.join(self.tempdir, "download")
|
||||
os.mkdir(self.dldir)
|
||||
self.d.setVar("DL_DIR", self.dldir)
|
||||
self.unpackdir = os.path.join(self.tempdir, "unpacked")
|
||||
os.mkdir(self.unpackdir)
|
||||
persistdir = os.path.join(self.tempdir, "persistdata")
|
||||
self.d.setVar("PERSISTENT_DIR", persistdir)
|
||||
|
||||
def tearDown(self):
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
|
||||
def test_fetch(self):
|
||||
fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
fetcher.unpack(self.unpackdir)
|
||||
self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
|
||||
self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.1/")), 9)
|
||||
|
||||
def test_fetch_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_premirror(self):
|
||||
self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def gitfetcher(self, url1, url2):
|
||||
def checkrevision(self, fetcher):
|
||||
fetcher.unpack(self.unpackdir)
|
||||
revision = subprocess.check_output("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git").strip()
|
||||
self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
|
||||
|
||||
self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
|
||||
self.d.setVar("SRCREV", "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
|
||||
fetcher = bb.fetch.Fetch([url1], self.d)
|
||||
fetcher.download()
|
||||
checkrevision(self, fetcher)
|
||||
# Wipe out the dldir clone and the unpacked source, turn off the network and check mirror tarball works
|
||||
bb.utils.prunedir(self.dldir + "/git2/")
|
||||
bb.utils.prunedir(self.unpackdir)
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
fetcher = bb.fetch.Fetch([url2], self.d)
|
||||
fetcher.download()
|
||||
checkrevision(self, fetcher)
|
||||
|
||||
def test_gitfetch(self):
|
||||
url1 = url2 = "git://git.openembedded.org/bitbake"
|
||||
self.gitfetcher(url1, url2)
|
||||
|
||||
def test_gitfetch_premirror(self):
|
||||
url1 = "git://git.openembedded.org/bitbake"
|
||||
url2 = "git://someserver.org/bitbake"
|
||||
self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
|
||||
self.gitfetcher(url1, url2)
|
||||
|
||||
def test_gitfetch_premirror2(self):
|
||||
url1 = url2 = "git://someserver.org/bitbake"
|
||||
self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
|
||||
self.gitfetcher(url1, url2)
|
||||
|
||||
def test_gitfetch_premirror3(self):
|
||||
realurl = "git://git.openembedded.org/bitbake"
|
||||
dummyurl = "git://someserver.org/bitbake"
|
||||
self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
|
||||
os.chdir(self.tempdir)
|
||||
subprocess.check_output("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True)
|
||||
self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (dummyurl, self.sourcedir))
|
||||
self.gitfetcher(dummyurl, dummyurl)
|
||||
|
||||
def test_urireplace(self):
|
||||
for k, v in self.replaceuris.items():
|
||||
ud = bb.fetch.FetchData(k[0], self.d)
|
||||
ud.setup_localpath(self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2]))
|
||||
newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d)
|
||||
self.assertEqual([v], newuris)
|
||||
|
||||
def test_urilist1(self):
|
||||
fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz', 'file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
def test_urilist2(self):
|
||||
# Catch https:// -> files:// bug
|
||||
fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
|
||||
class URLHandle(unittest.TestCase):
|
||||
|
||||
datatable = {
|
||||
"http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
|
||||
"cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
|
||||
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'})
|
||||
}
|
||||
|
||||
def test_decodeurl(self):
|
||||
for k, v in self.datatable.items():
|
||||
result = bb.fetch.decodeurl(k)
|
||||
self.assertEqual(result, v)
|
||||
|
||||
def test_encodeurl(self):
|
||||
for k, v in self.datatable.items():
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for utils.py
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import bb
|
||||
|
||||
class VerCmpString(unittest.TestCase):
|
||||
|
||||
def test_vercmpstring(self):
|
||||
result = bb.utils.vercmp_string('1', '2')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('2', '1')
|
||||
self.assertTrue(result > 0)
|
||||
result = bb.utils.vercmp_string('1', '1.0')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1', '1.1')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1_p2')
|
||||
self.assertTrue(result < 0)
|
||||
|
||||
@@ -1,350 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import pango
|
||||
import gobject
|
||||
import bb.process
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobwidget import hic, HobNotebook, HobAltButton, HobWarpCellRendererText, HobButton
|
||||
from bb.ui.crumbs.runningbuild import RunningBuildTreeView
|
||||
from bb.ui.crumbs.runningbuild import BuildFailureTreeView
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class BuildConfigurationTreeView(gtk.TreeView):
|
||||
def __init__ (self):
|
||||
gtk.TreeView.__init__(self)
|
||||
self.set_rules_hint(False)
|
||||
self.set_headers_visible(False)
|
||||
self.set_property("hover-expand", True)
|
||||
self.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
# The icon that indicates whether we're building or failed.
|
||||
renderer0 = gtk.CellRendererText()
|
||||
renderer0.set_property('font-desc', pango.FontDescription('courier bold 12'))
|
||||
col0 = gtk.TreeViewColumn ("Name", renderer0, text=0)
|
||||
self.append_column (col0)
|
||||
|
||||
# The message of configuration.
|
||||
renderer1 = HobWarpCellRendererText(col_number=1)
|
||||
col1 = gtk.TreeViewColumn ("Values", renderer1, text=1)
|
||||
self.append_column (col1)
|
||||
|
||||
def set_vars(self, key="", var=[""]):
|
||||
d = {}
|
||||
if type(var) == str:
|
||||
d = {key: [var]}
|
||||
elif type(var) == list and len(var) > 1:
|
||||
#create the sub item line
|
||||
l = []
|
||||
text = ""
|
||||
for item in var:
|
||||
text = " - " + item
|
||||
l.append(text)
|
||||
d = {key: var}
|
||||
|
||||
return d
|
||||
|
||||
def set_config_model(self, show_vars):
|
||||
listmodel = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
|
||||
parent = None
|
||||
for var in show_vars:
|
||||
for subitem in var.items():
|
||||
name = subitem[0]
|
||||
is_parent = True
|
||||
for value in subitem[1]:
|
||||
if is_parent:
|
||||
parent = listmodel.append(parent, (name, value))
|
||||
is_parent = False
|
||||
else:
|
||||
listmodel.append(parent, (None, value))
|
||||
name = " - "
|
||||
parent = None
|
||||
# renew the tree model after get the configuration messages
|
||||
self.set_model(listmodel)
|
||||
|
||||
def show(self, src_config_info, src_params):
|
||||
vars = []
|
||||
vars.append(self.set_vars("BB version:", src_params.bb_version))
|
||||
vars.append(self.set_vars("Target arch:", src_params.target_arch))
|
||||
vars.append(self.set_vars("Target OS:", src_params.target_os))
|
||||
vars.append(self.set_vars("Machine:", src_config_info.curr_mach))
|
||||
vars.append(self.set_vars("Distro:", src_config_info.curr_distro))
|
||||
vars.append(self.set_vars("Distro version:", src_params.distro_version))
|
||||
vars.append(self.set_vars("SDK machine:", src_config_info.curr_sdk_machine))
|
||||
vars.append(self.set_vars("Tune features:", src_params.tune_pkgarch))
|
||||
vars.append(self.set_vars("Layers:", src_config_info.layers))
|
||||
|
||||
for path in src_config_info.layers:
|
||||
import os, os.path
|
||||
if os.path.exists(path):
|
||||
branch = bb.process.run('cd %s; git branch | grep "^* " | tr -d "* "' % path)[0]
|
||||
if branch:
|
||||
branch = branch.strip('\n')
|
||||
vars.append(self.set_vars("Branch:", branch))
|
||||
break
|
||||
|
||||
self.set_config_model(vars)
|
||||
|
||||
def reset(self):
|
||||
self.set_model(None)
|
||||
|
||||
#
|
||||
# BuildDetailsPage
|
||||
#
|
||||
|
||||
class BuildDetailsPage (HobPage):
|
||||
|
||||
def __init__(self, builder):
|
||||
super(BuildDetailsPage, self).__init__(builder, "Building ...")
|
||||
|
||||
self.num_of_issues = 0
|
||||
self.endpath = (0,)
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
# create visual elements
|
||||
self.vbox = gtk.VBox(False, 12)
|
||||
|
||||
self.progress_box = gtk.VBox(False, 12)
|
||||
self.task_status = gtk.Label("\n") # to ensure layout is correct
|
||||
self.task_status.set_alignment(0.0, 0.5)
|
||||
self.progress_box.pack_start(self.task_status, expand=False, fill=False)
|
||||
self.progress_hbox = gtk.HBox(False, 6)
|
||||
self.progress_box.pack_end(self.progress_hbox, expand=True, fill=True)
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.progress_hbox.pack_start(self.progress_bar, expand=True, fill=True)
|
||||
self.stop_button = HobAltButton("Stop")
|
||||
self.stop_button.connect("clicked", self.stop_button_clicked_cb)
|
||||
self.stop_button.set_sensitive(False)
|
||||
self.progress_hbox.pack_end(self.stop_button, expand=False, fill=False)
|
||||
|
||||
self.notebook = HobNotebook()
|
||||
self.config_tv = BuildConfigurationTreeView()
|
||||
self.scrolled_view_config = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_config.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_config.add(self.config_tv)
|
||||
self.notebook.append_page(self.scrolled_view_config, "Build configuration")
|
||||
|
||||
self.failure_tv = BuildFailureTreeView()
|
||||
self.failure_model = self.builder.handler.build.model.failure_model()
|
||||
self.failure_tv.set_model(self.failure_model)
|
||||
self.scrolled_view_failure = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_failure.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_failure.add(self.failure_tv)
|
||||
self.notebook.append_page(self.scrolled_view_failure, "Issues")
|
||||
|
||||
self.build_tv = RunningBuildTreeView(readonly=True, hob=True)
|
||||
self.build_tv.set_model(self.builder.handler.build.model)
|
||||
self.scrolled_view_build = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_build.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_build.add(self.build_tv)
|
||||
self.notebook.append_page(self.scrolled_view_build, "Log")
|
||||
|
||||
self.builder.handler.build.model.connect_after("row-changed", self.scroll_to_present_row, self.scrolled_view_build.get_vadjustment(), self.build_tv)
|
||||
|
||||
self.button_box = gtk.HBox(False, 6)
|
||||
self.back_button = HobAltButton("<< Back to image configuration")
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
self.button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def update_build_status(self, current, total, task):
|
||||
recipe_path, recipe_task = task.split(", ")
|
||||
recipe = os.path.basename(recipe_path).rstrip(".bb")
|
||||
tsk_msg = "<b>Running task %s of %s:</b> %s\n<b>Recipe:</b> %s" % (current, total, recipe_task, recipe)
|
||||
self.task_status.set_markup(tsk_msg)
|
||||
self.stop_button.set_sensitive(True)
|
||||
|
||||
def reset_build_status(self):
|
||||
self.task_status.set_markup("\n") # to ensure layout is correct
|
||||
self.endpath = (0,)
|
||||
|
||||
def show_issues(self):
|
||||
self.num_of_issues += 1
|
||||
self.notebook.show_indicator_icon("Issues", self.num_of_issues)
|
||||
|
||||
def reset_issues(self):
|
||||
self.num_of_issues = 0
|
||||
self.notebook.hide_indicator_icon("Issues")
|
||||
|
||||
def _remove_all_widget(self):
|
||||
children = self.vbox.get_children() or []
|
||||
for child in children:
|
||||
self.vbox.remove(child)
|
||||
children = self.box_group_area.get_children() or []
|
||||
for child in children:
|
||||
self.box_group_area.remove(child)
|
||||
children = self.get_children() or []
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
|
||||
def update_failures_sum_display(self):
|
||||
num = 0
|
||||
it = self.failure_model.get_iter_first()
|
||||
while it:
|
||||
color = self.failure_model.get_value(it, self.builder.handler.build.model.COL_COLOR)
|
||||
if color == HobColors.ERROR:
|
||||
num += 1
|
||||
it = self.failure_model.iter_next(it)
|
||||
|
||||
return num
|
||||
|
||||
def add_build_fail_top_bar(self, actions):
|
||||
mainly_action = "Edit %s" % actions
|
||||
if 'image' in actions:
|
||||
next_action = ""
|
||||
else:
|
||||
next_action = "Create new image"
|
||||
|
||||
#set to issue page
|
||||
self.notebook.set_page("Issues")
|
||||
|
||||
color = HobColors.ERROR
|
||||
build_fail_top = gtk.EventBox()
|
||||
build_fail_top.set_size_request(-1, 260)
|
||||
build_fail_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
|
||||
build_fail_tab = gtk.Table(7, 40, True)
|
||||
build_fail_top.add(build_fail_tab)
|
||||
|
||||
icon = gtk.Image()
|
||||
icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ERROR_FILE)
|
||||
icon.set_from_pixbuf(icon_pix_buffer)
|
||||
build_fail_tab.attach(icon, 1, 4, 0, 3)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span size='x-large'>%s</span>" % self.title)
|
||||
build_fail_tab.attach(label, 4, 20, 0, 3)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
num_of_fails = self.update_failures_sum_display()
|
||||
current_fail, recipe_task_status = self.task_status.get_text().split('\n')
|
||||
label.set_markup(" %d tasks failed, %s, %s" % (num_of_fails, current_fail, recipe_task_status))
|
||||
build_fail_tab.attach(label, 4, 40, 2, 4)
|
||||
|
||||
# create button 'Edit packages'
|
||||
action_button = HobButton(mainly_action)
|
||||
action_button.set_size_request(-1, 49)
|
||||
action_button.connect('clicked', self.failure_main_action_button_clicked_cb, mainly_action)
|
||||
build_fail_tab.attach(action_button, 4, 16, 4, 6)
|
||||
|
||||
if next_action:
|
||||
next_button = HobAltButton(next_action)
|
||||
next_button.set_alignment(0.0, 0.5)
|
||||
next_button.connect('clicked', self.failure_next_action_button_clicked_cb, next_action)
|
||||
build_fail_tab.attach(next_button, 17, 24, 4, 5)
|
||||
|
||||
file_bug_button = HobAltButton('File a bug')
|
||||
file_bug_button.set_alignment(0.0, 0.5)
|
||||
file_bug_button.connect('clicked', self.failure_file_bug_activate_link_cb)
|
||||
build_fail_tab.attach(file_bug_button, 17, 24, 4 + abs(next_action != ""), 6)
|
||||
|
||||
return build_fail_top
|
||||
|
||||
def show_fail_page(self, title, action_names):
|
||||
self._remove_all_widget()
|
||||
self.title = "Hob cannot build your %s" % title
|
||||
|
||||
self.build_fail_bar = self.add_build_fail_top_bar(action_names)
|
||||
self.pack_start(self.build_fail_bar)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_end(self.button_box, expand=False, fill=False)
|
||||
self.show_all()
|
||||
self.back_button.hide()
|
||||
|
||||
def show_page(self, step):
|
||||
self._remove_all_widget()
|
||||
if step == self.builder.PACKAGE_GENERATING or step == self.builder.FAST_IMAGE_GENERATING:
|
||||
self.title = "Building packages ..."
|
||||
else:
|
||||
self.title = "Building image ..."
|
||||
self.build_details_top = self.add_onto_top_bar(None)
|
||||
self.pack_start(self.build_details_top, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.progress_bar.reset()
|
||||
self.config_tv.reset()
|
||||
self.vbox.pack_start(self.progress_box, expand=False, fill=False)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_end(self.button_box, expand=False, fill=False)
|
||||
self.show_all()
|
||||
self.back_button.hide()
|
||||
|
||||
self.reset_build_status()
|
||||
self.reset_issues()
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=None):
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_title(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def back_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def show_back_button(self):
|
||||
self.back_button.show()
|
||||
|
||||
def stop_button_clicked_cb(self, button):
|
||||
self.builder.stop_build()
|
||||
|
||||
def hide_stop_button(self):
|
||||
self.stop_button.set_sensitive(False)
|
||||
self.stop_button.hide()
|
||||
|
||||
def scroll_to_present_row(self, model, path, iter, v_adj, treeview):
|
||||
if treeview and v_adj:
|
||||
if path[0] > self.endpath[0]: # check the event is a new row append or not
|
||||
self.endpath = path
|
||||
# check the gtk.adjustment position is at end boundary or not
|
||||
if (v_adj.upper <= v_adj.page_size) or (v_adj.value == v_adj.upper - v_adj.page_size):
|
||||
treeview.scroll_to_cell(path)
|
||||
|
||||
def show_configurations(self, configurations, params):
|
||||
self.config_tv.show(configurations, params)
|
||||
|
||||
def failure_main_action_button_clicked_cb(self, button, action):
|
||||
if "Edit recipes" in action:
|
||||
self.builder.show_recipes()
|
||||
elif "Edit packages" in action:
|
||||
self.builder.show_packages()
|
||||
elif "Edit image configuration" in action:
|
||||
self.builder.show_configuration()
|
||||
|
||||
def failure_next_action_button_clicked_cb(self, button, action):
|
||||
if "Create new image" in action:
|
||||
self.builder.initiate_new_build_async()
|
||||
|
||||
def failure_file_bug_activate_link_cb(self, button):
|
||||
button.child.emit('activate-link', "http://bugzilla.yoctoproject.org")
|
||||
346
bitbake/lib/bb/ui/crumbs/configurator.py
Normal file
@@ -0,0 +1,346 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import copy
|
||||
import re, os
|
||||
from bb import data
|
||||
|
||||
class Configurator(gobject.GObject):
|
||||
|
||||
"""
|
||||
A GObject to handle writing modified configuration values back
|
||||
to conf files.
|
||||
"""
|
||||
__gsignals__ = {
|
||||
"layers-loaded" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"layers-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
())
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
gobject.GObject.__init__(self)
|
||||
self.bblayers = None
|
||||
self.enabled_layers = {}
|
||||
self.loaded_layers = {}
|
||||
self.config = {}
|
||||
self.orig_config = {}
|
||||
self.preconf = None
|
||||
self.postconf = None
|
||||
|
||||
# NOTE: cribbed from the cooker...
|
||||
def _parse(self, f, data, include=False):
|
||||
try:
|
||||
return bb.parse.handle(f, data, include)
|
||||
except (IOError, bb.parse.ParseError) as exc:
|
||||
parselog.critical("Unable to parse %s: %s" % (f, exc))
|
||||
sys.exit(1)
|
||||
|
||||
def _loadConf(self, path):
|
||||
def getString(var):
|
||||
return bb.data.getVar(var, data, True) or ""
|
||||
|
||||
if self.orig_config:
|
||||
del self.orig_config
|
||||
self.orig_config = {}
|
||||
|
||||
data = bb.data.init()
|
||||
data = self._parse(path, data)
|
||||
|
||||
# We only need to care about certain variables
|
||||
mach = getString('MACHINE')
|
||||
if mach and mach != self.config.get('MACHINE', ''):
|
||||
self.config['MACHINE'] = mach
|
||||
sdkmach = getString('SDKMACHINE')
|
||||
if sdkmach and sdkmach != self.config.get('SDKMACHINE', ''):
|
||||
self.config['SDKMACHINE'] = sdkmach
|
||||
distro = getString('DISTRO')
|
||||
if not distro:
|
||||
distro = "defaultsetup"
|
||||
if distro and distro != self.config.get('DISTRO', ''):
|
||||
self.config['DISTRO'] = distro
|
||||
bbnum = getString('BB_NUMBER_THREADS')
|
||||
if bbnum and bbnum != self.config.get('BB_NUMBER_THREADS', ''):
|
||||
self.config['BB_NUMBER_THREADS'] = bbnum
|
||||
pmake = getString('PARALLEL_MAKE')
|
||||
if pmake and pmake != self.config.get('PARALLEL_MAKE', ''):
|
||||
self.config['PARALLEL_MAKE'] = pmake
|
||||
pclass = getString('PACKAGE_CLASSES')
|
||||
if pclass and pclass != self.config.get('PACKAGE_CLASSES', ''):
|
||||
self.config['PACKAGE_CLASSES'] = pclass
|
||||
fstypes = getString('IMAGE_FSTYPES')
|
||||
if fstypes and fstypes != self.config.get('IMAGE_FSTYPES', ''):
|
||||
self.config['IMAGE_FSTYPES'] = fstypes
|
||||
|
||||
# Values which aren't always set in the conf must be explicitly
|
||||
# loaded as empty values for save to work
|
||||
incompat = getString('INCOMPATIBLE_LICENSE')
|
||||
if incompat and incompat != self.config.get('INCOMPATIBLE_LICENSE', ''):
|
||||
self.config['INCOMPATIBLE_LICENSE'] = incompat
|
||||
else:
|
||||
self.config['INCOMPATIBLE_LICENSE'] = ""
|
||||
|
||||
# Non-standard, namespaces, variables for GUI preferences
|
||||
toolchain = getString('HOB_BUILD_TOOLCHAIN')
|
||||
if toolchain and toolchain != self.config.get('HOB_BUILD_TOOLCHAIN', ''):
|
||||
self.config['HOB_BUILD_TOOLCHAIN'] = toolchain
|
||||
header = getString('HOB_BUILD_TOOLCHAIN_HEADERS')
|
||||
if header and header != self.config.get('HOB_BUILD_TOOLCHAIN_HEADERS', ''):
|
||||
self.config['HOB_BUILD_TOOLCHAIN_HEADERS'] = header
|
||||
|
||||
self.orig_config = copy.deepcopy(self.config)
|
||||
|
||||
def setConfVar(self, var, val):
|
||||
self.config[var] = val
|
||||
|
||||
def getConfVar(self, var):
|
||||
if var in self.config:
|
||||
return self.config[var]
|
||||
else:
|
||||
return ""
|
||||
|
||||
def _loadLayerConf(self, path):
|
||||
self.bblayers = path
|
||||
self.enabled_layers = {}
|
||||
self.loaded_layers = {}
|
||||
data = bb.data.init()
|
||||
data = self._parse(self.bblayers, data)
|
||||
layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
|
||||
for layer in layers:
|
||||
# TODO: we may be better off calling the layer by its
|
||||
# BBFILE_COLLECTIONS value?
|
||||
name = self._getLayerName(layer)
|
||||
self.loaded_layers[name] = layer
|
||||
|
||||
self.enabled_layers = copy.deepcopy(self.loaded_layers)
|
||||
self.emit("layers-loaded")
|
||||
|
||||
def _addConfigFile(self, path):
|
||||
conffiles = ["local.conf", "hob-pre.conf", "hob-post.conf"]
|
||||
pref, sep, filename = path.rpartition("/")
|
||||
|
||||
if filename == "hob-pre.conf":
|
||||
self.preconf = path
|
||||
|
||||
if filename == "hob-post.conf":
|
||||
self.postconf = path
|
||||
|
||||
if filename in conffiles:
|
||||
self._loadConf(path)
|
||||
elif filename == "bblayers.conf":
|
||||
self._loadLayerConf(path)
|
||||
|
||||
def _splitLayer(self, path):
|
||||
# we only care about the path up to /conf/layer.conf
|
||||
layerpath, conf, end = path.rpartition("/conf/")
|
||||
return layerpath
|
||||
|
||||
def _getLayerName(self, path):
|
||||
# Should this be the collection name?
|
||||
layerpath, sep, name = path.rpartition("/")
|
||||
return name
|
||||
|
||||
def disableLayer(self, layer):
|
||||
if layer in self.enabled_layers:
|
||||
del self.enabled_layers[layer]
|
||||
|
||||
def addLayerConf(self, confpath):
|
||||
layerpath = self._splitLayer(confpath)
|
||||
name = self._getLayerName(layerpath)
|
||||
|
||||
if not layerpath or not name:
|
||||
return None, None
|
||||
elif name not in self.enabled_layers:
|
||||
self.addLayer(name, layerpath)
|
||||
return name, layerpath
|
||||
else:
|
||||
return name, None
|
||||
|
||||
def addLayer(self, name, path):
|
||||
self.enabled_layers[name] = path
|
||||
|
||||
def _isLayerConfDirty(self):
|
||||
# if a different number of layers enabled to what was
|
||||
# loaded, definitely different
|
||||
if len(self.enabled_layers) != len(self.loaded_layers):
|
||||
return True
|
||||
|
||||
for layer in self.loaded_layers:
|
||||
# if layer loaded but no longer present, definitely dirty
|
||||
if layer not in self.enabled_layers:
|
||||
return True
|
||||
|
||||
for layer in self.enabled_layers:
|
||||
# if this layer wasn't present at load, definitely dirty
|
||||
if layer not in self.loaded_layers:
|
||||
return True
|
||||
# if this layers path has changed, definitely dirty
|
||||
if self.enabled_layers[layer] != self.loaded_layers[layer]:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _constructLayerEntry(self):
|
||||
"""
|
||||
Returns a string representing the new layer selection
|
||||
"""
|
||||
layers = self.enabled_layers.copy()
|
||||
# Construct BBLAYERS entry
|
||||
layer_entry = "BBLAYERS = \" \\\n"
|
||||
if 'meta' in layers:
|
||||
layer_entry = layer_entry + " %s \\\n" % layers['meta']
|
||||
del layers['meta']
|
||||
for layer in layers:
|
||||
layer_entry = layer_entry + " %s \\\n" % layers[layer]
|
||||
layer_entry = layer_entry + " \""
|
||||
|
||||
return "".join(layer_entry)
|
||||
|
||||
def writeConfFile(self, conffile, contents):
|
||||
"""
|
||||
Make a backup copy of conffile and write a new file in its stead with
|
||||
the lines in the contents list.
|
||||
"""
|
||||
# Create a backup of the conf file
|
||||
bkup = "%s~" % conffile
|
||||
os.rename(conffile, bkup)
|
||||
|
||||
# Write the contents list object to the conf file
|
||||
with open(conffile, "w") as new:
|
||||
new.write("".join(contents))
|
||||
|
||||
def updateConf(self, orig_lines, changed_values):
|
||||
new_config_lines = []
|
||||
for var in changed_values:
|
||||
# Convenience function for re.subn(). If the pattern matches
|
||||
# return a string which contains an assignment using the same
|
||||
# assignment operator as the old assignment.
|
||||
def replace_val(matchobj):
|
||||
var = matchobj.group(1) # config variable
|
||||
op = matchobj.group(2) # assignment operator
|
||||
val = changed_values[var] # new config value
|
||||
return "%s %s \"%s\"" % (var, op, val)
|
||||
|
||||
pattern = '^\s*(%s)\s*([+=?.]+)(.*)' % re.escape(var)
|
||||
p = re.compile(pattern)
|
||||
cnt = 0
|
||||
replaced = False
|
||||
|
||||
# Iterate over the local.conf lines and if they are a match
|
||||
# for the pattern comment out the line and append a new line
|
||||
# with the new VAR op "value" entry
|
||||
for line in orig_lines:
|
||||
new_line, replacements = p.subn(replace_val, line)
|
||||
if replacements:
|
||||
orig_lines[cnt] = "#%s" % line
|
||||
new_config_lines.append(new_line)
|
||||
replaced = True
|
||||
cnt = cnt + 1
|
||||
|
||||
if not replaced:
|
||||
new_config_lines.append("%s = \"%s\"\n" % (var, changed_values[var]))
|
||||
|
||||
# Add the modified variables
|
||||
orig_lines.extend(new_config_lines)
|
||||
return orig_lines
|
||||
|
||||
def writeConf(self):
|
||||
pre_vars = ["MACHINE", "SDKMACHINE", "DISTRO",
|
||||
"INCOMPATIBLE_LICENSE"]
|
||||
post_vars = ["BB_NUMBER_THREADS", "PARALLEL_MAKE", "PACKAGE_CLASSES",
|
||||
"IMAGE_FSTYPES", "HOB_BUILD_TOOLCHAIN",
|
||||
"HOB_BUILD_TOOLCHAIN_HEADERS"]
|
||||
pre_values = {}
|
||||
post_values = {}
|
||||
changed_values = {}
|
||||
pre_lines = None
|
||||
post_lines = None
|
||||
|
||||
for var in self.config:
|
||||
val = self.config[var]
|
||||
if self.orig_config.get(var, None) != val:
|
||||
changed_values[var] = val
|
||||
|
||||
if not len(changed_values):
|
||||
return
|
||||
|
||||
for var in changed_values:
|
||||
if var in pre_vars:
|
||||
pre_values[var] = changed_values[var]
|
||||
elif var in post_vars:
|
||||
post_values[var] = changed_values[var]
|
||||
|
||||
with open(self.preconf, 'r') as pre:
|
||||
pre_lines = pre.readlines()
|
||||
pre_lines = self.updateConf(pre_lines, pre_values)
|
||||
if len(pre_lines):
|
||||
self.writeConfFile(self.preconf, pre_lines)
|
||||
|
||||
with open(self.postconf, 'r') as post:
|
||||
post_lines = post.readlines()
|
||||
post_lines = self.updateConf(post_lines, post_values)
|
||||
if len(post_lines):
|
||||
self.writeConfFile(self.postconf, post_lines)
|
||||
|
||||
del self.orig_config
|
||||
self.orig_config = copy.deepcopy(self.config)
|
||||
|
||||
def insertTempBBPath(self, bbpath, bbfiles):
|
||||
# read the original conf into a list
|
||||
with open(self.postconf, 'r') as config:
|
||||
config_lines = config.readlines()
|
||||
|
||||
if bbpath:
|
||||
config_lines.append("BBPATH := \"${BBPATH}:%s\"\n" % bbpath)
|
||||
if bbfiles:
|
||||
config_lines.append("BBFILES := \"${BBFILES} %s\"\n" % bbfiles)
|
||||
|
||||
self.writeConfFile(self.postconf, config_lines)
|
||||
|
||||
def writeLayerConf(self):
|
||||
# If we've not added/removed new layers don't write
|
||||
if not self._isLayerConfDirty():
|
||||
return
|
||||
|
||||
# This pattern should find the existing BBLAYERS
|
||||
pattern = 'BBLAYERS\s=\s\".*\"'
|
||||
|
||||
replacement = self._constructLayerEntry()
|
||||
|
||||
with open(self.bblayers, "r") as f:
|
||||
contents = f.read()
|
||||
p = re.compile(pattern, re.DOTALL)
|
||||
new = p.sub(replacement, contents)
|
||||
|
||||
self.writeConfFile(self.bblayers, new)
|
||||
|
||||
# set loaded_layers for dirtiness tracking
|
||||
self.loaded_layers = copy.deepcopy(self.enabled_layers)
|
||||
|
||||
self.emit("layers-changed")
|
||||
|
||||
def configFound(self, handler, path):
|
||||
self._addConfigFile(path)
|
||||
|
||||
def loadConfig(self, path):
|
||||
self._addConfigFile(path)
|
||||
@@ -1,37 +0,0 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
class HobColors:
|
||||
WHITE = "#ffffff"
|
||||
PALE_GREEN = "#aaffaa"
|
||||
ORANGE = "#eb8e68"
|
||||
PALE_RED = "#ffaaaa"
|
||||
GRAY = "#aaaaaa"
|
||||
LIGHT_GRAY = "#dddddd"
|
||||
SLIGHT_DARK = "#5f5f5f"
|
||||
DARK = "#3c3b37"
|
||||
BLACK = "#000000"
|
||||
PALE_BLUE = "#53b8ff"
|
||||
DEEP_RED = "#aa3e3e"
|
||||
|
||||
OK = WHITE
|
||||
RUNNING = PALE_GREEN
|
||||
WARNING = ORANGE
|
||||
ERROR = PALE_RED
|
||||
@@ -4,7 +4,6 @@
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -21,8 +20,10 @@
|
||||
|
||||
import gobject
|
||||
import logging
|
||||
from bb.ui.crumbs.runningbuild import RunningBuild
|
||||
from bb.ui.crumbs.hobwidget import hcc
|
||||
import tempfile
|
||||
import datetime
|
||||
|
||||
progress_total = 0
|
||||
|
||||
class HobHandler(gobject.GObject):
|
||||
|
||||
@@ -30,169 +31,147 @@ class HobHandler(gobject.GObject):
|
||||
This object does BitBake event handling for the hob gui.
|
||||
"""
|
||||
__gsignals__ = {
|
||||
"package-formats-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"config-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
|
||||
"command-succeeded" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_INT,)),
|
||||
"command-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"sanity-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"generating-data" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"data-generated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"parsing-started" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"parsing" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"parsing-completed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"recipe-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"package-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"machines-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"sdk-machines-updated": (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"distros-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"package-formats-found" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"config-found" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"generating-data" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"data-generated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"fatal-error" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,)),
|
||||
"command-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"reload-triggered" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,)),
|
||||
}
|
||||
|
||||
(GENERATE_CONFIGURATION, GENERATE_RECIPES, GENERATE_PACKAGES, GENERATE_IMAGE, POPULATE_PACKAGEINFO, SANITY_CHECK) = range(6)
|
||||
(SUB_PATH_LAYERS, SUB_FILES_DISTRO, SUB_FILES_MACH, SUB_FILES_SDKMACH, SUB_MATCH_CLASS, SUB_PARSE_CONFIG, SUB_SANITY_CHECK, SUB_GNERATE_TGTS, SUB_GENERATE_PKGINFO, SUB_BUILD_RECIPES, SUB_BUILD_IMAGE) = range(11)
|
||||
(CFG_PATH_LOCAL, CFG_PATH_PRE, CFG_PATH_POST, CFG_PATH_LAYERS, CFG_FILES_DISTRO, CFG_FILES_MACH, CFG_FILES_SDK, FILES_MATCH_CLASS, GENERATE_TGTS, REPARSE_FILES, BUILD_IMAGE) = range(11)
|
||||
|
||||
def __init__(self, server, recipe_model, package_model):
|
||||
super(HobHandler, self).__init__()
|
||||
def __init__(self, taskmodel, server):
|
||||
gobject.GObject.__init__(self)
|
||||
|
||||
self.build = RunningBuild(sequential=True)
|
||||
|
||||
self.recipe_model = recipe_model
|
||||
self.package_model = package_model
|
||||
|
||||
self.commands_async = []
|
||||
self.generating = False
|
||||
self.current_phase = None
|
||||
self.current_command = None
|
||||
self.building = False
|
||||
self.recipe_queue = []
|
||||
self.package_queue = []
|
||||
self.build_toolchain = False
|
||||
self.build_toolchain_headers = False
|
||||
self.generating = False
|
||||
self.build_queue = []
|
||||
self.current_phase = None
|
||||
self.bbpath_ok = False
|
||||
self.bbfiles_ok = False
|
||||
self.build_type = "image"
|
||||
self.image_dir = os.path.join(tempfile.gettempdir(), 'hob-images')
|
||||
|
||||
self.model = taskmodel
|
||||
self.server = server
|
||||
self.error_msg = ""
|
||||
self.initcmd = None
|
||||
|
||||
def set_busy(self):
|
||||
if not self.generating:
|
||||
deploy_dir = self.server.runCommand(["getVariable", "DEPLOY_DIR"])
|
||||
self.image_out_dir = os.path.join(deploy_dir, "images")
|
||||
self.image_output_types = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"]).split(" ")
|
||||
self.bbpath = self.server.runCommand(["getVariable", "BBPATH"])
|
||||
self.bbfiles = self.server.runCommand(["getVariable", "BBFILES"])
|
||||
|
||||
def run_next_command(self):
|
||||
if self.current_command and not self.generating:
|
||||
self.emit("generating-data")
|
||||
self.generating = True
|
||||
|
||||
def clear_busy(self):
|
||||
if self.generating:
|
||||
self.emit("data-generated")
|
||||
self.generating = False
|
||||
|
||||
def runCommand(self, commandline):
|
||||
try:
|
||||
return self.server.runCommand(commandline)
|
||||
except Exception as e:
|
||||
self.commands_async = []
|
||||
self.clear_busy()
|
||||
self.emit("command-failed", "Hob Exception - %s" % (str(e)))
|
||||
return None
|
||||
|
||||
def run_next_command(self, initcmd=None):
|
||||
if initcmd != None:
|
||||
self.initcmd = initcmd
|
||||
|
||||
if self.commands_async:
|
||||
self.set_busy()
|
||||
next_command = self.commands_async.pop(0)
|
||||
else:
|
||||
self.clear_busy()
|
||||
if self.initcmd != None:
|
||||
self.emit("command-succeeded", self.initcmd)
|
||||
return
|
||||
|
||||
if next_command == self.SUB_PATH_LAYERS:
|
||||
self.runCommand(["findConfigFilePath", "bblayers.conf"])
|
||||
elif next_command == self.SUB_FILES_DISTRO:
|
||||
self.runCommand(["findConfigFiles", "DISTRO"])
|
||||
elif next_command == self.SUB_FILES_MACH:
|
||||
self.runCommand(["findConfigFiles", "MACHINE"])
|
||||
elif next_command == self.SUB_FILES_SDKMACH:
|
||||
self.runCommand(["findConfigFiles", "MACHINE-SDK"])
|
||||
elif next_command == self.SUB_MATCH_CLASS:
|
||||
self.runCommand(["findFilesMatchingInDir", "rootfs_", "classes"])
|
||||
elif next_command == self.SUB_PARSE_CONFIG:
|
||||
self.runCommand(["parseConfigurationFiles", "", ""])
|
||||
elif next_command == self.SUB_GNERATE_TGTS:
|
||||
self.runCommand(["generateTargetsTree", "classes/image.bbclass", []])
|
||||
elif next_command == self.SUB_GENERATE_PKGINFO:
|
||||
self.runCommand(["triggerEvent", "bb.event.RequestPackageInfo()"])
|
||||
elif next_command == self.SUB_SANITY_CHECK:
|
||||
self.runCommand(["triggerEvent", "bb.event.SanityCheck()"])
|
||||
elif next_command == self.SUB_BUILD_RECIPES:
|
||||
self.clear_busy()
|
||||
if self.current_command == self.CFG_PATH_LOCAL:
|
||||
self.current_command = self.CFG_PATH_PRE
|
||||
self.server.runCommand(["findConfigFilePath", "hob-pre.conf"])
|
||||
elif self.current_command == self.CFG_PATH_PRE:
|
||||
self.current_command = self.CFG_PATH_POST
|
||||
self.server.runCommand(["findConfigFilePath", "hob-post.conf"])
|
||||
elif self.current_command == self.CFG_PATH_POST:
|
||||
self.current_command = self.CFG_PATH_LAYERS
|
||||
self.server.runCommand(["findConfigFilePath", "bblayers.conf"])
|
||||
elif self.current_command == self.CFG_PATH_LAYERS:
|
||||
self.current_command = self.CFG_FILES_DISTRO
|
||||
self.server.runCommand(["findConfigFiles", "DISTRO"])
|
||||
elif self.current_command == self.CFG_FILES_DISTRO:
|
||||
self.current_command = self.CFG_FILES_MACH
|
||||
self.server.runCommand(["findConfigFiles", "MACHINE"])
|
||||
elif self.current_command == self.CFG_FILES_MACH:
|
||||
self.current_command = self.CFG_FILES_SDK
|
||||
self.server.runCommand(["findConfigFiles", "MACHINE-SDK"])
|
||||
elif self.current_command == self.CFG_FILES_SDK:
|
||||
self.current_command = self.FILES_MATCH_CLASS
|
||||
self.server.runCommand(["findFilesMatchingInDir", "rootfs_", "classes"])
|
||||
elif self.current_command == self.FILES_MATCH_CLASS:
|
||||
self.current_command = self.GENERATE_TGTS
|
||||
self.server.runCommand(["generateTargetsTree", "classes/image.bbclass"])
|
||||
elif self.current_command == self.GENERATE_TGTS:
|
||||
if self.generating:
|
||||
self.emit("data-generated")
|
||||
self.generating = False
|
||||
self.current_command = None
|
||||
elif self.current_command == self.REPARSE_FILES:
|
||||
if self.build_queue:
|
||||
self.current_command = self.BUILD_IMAGE
|
||||
else:
|
||||
self.current_command = self.CFG_PATH_LAYERS
|
||||
self.server.runCommand(["resetCooker"])
|
||||
self.server.runCommand(["reparseFiles"])
|
||||
elif self.current_command == self.BUILD_IMAGE:
|
||||
if self.generating:
|
||||
self.emit("data-generated")
|
||||
self.generating = False
|
||||
self.building = True
|
||||
self.runCommand(["buildTargets", self.recipe_queue, self.default_task])
|
||||
self.recipe_queue = []
|
||||
elif next_command == self.SUB_BUILD_IMAGE:
|
||||
self.clear_busy()
|
||||
self.building = True
|
||||
targets = [self.image]
|
||||
if self.package_queue:
|
||||
self.runCommand(["setVariable", "LINGUAS_INSTALL", ""])
|
||||
self.runCommand(["setVariable", "PACKAGE_INSTALL", " ".join(self.package_queue)])
|
||||
if self.toolchain_packages:
|
||||
self.runCommand(["setVariable", "TOOLCHAIN_TARGET_TASK", " ".join(self.toolchain_packages)])
|
||||
targets.append(self.toolchain)
|
||||
self.runCommand(["buildTargets", targets, self.default_task])
|
||||
self.server.runCommand(["buildTargets", self.build_queue, "build"])
|
||||
self.build_queue = []
|
||||
self.current_command = None
|
||||
|
||||
def handle_event(self, event):
|
||||
def handle_event(self, event, running_build, pbar):
|
||||
if not event:
|
||||
return
|
||||
return
|
||||
|
||||
# If we're running a build, use the RunningBuild event handler
|
||||
if self.building:
|
||||
self.current_phase = "building"
|
||||
self.build.handle_event(event)
|
||||
|
||||
if isinstance(event, bb.event.PackageInfo):
|
||||
self.package_model.populate(event._pkginfolist)
|
||||
self.emit("package-populated")
|
||||
self.run_next_command()
|
||||
|
||||
elif isinstance(event, bb.event.SanityCheckPassed):
|
||||
self.run_next_command()
|
||||
|
||||
elif isinstance(event, bb.event.SanityCheckFailed):
|
||||
self.emit("sanity-failed", event._msg)
|
||||
|
||||
elif isinstance(event, logging.LogRecord):
|
||||
if event.levelno >= logging.ERROR:
|
||||
self.error_msg += event.msg + '\n'
|
||||
|
||||
running_build.handle_event(event)
|
||||
elif isinstance(event, bb.event.TargetsTreeGenerated):
|
||||
self.current_phase = "data generation"
|
||||
if event._model:
|
||||
self.recipe_model.populate(event._model)
|
||||
self.emit("recipe-populated")
|
||||
self.model.populate(event._model)
|
||||
elif isinstance(event, bb.event.ConfigFilesFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
var = event._variable
|
||||
values = event._values
|
||||
values.sort()
|
||||
self.emit("config-updated", var, values)
|
||||
if var == "distro":
|
||||
distros = event._values
|
||||
distros.sort()
|
||||
self.emit("distros-updated", distros)
|
||||
elif var == "machine":
|
||||
machines = event._values
|
||||
machines.sort()
|
||||
self.emit("machines-updated", machines)
|
||||
elif var == "machine-sdk":
|
||||
sdk_machines = event._values
|
||||
sdk_machines.sort()
|
||||
self.emit("sdk-machines-updated", sdk_machines)
|
||||
elif isinstance(event, bb.event.ConfigFilePathFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
path = event._path
|
||||
self.emit("config-found", path)
|
||||
elif isinstance(event, bb.event.FilesMatchingFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
# FIXME: hard coding, should at least be a variable shared between
|
||||
@@ -204,314 +183,161 @@ class HobHandler(gobject.GObject):
|
||||
fs, sep, format = classname.rpartition("_")
|
||||
formats.append(format)
|
||||
formats.sort()
|
||||
self.emit("package-formats-updated", formats)
|
||||
self.emit("package-formats-found", formats)
|
||||
elif isinstance(event, bb.command.CommandCompleted):
|
||||
self.current_phase = None
|
||||
self.run_next_command()
|
||||
elif isinstance(event, bb.command.CommandFailed):
|
||||
self.commands_async = []
|
||||
self.clear_busy()
|
||||
self.emit("command-failed", self.error_msg)
|
||||
self.error_msg = ""
|
||||
if self.building:
|
||||
self.building = False
|
||||
elif isinstance(event, (bb.event.ParseStarted,
|
||||
bb.event.CacheLoadStarted,
|
||||
bb.event.TreeDataPreparationStarted,
|
||||
)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
message["current"] = 0
|
||||
message["total"] = None
|
||||
message["title"] = "Parsing recipes: "
|
||||
self.emit("parsing-started", message)
|
||||
elif isinstance(event, (bb.event.ParseProgress,
|
||||
bb.event.CacheLoadProgress,
|
||||
bb.event.TreeDataPreparationProgress)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
message["current"] = event.current
|
||||
message["total"] = event.total
|
||||
message["title"] = "Parsing recipes: "
|
||||
self.emit("parsing", message)
|
||||
elif isinstance(event, (bb.event.ParseCompleted,
|
||||
bb.event.CacheLoadCompleted,
|
||||
bb.event.TreeDataPreparationCompleted)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
message["current"] = event.total
|
||||
message["total"] = event.total
|
||||
message["title"] = "Parsing recipes: "
|
||||
self.emit("parsing-completed", message)
|
||||
|
||||
self.emit("command-failed", event.error)
|
||||
elif isinstance(event, bb.event.CacheLoadStarted):
|
||||
self.current_phase = "cache loading"
|
||||
bb.ui.crumbs.hobeventhandler.progress_total = event.total
|
||||
pbar.set_text("Loading cache: %s/%s" % (0, bb.ui.crumbs.hobeventhandler.progress_total))
|
||||
elif isinstance(event, bb.event.CacheLoadProgress):
|
||||
self.current_phase = "cache loading"
|
||||
pbar.set_text("Loading cache: %s/%s" % (event.current, bb.ui.crumbs.hobeventhandler.progress_total))
|
||||
elif isinstance(event, bb.event.CacheLoadCompleted):
|
||||
self.current_phase = None
|
||||
pbar.set_text("Loading...")
|
||||
elif isinstance(event, bb.event.ParseStarted):
|
||||
self.current_phase = "recipe parsing"
|
||||
if event.total == 0:
|
||||
return
|
||||
bb.ui.crumbs.hobeventhandler.progress_total = event.total
|
||||
pbar.set_text("Processing recipes: %s/%s" % (0, bb.ui.crumbs.hobeventhandler.progress_total))
|
||||
elif isinstance(event, bb.event.ParseProgress):
|
||||
self.current_phase = "recipe parsing"
|
||||
pbar.set_text("Processing recipes: %s/%s" % (event.current, bb.ui.crumbs.hobeventhandler.progress_total))
|
||||
elif isinstance(event, bb.event.ParseCompleted):
|
||||
self.current_phase = None
|
||||
pbar.set_fraction(1.0)
|
||||
pbar.set_text("Loading...")
|
||||
elif isinstance(event, logging.LogRecord):
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if event.levelno >= format.CRITICAL:
|
||||
self.emit("fatal-error", event.getMessage(), self.current_phase)
|
||||
return
|
||||
|
||||
def init_cooker(self):
|
||||
self.runCommand(["initCooker"])
|
||||
|
||||
def set_extra_inherit(self, bbclass):
|
||||
inherits = self.runCommand(["getVariable", "INHERIT"]) or ""
|
||||
inherits = inherits + " " + bbclass
|
||||
self.runCommand(["setVariable", "INHERIT", inherits])
|
||||
|
||||
def set_bblayers(self, bblayers):
|
||||
self.runCommand(["setVariable", "BBLAYERS_HOB", " ".join(bblayers)])
|
||||
def event_handle_idle_func (self, eventHandler, running_build, pbar):
|
||||
# Consume as many messages as we can in the time available to us
|
||||
event = eventHandler.getEvent()
|
||||
while event:
|
||||
self.handle_event(event, running_build, pbar)
|
||||
event = eventHandler.getEvent()
|
||||
return True
|
||||
|
||||
def set_machine(self, machine):
|
||||
if machine:
|
||||
self.runCommand(["setVariable", "MACHINE_HOB", machine])
|
||||
self.server.runCommand(["setVariable", "MACHINE", machine])
|
||||
|
||||
def set_sdk_machine(self, sdk_machine):
|
||||
self.runCommand(["setVariable", "SDKMACHINE_HOB", sdk_machine])
|
||||
|
||||
def set_image_fstypes(self, image_fstypes):
|
||||
self.runCommand(["setVariable", "IMAGE_FSTYPES", image_fstypes])
|
||||
self.server.runCommand(["setVariable", "SDKMACHINE", sdk_machine])
|
||||
|
||||
def set_distro(self, distro):
|
||||
self.runCommand(["setVariable", "DISTRO_HOB", distro])
|
||||
self.server.runCommand(["setVariable", "DISTRO", distro])
|
||||
|
||||
def set_package_format(self, format):
|
||||
package_classes = ""
|
||||
for pkgfmt in format.split():
|
||||
package_classes += ("package_%s" % pkgfmt + " ")
|
||||
self.runCommand(["setVariable", "PACKAGE_CLASSES_HOB", package_classes])
|
||||
self.server.runCommand(["setVariable", "PACKAGE_CLASSES", "package_%s" % format])
|
||||
|
||||
def reload_data(self, config=None):
|
||||
img = self.model.selected_image
|
||||
selected_packages, _ = self.model.get_selected_packages()
|
||||
self.emit("reload-triggered", img, " ".join(selected_packages))
|
||||
self.current_command = self.REPARSE_FILES
|
||||
self.run_next_command()
|
||||
|
||||
def set_bbthreads(self, threads):
|
||||
self.runCommand(["setVariable", "BB_NUMBER_THREADS_HOB", threads])
|
||||
self.server.runCommand(["setVariable", "BB_NUMBER_THREADS", threads])
|
||||
|
||||
def set_pmake(self, threads):
|
||||
pmake = "-j %s" % threads
|
||||
self.runCommand(["setVariable", "PARALLEL_MAKE_HOB", pmake])
|
||||
self.server.runCommand(["setVariable", "BB_NUMBER_THREADS", pmake])
|
||||
|
||||
def set_dl_dir(self, directory):
|
||||
self.runCommand(["setVariable", "DL_DIR_HOB", directory])
|
||||
|
||||
def set_sstate_dir(self, directory):
|
||||
self.runCommand(["setVariable", "SSTATE_DIR_HOB", directory])
|
||||
|
||||
def set_sstate_mirror(self, url):
|
||||
self.runCommand(["setVariable", "SSTATE_MIRROR_HOB", url])
|
||||
|
||||
def set_extra_size(self, image_extra_size):
|
||||
self.runCommand(["setVariable", "IMAGE_ROOTFS_EXTRA_SPACE", str(image_extra_size)])
|
||||
|
||||
def set_rootfs_size(self, image_rootfs_size):
|
||||
self.runCommand(["setVariable", "IMAGE_ROOTFS_SIZE", str(image_rootfs_size)])
|
||||
|
||||
def set_incompatible_license(self, incompat_license):
|
||||
self.runCommand(["setVariable", "INCOMPATIBLE_LICENSE_HOB", incompat_license])
|
||||
|
||||
def set_extra_config(self, extra_setting):
|
||||
for key in extra_setting.keys():
|
||||
value = extra_setting[key]
|
||||
self.runCommand(["setVariable", key, value])
|
||||
|
||||
def set_config_filter(self, config_filter):
|
||||
self.runCommand(["setConfFilter", config_filter])
|
||||
|
||||
def set_http_proxy(self, http_proxy):
|
||||
self.runCommand(["setVariable", "http_proxy", http_proxy])
|
||||
|
||||
def set_https_proxy(self, https_proxy):
|
||||
self.runCommand(["setVariable", "https_proxy", https_proxy])
|
||||
|
||||
def set_ftp_proxy(self, ftp_proxy):
|
||||
self.runCommand(["setVariable", "ftp_proxy", ftp_proxy])
|
||||
|
||||
def set_git_proxy(self, host, port):
|
||||
self.runCommand(["setVariable", "GIT_PROXY_HOST", host])
|
||||
self.runCommand(["setVariable", "GIT_PROXY_PORT", port])
|
||||
|
||||
def set_cvs_proxy(self, host, port):
|
||||
self.runCommand(["setVariable", "CVS_PROXY_HOST", host])
|
||||
self.runCommand(["setVariable", "CVS_PROXY_PORT", port])
|
||||
|
||||
def request_package_info(self):
|
||||
self.commands_async.append(self.SUB_GENERATE_PKGINFO)
|
||||
self.run_next_command(self.POPULATE_PACKAGEINFO)
|
||||
|
||||
def trigger_sanity_check(self):
|
||||
self.commands_async.append(self.SUB_SANITY_CHECK)
|
||||
self.run_next_command(self.SANITY_CHECK)
|
||||
|
||||
def generate_configuration(self):
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_PATH_LAYERS)
|
||||
self.commands_async.append(self.SUB_FILES_DISTRO)
|
||||
self.commands_async.append(self.SUB_FILES_MACH)
|
||||
self.commands_async.append(self.SUB_FILES_SDKMACH)
|
||||
self.commands_async.append(self.SUB_MATCH_CLASS)
|
||||
self.run_next_command(self.GENERATE_CONFIGURATION)
|
||||
|
||||
def generate_recipes(self):
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_GNERATE_TGTS)
|
||||
self.run_next_command(self.GENERATE_RECIPES)
|
||||
|
||||
def generate_packages(self, tgts, default_task="build"):
|
||||
def build_targets(self, tgts, configurator, build_type="image"):
|
||||
self.build_type = build_type
|
||||
targets = []
|
||||
nbbp = None
|
||||
nbbf = None
|
||||
targets.extend(tgts)
|
||||
self.recipe_queue = targets
|
||||
self.default_task = default_task
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_BUILD_RECIPES)
|
||||
self.run_next_command(self.GENERATE_PACKAGES)
|
||||
if self.build_toolchain and self.build_toolchain_headers:
|
||||
targets.append("meta-toolchain-sdk")
|
||||
elif self.build_toolchain:
|
||||
targets.append("meta-toolchain")
|
||||
self.build_queue = targets
|
||||
|
||||
def generate_image(self, image, toolchain, image_packages=[], toolchain_packages=[], default_task="build"):
|
||||
self.image = image
|
||||
self.toolchain = toolchain
|
||||
self.package_queue = image_packages
|
||||
self.toolchain_packages = toolchain_packages
|
||||
self.default_task = default_task
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_BUILD_IMAGE)
|
||||
self.run_next_command(self.GENERATE_IMAGE)
|
||||
if not self.bbpath_ok:
|
||||
if self.image_dir in self.bbpath.split(":"):
|
||||
self.bbpath_ok = True
|
||||
else:
|
||||
nbbp = self.image_dir
|
||||
|
||||
def build_succeeded_async(self):
|
||||
self.building = False
|
||||
if not self.bbfiles_ok:
|
||||
import re
|
||||
pattern = "%s/\*.bb" % self.image_dir
|
||||
|
||||
def build_failed_async(self):
|
||||
self.initcmd = None
|
||||
self.commands_async = []
|
||||
self.building = False
|
||||
for files in self.bbfiles.split(" "):
|
||||
if re.match(pattern, files):
|
||||
self.bbfiles_ok = True
|
||||
|
||||
def cancel_parse(self):
|
||||
self.runCommand(["stateStop"])
|
||||
if not self.bbfiles_ok:
|
||||
nbbf = "%s/*.bb" % self.image_dir
|
||||
|
||||
if nbbp or nbbf:
|
||||
configurator.insertTempBBPath(nbbp, nbbf)
|
||||
self.bbpath_ok = True
|
||||
self.bbfiles_ok = True
|
||||
|
||||
self.current_command = self.REPARSE_FILES
|
||||
self.run_next_command()
|
||||
|
||||
def cancel_build(self, force=False):
|
||||
if force:
|
||||
# Force the cooker to stop as quickly as possible
|
||||
self.runCommand(["stateStop"])
|
||||
self.server.runCommand(["stateStop"])
|
||||
else:
|
||||
# Wait for tasks to complete before shutting down, this helps
|
||||
# leave the workdir in a usable state
|
||||
self.runCommand(["stateShutdown"])
|
||||
self.server.runCommand(["stateShutdown"])
|
||||
|
||||
def reset_build(self):
|
||||
self.build.reset()
|
||||
def set_incompatible_license(self, incompatible):
|
||||
self.server.runCommand(["setVariable", "INCOMPATIBLE_LICENSE", incompatible])
|
||||
|
||||
def _remove_redundant(self, string):
|
||||
ret = []
|
||||
for i in string.split():
|
||||
if i not in ret:
|
||||
ret.append(i)
|
||||
return " ".join(ret)
|
||||
def toggle_toolchain(self, enabled):
|
||||
if self.build_toolchain != enabled:
|
||||
self.build_toolchain = enabled
|
||||
|
||||
def get_parameters(self):
|
||||
# retrieve the parameters from bitbake
|
||||
params = {}
|
||||
params["core_base"] = self.runCommand(["getVariable", "COREBASE"]) or ""
|
||||
hob_layer = params["core_base"] + "/meta-hob"
|
||||
params["layer"] = self.runCommand(["getVariable", "BBLAYERS"]) or ""
|
||||
if hob_layer not in params["layer"].split():
|
||||
params["layer"] += (" " + hob_layer)
|
||||
params["dldir"] = self.runCommand(["getVariable", "DL_DIR"]) or ""
|
||||
params["machine"] = self.runCommand(["getVariable", "MACHINE"]) or ""
|
||||
params["distro"] = self.runCommand(["getVariable", "DISTRO"]) or "defaultsetup"
|
||||
params["pclass"] = self.runCommand(["getVariable", "PACKAGE_CLASSES"]) or ""
|
||||
params["sstatedir"] = self.runCommand(["getVariable", "SSTATE_DIR"]) or ""
|
||||
params["sstatemirror"] = self.runCommand(["getVariable", "SSTATE_MIRROR"]) or ""
|
||||
def toggle_toolchain_headers(self, enabled):
|
||||
if self.build_toolchain_headers != enabled:
|
||||
self.build_toolchain_headers = enabled
|
||||
|
||||
num_threads = self.runCommand(["getCpuCount"])
|
||||
if not num_threads:
|
||||
num_threads = 1
|
||||
max_threads = 65536
|
||||
else:
|
||||
try:
|
||||
num_threads = int(num_threads)
|
||||
max_threads = 16 * num_threads
|
||||
except:
|
||||
num_threads = 1
|
||||
max_threads = 65536
|
||||
params["max_threads"] = max_threads
|
||||
def set_fstypes(self, fstypes):
|
||||
self.server.runCommand(["setVariable", "IMAGE_FSTYPES", fstypes])
|
||||
|
||||
bbthread = self.runCommand(["getVariable", "BB_NUMBER_THREADS"])
|
||||
if not bbthread:
|
||||
bbthread = num_threads
|
||||
else:
|
||||
try:
|
||||
bbthread = int(bbthread)
|
||||
except:
|
||||
bbthread = num_threads
|
||||
params["bbthread"] = bbthread
|
||||
def add_image_output_type(self, output_type):
|
||||
if output_type not in self.image_output_types:
|
||||
self.image_output_types.append(output_type)
|
||||
fstypes = " ".join(self.image_output_types).lstrip(" ")
|
||||
self.set_fstypes(fstypes)
|
||||
return self.image_output_types
|
||||
|
||||
pmake = self.runCommand(["getVariable", "PARALLEL_MAKE"])
|
||||
if not pmake:
|
||||
pmake = num_threads
|
||||
elif isinstance(pmake, int):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
pmake = int(pmake.lstrip("-j "))
|
||||
except:
|
||||
pmake = num_threads
|
||||
params["pmake"] = "-j %s" % pmake
|
||||
def remove_image_output_type(self, output_type):
|
||||
if output_type in self.image_output_types:
|
||||
ind = self.image_output_types.index(output_type)
|
||||
self.image_output_types.pop(ind)
|
||||
fstypes = " ".join(self.image_output_types).lstrip(" ")
|
||||
self.set_fstypes(fstypes)
|
||||
return self.image_output_types
|
||||
|
||||
params["image_addr"] = self.runCommand(["getVariable", "DEPLOY_DIR_IMAGE"]) or ""
|
||||
def get_image_deploy_dir(self):
|
||||
return self.image_out_dir
|
||||
|
||||
image_extra_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_EXTRA_SPACE"])
|
||||
if not image_extra_size:
|
||||
image_extra_size = 0
|
||||
else:
|
||||
try:
|
||||
image_extra_size = int(image_extra_size)
|
||||
except:
|
||||
image_extra_size = 0
|
||||
params["image_extra_size"] = image_extra_size
|
||||
def make_temp_dir(self):
|
||||
bb.utils.mkdirhier(self.image_dir)
|
||||
|
||||
image_rootfs_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_SIZE"])
|
||||
if not image_rootfs_size:
|
||||
image_rootfs_size = 0
|
||||
else:
|
||||
try:
|
||||
image_rootfs_size = int(image_rootfs_size)
|
||||
except:
|
||||
image_rootfs_size = 0
|
||||
params["image_rootfs_size"] = image_rootfs_size
|
||||
def remove_temp_dir(self):
|
||||
bb.utils.remove(self.image_dir, True)
|
||||
|
||||
image_overhead_factor = self.runCommand(["getVariable", "IMAGE_OVERHEAD_FACTOR"])
|
||||
if not image_overhead_factor:
|
||||
image_overhead_factor = 1
|
||||
else:
|
||||
try:
|
||||
image_overhead_factor = float(image_overhead_factor)
|
||||
except:
|
||||
image_overhead_factor = 1
|
||||
params['image_overhead_factor'] = image_overhead_factor
|
||||
|
||||
params["incompat_license"] = self._remove_redundant(self.runCommand(["getVariable", "INCOMPATIBLE_LICENSE"]) or "")
|
||||
params["sdk_machine"] = self.runCommand(["getVariable", "SDKMACHINE"]) or self.runCommand(["getVariable", "SDK_ARCH"]) or ""
|
||||
|
||||
params["image_fstypes"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_FSTYPES"]) or "")
|
||||
|
||||
params["image_types"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_TYPES"]) or "")
|
||||
|
||||
params["conf_version"] = self.runCommand(["getVariable", "CONF_VERSION"]) or ""
|
||||
params["lconf_version"] = self.runCommand(["getVariable", "LCONF_VERSION"]) or ""
|
||||
|
||||
params["runnable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_IMAGE_TYPES"]) or "")
|
||||
params["runnable_machine_patterns"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_MACHINE_PATTERNS"]) or "")
|
||||
params["deployable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "DEPLOYABLE_IMAGE_TYPES"]) or "")
|
||||
params["kernel_image_type"] = self.runCommand(["getVariable", "KERNEL_IMAGETYPE"]) or ""
|
||||
params["tmpdir"] = self.runCommand(["getVariable", "TMPDIR"]) or ""
|
||||
params["distro_version"] = self.runCommand(["getVariable", "DISTRO_VERSION"]) or ""
|
||||
params["target_os"] = self.runCommand(["getVariable", "TARGET_OS"]) or ""
|
||||
params["target_arch"] = self.runCommand(["getVariable", "TARGET_ARCH"]) or ""
|
||||
params["tune_pkgarch"] = self.runCommand(["getVariable", "TUNE_PKGARCH"]) or ""
|
||||
params["bb_version"] = self.runCommand(["getVariable", "BB_MIN_VERSION"]) or ""
|
||||
|
||||
params["default_task"] = self.runCommand(["getVariable", "BB_DEFAULT_TASK"]) or "build"
|
||||
|
||||
params["git_proxy_host"] = self.runCommand(["getVariable", "GIT_PROXY_HOST"]) or ""
|
||||
params["git_proxy_port"] = self.runCommand(["getVariable", "GIT_PROXY_PORT"]) or ""
|
||||
|
||||
params["http_proxy"] = self.runCommand(["getVariable", "http_proxy"]) or ""
|
||||
params["ftp_proxy"] = self.runCommand(["getVariable", "ftp_proxy"]) or ""
|
||||
params["https_proxy"] = self.runCommand(["getVariable", "https_proxy"]) or ""
|
||||
|
||||
params["cvs_proxy_host"] = self.runCommand(["getVariable", "CVS_PROXY_HOST"]) or ""
|
||||
params["cvs_proxy_port"] = self.runCommand(["getVariable", "CVS_PROXY_PORT"]) or ""
|
||||
|
||||
params["image_white_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_WHITE_PATTERN"]) or ""
|
||||
params["image_black_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_BLACK_PATTERN"]) or ""
|
||||
return params
|
||||
def get_temp_recipe_path(self, name):
|
||||
timestamp = datetime.date.today().isoformat()
|
||||
image_file = "hob-%s-variant-%s.bb" % (name, timestamp)
|
||||
recipepath = os.path.join(self.image_dir, image_file)
|
||||
return recipepath
|
||||
|
||||
@@ -1,753 +0,0 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# PackageListModel
|
||||
#
|
||||
class PackageListModel(gtk.TreeStore):
|
||||
"""
|
||||
This class defines an gtk.TreeStore subclass which will convert the output
|
||||
of the bb.event.TargetsTreeGenerated event into a gtk.TreeStore whilst also
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
(COL_NAME, COL_VER, COL_REV, COL_RNM, COL_SEC, COL_SUM, COL_RDEP, COL_RPROV, COL_SIZE, COL_BINB, COL_INC, COL_FADE_INC, COL_FONT) = range(13)
|
||||
|
||||
__gsignals__ = {
|
||||
"package-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
|
||||
__toolchain_required_packages__ = ["task-core-standalone-sdk-target", "task-core-standalone-sdk-target-dbg"]
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.contents = None
|
||||
self.images = None
|
||||
self.pkgs_size = 0
|
||||
self.pn_path = {}
|
||||
self.pkg_path = {}
|
||||
self.rprov_pkg = {}
|
||||
|
||||
gtk.TreeStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING)
|
||||
|
||||
|
||||
"""
|
||||
Find the model path for the item_name
|
||||
Returns the path in the model or None
|
||||
"""
|
||||
def find_path_for_item(self, item_name):
|
||||
pkg = item_name
|
||||
if item_name not in self.pkg_path.keys():
|
||||
if item_name not in self.rprov_pkg.keys():
|
||||
return None
|
||||
pkg = self.rprov_pkg[item_name]
|
||||
if pkg not in self.pkg_path.keys():
|
||||
return None
|
||||
|
||||
return self.pkg_path[pkg]
|
||||
|
||||
def find_item_for_path(self, item_path):
|
||||
return self[item_path][self.COL_NAME]
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is an item specified by filter
|
||||
"""
|
||||
def tree_model_filter(self, model, it, filter):
|
||||
for key in filter.keys():
|
||||
if model.get_value(it, key) not in filter[key]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModelSort
|
||||
containing only the items specified by filter
|
||||
"""
|
||||
def tree_model(self, filter):
|
||||
model = self.filter_new()
|
||||
model.set_visible_func(self.tree_model_filter, filter)
|
||||
|
||||
sort = gtk.TreeModelSort(model)
|
||||
sort.set_sort_column_id(PackageListModel.COL_NAME, gtk.SORT_ASCENDING)
|
||||
sort.set_default_sort_func(None)
|
||||
return sort
|
||||
|
||||
def convert_vpath_to_path(self, view_model, view_path):
|
||||
# view_model is the model sorted
|
||||
# get the path of the model filtered
|
||||
filtered_model_path = view_model.convert_path_to_child_path(view_path)
|
||||
# get the model filtered
|
||||
filtered_model = view_model.get_model()
|
||||
# get the path of the original model
|
||||
path = filtered_model.convert_path_to_child_path(filtered_model_path)
|
||||
return path
|
||||
|
||||
def convert_path_to_vpath(self, view_model, path):
|
||||
name = self.find_item_for_path(path)
|
||||
it = view_model.get_iter_first()
|
||||
while it:
|
||||
child_it = view_model.iter_children(it)
|
||||
while child_it:
|
||||
view_name = view_model.get_value(child_it, self.COL_NAME)
|
||||
if view_name == name:
|
||||
view_path = view_model.get_path(child_it)
|
||||
return view_path
|
||||
child_it = view_model.iter_next(child_it)
|
||||
it = view_model.iter_next(it)
|
||||
return None
|
||||
|
||||
"""
|
||||
The populate() function takes as input the data from a
|
||||
bb.event.PackageInfo event and populates the package list.
|
||||
"""
|
||||
def populate(self, pkginfolist):
|
||||
self.clear()
|
||||
self.pkgs_size = 0
|
||||
self.pn_path = {}
|
||||
self.pkg_path = {}
|
||||
self.rprov_pkg = {}
|
||||
|
||||
for pkginfo in pkginfolist:
|
||||
pn = pkginfo['PN']
|
||||
pv = pkginfo['PV']
|
||||
pr = pkginfo['PR']
|
||||
if pn in self.pn_path.keys():
|
||||
pniter = self.get_iter(self.pn_path[pn])
|
||||
else:
|
||||
pniter = self.append(None)
|
||||
self.set(pniter, self.COL_NAME, pn + '-' + pv + '-' + pr,
|
||||
self.COL_INC, False)
|
||||
self.pn_path[pn] = self.get_path(pniter)
|
||||
|
||||
pkg = pkginfo['PKG']
|
||||
pkgv = pkginfo['PKGV']
|
||||
pkgr = pkginfo['PKGR']
|
||||
pkgsize = pkginfo['PKGSIZE_%s' % pkg] if 'PKGSIZE_%s' % pkg in pkginfo.keys() else "0"
|
||||
pkg_rename = pkginfo['PKG_%s' % pkg] if 'PKG_%s' % pkg in pkginfo.keys() else ""
|
||||
section = pkginfo['SECTION_%s' % pkg] if 'SECTION_%s' % pkg in pkginfo.keys() else ""
|
||||
summary = pkginfo['SUMMARY_%s' % pkg] if 'SUMMARY_%s' % pkg in pkginfo.keys() else ""
|
||||
rdep = pkginfo['RDEPENDS_%s' % pkg] if 'RDEPENDS_%s' % pkg in pkginfo.keys() else ""
|
||||
rrec = pkginfo['RRECOMMENDS_%s' % pkg] if 'RRECOMMENDS_%s' % pkg in pkginfo.keys() else ""
|
||||
rprov = pkginfo['RPROVIDES_%s' % pkg] if 'RPROVIDES_%s' % pkg in pkginfo.keys() else ""
|
||||
for i in rprov.split():
|
||||
self.rprov_pkg[i] = pkg
|
||||
|
||||
if 'ALLOW_EMPTY_%s' % pkg in pkginfo.keys():
|
||||
allow_empty = pkginfo['ALLOW_EMPTY_%s' % pkg]
|
||||
elif 'ALLOW_EMPTY' in pkginfo.keys():
|
||||
allow_empty = pkginfo['ALLOW_EMPTY']
|
||||
else:
|
||||
allow_empty = ""
|
||||
|
||||
if pkgsize == "0" and not allow_empty:
|
||||
continue
|
||||
|
||||
# pkgsize is in KB
|
||||
size = HobPage._size_to_string(HobPage._string_to_size(pkgsize + ' KB'))
|
||||
|
||||
it = self.append(pniter)
|
||||
self.pkg_path[pkg] = self.get_path(it)
|
||||
self.set(it, self.COL_NAME, pkg, self.COL_VER, pkgv,
|
||||
self.COL_REV, pkgr, self.COL_RNM, pkg_rename,
|
||||
self.COL_SEC, section, self.COL_SUM, summary,
|
||||
self.COL_RDEP, rdep + ' ' + rrec,
|
||||
self.COL_RPROV, rprov, self.COL_SIZE, size,
|
||||
self.COL_BINB, "", self.COL_INC, False, self.COL_FONT, '10')
|
||||
|
||||
"""
|
||||
Check whether the item at item_path is included or not
|
||||
"""
|
||||
def path_included(self, item_path):
|
||||
return self[item_path][self.COL_INC]
|
||||
|
||||
"""
|
||||
Update the model, send out the notification.
|
||||
"""
|
||||
def selection_change_notification(self):
|
||||
self.emit("package-selection-changed")
|
||||
|
||||
"""
|
||||
Mark a certain package as selected.
|
||||
All its dependencies are marked as selected.
|
||||
The recipe provides the package is marked as selected.
|
||||
If user explicitly selects a recipe, all its providing packages are selected
|
||||
"""
|
||||
def include_item(self, item_path, binb=""):
|
||||
if self.path_included(item_path):
|
||||
return
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_rdep = self[item_path][self.COL_RDEP]
|
||||
|
||||
self[item_path][self.COL_INC] = True
|
||||
|
||||
it = self.get_iter(item_path)
|
||||
|
||||
# If user explicitly selects a recipe, all its providing packages are selected.
|
||||
if not self[item_path][self.COL_VER] and binb == "User Selected":
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
child_path = self.get_path(child_it)
|
||||
child_included = self.path_included(child_path)
|
||||
if not child_included:
|
||||
self.include_item(child_path, binb="User Selected")
|
||||
child_it = self.iter_next(child_it)
|
||||
return
|
||||
|
||||
# The recipe provides the package is also marked as selected
|
||||
parent_it = self.iter_parent(it)
|
||||
if parent_it:
|
||||
parent_path = self.get_path(parent_it)
|
||||
self[parent_path][self.COL_INC] = True
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if binb and not binb in item_bin:
|
||||
item_bin.append(binb)
|
||||
self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
|
||||
|
||||
if item_rdep:
|
||||
# Ensure all of the items deps are included and, where appropriate,
|
||||
# add this item to their COL_BINB
|
||||
for dep in item_rdep.split(" "):
|
||||
if dep.startswith('('):
|
||||
continue
|
||||
# If the contents model doesn't already contain dep, add it
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_included = self.path_included(dep_path)
|
||||
|
||||
if dep_included and not dep in item_bin:
|
||||
# don't set the COL_BINB to this item if the target is an
|
||||
# item in our own COL_BINB
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if not item_name in dep_bin:
|
||||
dep_bin.append(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
elif not dep_included:
|
||||
self.include_item(dep_path, binb=item_name)
|
||||
|
||||
"""
|
||||
Mark a certain package as de-selected.
|
||||
All other packages that depends on this package are marked as de-selected.
|
||||
If none of the packages provided by the recipe, the recipe should be marked as de-selected.
|
||||
If user explicitly de-select a recipe, all its providing packages are de-selected.
|
||||
"""
|
||||
def exclude_item(self, item_path):
|
||||
if not self.path_included(item_path):
|
||||
return
|
||||
|
||||
self[item_path][self.COL_INC] = False
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_rdep = self[item_path][self.COL_RDEP]
|
||||
it = self.get_iter(item_path)
|
||||
|
||||
# If user explicitly de-select a recipe, all its providing packages are de-selected.
|
||||
if not self[item_path][self.COL_VER]:
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
child_path = self.get_path(child_it)
|
||||
child_included = self[child_path][self.COL_INC]
|
||||
if child_included:
|
||||
self.exclude_item(child_path)
|
||||
child_it = self.iter_next(child_it)
|
||||
return
|
||||
|
||||
# If none of the packages provided by the recipe, the recipe should be marked as de-selected.
|
||||
parent_it = self.iter_parent(it)
|
||||
peer_iter = self.iter_children(parent_it)
|
||||
enabled = 0
|
||||
while peer_iter:
|
||||
peer_path = self.get_path(peer_iter)
|
||||
if self[peer_path][self.COL_INC]:
|
||||
enabled = 1
|
||||
break
|
||||
peer_iter = self.iter_next(peer_iter)
|
||||
if not enabled:
|
||||
parent_path = self.get_path(parent_it)
|
||||
self[parent_path][self.COL_INC] = False
|
||||
|
||||
# All packages that depends on this package are de-selected.
|
||||
if item_rdep:
|
||||
for dep in item_rdep.split(" "):
|
||||
if dep.startswith('('):
|
||||
continue
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if item_name in dep_bin:
|
||||
dep_bin.remove(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if item_bin:
|
||||
for binb in item_bin:
|
||||
binb_path = self.find_path_for_item(binb)
|
||||
if not binb_path:
|
||||
continue
|
||||
self.exclude_item(binb_path)
|
||||
|
||||
"""
|
||||
Package model may be incomplete, therefore when calling the
|
||||
set_selected_packages(), some packages will not be set included.
|
||||
Return the un-set packages list.
|
||||
"""
|
||||
def set_selected_packages(self, packagelist):
|
||||
left = []
|
||||
for pn in packagelist:
|
||||
if pn in self.pkg_path.keys():
|
||||
path = self.pkg_path[pn]
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected")
|
||||
else:
|
||||
left.append(pn)
|
||||
|
||||
self.selection_change_notification()
|
||||
return left
|
||||
|
||||
def get_user_selected_packages(self):
|
||||
packagelist = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
if self.get_value(child_it, self.COL_INC):
|
||||
binb = self.get_value(child_it, self.COL_BINB)
|
||||
if not binb or binb == "User Selected":
|
||||
name = self.get_value(child_it, self.COL_NAME)
|
||||
packagelist.append(name)
|
||||
child_it = self.iter_next(child_it)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return packagelist
|
||||
|
||||
def get_selected_packages(self):
|
||||
packagelist = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
if self.get_value(child_it, self.COL_INC):
|
||||
name = self.get_value(child_it, self.COL_NAME)
|
||||
packagelist.append(name)
|
||||
child_it = self.iter_next(child_it)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return packagelist
|
||||
|
||||
def get_selected_packages_toolchain(self):
|
||||
packagelist = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
name = self.get_value(child_it, self.COL_NAME)
|
||||
inc = self.get_value(child_it, self.COL_INC)
|
||||
if inc or name.endswith("-dev") or name.endswith("-dbg"):
|
||||
packagelist.append(name)
|
||||
child_it = self.iter_next(child_it)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return list(set(packagelist + self.__toolchain_required_packages__));
|
||||
"""
|
||||
Return the selected package size, unit is B.
|
||||
"""
|
||||
def get_packages_size(self):
|
||||
packages_size = 0
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
if self.get_value(child_it, self.COL_INC):
|
||||
str_size = self.get_value(child_it, self.COL_SIZE)
|
||||
if not str_size:
|
||||
continue
|
||||
|
||||
packages_size += HobPage._string_to_size(str_size)
|
||||
|
||||
child_it = self.iter_next(child_it)
|
||||
it = self.iter_next(it)
|
||||
return packages_size
|
||||
|
||||
"""
|
||||
Empty self.contents by setting the include of each entry to None
|
||||
"""
|
||||
def reset(self):
|
||||
self.pkgs_size = 0
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
self.set(it, self.COL_INC, False)
|
||||
child_it = self.iter_children(it)
|
||||
while child_it:
|
||||
self.set(child_it,
|
||||
self.COL_INC, False,
|
||||
self.COL_BINB, "")
|
||||
child_it = self.iter_next(child_it)
|
||||
it = self.iter_next(it)
|
||||
|
||||
self.selection_change_notification()
|
||||
|
||||
"""
|
||||
Resync the state of included items to a backup column before performing the fadeout visible effect
|
||||
"""
|
||||
def resync_fadeout_column(self, model_first_iter=None):
|
||||
it = model_first_iter
|
||||
while it:
|
||||
active = self.get_value(it, self.COL_INC)
|
||||
self.set(it, self.COL_FADE_INC, active)
|
||||
if self.iter_has_child(it):
|
||||
self.resync_fadeout_column(self.iter_children(it))
|
||||
|
||||
it = self.iter_next(it)
|
||||
|
||||
#
|
||||
# RecipeListModel
|
||||
#
|
||||
class RecipeListModel(gtk.ListStore):
|
||||
"""
|
||||
This class defines an gtk.ListStore subclass which will convert the output
|
||||
of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
(COL_NAME, COL_DESC, COL_LIC, COL_GROUP, COL_DEPS, COL_BINB, COL_TYPE, COL_INC, COL_IMG, COL_INSTALL, COL_PN, COL_FADE_INC) = range(12)
|
||||
|
||||
__custom_image__ = "Create your own image"
|
||||
|
||||
__gsignals__ = {
|
||||
"recipe-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
|
||||
"""
|
||||
"""
|
||||
def __init__(self):
|
||||
gtk.ListStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN)
|
||||
|
||||
"""
|
||||
Find the model path for the item_name
|
||||
Returns the path in the model or None
|
||||
"""
|
||||
def find_path_for_item(self, item_name):
|
||||
if self.non_target_name(item_name) or item_name not in self.pn_path.keys():
|
||||
return None
|
||||
else:
|
||||
return self.pn_path[item_name]
|
||||
|
||||
def find_item_for_path(self, item_path):
|
||||
return self[item_path][self.COL_NAME]
|
||||
|
||||
"""
|
||||
Helper method to determine whether name is a target pn
|
||||
"""
|
||||
def non_target_name(self, name):
|
||||
if name and ('-native' in name):
|
||||
return True
|
||||
return False
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is an item specified by filter
|
||||
"""
|
||||
def tree_model_filter(self, model, it, filter):
|
||||
name = model.get_value(it, self.COL_NAME)
|
||||
if self.non_target_name(name):
|
||||
return False
|
||||
|
||||
for key in filter.keys():
|
||||
if model.get_value(it, key) not in filter[key]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def exclude_item_sort_func(self, model, iter1, iter2):
|
||||
val1 = model.get_value(iter1, RecipeListModel.COL_FADE_INC)
|
||||
val2 = model.get_value(iter2, RecipeListModel.COL_INC)
|
||||
return ((val1 == True) and (val2 == False))
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModelSort
|
||||
containing only the items which are items specified by filter
|
||||
"""
|
||||
def tree_model(self, filter, excluded_items_ahead=False):
|
||||
model = self.filter_new()
|
||||
model.set_visible_func(self.tree_model_filter, filter)
|
||||
|
||||
sort = gtk.TreeModelSort(model)
|
||||
if excluded_items_ahead:
|
||||
sort.set_default_sort_func(self.exclude_item_sort_func)
|
||||
else:
|
||||
sort.set_sort_column_id(RecipeListModel.COL_NAME, gtk.SORT_ASCENDING)
|
||||
sort.set_default_sort_func(None)
|
||||
return sort
|
||||
|
||||
def convert_vpath_to_path(self, view_model, view_path):
|
||||
filtered_model_path = view_model.convert_path_to_child_path(view_path)
|
||||
filtered_model = view_model.get_model()
|
||||
|
||||
# get the path of the original model
|
||||
path = filtered_model.convert_path_to_child_path(filtered_model_path)
|
||||
return path
|
||||
|
||||
def convert_path_to_vpath(self, view_model, path):
|
||||
it = view_model.get_iter_first()
|
||||
while it:
|
||||
name = self.find_item_for_path(path)
|
||||
view_name = view_model.get_value(it, RecipeListModel.COL_NAME)
|
||||
if view_name == name:
|
||||
view_path = view_model.get_path(it)
|
||||
return view_path
|
||||
it = view_model.iter_next(it)
|
||||
return None
|
||||
|
||||
"""
|
||||
The populate() function takes as input the data from a
|
||||
bb.event.TargetsTreeGenerated event and populates the RecipeList.
|
||||
"""
|
||||
def populate(self, event_model):
|
||||
# First clear the model, in case repopulating
|
||||
self.clear()
|
||||
|
||||
# dummy image for prompt
|
||||
self.set(self.append(), self.COL_NAME, self.__custom_image__,
|
||||
self.COL_DESC, "Use the 'View recipes' and 'View packages' " \
|
||||
"options to select what you want to include " \
|
||||
"in your image.",
|
||||
self.COL_LIC, "", self.COL_GROUP, "",
|
||||
self.COL_DEPS, "", self.COL_BINB, "",
|
||||
self.COL_TYPE, "image", self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, self.__custom_image__)
|
||||
|
||||
for item in event_model["pn"]:
|
||||
name = item
|
||||
desc = event_model["pn"][item]["description"]
|
||||
lic = event_model["pn"][item]["license"]
|
||||
group = event_model["pn"][item]["section"]
|
||||
inherits = event_model["pn"][item]["inherits"]
|
||||
install = []
|
||||
|
||||
depends = event_model["depends"].get(item, []) + event_model["rdepends-pn"].get(item, [])
|
||||
|
||||
if ('task-' in name):
|
||||
atype = 'task'
|
||||
elif ('image.bbclass' in " ".join(inherits)):
|
||||
if name != "hob-image":
|
||||
atype = 'image'
|
||||
install = event_model["rdepends-pkg"].get(item, []) + event_model["rrecs-pkg"].get(item, [])
|
||||
elif ('meta-' in name):
|
||||
atype = 'toolchain'
|
||||
elif (name == 'dummy-image' or name == 'dummy-toolchain'):
|
||||
atype = 'dummy'
|
||||
else:
|
||||
atype = 'recipe'
|
||||
|
||||
self.set(self.append(), self.COL_NAME, item, self.COL_DESC, desc,
|
||||
self.COL_LIC, lic, self.COL_GROUP, group,
|
||||
self.COL_DEPS, " ".join(depends), self.COL_BINB, "",
|
||||
self.COL_TYPE, atype, self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, " ".join(install), self.COL_PN, item)
|
||||
|
||||
self.pn_path = {}
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
pn = self.get_value(it, self.COL_NAME)
|
||||
path = self.get_path(it)
|
||||
self.pn_path[pn] = path
|
||||
it = self.iter_next(it)
|
||||
|
||||
"""
|
||||
Update the model, send out the notification.
|
||||
"""
|
||||
def selection_change_notification(self):
|
||||
self.emit("recipe-selection-changed")
|
||||
|
||||
def path_included(self, item_path):
|
||||
return self[item_path][self.COL_INC]
|
||||
|
||||
"""
|
||||
Add this item, and any of its dependencies, to the image contents
|
||||
"""
|
||||
def include_item(self, item_path, binb="", image_contents=False):
|
||||
if self.path_included(item_path):
|
||||
return
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_DEPS]
|
||||
|
||||
self[item_path][self.COL_INC] = True
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if binb and not binb in item_bin:
|
||||
item_bin.append(binb)
|
||||
self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
|
||||
|
||||
# We want to do some magic with things which are brought in by the
|
||||
# base image so tag them as so
|
||||
if image_contents:
|
||||
self[item_path][self.COL_IMG] = True
|
||||
|
||||
if item_deps:
|
||||
# Ensure all of the items deps are included and, where appropriate,
|
||||
# add this item to their COL_BINB
|
||||
for dep in item_deps.split(" "):
|
||||
# If the contents model doesn't already contain dep, add it
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_included = self.path_included(dep_path)
|
||||
|
||||
if dep_included and not dep in item_bin:
|
||||
# don't set the COL_BINB to this item if the target is an
|
||||
# item in our own COL_BINB
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if not item_name in dep_bin:
|
||||
dep_bin.append(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
elif not dep_included:
|
||||
self.include_item(dep_path, binb=item_name, image_contents=image_contents)
|
||||
|
||||
def exclude_item(self, item_path):
|
||||
if not self.path_included(item_path):
|
||||
return
|
||||
|
||||
self[item_path][self.COL_INC] = False
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_DEPS]
|
||||
if item_deps:
|
||||
for dep in item_deps.split(" "):
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if item_name in dep_bin:
|
||||
dep_bin.remove(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if item_bin:
|
||||
for binb in item_bin:
|
||||
binb_path = self.find_path_for_item(binb)
|
||||
if not binb_path:
|
||||
continue
|
||||
self.exclude_item(binb_path)
|
||||
|
||||
def reset(self):
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
self.set(it,
|
||||
self.COL_INC, False,
|
||||
self.COL_BINB, "",
|
||||
self.COL_IMG, False)
|
||||
it = self.iter_next(it)
|
||||
|
||||
self.selection_change_notification()
|
||||
|
||||
"""
|
||||
Returns two lists. One of user selected recipes and the other containing
|
||||
all selected recipes
|
||||
"""
|
||||
def get_selected_recipes(self):
|
||||
allrecipes = []
|
||||
userrecipes = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
name = self.get_value(it, self.COL_PN)
|
||||
type = self.get_value(it, self.COL_TYPE)
|
||||
if type != "image":
|
||||
allrecipes.append(name)
|
||||
sel = "User Selected" in self.get_value(it, self.COL_BINB)
|
||||
if sel:
|
||||
userrecipes.append(name)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return list(set(userrecipes)), list(set(allrecipes))
|
||||
|
||||
def set_selected_recipes(self, recipelist):
|
||||
for pn in recipelist:
|
||||
if pn in self.pn_path.keys():
|
||||
path = self.pn_path[pn]
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected")
|
||||
self.selection_change_notification()
|
||||
|
||||
def get_selected_image(self):
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
name = self.get_value(it, self.COL_PN)
|
||||
type = self.get_value(it, self.COL_TYPE)
|
||||
if type == "image":
|
||||
sel = "User Selected" in self.get_value(it, self.COL_BINB)
|
||||
if sel:
|
||||
return name
|
||||
it = self.iter_next(it)
|
||||
return None
|
||||
|
||||
def set_selected_image(self, img):
|
||||
if not img:
|
||||
return
|
||||
self.reset()
|
||||
path = self.find_path_for_item(img)
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected",
|
||||
image_contents=True)
|
||||
self.selection_change_notification()
|
||||
@@ -1,124 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hwc
|
||||
|
||||
#
|
||||
# HobPage: the super class for all Hob-related pages
|
||||
#
|
||||
class HobPage (gtk.VBox):
|
||||
|
||||
def __init__(self, builder, title = None):
|
||||
super(HobPage, self).__init__(False, 0)
|
||||
self.builder = builder
|
||||
self.builder_width, self.builder_height = self.builder.size_request()
|
||||
|
||||
if not title:
|
||||
self.title = "Hob -- Image Creator"
|
||||
else:
|
||||
self.title = title
|
||||
|
||||
self.box_group_area = gtk.VBox(False, 12)
|
||||
self.box_group_area.set_size_request(self.builder_width - 73 - 73, self.builder_height - 88 - 15 - 15)
|
||||
self.group_align = gtk.Alignment(xalign = 0, yalign=0.5, xscale=1, yscale=1)
|
||||
self.group_align.set_padding(15, 15, 73, 73)
|
||||
self.group_align.add(self.box_group_area)
|
||||
self.box_group_area.set_homogeneous(False)
|
||||
|
||||
|
||||
def add_onto_top_bar(self, widget = None, padding = 0):
|
||||
# the top button occupies 1/7 of the page height
|
||||
# setup an event box
|
||||
eventbox = gtk.EventBox()
|
||||
style = eventbox.get_style().copy()
|
||||
style.bg[gtk.STATE_NORMAL] = eventbox.get_colormap().alloc_color(HobColors.LIGHT_GRAY, False, False)
|
||||
eventbox.set_style(style)
|
||||
eventbox.set_size_request(-1, 88)
|
||||
|
||||
hbox = gtk.HBox()
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_markup("<span size='x-large'>%s</span>" % self.title)
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=20)
|
||||
|
||||
if widget:
|
||||
# add the widget in the event box
|
||||
hbox.pack_end(widget, expand=False, fill=False, padding=padding)
|
||||
eventbox.add(hbox)
|
||||
|
||||
return eventbox
|
||||
|
||||
def span_tag(self, size="medium", weight="normal", forground="#1c1c1c"):
|
||||
span_tag = "weight='%s' foreground='%s' size='%s'" % (weight, forground, size)
|
||||
return span_tag
|
||||
|
||||
def append_toolbar_button(self, toolbar, buttonname, icon_disp, icon_hovor, tip, cb):
|
||||
# Create a button and append it on the toolbar according to button name
|
||||
icon = gtk.Image()
|
||||
icon_display = icon_disp
|
||||
icon_hover = icon_hovor
|
||||
pix_buffer = gtk.gdk.pixbuf_new_from_file(icon_display)
|
||||
icon.set_from_pixbuf(pix_buffer)
|
||||
tip_text = tip
|
||||
button = toolbar.append_item(buttonname, tip, None, icon, cb)
|
||||
return button
|
||||
|
||||
@staticmethod
|
||||
def _size_to_string(size):
|
||||
try:
|
||||
if not size:
|
||||
size_str = "0 B"
|
||||
else:
|
||||
if len(str(int(size))) > 6:
|
||||
size_str = '%.1f' % (size*1.0/(1024*1024)) + ' MB'
|
||||
elif len(str(int(size))) > 3:
|
||||
size_str = '%.1f' % (size*1.0/1024) + ' KB'
|
||||
else:
|
||||
size_str = str(size) + ' B'
|
||||
except:
|
||||
size_str = "0 B"
|
||||
return size_str
|
||||
|
||||
@staticmethod
|
||||
def _string_to_size(str_size):
|
||||
try:
|
||||
if not str_size:
|
||||
size = 0
|
||||
else:
|
||||
unit = str_size.split()
|
||||
if len(unit) > 1:
|
||||
if unit[1] == 'MB':
|
||||
size = float(unit[0])*1024*1024
|
||||
elif unit[1] == 'KB':
|
||||
size = float(unit[0])*1024
|
||||
elif unit[1] == 'B':
|
||||
size = float(unit[0])
|
||||
else:
|
||||
size = 0
|
||||
else:
|
||||
size = float(unit[0])
|
||||
except:
|
||||
size = 0
|
||||
return size
|
||||
|
||||
335
bitbake/lib/bb/ui/crumbs/hobprefs.py
Normal file
@@ -0,0 +1,335 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
from bb.ui.crumbs.configurator import Configurator
|
||||
|
||||
class HobPrefs(gtk.Dialog):
|
||||
"""
|
||||
"""
|
||||
def empty_combo_text(self, combo_text):
|
||||
model = combo_text.get_model()
|
||||
if model:
|
||||
model.clear()
|
||||
|
||||
def output_type_toggled_cb(self, check, handler):
|
||||
ot = check.get_label()
|
||||
enabled = check.get_active()
|
||||
if enabled:
|
||||
self.selected_image_types = handler.add_image_output_type(ot)
|
||||
else:
|
||||
self.selected_image_types = handler.remove_image_output_type(ot)
|
||||
|
||||
self.configurator.setConfVar('IMAGE_FSTYPES', "%s" % " ".join(self.selected_image_types).lstrip(" "))
|
||||
self.reload_required = True
|
||||
|
||||
def sdk_machine_combo_changed_cb(self, combo, handler):
|
||||
sdk_mach = combo.get_active_text()
|
||||
if sdk_mach != self.curr_sdk_mach:
|
||||
self.curr_sdk_mach = sdk_mach
|
||||
self.configurator.setConfVar('SDKMACHINE', sdk_mach)
|
||||
handler.set_sdk_machine(sdk_mach)
|
||||
|
||||
def update_sdk_machines(self, handler, sdk_machines):
|
||||
active = 0
|
||||
# disconnect the signal handler before updating the combo model
|
||||
if self.sdk_machine_handler_id:
|
||||
self.sdk_machine_combo.disconnect(self.sdk_machine_handler_id)
|
||||
self.sdk_machine_handler_id = None
|
||||
|
||||
self.empty_combo_text(self.sdk_machine_combo)
|
||||
for sdk_machine in sdk_machines:
|
||||
self.sdk_machine_combo.append_text(sdk_machine)
|
||||
if sdk_machine == self.curr_sdk_mach:
|
||||
self.sdk_machine_combo.set_active(active)
|
||||
active = active + 1
|
||||
|
||||
self.sdk_machine_handler_id = self.sdk_machine_combo.connect("changed", self.sdk_machine_combo_changed_cb, handler)
|
||||
|
||||
def distro_combo_changed_cb(self, combo, handler):
|
||||
distro = combo.get_active_text()
|
||||
if distro != self.curr_distro:
|
||||
self.curr_distro = distro
|
||||
self.configurator.setConfVar('DISTRO', distro)
|
||||
handler.set_distro(distro)
|
||||
self.reload_required = True
|
||||
|
||||
def update_distros(self, handler, distros):
|
||||
active = 0
|
||||
# disconnect the signal handler before updating combo model
|
||||
if self.distro_handler_id:
|
||||
self.distro_combo.disconnect(self.distro_handler_id)
|
||||
self.distro_handler_id = None
|
||||
|
||||
self.empty_combo_text(self.distro_combo)
|
||||
for distro in distros:
|
||||
self.distro_combo.append_text(distro)
|
||||
if distro == self.curr_distro:
|
||||
self.distro_combo.set_active(active)
|
||||
active = active + 1
|
||||
|
||||
self.distro_handler_id = self.distro_combo.connect("changed", self.distro_combo_changed_cb, handler)
|
||||
|
||||
def package_format_combo_changed_cb(self, combo, handler):
|
||||
package_format = combo.get_active_text()
|
||||
if package_format != self.curr_package_format:
|
||||
self.curr_package_format = package_format
|
||||
self.configurator.setConfVar('PACKAGE_CLASSES', 'package_%s' % package_format)
|
||||
handler.set_package_format(package_format)
|
||||
self.reload_required = True
|
||||
|
||||
def update_package_formats(self, handler, formats):
|
||||
active = 0
|
||||
# disconnect the signal handler before updating the model
|
||||
if self.package_handler_id:
|
||||
self.package_combo.disconnect(self.package_handler_id)
|
||||
self.package_handler_id = None
|
||||
|
||||
self.empty_combo_text(self.package_combo)
|
||||
for format in formats:
|
||||
self.package_combo.append_text(format)
|
||||
if format == self.curr_package_format:
|
||||
self.package_combo.set_active(active)
|
||||
active = active + 1
|
||||
|
||||
self.package_handler_id = self.package_combo.connect("changed", self.package_format_combo_changed_cb, handler)
|
||||
|
||||
def include_gplv3_cb(self, toggle):
|
||||
excluded = toggle.get_active()
|
||||
orig_incompatible = self.configurator.getConfVar('INCOMPATIBLE_LICENSE')
|
||||
new_incompatible = ""
|
||||
if excluded:
|
||||
if not orig_incompatible:
|
||||
new_incompatible = "GPLv3"
|
||||
elif not orig_incompatible.find('GPLv3'):
|
||||
new_incompatible = "%s GPLv3" % orig_incompatible
|
||||
else:
|
||||
new_incompatible = orig_incompatible.replace('GPLv3', '')
|
||||
|
||||
if new_incompatible != orig_incompatible:
|
||||
self.handler.set_incompatible_license(new_incompatible)
|
||||
self.configurator.setConfVar('INCOMPATIBLE_LICENSE', new_incompatible)
|
||||
self.reload_required = True
|
||||
|
||||
def change_bb_threads_cb(self, spinner):
|
||||
val = spinner.get_value_as_int()
|
||||
self.handler.set_bbthreads(val)
|
||||
self.configurator.setConfVar('BB_NUMBER_THREADS', val)
|
||||
|
||||
def change_make_threads_cb(self, spinner):
|
||||
val = spinner.get_value_as_int()
|
||||
self.handler.set_pmake(val)
|
||||
self.configurator.setConfVar('PARALLEL_MAKE', "-j %s" % val)
|
||||
|
||||
def toggle_toolchain_cb(self, check):
|
||||
enabled = check.get_active()
|
||||
toolchain = '0'
|
||||
if enabled:
|
||||
toolchain = '1'
|
||||
self.handler.toggle_toolchain(enabled)
|
||||
self.configurator.setConfVar('HOB_BUILD_TOOLCHAIN', toolchain)
|
||||
|
||||
def toggle_headers_cb(self, check):
|
||||
enabled = check.get_active()
|
||||
headers = '0'
|
||||
if enabled:
|
||||
headers = '1'
|
||||
self.handler.toggle_toolchain_headers(enabled)
|
||||
self.configurator.setConfVar('HOB_BUILD_TOOLCHAIN_HEADERS', headers)
|
||||
|
||||
def set_parent_window(self, parent):
|
||||
self.set_transient_for(parent)
|
||||
|
||||
def write_changes(self):
|
||||
self.configurator.writeConf()
|
||||
|
||||
def prefs_response_cb(self, dialog, response):
|
||||
if self.reload_required:
|
||||
glib.idle_add(self.handler.reload_data)
|
||||
self.reload_required = False
|
||||
|
||||
def __init__(self, configurator, handler, curr_sdk_mach, curr_distro, pclass,
|
||||
pmake, bbthread, selected_image_types, all_image_types,
|
||||
gplv3disabled, build_toolchain, build_toolchain_headers):
|
||||
"""
|
||||
"""
|
||||
gtk.Dialog.__init__(self, "Preferences", None,
|
||||
gtk.DIALOG_DESTROY_WITH_PARENT,
|
||||
(gtk.STOCK_CLOSE, gtk.RESPONSE_OK))
|
||||
|
||||
self.set_border_width(6)
|
||||
self.vbox.set_property("spacing", 12)
|
||||
self.action_area.set_property("spacing", 12)
|
||||
self.action_area.set_property("border-width", 6)
|
||||
|
||||
self.handler = handler
|
||||
self.configurator = configurator
|
||||
|
||||
self.curr_sdk_mach = curr_sdk_mach
|
||||
self.curr_distro = curr_distro
|
||||
self.curr_package_format = pclass
|
||||
self.pmake = pmake
|
||||
self.bbthread = bbthread
|
||||
self.selected_image_types = selected_image_types.split(" ")
|
||||
self.gplv3disabled = gplv3disabled
|
||||
self.build_toolchain = build_toolchain
|
||||
self.build_toolchain_headers = build_toolchain_headers
|
||||
|
||||
self.reload_required = False
|
||||
self.distro_handler_id = None
|
||||
self.sdk_machine_handler_id = None
|
||||
self.package_handler_id = None
|
||||
|
||||
left = gtk.SizeGroup(gtk.SIZE_GROUP_HORIZONTAL)
|
||||
right = gtk.SizeGroup(gtk.SIZE_GROUP_HORIZONTAL)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_markup("<b>Policy</b>")
|
||||
label.show()
|
||||
frame = gtk.Frame()
|
||||
frame.set_label_widget(label)
|
||||
frame.set_shadow_type(gtk.SHADOW_NONE)
|
||||
frame.show()
|
||||
self.vbox.pack_start(frame)
|
||||
pbox = gtk.VBox(False, 12)
|
||||
pbox.show()
|
||||
frame.add(pbox)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
# Distro selector
|
||||
label = gtk.Label("Distribution:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
self.distro_combo = gtk.combo_box_new_text()
|
||||
self.distro_combo.set_tooltip_text("Select the Yocto distribution you would like to use")
|
||||
self.distro_combo.show()
|
||||
hbox.pack_start(self.distro_combo, expand=False, fill=False, padding=6)
|
||||
# Exclude GPLv3
|
||||
check = gtk.CheckButton("Exclude GPLv3 packages")
|
||||
check.set_tooltip_text("Check this box to prevent GPLv3 packages from being included in your image")
|
||||
check.show()
|
||||
check.set_active(self.gplv3disabled)
|
||||
check.connect("toggled", self.include_gplv3_cb)
|
||||
hbox.pack_start(check, expand=False, fill=False, padding=6)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
# Package format selector
|
||||
label = gtk.Label("Package format:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
self.package_combo = gtk.combo_box_new_text()
|
||||
self.package_combo.set_tooltip_text("""The package format is that used in creation
|
||||
of the root filesystem and also dictates the package manager used in your image""")
|
||||
self.package_combo.show()
|
||||
hbox.pack_start(self.package_combo, expand=False, fill=False, padding=6)
|
||||
if all_image_types:
|
||||
# Image output type selector
|
||||
label = gtk.Label("Image output types:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
chk_cnt = 3
|
||||
for it in all_image_types.split(" "):
|
||||
chk_cnt = chk_cnt + 1
|
||||
if chk_cnt % 6 == 0:
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
chk = gtk.CheckButton(it)
|
||||
if it in self.selected_image_types:
|
||||
chk.set_active(True)
|
||||
chk.set_tooltip_text("Build an %s image" % it)
|
||||
chk.connect("toggled", self.output_type_toggled_cb, handler)
|
||||
chk.show()
|
||||
hbox.pack_start(chk, expand=False, fill=False, padding=3)
|
||||
# BitBake
|
||||
label = gtk.Label()
|
||||
label.set_markup("<b>BitBake</b>")
|
||||
label.show()
|
||||
frame = gtk.Frame()
|
||||
frame.set_label_widget(label)
|
||||
frame.set_shadow_type(gtk.SHADOW_NONE)
|
||||
frame.show()
|
||||
self.vbox.pack_start(frame)
|
||||
pbox = gtk.VBox(False, 12)
|
||||
pbox.show()
|
||||
frame.add(pbox)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
label = gtk.Label("BitBake threads:")
|
||||
label.show()
|
||||
# NOTE: may be a good idea in future to intelligently cap the maximum
|
||||
# values but we need more data to make an educated decision, for now
|
||||
# set a high maximum as a value for upper bounds is required by the
|
||||
# gtk.Adjustment
|
||||
spin_max = 30 # seems like a high enough arbitrary number
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
bbadj = gtk.Adjustment(value=self.bbthread, lower=1, upper=spin_max, step_incr=1)
|
||||
bbspinner = gtk.SpinButton(adjustment=bbadj, climb_rate=1, digits=0)
|
||||
bbspinner.show()
|
||||
bbspinner.connect("value-changed", self.change_bb_threads_cb)
|
||||
hbox.pack_start(bbspinner, expand=False, fill=False, padding=6)
|
||||
label = gtk.Label("Make threads:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
madj = gtk.Adjustment(value=self.pmake, lower=1, upper=spin_max, step_incr=1)
|
||||
makespinner = gtk.SpinButton(adjustment=madj, climb_rate=1, digits=0)
|
||||
makespinner.connect("value-changed", self.change_make_threads_cb)
|
||||
makespinner.show()
|
||||
hbox.pack_start(makespinner, expand=False, fill=False, padding=6)
|
||||
# Toolchain
|
||||
label = gtk.Label()
|
||||
label.set_markup("<b>External Toolchain</b>")
|
||||
label.show()
|
||||
frame = gtk.Frame()
|
||||
frame.set_label_widget(label)
|
||||
frame.set_shadow_type(gtk.SHADOW_NONE)
|
||||
frame.show()
|
||||
self.vbox.pack_start(frame)
|
||||
pbox = gtk.VBox(False, 12)
|
||||
pbox.show()
|
||||
frame.add(pbox)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
toolcheck = gtk.CheckButton("Build external development toolchain with image")
|
||||
toolcheck.show()
|
||||
toolcheck.set_active(self.build_toolchain)
|
||||
toolcheck.connect("toggled", self.toggle_toolchain_cb)
|
||||
hbox.pack_start(toolcheck, expand=False, fill=False, padding=6)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.show()
|
||||
pbox.pack_start(hbox, expand=False, fill=False, padding=6)
|
||||
label = gtk.Label("Toolchain host:")
|
||||
label.show()
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=6)
|
||||
self.sdk_machine_combo = gtk.combo_box_new_text()
|
||||
self.sdk_machine_combo.set_tooltip_text("Select the host architecture of the external machine")
|
||||
self.sdk_machine_combo.show()
|
||||
hbox.pack_start(self.sdk_machine_combo, expand=False, fill=False, padding=6)
|
||||
# headerscheck = gtk.CheckButton("Include development headers with toolchain")
|
||||
# headerscheck.show()
|
||||
# headerscheck.set_active(self.build_toolchain_headers)
|
||||
# headerscheck.connect("toggled", self.toggle_headers_cb)
|
||||
# hbox.pack_start(headerscheck, expand=False, fill=False, padding=6)
|
||||
self.connect("response", self.prefs_response_cb)
|
||||
@@ -1,860 +0,0 @@
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import pango, pangocairo
|
||||
import cairo
|
||||
import math
|
||||
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.persistenttooltip import PersistentTooltip
|
||||
|
||||
class hwc:
|
||||
|
||||
MAIN_WIN_WIDTH = 1024
|
||||
MAIN_WIN_HEIGHT = 700
|
||||
|
||||
class hic:
|
||||
|
||||
HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("ui/icons/"))
|
||||
|
||||
ICON_RCIPE_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_display.png'))
|
||||
ICON_RCIPE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_hover.png'))
|
||||
ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_display.png'))
|
||||
ICON_PACKAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_hover.png'))
|
||||
ICON_LAYERS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_display.png'))
|
||||
ICON_LAYERS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_hover.png'))
|
||||
ICON_TEMPLATES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('templates/templates_display.png'))
|
||||
ICON_TEMPLATES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('templates/templates_hover.png'))
|
||||
ICON_IMAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_display.png'))
|
||||
ICON_IMAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_hover.png'))
|
||||
ICON_SETTINGS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_display.png'))
|
||||
ICON_SETTINGS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_hover.png'))
|
||||
ICON_INFO_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
|
||||
ICON_INFO_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_hover.png'))
|
||||
ICON_INDI_CONFIRM_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/confirmation.png'))
|
||||
ICON_INDI_ERROR_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/denied.png'))
|
||||
ICON_INDI_REMOVE_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove.png'))
|
||||
ICON_INDI_REMOVE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove-hover.png'))
|
||||
ICON_INDI_ADD_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add.png'))
|
||||
ICON_INDI_ADD_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add-hover.png'))
|
||||
ICON_INDI_REFRESH_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/refresh.png'))
|
||||
ICON_INDI_ALERT_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/alert.png'))
|
||||
ICON_INDI_TICK_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/tick.png'))
|
||||
ICON_INDI_INFO_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/info.png'))
|
||||
|
||||
class hcc:
|
||||
|
||||
SUPPORTED_IMAGE_TYPES = {
|
||||
"jffs2" : ["jffs2"],
|
||||
"sum.jffs2" : ["sum.jffs2"],
|
||||
"cramfs" : ["cramfs"],
|
||||
"ext2" : ["ext2"],
|
||||
"ext2.gz" : ["ext2.gz"],
|
||||
"ext2.bz2" : ["ext2.bz2"],
|
||||
"ext3" : ["ext3"],
|
||||
"ext3.gz" : ["ext3.gz"],
|
||||
"ext2.lzma" : ["ext2.lzma"],
|
||||
"btrfs" : ["btrfs"],
|
||||
"live" : ["hddimg", "iso"],
|
||||
"squashfs" : ["squashfs"],
|
||||
"squashfs-lzma" : ["squashfs-lzma"],
|
||||
"ubi" : ["ubi"],
|
||||
"tar" : ["tar"],
|
||||
"tar.gz" : ["tar.gz"],
|
||||
"tar.bz2" : ["tar.bz2"],
|
||||
"tar.xz" : ["tar.xz"],
|
||||
"cpio" : ["cpio"],
|
||||
"cpio.gz" : ["cpio.gz"],
|
||||
"cpio.xz" : ["cpio.xz"],
|
||||
"vmdk" : ["vmdk"],
|
||||
"cpio.lzma" : ["cpio.lzma"],
|
||||
"elf" : ["elf"],
|
||||
}
|
||||
|
||||
class HobViewTable (gtk.VBox):
|
||||
"""
|
||||
A VBox to contain the table for different recipe views and package view
|
||||
"""
|
||||
__gsignals__ = {
|
||||
"toggled" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_INT,
|
||||
gobject.TYPE_PYOBJECT,)),
|
||||
"row-activated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,
|
||||
gobject.TYPE_PYOBJECT,)),
|
||||
"cell-fadeinout-stopped" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,
|
||||
gobject.TYPE_PYOBJECT,
|
||||
gobject.TYPE_PYOBJECT,)),
|
||||
}
|
||||
|
||||
def __init__(self, columns):
|
||||
gtk.VBox.__init__(self, False, 6)
|
||||
self.table_tree = gtk.TreeView()
|
||||
self.table_tree.set_headers_visible(True)
|
||||
self.table_tree.set_headers_clickable(True)
|
||||
self.table_tree.set_enable_search(True)
|
||||
self.table_tree.set_rules_hint(True)
|
||||
self.table_tree.set_enable_tree_lines(True)
|
||||
self.table_tree.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
self.toggle_columns = []
|
||||
self.table_tree.connect("row-activated", self.row_activated_cb)
|
||||
|
||||
for i, column in enumerate(columns):
|
||||
col = gtk.TreeViewColumn(column['col_name'])
|
||||
col.set_clickable(True)
|
||||
col.set_resizable(True)
|
||||
col.set_sort_column_id(column['col_id'])
|
||||
if 'col_min' in column.keys():
|
||||
col.set_min_width(column['col_min'])
|
||||
if 'col_max' in column.keys():
|
||||
col.set_max_width(column['col_max'])
|
||||
if 'expand' in column.keys():
|
||||
col.set_expand(True)
|
||||
self.table_tree.append_column(col)
|
||||
|
||||
if (not 'col_style' in column.keys()) or column['col_style'] == 'text':
|
||||
cell = gtk.CellRendererText()
|
||||
col.pack_start(cell, True)
|
||||
col.set_attributes(cell, text=column['col_id'])
|
||||
if 'col_t_id' in column.keys():
|
||||
col.add_attribute(cell, 'font', column['col_t_id'])
|
||||
elif column['col_style'] == 'check toggle':
|
||||
cell = HobCellRendererToggle()
|
||||
cell.set_property('activatable', True)
|
||||
cell.connect("toggled", self.toggled_cb, i, self.table_tree)
|
||||
cell.connect_render_state_changed(self.stop_cell_fadeinout_cb, self.table_tree)
|
||||
self.toggle_id = i
|
||||
col.pack_end(cell, True)
|
||||
col.set_attributes(cell, active=column['col_id'])
|
||||
self.toggle_columns.append(column['col_name'])
|
||||
if 'col_group' in column.keys():
|
||||
col.set_cell_data_func(cell, self.set_group_number_cb)
|
||||
elif column['col_style'] == 'radio toggle':
|
||||
cell = gtk.CellRendererToggle()
|
||||
cell.set_property('activatable', True)
|
||||
cell.set_radio(True)
|
||||
cell.connect("toggled", self.toggled_cb, i, self.table_tree)
|
||||
self.toggle_id = i
|
||||
col.pack_end(cell, True)
|
||||
col.set_attributes(cell, active=column['col_id'])
|
||||
self.toggle_columns.append(column['col_name'])
|
||||
elif column['col_style'] == 'binb':
|
||||
cell = gtk.CellRendererText()
|
||||
col.pack_start(cell, True)
|
||||
col.set_cell_data_func(cell, self.display_binb_cb, column['col_id'])
|
||||
if 'col_t_id' in column.keys():
|
||||
col.add_attribute(cell, 'font', column['col_t_id'])
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
scroll.add(self.table_tree)
|
||||
self.pack_start(scroll, True, True, 0)
|
||||
|
||||
def display_binb_cb(self, col, cell, model, it, col_id):
|
||||
binb = model.get_value(it, col_id)
|
||||
# Just display the first item
|
||||
if binb:
|
||||
bin = binb.split(', ')
|
||||
cell.set_property('text', bin[0])
|
||||
else:
|
||||
cell.set_property('text', "")
|
||||
return True
|
||||
|
||||
def set_model(self, tree_model):
|
||||
self.table_tree.set_model(tree_model)
|
||||
|
||||
def set_search_entry(self, search_column_id, entry):
|
||||
self.table_tree.set_search_column(search_column_id)
|
||||
self.table_tree.set_search_entry(entry)
|
||||
|
||||
def toggle_default(self):
|
||||
model = self.table_tree.get_model()
|
||||
if not model:
|
||||
return
|
||||
iter = model.get_iter_first()
|
||||
if iter:
|
||||
rowpath = model.get_path(iter)
|
||||
model[rowpath][self.toggle_id] = True
|
||||
|
||||
def toggled_cb(self, cell, path, columnid, tree):
|
||||
self.emit("toggled", cell, path, columnid, tree)
|
||||
|
||||
def row_activated_cb(self, tree, path, view_column):
|
||||
if not view_column.get_title() in self.toggle_columns:
|
||||
self.emit("row-activated", tree.get_model(), path)
|
||||
|
||||
def stop_cell_fadeinout_cb(self, ctrl, cell, tree):
|
||||
self.emit("cell-fadeinout-stopped", ctrl, cell, tree)
|
||||
|
||||
def set_group_number_cb(self, col, cell, model, iter):
|
||||
if model and (model.iter_parent(iter) == None):
|
||||
cell.cell_attr["number_of_children"] = model.iter_n_children(iter)
|
||||
else:
|
||||
cell.cell_attr["number_of_children"] = 0
|
||||
|
||||
def connect_group_selection(self, cb_func):
|
||||
self.table_tree.get_selection().connect("changed", cb_func)
|
||||
|
||||
"""
|
||||
A method to calculate a softened value for the colour of widget when in the
|
||||
provided state.
|
||||
|
||||
widget: the widget whose style to use
|
||||
state: the state of the widget to use the style for
|
||||
|
||||
Returns a string value representing the softened colour
|
||||
"""
|
||||
def soften_color(widget, state=gtk.STATE_NORMAL):
|
||||
# this colour munging routine is heavily inspired bu gdu_util_get_mix_color()
|
||||
# from gnome-disk-utility:
|
||||
# http://git.gnome.org/browse/gnome-disk-utility/tree/src/gdu-gtk/gdu-gtk.c?h=gnome-3-0
|
||||
blend = 0.7
|
||||
style = widget.get_style()
|
||||
color = style.text[state]
|
||||
color.red = color.red * blend + style.base[state].red * (1.0 - blend)
|
||||
color.green = color.green * blend + style.base[state].green * (1.0 - blend)
|
||||
color.blue = color.blue * blend + style.base[state].blue * (1.0 - blend)
|
||||
return color.to_string()
|
||||
|
||||
class HobButton(gtk.Button):
|
||||
"""
|
||||
A gtk.Button subclass which follows the visual design of Hob for primary
|
||||
action buttons
|
||||
|
||||
label: the text to display as the button's label
|
||||
"""
|
||||
def __init__(self, label):
|
||||
gtk.Button.__init__(self, label)
|
||||
HobButton.style_button(self)
|
||||
|
||||
@staticmethod
|
||||
def style_button(button):
|
||||
style = button.get_style()
|
||||
button_color = gtk.gdk.Color(HobColors.ORANGE)
|
||||
button.modify_bg(gtk.STATE_NORMAL, button_color)
|
||||
button.modify_bg(gtk.STATE_PRELIGHT, button_color)
|
||||
button.modify_bg(gtk.STATE_SELECTED, button_color)
|
||||
|
||||
button.set_flags(gtk.CAN_DEFAULT)
|
||||
button.grab_default()
|
||||
|
||||
label = "<span size='x-large'><b>%s</b></span>" % gobject.markup_escape_text(button.get_label())
|
||||
button.set_label(label)
|
||||
button.child.set_use_markup(True)
|
||||
|
||||
class HobAltButton(gtk.Button):
|
||||
"""
|
||||
A gtk.Button subclass which has no relief, and so is more discrete
|
||||
"""
|
||||
def __init__(self, label):
|
||||
gtk.Button.__init__(self, label)
|
||||
HobAltButton.style_button(self)
|
||||
|
||||
"""
|
||||
A callback for the state-changed event to ensure the text is displayed
|
||||
differently when the widget is not sensitive
|
||||
"""
|
||||
@staticmethod
|
||||
def desensitise_on_state_change_cb(button, state):
|
||||
if not button.get_property("sensitive"):
|
||||
HobAltButton.set_text(button, False)
|
||||
else:
|
||||
HobAltButton.set_text(button, True)
|
||||
|
||||
"""
|
||||
Set the button label with an appropriate colour for the current widget state
|
||||
"""
|
||||
@staticmethod
|
||||
def set_text(button, sensitive=True):
|
||||
if sensitive:
|
||||
colour = HobColors.PALE_BLUE
|
||||
else:
|
||||
colour = HobColors.LIGHT_GRAY
|
||||
button.set_label("<span size='large' color='%s'><b>%s</b></span>" % (colour, gobject.markup_escape_text(button.text)))
|
||||
button.child.set_use_markup(True)
|
||||
|
||||
@staticmethod
|
||||
def style_button(button):
|
||||
button.text = button.get_label()
|
||||
button.connect("state-changed", HobAltButton.desensitise_on_state_change_cb)
|
||||
HobAltButton.set_text(button)
|
||||
button.child.set_use_markup(True)
|
||||
button.set_relief(gtk.RELIEF_NONE)
|
||||
|
||||
class HobImageButton(gtk.Button):
|
||||
"""
|
||||
A gtk.Button with an icon and two rows of text, the second of which is
|
||||
displayed in a blended colour.
|
||||
|
||||
primary_text: the main button label
|
||||
secondary_text: optional second line of text
|
||||
icon_path: path to the icon file to display on the button
|
||||
"""
|
||||
def __init__(self, primary_text, secondary_text="", icon_path="", hover_icon_path=""):
|
||||
gtk.Button.__init__(self)
|
||||
self.set_relief(gtk.RELIEF_NONE)
|
||||
|
||||
self.icon_path = icon_path
|
||||
self.hover_icon_path = hover_icon_path
|
||||
|
||||
hbox = gtk.HBox(False, 10)
|
||||
hbox.show()
|
||||
self.add(hbox)
|
||||
self.icon = gtk.Image()
|
||||
self.icon.set_from_file(self.icon_path)
|
||||
self.icon.set_alignment(0.5, 0.0)
|
||||
self.icon.show()
|
||||
if self.hover_icon_path and len(self.hover_icon_path):
|
||||
self.connect("enter-notify-event", self.set_hover_icon_cb)
|
||||
self.connect("leave-notify-event", self.set_icon_cb)
|
||||
hbox.pack_start(self.icon, False, False, 0)
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
colour = soften_color(label)
|
||||
mark = "<span size='x-large'>%s</span>\n<span size='medium' fgcolor='%s' weight='ultralight'>%s</span>" % (primary_text, colour, secondary_text)
|
||||
label.set_markup(mark)
|
||||
label.show()
|
||||
hbox.pack_start(label, True, True, 0)
|
||||
|
||||
def set_hover_icon_cb(self, widget, event):
|
||||
self.icon.set_from_file(self.hover_icon_path)
|
||||
|
||||
def set_icon_cb(self, widget, event):
|
||||
self.icon.set_from_file(self.icon_path)
|
||||
|
||||
class HobInfoButton(gtk.EventBox):
|
||||
"""
|
||||
This class implements a button-like widget per the Hob visual and UX designs
|
||||
which will display a persistent tooltip, with the contents of tip_markup, when
|
||||
clicked.
|
||||
|
||||
tip_markup: the Pango Markup to be displayed in the persistent tooltip
|
||||
"""
|
||||
def __init__(self, tip_markup, parent=None):
|
||||
gtk.EventBox.__init__(self)
|
||||
self.image = gtk.Image()
|
||||
self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
|
||||
self.image.show()
|
||||
self.add(self.image)
|
||||
|
||||
self.set_events(gtk.gdk.BUTTON_RELEASE |
|
||||
gtk.gdk.ENTER_NOTIFY_MASK |
|
||||
gtk.gdk.LEAVE_NOTIFY_MASK)
|
||||
|
||||
self.ptip = PersistentTooltip(tip_markup)
|
||||
|
||||
if parent:
|
||||
self.ptip.set_parent(parent)
|
||||
self.ptip.set_transient_for(parent)
|
||||
self.ptip.set_destroy_with_parent(True)
|
||||
|
||||
self.connect("button-release-event", self.button_release_cb)
|
||||
self.connect("enter-notify-event", self.mouse_in_cb)
|
||||
self.connect("leave-notify-event", self.mouse_out_cb)
|
||||
|
||||
"""
|
||||
When the mouse click is released emulate a button-click and show the associated
|
||||
PersistentTooltip
|
||||
"""
|
||||
def button_release_cb(self, widget, event):
|
||||
self.ptip.show()
|
||||
|
||||
"""
|
||||
Change to the prelight image when the mouse enters the widget
|
||||
"""
|
||||
def mouse_in_cb(self, widget, event):
|
||||
self.image.set_from_file(hic.ICON_INFO_HOVER_FILE)
|
||||
|
||||
"""
|
||||
Change to the stock image when the mouse enters the widget
|
||||
"""
|
||||
def mouse_out_cb(self, widget, event):
|
||||
self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
|
||||
|
||||
class HobIndicator(gtk.DrawingArea):
|
||||
def __init__(self, count):
|
||||
gtk.DrawingArea.__init__(self)
|
||||
# Set no window for transparent background
|
||||
self.set_has_window(False)
|
||||
self.set_size_request(38,38)
|
||||
# We need to pass through button clicks
|
||||
self.add_events(gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
|
||||
|
||||
self.connect('expose-event', self.expose)
|
||||
|
||||
self.count = count
|
||||
self.color = HobColors.GRAY
|
||||
|
||||
def expose(self, widget, event):
|
||||
if self.count and self.count > 0:
|
||||
ctx = widget.window.cairo_create()
|
||||
|
||||
x, y, w, h = self.allocation
|
||||
|
||||
ctx.set_operator(cairo.OPERATOR_OVER)
|
||||
ctx.set_source_color(gtk.gdk.color_parse(self.color))
|
||||
ctx.translate(w/2, h/2)
|
||||
ctx.arc(x, y, min(w,h)/2 - 2, 0, 2*math.pi)
|
||||
ctx.fill_preserve()
|
||||
|
||||
layout = self.create_pango_layout(str(self.count))
|
||||
textw, texth = layout.get_pixel_size()
|
||||
x = (w/2)-(textw/2) + x
|
||||
y = (h/2) - (texth/2) + y
|
||||
ctx.move_to(x, y)
|
||||
self.window.draw_layout(self.style.light_gc[gtk.STATE_NORMAL], int(x), int(y), layout)
|
||||
|
||||
def set_count(self, count):
|
||||
self.count = count
|
||||
|
||||
def set_active(self, active):
|
||||
if active:
|
||||
self.color = HobColors.DEEP_RED
|
||||
else:
|
||||
self.color = HobColors.GRAY
|
||||
|
||||
class HobTabLabel(gtk.HBox):
|
||||
def __init__(self, text, count=0):
|
||||
gtk.HBox.__init__(self, False, 0)
|
||||
self.indicator = HobIndicator(count)
|
||||
self.indicator.show()
|
||||
self.pack_end(self.indicator, False, False)
|
||||
self.lbl = gtk.Label(text)
|
||||
self.lbl.set_alignment(0.0, 0.5)
|
||||
self.lbl.show()
|
||||
self.pack_end(self.lbl, True, True, 6)
|
||||
|
||||
def set_count(self, count):
|
||||
self.indicator.set_count(count)
|
||||
|
||||
def set_active(self, active=True):
|
||||
self.indicator.set_active(active)
|
||||
|
||||
class HobNotebook(gtk.Notebook):
|
||||
def __init__(self):
|
||||
gtk.Notebook.__init__(self)
|
||||
self.set_property('homogeneous', True)
|
||||
|
||||
self.pages = []
|
||||
|
||||
self.search = None
|
||||
self.search_name = ""
|
||||
|
||||
self.connect("switch-page", self.page_changed_cb)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def page_changed_cb(self, nb, page, page_num):
|
||||
for p, lbl in enumerate(self.pages):
|
||||
if p == page_num:
|
||||
lbl.set_active()
|
||||
else:
|
||||
lbl.set_active(False)
|
||||
|
||||
def append_page(self, child, tab_label, tab_tooltip=None):
|
||||
label = HobTabLabel(tab_label)
|
||||
if tab_tooltip:
|
||||
label.set_tooltip_text(tab_tooltip)
|
||||
label.set_active(False)
|
||||
self.pages.append(label)
|
||||
gtk.Notebook.append_page(self, child, label)
|
||||
|
||||
def set_entry(self, name="Search:"):
|
||||
self.search = gtk.Entry()
|
||||
self.search_name = name
|
||||
style = self.search.get_style()
|
||||
style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
|
||||
self.search.set_style(style)
|
||||
self.search.set_text(name)
|
||||
self.search.set_editable(False)
|
||||
self.search.set_icon_from_stock(gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR)
|
||||
self.search.connect("icon-release", self.set_search_entry_clear_cb)
|
||||
self.search.show()
|
||||
|
||||
self.search.connect("focus-in-event", self.set_search_entry_editable_cb)
|
||||
self.search.connect("focus-out-event", self.set_search_entry_reset_cb)
|
||||
self.set_action_widget(self.search, gtk.PACK_END)
|
||||
|
||||
def show_indicator_icon(self, title, number):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.set_count(number)
|
||||
|
||||
def hide_indicator_icon(self, title):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.set_count(0)
|
||||
|
||||
def set_search_entry_editable_cb(self, search, event):
|
||||
search.set_editable(True)
|
||||
search.set_text("")
|
||||
style = self.search.get_style()
|
||||
style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.BLACK, False, False)
|
||||
search.set_style(style)
|
||||
|
||||
def reset_entry(self, entry):
|
||||
style = entry.get_style()
|
||||
style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
|
||||
entry.set_style(style)
|
||||
entry.set_text(self.search_name)
|
||||
entry.set_editable(False)
|
||||
|
||||
def set_search_entry_reset_cb(self, search, event):
|
||||
self.reset_entry(search)
|
||||
|
||||
def set_search_entry_clear_cb(self, search, icon_pos, event):
|
||||
if search.get_editable() == True:
|
||||
search.set_text("")
|
||||
|
||||
def set_page(self, title):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.grab_focus()
|
||||
self.set_current_page(self.page_num(child))
|
||||
|
||||
class HobWarpCellRendererText(gtk.CellRendererText):
|
||||
def __init__(self, col_number):
|
||||
gtk.CellRendererText.__init__(self)
|
||||
self.set_property("wrap-mode", pango.WRAP_WORD_CHAR)
|
||||
self.set_property("wrap-width", 300) # default value wrap width is 300
|
||||
self.col_n = col_number
|
||||
|
||||
def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
|
||||
if widget:
|
||||
self.props.wrap_width = self.get_resized_wrap_width(widget, widget.get_column(self.col_n))
|
||||
return gtk.CellRendererText.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
|
||||
|
||||
def get_resized_wrap_width(self, treeview, column):
|
||||
otherCols = []
|
||||
for col in treeview.get_columns():
|
||||
if col != column:
|
||||
otherCols.append(col)
|
||||
adjwidth = treeview.allocation.width - sum(c.get_width() for c in otherCols)
|
||||
adjwidth -= treeview.style_get_property("horizontal-separator") * 4
|
||||
if self.props.wrap_width == adjwidth or adjwidth <= 0:
|
||||
adjwidth = self.props.wrap_width
|
||||
return adjwidth
|
||||
|
||||
gobject.type_register(HobWarpCellRendererText)
|
||||
|
||||
class HobIconChecker(hic):
|
||||
def set_hob_icon_to_stock_icon(self, file_path, stock_id=""):
|
||||
try:
|
||||
pixbuf = gtk.gdk.pixbuf_new_from_file(file_path)
|
||||
except Exception, e:
|
||||
return None
|
||||
|
||||
if stock_id and (gtk.icon_factory_lookup_default(stock_id) == None):
|
||||
icon_factory = gtk.IconFactory()
|
||||
icon_factory.add_default()
|
||||
icon_factory.add(stock_id, gtk.IconSet(pixbuf))
|
||||
gtk.stock_add([(stock_id, '_label', 0, 0, '')])
|
||||
|
||||
return icon_factory.lookup(stock_id)
|
||||
|
||||
return None
|
||||
|
||||
"""
|
||||
For make hob icon consistently by request, and avoid icon view diff by system or gtk version, we use some 'hob icon' to replace the 'gtk icon'.
|
||||
this function check the stock_id and make hob_id to replaced the gtk_id then return it or ""
|
||||
"""
|
||||
def check_stock_icon(self, stock_name=""):
|
||||
HOB_CHECK_STOCK_NAME = {
|
||||
('hic-dialog-info', 'gtk-dialog-info', 'dialog-info') : self.ICON_INDI_INFO_FILE,
|
||||
('hic-ok', 'gtk-ok', 'ok') : self.ICON_INDI_TICK_FILE,
|
||||
('hic-dialog-error', 'gtk-dialog-error', 'dialog-error') : self.ICON_INDI_ERROR_FILE,
|
||||
('hic-dialog-warning', 'gtk-dialog-warning', 'dialog-warning') : self.ICON_INDI_ALERT_FILE,
|
||||
('hic-task-refresh', 'gtk-execute', 'execute') : self.ICON_INDI_REFRESH_FILE,
|
||||
}
|
||||
valid_stock_id = stock_name
|
||||
if stock_name:
|
||||
for names, path in HOB_CHECK_STOCK_NAME.iteritems():
|
||||
if stock_name in names:
|
||||
valid_stock_id = names[0]
|
||||
if not gtk.icon_factory_lookup_default(valid_stock_id):
|
||||
self.set_hob_icon_to_stock_icon(path, valid_stock_id)
|
||||
|
||||
return valid_stock_id
|
||||
|
||||
class HobCellRendererController(gobject.GObject):
|
||||
(MODE_CYCLE_RUNNING, MODE_ONE_SHORT) = range(2)
|
||||
__gsignals__ = {
|
||||
"run-timer-stopped" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
def __init__(self, runningmode=MODE_CYCLE_RUNNING, is_draw_row=False):
|
||||
gobject.GObject.__init__(self)
|
||||
self.timeout_id = None
|
||||
self.current_angle_pos = 0.0
|
||||
self.step_angle = 0.0
|
||||
self.tree_headers_height = 0
|
||||
self.running_cell_areas = []
|
||||
self.running_mode = runningmode
|
||||
self.is_queue_draw_row_area = is_draw_row
|
||||
self.force_stop_enable = False
|
||||
|
||||
def is_active(self):
|
||||
if self.timeout_id:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def reset_run(self):
|
||||
self.force_stop()
|
||||
self.running_cell_areas = []
|
||||
self.current_angle_pos = 0.0
|
||||
self.step_angle = 0.0
|
||||
|
||||
''' time_iterval: (1~1000)ms, which will be as the basic interval count for timer
|
||||
init_usrdata: the current data which related the progress-bar will be at
|
||||
min_usrdata: the range of min of user data
|
||||
max_usrdata: the range of max of user data
|
||||
step: each step which you want to progress
|
||||
Note: the init_usrdata should in the range of from min to max, and max should > min
|
||||
step should < (max - min)
|
||||
'''
|
||||
def start_run(self, time_iterval, init_usrdata, min_usrdata, max_usrdata, step, tree):
|
||||
if (not time_iterval) or (not max_usrdata):
|
||||
return
|
||||
usr_range = (max_usrdata - min_usrdata) * 1.0
|
||||
self.current_angle_pos = (init_usrdata * 1.0) / usr_range
|
||||
self.step_angle = (step * 1) / usr_range
|
||||
self.timeout_id = gobject.timeout_add(int(time_iterval),
|
||||
self.make_image_on_progressing_cb, tree)
|
||||
self.tree_headers_height = self.get_treeview_headers_height(tree)
|
||||
self.force_stop_enable = False
|
||||
|
||||
def force_stop(self):
|
||||
self.emit("run-timer-stopped")
|
||||
self.force_stop_enable = True
|
||||
if self.timeout_id:
|
||||
if gobject.source_remove(self.timeout_id):
|
||||
self.timeout_id = None
|
||||
|
||||
def on_draw_pixbuf_cb(self, pixbuf, cr, x, y, img_width, img_height, do_refresh=True):
|
||||
if pixbuf:
|
||||
r = max(img_width/2, img_height/2)
|
||||
cr.translate(x + r, y + r)
|
||||
if do_refresh:
|
||||
cr.rotate(2 * math.pi * self.current_angle_pos)
|
||||
|
||||
cr.set_source_pixbuf(pixbuf, -img_width/2, -img_height/2)
|
||||
cr.paint()
|
||||
|
||||
def on_draw_fadeinout_cb(self, cr, color, x, y, width, height, do_fadeout=True):
|
||||
if do_fadeout:
|
||||
alpha = self.current_angle_pos * 0.8
|
||||
else:
|
||||
alpha = (1.0 - self.current_angle_pos) * 0.8
|
||||
|
||||
cr.set_source_rgba(color.red, color.green, color.blue, alpha)
|
||||
cr.rectangle(x, y, width, height)
|
||||
cr.fill()
|
||||
|
||||
def get_treeview_headers_height(self, tree):
|
||||
if tree and (tree.get_property("headers-visible") == True):
|
||||
height = tree.get_allocation().height - tree.get_bin_window().get_size()[1]
|
||||
return height
|
||||
|
||||
return 0
|
||||
|
||||
def make_image_on_progressing_cb(self, tree):
|
||||
self.current_angle_pos += self.step_angle
|
||||
if self.running_mode == self.MODE_CYCLE_RUNNING:
|
||||
if (self.current_angle_pos >= 1):
|
||||
self.current_angle_pos = self.step_angle
|
||||
else:
|
||||
if self.current_angle_pos > 1:
|
||||
self.force_stop()
|
||||
return False
|
||||
|
||||
if self.is_queue_draw_row_area:
|
||||
for path in self.running_cell_areas:
|
||||
rect = tree.get_cell_area(path, tree.get_column(0))
|
||||
row_x, _, row_width, _ = tree.get_visible_rect()
|
||||
tree.queue_draw_area(row_x, rect.y + self.tree_headers_height, row_width, rect.height)
|
||||
else:
|
||||
for rect in self.running_cell_areas:
|
||||
tree.queue_draw_area(rect.x, rect.y + self.tree_headers_height, rect.width, rect.height)
|
||||
|
||||
return (not self.force_stop_enable)
|
||||
|
||||
def append_running_cell_area(self, cell_area):
|
||||
if cell_area and (cell_area not in self.running_cell_areas):
|
||||
self.running_cell_areas.append(cell_area)
|
||||
|
||||
def remove_running_cell_area(self, cell_area):
|
||||
if cell_area in self.running_cell_areas:
|
||||
self.running_cell_areas.remove(cell_area)
|
||||
if not self.running_cell_areas:
|
||||
self.reset_run()
|
||||
|
||||
gobject.type_register(HobCellRendererController)
|
||||
|
||||
class HobCellRendererPixbuf(gtk.CellRendererPixbuf):
|
||||
def __init__(self):
|
||||
gtk.CellRendererPixbuf.__init__(self)
|
||||
self.control = HobCellRendererController()
|
||||
# add icon checker for make the gtk-icon transfer to hob-icon
|
||||
self.checker = HobIconChecker()
|
||||
self.set_property("stock-size", gtk.ICON_SIZE_DND)
|
||||
|
||||
def get_pixbuf_from_stock_icon(self, widget, stock_id="", size=gtk.ICON_SIZE_DIALOG):
|
||||
if widget and stock_id and gtk.icon_factory_lookup_default(stock_id):
|
||||
return widget.render_icon(stock_id, size)
|
||||
|
||||
return None
|
||||
|
||||
def set_icon_name_to_id(self, new_name):
|
||||
if new_name and type(new_name) == str:
|
||||
# check the name is need to transfer to hob icon or not
|
||||
name = self.checker.check_stock_icon(new_name)
|
||||
if name.startswith("hic") or name.startswith("gtk"):
|
||||
stock_id = name
|
||||
else:
|
||||
stock_id = 'gtk-' + name
|
||||
|
||||
return stock_id
|
||||
|
||||
''' render cell exactly, "icon-name" is priority
|
||||
if use the 'hic-task-refresh' will make the pix animation
|
||||
if 'pix' will change the pixbuf for it from the pixbuf or image.
|
||||
'''
|
||||
def do_render(self, window, tree, background_area,cell_area, expose_area, flags):
|
||||
if (not self.control) or (not tree):
|
||||
return
|
||||
|
||||
x, y, w, h = self.on_get_size(tree, cell_area)
|
||||
x += cell_area.x
|
||||
y += cell_area.y
|
||||
w -= 2 * self.get_property("xpad")
|
||||
h -= 2 * self.get_property("ypad")
|
||||
|
||||
stock_id = ""
|
||||
if self.props.icon_name:
|
||||
stock_id = self.set_icon_name_to_id(self.props.icon_name)
|
||||
elif self.props.stock_id:
|
||||
stock_id = self.props.stock_id
|
||||
elif self.props.pixbuf:
|
||||
pix = self.props.pixbuf
|
||||
else:
|
||||
return
|
||||
|
||||
if stock_id:
|
||||
pix = self.get_pixbuf_from_stock_icon(tree, stock_id, self.props.stock_size)
|
||||
if stock_id == 'hic-task-refresh':
|
||||
self.control.append_running_cell_area(cell_area)
|
||||
if self.control.is_active():
|
||||
self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, True)
|
||||
else:
|
||||
self.control.start_run(200, 0, 0, 1000, 150, tree)
|
||||
else:
|
||||
self.control.remove_running_cell_area(cell_area)
|
||||
self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, False)
|
||||
|
||||
def on_get_size(self, widget, cell_area):
|
||||
if self.props.icon_name or self.props.pixbuf or self.props.stock_id:
|
||||
w, h = gtk.icon_size_lookup(self.props.stock_size)
|
||||
calc_width = self.get_property("xpad") * 2 + w
|
||||
calc_height = self.get_property("ypad") * 2 + h
|
||||
x_offset = 0
|
||||
y_offset = 0
|
||||
if cell_area and w > 0 and h > 0:
|
||||
x_offset = self.get_property("xalign") * (cell_area.width - calc_width - self.get_property("xpad"))
|
||||
y_offset = self.get_property("yalign") * (cell_area.height - calc_height - self.get_property("ypad"))
|
||||
|
||||
return x_offset, y_offset, w, h
|
||||
|
||||
return 0, 0, 0, 0
|
||||
|
||||
gobject.type_register(HobCellRendererPixbuf)
|
||||
|
||||
class HobCellRendererToggle(gtk.CellRendererToggle):
|
||||
def __init__(self):
|
||||
gtk.CellRendererToggle.__init__(self)
|
||||
self.ctrl = HobCellRendererController(is_draw_row=True)
|
||||
self.ctrl.running_mode = self.ctrl.MODE_ONE_SHORT
|
||||
self.cell_attr = {"fadeout": False, "number_of_children": 0}
|
||||
|
||||
def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
|
||||
if (not self.ctrl) or (not widget):
|
||||
return
|
||||
|
||||
if flags & gtk.CELL_RENDERER_SELECTED:
|
||||
state = gtk.STATE_SELECTED
|
||||
else:
|
||||
state = gtk.STATE_NORMAL
|
||||
|
||||
if self.ctrl.is_active():
|
||||
path = widget.get_path_at_pos(cell_area.x + cell_area.width/2, cell_area.y + cell_area.height/2)
|
||||
# sometimes the parameters of cell_area will be a negative number,such as pull up down the scroll bar
|
||||
# it's over the tree container range, so the path will be bad
|
||||
if not path: return
|
||||
path = path[0]
|
||||
if path in self.ctrl.running_cell_areas:
|
||||
cr = window.cairo_create()
|
||||
color = widget.get_style().base[state]
|
||||
|
||||
row_x, _, row_width, _ = widget.get_visible_rect()
|
||||
border_y = self.get_property("ypad")
|
||||
self.ctrl.on_draw_fadeinout_cb(cr, color, row_x, cell_area.y - border_y, row_width, \
|
||||
cell_area.height + border_y * 2, self.cell_attr["fadeout"])
|
||||
# draw number of a group
|
||||
if self.cell_attr["number_of_children"]:
|
||||
text = "%d pkg" % self.cell_attr["number_of_children"]
|
||||
pangolayout = widget.create_pango_layout(text)
|
||||
textw, texth = pangolayout.get_pixel_size()
|
||||
x = cell_area.x + (cell_area.width/2) - (textw/2)
|
||||
y = cell_area.y + (cell_area.height/2) - (texth/2)
|
||||
|
||||
widget.style.paint_layout(window, state, True, cell_area, widget, "checkbox", x, y, pangolayout)
|
||||
else:
|
||||
return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
|
||||
|
||||
'''delay: normally delay time is 1000ms
|
||||
cell_list: whilch cells need to be render
|
||||
'''
|
||||
def fadeout(self, tree, delay, cell_list=None):
|
||||
if (delay < 200) or (not tree):
|
||||
return
|
||||
self.cell_attr["fadeout"] = True
|
||||
self.ctrl.running_cell_areas = cell_list
|
||||
self.ctrl.start_run(200, 0, 0, delay, (delay * 200 / 1000), tree)
|
||||
|
||||
def connect_render_state_changed(self, func, usrdata=None):
|
||||
if not func:
|
||||
return
|
||||
if usrdata:
|
||||
self.ctrl.connect("run-timer-stopped", func, self, usrdata)
|
||||
else:
|
||||
self.ctrl.connect("run-timer-stopped", func, self)
|
||||
|
||||
gobject.type_register(HobCellRendererToggle)
|
||||
@@ -1,457 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
import re
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hic, HobImageButton, HobInfoButton, HobAltButton, HobButton
|
||||
from bb.ui.crumbs.hoblistmodel import RecipeListModel
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# ImageConfigurationPage
|
||||
#
|
||||
class ImageConfigurationPage (HobPage):
|
||||
|
||||
__dummy_machine__ = "--select a machine--"
|
||||
__dummy_image__ = "--select a base image--"
|
||||
|
||||
def __init__(self, builder):
|
||||
super(ImageConfigurationPage, self).__init__(builder, "Image configuration")
|
||||
|
||||
self.image_combo_id = None
|
||||
# we use machine_combo_changed_by_manual to identify the machine is changed by code
|
||||
# or by manual. If by manual, all user's recipe selection and package selection are
|
||||
# cleared.
|
||||
self.machine_combo_changed_by_manual = True
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
# create visual elements
|
||||
self.toolbar = gtk.Toolbar()
|
||||
self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
|
||||
self.toolbar.set_style(gtk.TOOLBAR_BOTH)
|
||||
|
||||
template_button = self.append_toolbar_button(self.toolbar,
|
||||
"Templates",
|
||||
hic.ICON_TEMPLATES_DISPLAY_FILE,
|
||||
hic.ICON_TEMPLATES_HOVER_FILE,
|
||||
"Load a previously saved template",
|
||||
self.template_button_clicked_cb)
|
||||
my_images_button = self.append_toolbar_button(self.toolbar,
|
||||
"Images",
|
||||
hic.ICON_IMAGES_DISPLAY_FILE,
|
||||
hic.ICON_IMAGES_HOVER_FILE,
|
||||
"Open previously built images",
|
||||
self.my_images_button_clicked_cb)
|
||||
settings_button = self.append_toolbar_button(self.toolbar,
|
||||
"Settings",
|
||||
hic.ICON_SETTINGS_DISPLAY_FILE,
|
||||
hic.ICON_SETTINGS_HOVER_FILE,
|
||||
"View additional build settings",
|
||||
self.settings_button_clicked_cb)
|
||||
|
||||
self.config_top_button = self.add_onto_top_bar(self.toolbar)
|
||||
|
||||
self.gtable = gtk.Table(40, 40, True)
|
||||
self.create_config_machine()
|
||||
self.create_config_baseimg()
|
||||
self.config_build_button = self.create_config_build_button()
|
||||
|
||||
def _remove_all_widget(self):
|
||||
children = self.gtable.get_children() or []
|
||||
for child in children:
|
||||
self.gtable.remove(child)
|
||||
children = self.box_group_area.get_children() or []
|
||||
for child in children:
|
||||
self.box_group_area.remove(child)
|
||||
children = self.get_children() or []
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
|
||||
def _pack_components(self, pack_config_build_button = False):
|
||||
self._remove_all_widget()
|
||||
self.pack_start(self.config_top_button, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_start(self.gtable, expand=True, fill=True)
|
||||
if pack_config_build_button:
|
||||
self.box_group_area.pack_end(self.config_build_button, expand=False, fill=False)
|
||||
else:
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
subbox = gtk.HBox(False, 0)
|
||||
subbox.set_size_request(205, 49)
|
||||
subbox.show()
|
||||
box.add(subbox)
|
||||
self.box_group_area.pack_end(box, False, False)
|
||||
|
||||
def show_machine(self):
|
||||
self.progress_bar.reset()
|
||||
self._pack_components(pack_config_build_button = False)
|
||||
self.set_config_machine_layout(show_progress_bar = False)
|
||||
self.show_all()
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=None):
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_title(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def show_info_populating(self):
|
||||
self._pack_components(pack_config_build_button = False)
|
||||
self.set_config_machine_layout(show_progress_bar = True)
|
||||
self.show_all()
|
||||
|
||||
def show_info_populated(self):
|
||||
self.progress_bar.reset()
|
||||
self._pack_components(pack_config_build_button = False)
|
||||
self.set_config_machine_layout(show_progress_bar = False)
|
||||
self.set_config_baseimg_layout()
|
||||
self.show_all()
|
||||
|
||||
def show_baseimg_selected(self):
|
||||
self.progress_bar.reset()
|
||||
self._pack_components(pack_config_build_button = True)
|
||||
self.set_config_machine_layout(show_progress_bar = False)
|
||||
self.set_config_baseimg_layout()
|
||||
self.set_rcppkg_layout()
|
||||
self.show_all()
|
||||
|
||||
def create_config_machine(self):
|
||||
self.machine_title = gtk.Label()
|
||||
self.machine_title.set_alignment(0.0, 0.5)
|
||||
mark = "<span %s>Select a machine</span>" % self.span_tag('x-large', 'bold')
|
||||
self.machine_title.set_markup(mark)
|
||||
|
||||
self.machine_title_desc = gtk.Label()
|
||||
self.machine_title_desc.set_alignment(0.0, 0.5)
|
||||
mark = ("<span %s>Your selection is the profile of the target machine for which you"
|
||||
" are building the image.\n</span>") % (self.span_tag('medium'))
|
||||
self.machine_title_desc.set_markup(mark)
|
||||
|
||||
self.machine_combo = gtk.combo_box_new_text()
|
||||
self.machine_combo.connect("changed", self.machine_combo_changed_cb)
|
||||
|
||||
icon_file = hic.ICON_LAYERS_DISPLAY_FILE
|
||||
hover_file = hic.ICON_LAYERS_HOVER_FILE
|
||||
self.layer_button = HobImageButton("Layers", "Add support for machines, software, etc.",
|
||||
icon_file, hover_file)
|
||||
self.layer_button.connect("clicked", self.layer_button_clicked_cb)
|
||||
|
||||
markup = "Layers are a powerful mechanism to extend the Yocto Project "
|
||||
markup += "with your own functionality.\n"
|
||||
markup += "For more on layers, check the <a href=\""
|
||||
markup += "http://www.yoctoproject.org/docs/current/dev-manual/"
|
||||
markup += "dev-manual.html#understanding-and-using-layers\">reference manual</a>."
|
||||
self.layer_info_icon = HobInfoButton(markup, self.get_parent())
|
||||
|
||||
self.progress_box = gtk.HBox(False, 6)
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.progress_box.pack_start(self.progress_bar, expand=True, fill=True)
|
||||
self.stop_button = HobAltButton("Stop")
|
||||
self.stop_button.connect("clicked", self.stop_button_clicked_cb)
|
||||
self.progress_box.pack_end(self.stop_button, expand=False, fill=False)
|
||||
|
||||
self.machine_separator = gtk.HSeparator()
|
||||
|
||||
def set_config_machine_layout(self, show_progress_bar = False):
|
||||
self.gtable.attach(self.machine_title, 0, 40, 0, 4)
|
||||
self.gtable.attach(self.machine_title_desc, 0, 40, 4, 6)
|
||||
self.gtable.attach(self.machine_combo, 0, 12, 7, 10)
|
||||
self.gtable.attach(self.layer_button, 14, 36, 7, 12)
|
||||
self.gtable.attach(self.layer_info_icon, 36, 40, 7, 11)
|
||||
if show_progress_bar:
|
||||
self.gtable.attach(self.progress_box, 0, 40, 15, 19)
|
||||
self.gtable.attach(self.machine_separator, 0, 40, 13, 14)
|
||||
|
||||
def create_config_baseimg(self):
|
||||
self.image_title = gtk.Label()
|
||||
self.image_title.set_alignment(0, 1.0)
|
||||
mark = "<span %s>Select a base image</span>" % self.span_tag('x-large', 'bold')
|
||||
self.image_title.set_markup(mark)
|
||||
|
||||
self.image_title_desc = gtk.Label()
|
||||
self.image_title_desc.set_alignment(0, 0.5)
|
||||
mark = ("<span %s>Base images are a starting point for the type of image you want. "
|
||||
"You can build them as \n"
|
||||
"they are or customize them to your specific needs.\n</span>") % self.span_tag('medium')
|
||||
self.image_title_desc.set_markup(mark)
|
||||
|
||||
self.image_combo = gtk.combo_box_new_text()
|
||||
self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
|
||||
|
||||
self.image_desc = gtk.Label()
|
||||
self.image_desc.set_alignment(0.0, 0.5)
|
||||
self.image_desc.set_size_request(360, -1)
|
||||
self.image_desc.set_justify(gtk.JUSTIFY_LEFT)
|
||||
self.image_desc.set_line_wrap(True)
|
||||
|
||||
# button to view recipes
|
||||
icon_file = hic.ICON_RCIPE_DISPLAY_FILE
|
||||
hover_file = hic.ICON_RCIPE_HOVER_FILE
|
||||
self.view_recipes_button = HobImageButton("View recipes",
|
||||
"Add/remove recipes and tasks",
|
||||
icon_file, hover_file)
|
||||
self.view_recipes_button.connect("clicked", self.view_recipes_button_clicked_cb)
|
||||
|
||||
# button to view packages
|
||||
icon_file = hic.ICON_PACKAGES_DISPLAY_FILE
|
||||
hover_file = hic.ICON_PACKAGES_HOVER_FILE
|
||||
self.view_packages_button = HobImageButton("View packages",
|
||||
"Add/remove previously built packages",
|
||||
icon_file, hover_file)
|
||||
self.view_packages_button.connect("clicked", self.view_packages_button_clicked_cb)
|
||||
|
||||
self.image_separator = gtk.HSeparator()
|
||||
|
||||
def set_config_baseimg_layout(self):
|
||||
self.gtable.attach(self.image_title, 0, 40, 15, 17)
|
||||
self.gtable.attach(self.image_title_desc, 0, 40, 18, 22)
|
||||
self.gtable.attach(self.image_combo, 0, 12, 23, 26)
|
||||
self.gtable.attach(self.image_desc, 13, 38, 23, 28)
|
||||
self.gtable.attach(self.image_separator, 0, 40, 35, 36)
|
||||
|
||||
def set_rcppkg_layout(self):
|
||||
self.gtable.attach(self.view_recipes_button, 0, 20, 28, 33)
|
||||
self.gtable.attach(self.view_packages_button, 20, 40, 28, 33)
|
||||
|
||||
def create_config_build_button(self):
|
||||
# Create the "Build packages" and "Build image" buttons at the bottom
|
||||
button_box = gtk.HBox(False, 6)
|
||||
|
||||
# create button "Build image"
|
||||
just_bake_button = HobButton("Build image")
|
||||
just_bake_button.set_size_request(205, 49)
|
||||
just_bake_button.set_tooltip_text("Build target image")
|
||||
just_bake_button.connect("clicked", self.just_bake_button_clicked_cb)
|
||||
button_box.pack_end(just_bake_button, expand=False, fill=False)
|
||||
|
||||
label = gtk.Label(" or ")
|
||||
button_box.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Build Packages"
|
||||
build_packages_button = HobAltButton("Build packages")
|
||||
build_packages_button.connect("clicked", self.build_packages_button_clicked_cb)
|
||||
build_packages_button.set_tooltip_text("Build recipes into packages")
|
||||
button_box.pack_end(build_packages_button, expand=False, fill=False)
|
||||
|
||||
return button_box
|
||||
|
||||
def stop_button_clicked_cb(self, button):
|
||||
self.builder.cancel_parse_sync()
|
||||
|
||||
def machine_combo_changed_cb(self, machine_combo):
|
||||
combo_item = machine_combo.get_active_text()
|
||||
if not combo_item or combo_item == self.__dummy_machine__:
|
||||
return
|
||||
|
||||
# remove __dummy_machine__ item from the store list after first user selection
|
||||
# because it is no longer valid
|
||||
combo_store = machine_combo.get_model()
|
||||
if len(combo_store) and (combo_store[0][0] == self.__dummy_machine__):
|
||||
machine_combo.remove_text(0)
|
||||
|
||||
self.builder.configuration.curr_mach = combo_item
|
||||
if self.machine_combo_changed_by_manual:
|
||||
self.builder.configuration.clear_selection()
|
||||
# reset machine_combo_changed_by_manual
|
||||
self.machine_combo_changed_by_manual = True
|
||||
|
||||
# Do reparse recipes
|
||||
self.builder.populate_recipe_package_info_async()
|
||||
|
||||
def update_machine_combo(self):
|
||||
all_machines = [self.__dummy_machine__] + self.builder.parameters.all_machines
|
||||
|
||||
model = self.machine_combo.get_model()
|
||||
model.clear()
|
||||
for machine in all_machines:
|
||||
self.machine_combo.append_text(machine)
|
||||
self.machine_combo.set_active(0)
|
||||
|
||||
def switch_machine_combo(self):
|
||||
self.machine_combo_changed_by_manual = False
|
||||
model = self.machine_combo.get_model()
|
||||
active = 0
|
||||
while active < len(model):
|
||||
if model[active][0] == self.builder.configuration.curr_mach:
|
||||
self.machine_combo.set_active(active)
|
||||
return
|
||||
active += 1
|
||||
|
||||
if model[0][0] != self.__dummy_machine__:
|
||||
self.machine_combo.insert_text(0, self.__dummy_machine__)
|
||||
|
||||
self.machine_combo.set_active(0)
|
||||
|
||||
def update_image_desc(self):
|
||||
desc = ""
|
||||
selected_image = self.image_combo.get_active_text()
|
||||
if selected_image and selected_image in self.builder.recipe_model.pn_path.keys():
|
||||
image_path = self.builder.recipe_model.pn_path[selected_image]
|
||||
image_iter = self.builder.recipe_model.get_iter(image_path)
|
||||
desc = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_DESC)
|
||||
|
||||
mark = ("<span %s>%s</span>\n") % (self.span_tag('small'), desc)
|
||||
self.image_desc.set_markup(mark)
|
||||
|
||||
def image_combo_changed_idle_cb(self, selected_image, selected_recipes, selected_packages):
|
||||
self.builder.update_recipe_model(selected_image, selected_recipes)
|
||||
self.builder.update_package_model(selected_packages)
|
||||
self.builder.window_sensitive(True)
|
||||
|
||||
def image_combo_changed_cb(self, combo):
|
||||
self.builder.window_sensitive(False)
|
||||
selected_image = self.image_combo.get_active_text()
|
||||
if not selected_image or (selected_image == self.__dummy_image__):
|
||||
return
|
||||
|
||||
# remove __dummy_image__ item from the store list after first user selection
|
||||
# because it is no longer valid
|
||||
combo_store = combo.get_model()
|
||||
if len(combo_store) and (combo_store[0][0] == self.__dummy_image__):
|
||||
combo.remove_text(0)
|
||||
|
||||
self.builder.customized = False
|
||||
|
||||
selected_recipes = []
|
||||
|
||||
image_path = self.builder.recipe_model.pn_path[selected_image]
|
||||
image_iter = self.builder.recipe_model.get_iter(image_path)
|
||||
selected_packages = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_INSTALL).split()
|
||||
self.update_image_desc()
|
||||
|
||||
self.builder.recipe_model.reset()
|
||||
self.builder.package_model.reset()
|
||||
|
||||
self.show_baseimg_selected()
|
||||
|
||||
glib.idle_add(self.image_combo_changed_idle_cb, selected_image, selected_recipes, selected_packages)
|
||||
|
||||
def _image_combo_connect_signal(self):
|
||||
if not self.image_combo_id:
|
||||
self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
|
||||
|
||||
def _image_combo_disconnect_signal(self):
|
||||
if self.image_combo_id:
|
||||
self.image_combo.disconnect(self.image_combo_id)
|
||||
self.image_combo_id = None
|
||||
|
||||
def update_image_combo(self, recipe_model, selected_image):
|
||||
# Update the image combo according to the images in the recipe_model
|
||||
# populate image combo
|
||||
filter = {RecipeListModel.COL_TYPE : ['image']}
|
||||
image_model = recipe_model.tree_model(filter)
|
||||
active = 0
|
||||
cnt = 1
|
||||
|
||||
white_pattern = []
|
||||
if self.builder.parameters.image_white_pattern:
|
||||
for i in self.builder.parameters.image_white_pattern.split():
|
||||
white_pattern.append(re.compile(i))
|
||||
|
||||
black_pattern = []
|
||||
if self.builder.parameters.image_black_pattern:
|
||||
for i in self.builder.parameters.image_black_pattern.split():
|
||||
black_pattern.append(re.compile(i))
|
||||
|
||||
it = image_model.get_iter_first()
|
||||
self._image_combo_disconnect_signal()
|
||||
model = self.image_combo.get_model()
|
||||
model.clear()
|
||||
# Set a indicator text to combo store when first open
|
||||
self.image_combo.append_text(self.__dummy_image__)
|
||||
# append and set active
|
||||
while it:
|
||||
path = image_model.get_path(it)
|
||||
it = image_model.iter_next(it)
|
||||
image_name = image_model[path][recipe_model.COL_NAME]
|
||||
if image_name == self.builder.recipe_model.__custom_image__:
|
||||
continue
|
||||
|
||||
if black_pattern:
|
||||
allow = True
|
||||
for pattern in black_pattern:
|
||||
if pattern.search(image_name):
|
||||
allow = False
|
||||
break
|
||||
elif white_pattern:
|
||||
allow = False
|
||||
for pattern in white_pattern:
|
||||
if pattern.search(image_name):
|
||||
allow = True
|
||||
break
|
||||
else:
|
||||
allow = True
|
||||
|
||||
if allow:
|
||||
self.image_combo.append_text(image_name)
|
||||
if image_name == selected_image:
|
||||
active = cnt
|
||||
cnt = cnt + 1
|
||||
|
||||
self.image_combo.append_text(self.builder.recipe_model.__custom_image__)
|
||||
if selected_image == self.builder.recipe_model.__custom_image__:
|
||||
active = cnt
|
||||
|
||||
self.image_combo.set_active(active)
|
||||
|
||||
if active != 0:
|
||||
self.show_baseimg_selected()
|
||||
|
||||
self._image_combo_connect_signal()
|
||||
|
||||
def layer_button_clicked_cb(self, button):
|
||||
# Create a layer selection dialog
|
||||
self.builder.show_layer_selection_dialog()
|
||||
|
||||
def view_recipes_button_clicked_cb(self, button):
|
||||
self.builder.show_recipes()
|
||||
|
||||
def view_packages_button_clicked_cb(self, button):
|
||||
self.builder.show_packages()
|
||||
|
||||
def just_bake_button_clicked_cb(self, button):
|
||||
self.builder.just_bake()
|
||||
|
||||
def build_packages_button_clicked_cb(self, button):
|
||||
self.builder.build_packages()
|
||||
|
||||
def template_button_clicked_cb(self, button):
|
||||
response, path = self.builder.show_load_template_dialog()
|
||||
if not response:
|
||||
return
|
||||
if path:
|
||||
self.builder.load_template(path)
|
||||
|
||||
def my_images_button_clicked_cb(self, button):
|
||||
self.builder.show_load_my_images_dialog()
|
||||
|
||||
def settings_button_clicked_cb(self, button):
|
||||
# Create an advanced settings dialog
|
||||
response, settings_changed = self.builder.show_adv_settings_dialog()
|
||||
if not response:
|
||||
return
|
||||
if settings_changed:
|
||||
self.builder.reparse_post_adv_settings()
|
||||
@@ -1,572 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hic, HobViewTable, HobAltButton, HobButton
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
import subprocess
|
||||
from bb.ui.crumbs.hig import CrumbsDialog
|
||||
#
|
||||
# ImageDetailsPage
|
||||
#
|
||||
class ImageDetailsPage (HobPage):
|
||||
|
||||
class DetailBox (gtk.EventBox):
|
||||
def __init__(self, widget = None, varlist = None, vallist = None, icon = None, button = None, color = HobColors.LIGHT_GRAY):
|
||||
gtk.EventBox.__init__(self)
|
||||
|
||||
# set color
|
||||
style = self.get_style().copy()
|
||||
style.bg[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(color, False, False)
|
||||
self.set_style(style)
|
||||
|
||||
self.hbox = gtk.HBox()
|
||||
self.hbox.set_border_width(10)
|
||||
self.add(self.hbox)
|
||||
|
||||
total_rows = 0
|
||||
if widget:
|
||||
total_rows = 10
|
||||
if varlist and vallist:
|
||||
# pack the icon and the text on the left
|
||||
total_rows += len(varlist)
|
||||
self.table = gtk.Table(total_rows, 20, True)
|
||||
self.table.set_row_spacings(6)
|
||||
self.table.set_size_request(100, -1)
|
||||
self.hbox.pack_start(self.table, expand=True, fill=True, padding=15)
|
||||
|
||||
colid = 0
|
||||
rowid = 0
|
||||
self.line_widgets = {}
|
||||
if icon:
|
||||
self.table.attach(icon, colid, colid + 2, 0, 1)
|
||||
colid = colid + 2
|
||||
if widget:
|
||||
self.table.attach(widget, colid, 20, 0, 10)
|
||||
rowid = 10
|
||||
if varlist and vallist:
|
||||
for row in range(rowid, total_rows):
|
||||
index = row - rowid
|
||||
self.line_widgets[varlist[index]] = self.text2label(varlist[index], vallist[index])
|
||||
self.table.attach(self.line_widgets[varlist[index]], colid, 20, row, row + 1)
|
||||
# pack the button on the right
|
||||
if button:
|
||||
self.hbox.pack_end(button, expand=False, fill=False)
|
||||
|
||||
def update_line_widgets(self, variable, value):
|
||||
if len(self.line_widgets) == 0:
|
||||
return
|
||||
if not isinstance(self.line_widgets[variable], gtk.Label):
|
||||
return
|
||||
self.line_widgets[variable].set_markup(self.format_line(variable, value))
|
||||
|
||||
def wrap_line(self, inputs):
|
||||
# wrap the long text of inputs
|
||||
wrap_width_chars = 75
|
||||
outputs = ""
|
||||
tmps = inputs
|
||||
less_chars = len(inputs)
|
||||
while (less_chars - wrap_width_chars) > 0:
|
||||
less_chars -= wrap_width_chars
|
||||
outputs += tmps[:wrap_width_chars] + "\n "
|
||||
tmps = inputs[less_chars:]
|
||||
outputs += tmps
|
||||
return outputs
|
||||
|
||||
def format_line(self, variable, value):
|
||||
wraped_value = self.wrap_line(value)
|
||||
markup = "<span weight=\'bold\'>%s</span>" % variable
|
||||
markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % wraped_value
|
||||
return markup
|
||||
|
||||
def text2label(self, variable, value):
|
||||
# append the name:value to the left box
|
||||
# such as "Name: hob-core-minimal-variant-2011-12-15-beagleboard"
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup(self.format_line(variable, value))
|
||||
return label
|
||||
|
||||
def __init__(self, builder):
|
||||
super(ImageDetailsPage, self).__init__(builder, "Image details")
|
||||
|
||||
self.image_store = []
|
||||
self.button_ids = {}
|
||||
self.details_bottom_buttons = gtk.HBox(False, 6)
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
# create visual elements
|
||||
# create the toolbar
|
||||
self.toolbar = gtk.Toolbar()
|
||||
self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
|
||||
self.toolbar.set_style(gtk.TOOLBAR_BOTH)
|
||||
|
||||
template_button = self.append_toolbar_button(self.toolbar,
|
||||
"Templates",
|
||||
hic.ICON_TEMPLATES_DISPLAY_FILE,
|
||||
hic.ICON_TEMPLATES_HOVER_FILE,
|
||||
"Load a previously saved template",
|
||||
self.template_button_clicked_cb)
|
||||
my_images_button = self.append_toolbar_button(self.toolbar,
|
||||
"Images",
|
||||
hic.ICON_IMAGES_DISPLAY_FILE,
|
||||
hic.ICON_IMAGES_HOVER_FILE,
|
||||
"Open previously built images",
|
||||
self.my_images_button_clicked_cb)
|
||||
settings_button = self.append_toolbar_button(self.toolbar,
|
||||
"Settings",
|
||||
hic.ICON_SETTINGS_DISPLAY_FILE,
|
||||
hic.ICON_SETTINGS_HOVER_FILE,
|
||||
"View additional build settings",
|
||||
self.settings_button_clicked_cb)
|
||||
|
||||
self.details_top_buttons = self.add_onto_top_bar(self.toolbar)
|
||||
|
||||
def _remove_all_widget(self):
|
||||
children = self.get_children() or []
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
children = self.box_group_area.get_children() or []
|
||||
for child in children:
|
||||
self.box_group_area.remove(child)
|
||||
children = self.details_bottom_buttons.get_children() or []
|
||||
for child in children:
|
||||
self.details_bottom_buttons.remove(child)
|
||||
|
||||
def show_page(self, step):
|
||||
self.build_succeeded = (step == self.builder.IMAGE_GENERATED)
|
||||
image_addr = self.builder.parameters.image_addr
|
||||
image_names = self.builder.parameters.image_names
|
||||
if self.build_succeeded:
|
||||
machine = self.builder.configuration.curr_mach
|
||||
base_image = self.builder.recipe_model.get_selected_image()
|
||||
layers = self.builder.configuration.layers
|
||||
pkg_num = "%s" % len(self.builder.package_model.get_selected_packages())
|
||||
else:
|
||||
pkg_num = "N/A"
|
||||
|
||||
# remove
|
||||
for button_id, button in self.button_ids.items():
|
||||
button.disconnect(button_id)
|
||||
self._remove_all_widget()
|
||||
|
||||
# repack
|
||||
self.pack_start(self.details_top_buttons, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.build_result = None
|
||||
if self.build_succeeded:
|
||||
# building is the previous step
|
||||
icon = gtk.Image()
|
||||
pixmap_path = hic.ICON_INDI_CONFIRM_FILE
|
||||
color = HobColors.RUNNING
|
||||
pix_buffer = gtk.gdk.pixbuf_new_from_file(pixmap_path)
|
||||
icon.set_from_pixbuf(pix_buffer)
|
||||
varlist = [""]
|
||||
vallist = ["Your image is ready"]
|
||||
self.build_result = self.DetailBox(varlist=varlist, vallist=vallist, icon=icon, color=color)
|
||||
self.box_group_area.pack_start(self.build_result, expand=False, fill=False)
|
||||
|
||||
# create the buttons at the bottom first because the buttons are used in apply_button_per_image()
|
||||
if self.build_succeeded:
|
||||
self.buttonlist = ["Build new image", "Save as template", "Run image", "Deploy image"]
|
||||
else: # get to this page from "My images"
|
||||
self.buttonlist = ["Build new image", "Run image", "Deploy image"]
|
||||
|
||||
# Name
|
||||
self.image_store = []
|
||||
self.toggled_image = ""
|
||||
default_image_size = 0
|
||||
self.num_toggled = 0
|
||||
i = 0
|
||||
for image_name in image_names:
|
||||
image_size = HobPage._size_to_string(os.stat(os.path.join(image_addr, image_name)).st_size)
|
||||
|
||||
image_attr = ("run" if (self.test_type_runnable(image_name) and self.test_mach_runnable(image_name)) else \
|
||||
("deploy" if self.test_deployable(image_name) else ""))
|
||||
is_toggled = (image_attr != "")
|
||||
|
||||
if not self.toggled_image:
|
||||
if i == (len(image_names) - 1):
|
||||
is_toggled = True
|
||||
if is_toggled:
|
||||
default_image_size = image_size
|
||||
self.toggled_image = image_name
|
||||
|
||||
split_stuff = image_name.split('.')
|
||||
if "rootfs" in split_stuff:
|
||||
image_type = image_name[(len(split_stuff[0]) + len(".rootfs") + 1):]
|
||||
else:
|
||||
image_type = image_name[(len(split_stuff[0]) + 1):]
|
||||
|
||||
self.image_store.append({'name': image_name,
|
||||
'type': image_type,
|
||||
'size': image_size,
|
||||
'is_toggled': is_toggled,
|
||||
'action_attr': image_attr,})
|
||||
|
||||
i = i + 1
|
||||
self.num_toggled += is_toggled
|
||||
|
||||
is_runnable = self.create_bottom_buttons(self.buttonlist, self.toggled_image)
|
||||
|
||||
# Generated image files info
|
||||
varlist = ["Name: ", "FileCreated: ", "Directory: "]
|
||||
vallist = []
|
||||
|
||||
vallist.append(image_name.split('.')[0])
|
||||
vallist.append(', '.join(fileitem['type'] for fileitem in self.image_store))
|
||||
vallist.append(image_addr)
|
||||
|
||||
view_files_button = HobAltButton("View files")
|
||||
view_files_button.connect("clicked", self.view_files_clicked_cb, image_addr)
|
||||
view_files_button.set_tooltip_text("Open the directory containing the image files")
|
||||
self.image_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=view_files_button)
|
||||
self.box_group_area.pack_start(self.image_detail, expand=False, fill=True)
|
||||
|
||||
# The default kernel box for the qemu images
|
||||
self.sel_kernel = ""
|
||||
if 'qemu' in image_name:
|
||||
self.sel_kernel = self.get_kernel_file_name()
|
||||
|
||||
varlist = ["Kernel: "]
|
||||
vallist = []
|
||||
vallist.append(self.sel_kernel)
|
||||
|
||||
change_kernel_button = HobAltButton("Change")
|
||||
change_kernel_button.connect("clicked", self.change_kernel_cb)
|
||||
change_kernel_button.set_tooltip_text("Change qemu kernel file")
|
||||
self.kernel_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=change_kernel_button)
|
||||
self.box_group_area.pack_start(self.kernel_detail, expand=False, fill=False)
|
||||
|
||||
# Machine, Base image and Layers
|
||||
layer_num_limit = 15
|
||||
varlist = ["Machine: ", "Base image: ", "Layers: "]
|
||||
vallist = []
|
||||
self.setting_detail = None
|
||||
if self.build_succeeded:
|
||||
vallist.append(machine)
|
||||
vallist.append(base_image)
|
||||
i = 0
|
||||
for layer in layers:
|
||||
varlist.append(" - ")
|
||||
if i > layer_num_limit:
|
||||
break
|
||||
i += 1
|
||||
vallist.append("")
|
||||
i = 0
|
||||
for layer in layers:
|
||||
if i > layer_num_limit:
|
||||
break
|
||||
elif i == layer_num_limit:
|
||||
vallist.append("and more...")
|
||||
else:
|
||||
vallist.append(layer)
|
||||
i += 1
|
||||
|
||||
edit_config_button = HobAltButton("Edit configuration")
|
||||
edit_config_button.set_tooltip_text("Edit machine, base image and recipes")
|
||||
edit_config_button.connect("clicked", self.edit_config_button_clicked_cb)
|
||||
self.setting_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=edit_config_button)
|
||||
self.box_group_area.pack_start(self.setting_detail, expand=True, fill=True)
|
||||
|
||||
# Packages included, and Total image size
|
||||
varlist = ["Packages included: ", "Total image size: "]
|
||||
vallist = []
|
||||
vallist.append(pkg_num)
|
||||
vallist.append(default_image_size)
|
||||
if self.build_succeeded:
|
||||
edit_packages_button = HobAltButton("Edit packages")
|
||||
edit_packages_button.set_tooltip_text("Edit the packages included in your image")
|
||||
edit_packages_button.connect("clicked", self.edit_packages_button_clicked_cb)
|
||||
else: # get to this page from "My images"
|
||||
edit_packages_button = None
|
||||
self.package_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=edit_packages_button)
|
||||
self.box_group_area.pack_start(self.package_detail, expand=False, fill=False)
|
||||
|
||||
# pack the buttons at the bottom, at this time they are already created.
|
||||
if self.build_succeeded:
|
||||
self.box_group_area.pack_end(self.details_bottom_buttons, expand=False, fill=False)
|
||||
else: # for "My images" page
|
||||
self.details_separator = gtk.HSeparator()
|
||||
self.box_group_area.pack_start(self.details_separator, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.details_bottom_buttons, expand=False, fill=False)
|
||||
|
||||
self.show_all()
|
||||
if not is_runnable:
|
||||
self.kernel_detail.hide()
|
||||
|
||||
def view_files_clicked_cb(self, button, image_addr):
|
||||
subprocess.call("xdg-open /%s" % image_addr, shell=True)
|
||||
|
||||
def refresh_package_detail_box(self, image_size):
|
||||
self.package_detail.update_line_widgets("Total image size: ", image_size)
|
||||
|
||||
def test_type_runnable(self, image_name):
|
||||
type_runnable = False
|
||||
for t in self.builder.parameters.runnable_image_types:
|
||||
if image_name.endswith(t):
|
||||
type_runnable = True
|
||||
break
|
||||
return type_runnable
|
||||
|
||||
def test_mach_runnable(self, image_name):
|
||||
mach_runnable = False
|
||||
for t in self.builder.parameters.runnable_machine_patterns:
|
||||
if t in image_name:
|
||||
mach_runnable = True
|
||||
break
|
||||
return mach_runnable
|
||||
|
||||
def test_deployable(self, image_name):
|
||||
deployable = False
|
||||
for t in self.builder.parameters.deployable_image_types:
|
||||
if image_name.endswith(t):
|
||||
deployable = True
|
||||
break
|
||||
return deployable
|
||||
|
||||
def get_kernel_file_name(self, kernel_addr=""):
|
||||
kernel_name = ""
|
||||
|
||||
if not kernel_addr:
|
||||
kernel_addr = self.builder.parameters.image_addr
|
||||
|
||||
files = [f for f in os.listdir(kernel_addr) if f[0] <> '.']
|
||||
for check_file in files:
|
||||
if check_file.endswith(".bin"):
|
||||
name_splits = check_file.split(".")[0]
|
||||
if self.builder.parameters.kernel_image_type in name_splits.split("-"):
|
||||
kernel_name = check_file
|
||||
break
|
||||
|
||||
return kernel_name
|
||||
|
||||
def show_builded_images_dialog(self, widget, primary_action=""):
|
||||
title = primary_action if primary_action else "Your builded images"
|
||||
dialog = CrumbsDialog(title, self.builder,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
|
||||
dialog.set_border_width(12)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_use_markup(True)
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span font_desc='12'>Select the image file you want to %s</span>" % primary_action)
|
||||
dialog.vbox.pack_start(label, expand=False, fill=False)
|
||||
|
||||
# filter created images as action attribution (deploy or run)
|
||||
action_attr = ""
|
||||
action_images = []
|
||||
for fileitem in self.image_store:
|
||||
action_attr = fileitem['action_attr']
|
||||
if (action_attr == 'run' and primary_action == "Run image") \
|
||||
or (action_attr == 'deploy' and primary_action == "Deploy image"):
|
||||
action_images.append(fileitem)
|
||||
|
||||
# pack the corresponding 'runnable' or 'deploy' radio_buttons, if there has no more than one file.
|
||||
# assume that there does not both have 'deploy' and 'runnable' files in the same building result
|
||||
# in possible as design.
|
||||
curr_row = 0
|
||||
rows = (len(action_images)) if len(action_images) < 10 else 10
|
||||
table = gtk.Table(rows, 10, True)
|
||||
table.set_row_spacings(6)
|
||||
table.set_col_spacing(0, 12)
|
||||
table.set_col_spacing(5, 12)
|
||||
|
||||
sel_parent_btn = None
|
||||
for fileitem in action_images:
|
||||
sel_btn = gtk.RadioButton(sel_parent_btn, fileitem['type'])
|
||||
sel_parent_btn = sel_btn if not sel_parent_btn else sel_parent_btn
|
||||
sel_btn.set_active(fileitem['is_toggled'])
|
||||
sel_btn.connect('toggled', self.table_selected_cb, fileitem)
|
||||
if curr_row < 10:
|
||||
table.attach(sel_btn, 2, 5, curr_row, curr_row + 1)
|
||||
else:
|
||||
table.attach(sel_btn, 7, 10, curr_row - 10, curr_row - 9)
|
||||
curr_row += 1
|
||||
|
||||
dialog.vbox.pack_start(table, expand=False, fill=False, padding = 6)
|
||||
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_CANCEL)
|
||||
HobAltButton.style_button(button)
|
||||
|
||||
if primary_action:
|
||||
button = dialog.add_button(primary_action, gtk.RESPONSE_YES)
|
||||
HobButton.style_button(button)
|
||||
|
||||
dialog.show_all()
|
||||
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
if response != gtk.RESPONSE_YES:
|
||||
return
|
||||
|
||||
for fileitem in self.image_store:
|
||||
if fileitem['is_toggled']:
|
||||
if fileitem['action_attr'] == 'run':
|
||||
self.builder.runqemu_image(fileitem['name'], self.sel_kernel)
|
||||
elif fileitem['action_attr'] == 'deploy':
|
||||
self.builder.deploy_image(fileitem['name'])
|
||||
|
||||
def table_selected_cb(self, tbutton, image):
|
||||
image['is_toggled'] = tbutton.get_active()
|
||||
if image['is_toggled']:
|
||||
self.toggled_image = image['name']
|
||||
|
||||
def change_kernel_cb(self, widget):
|
||||
kernel_path = self.builder.show_load_kernel_dialog()
|
||||
if kernel_path and self.kernel_detail:
|
||||
import os.path
|
||||
self.sel_kernel = os.path.basename(kernel_path)
|
||||
markup = self.kernel_detail.format_line("Kernel: ", self.sel_kernel)
|
||||
label = ((self.kernel_detail.get_children()[0]).get_children()[0]).get_children()[0]
|
||||
label.set_markup(markup)
|
||||
|
||||
def create_bottom_buttons(self, buttonlist, image_name):
|
||||
# Create the buttons at the bottom
|
||||
created = False
|
||||
packed = False
|
||||
self.button_ids = {}
|
||||
is_runnable = False
|
||||
|
||||
# create button "Deploy image"
|
||||
name = "Deploy image"
|
||||
if name in buttonlist and self.test_deployable(image_name):
|
||||
deploy_button = HobButton('Deploy image')
|
||||
deploy_button.set_size_request(205, 49)
|
||||
deploy_button.set_tooltip_text("Burn a live image to a USB drive or flash memory")
|
||||
deploy_button.set_flags(gtk.CAN_DEFAULT)
|
||||
button_id = deploy_button.connect("clicked", self.deploy_button_clicked_cb)
|
||||
self.button_ids[button_id] = deploy_button
|
||||
self.details_bottom_buttons.pack_end(deploy_button, expand=False, fill=False)
|
||||
created = True
|
||||
packed = True
|
||||
|
||||
name = "Run image"
|
||||
if name in buttonlist and self.test_type_runnable(image_name) and self.test_mach_runnable(image_name):
|
||||
if created == True:
|
||||
# separator
|
||||
label = gtk.Label(" or ")
|
||||
self.details_bottom_buttons.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Run image"
|
||||
run_button = HobAltButton("Run image")
|
||||
else:
|
||||
# create button "Run image" as the primary button
|
||||
run_button = HobButton("Run image")
|
||||
run_button.set_size_request(205, 49)
|
||||
run_button.set_flags(gtk.CAN_DEFAULT)
|
||||
packed = True
|
||||
run_button.set_tooltip_text("Start up an image with qemu emulator")
|
||||
button_id = run_button.connect("clicked", self.run_button_clicked_cb)
|
||||
self.button_ids[button_id] = run_button
|
||||
self.details_bottom_buttons.pack_end(run_button, expand=False, fill=False)
|
||||
created = True
|
||||
is_runnable = True
|
||||
|
||||
name = "Save as template"
|
||||
if name in buttonlist:
|
||||
if created == True:
|
||||
# separator
|
||||
label = gtk.Label(" or ")
|
||||
self.details_bottom_buttons.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Save as template"
|
||||
save_button = HobAltButton("Save as template")
|
||||
else:
|
||||
save_button = HobButton("Save as template")
|
||||
save_button.set_size_request(205, 49)
|
||||
save_button.set_flags(gtk.CAN_DEFAULT)
|
||||
packed = True
|
||||
save_button.set_tooltip_text("Save the image configuration for reuse")
|
||||
button_id = save_button.connect("clicked", self.save_button_clicked_cb)
|
||||
self.button_ids[button_id] = save_button
|
||||
self.details_bottom_buttons.pack_end(save_button, expand=False, fill=False)
|
||||
create = True
|
||||
|
||||
name = "Build new image"
|
||||
if name in buttonlist:
|
||||
# create button "Build new image"
|
||||
if packed:
|
||||
build_new_button = HobAltButton("Build new image")
|
||||
else:
|
||||
build_new_button = HobButton("Build new image")
|
||||
build_new_button.set_flags(gtk.CAN_DEFAULT)
|
||||
build_new_button.set_size_request(205, 49)
|
||||
self.details_bottom_buttons.pack_end(build_new_button, expand=False, fill=False)
|
||||
build_new_button.set_tooltip_text("Create a new image from scratch")
|
||||
button_id = build_new_button.connect("clicked", self.build_new_button_clicked_cb)
|
||||
self.button_ids[button_id] = build_new_button
|
||||
|
||||
return is_runnable
|
||||
|
||||
def save_button_clicked_cb(self, button):
|
||||
self.builder.show_save_template_dialog()
|
||||
|
||||
def deploy_button_clicked_cb(self, button):
|
||||
if self.toggled_image:
|
||||
if self.num_toggled > 1:
|
||||
self.set_sensitive(False)
|
||||
self.show_builded_images_dialog(None, "Deploy image")
|
||||
self.set_sensitive(True)
|
||||
else:
|
||||
self.builder.deploy_image(self.toggled_image)
|
||||
|
||||
def run_button_clicked_cb(self, button):
|
||||
if self.toggled_image:
|
||||
if self.num_toggled > 1:
|
||||
self.set_sensitive(False)
|
||||
self.show_builded_images_dialog(None, "Run image")
|
||||
self.set_sensitive(True)
|
||||
else:
|
||||
self.builder.runqemu_image(self.toggled_image, self.sel_kernel)
|
||||
|
||||
def build_new_button_clicked_cb(self, button):
|
||||
self.builder.initiate_new_build_async()
|
||||
|
||||
def edit_config_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def edit_packages_button_clicked_cb(self, button):
|
||||
self.builder.show_packages(ask=False)
|
||||
|
||||
def template_button_clicked_cb(self, button):
|
||||
response, path = self.builder.show_load_template_dialog()
|
||||
if not response:
|
||||
return
|
||||
if path:
|
||||
self.builder.load_template(path)
|
||||
|
||||
def my_images_button_clicked_cb(self, button):
|
||||
self.builder.show_load_my_images_dialog()
|
||||
|
||||
def settings_button_clicked_cb(self, button):
|
||||
# Create an advanced settings dialog
|
||||
response, settings_changed = self.builder.show_adv_settings_dialog()
|
||||
if not response:
|
||||
return
|
||||
if settings_changed:
|
||||
self.builder.reparse_post_adv_settings()
|
||||
153
bitbake/lib/bb/ui/crumbs/layereditor.py
Normal file
@@ -0,0 +1,153 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import gtk
|
||||
from bb.ui.crumbs.configurator import Configurator
|
||||
from bb.ui.crumbs.hig import CrumbsDialog
|
||||
|
||||
class LayerEditor(gtk.Dialog):
|
||||
"""
|
||||
Gtk+ Widget for enabling and disabling layers.
|
||||
Layers are added through using an open dialog to find the layer.conf
|
||||
Disabled layers are deleted from conf/bblayers.conf
|
||||
"""
|
||||
def __init__(self, configurator, parent=None):
|
||||
gtk.Dialog.__init__(self, "Layers", None,
|
||||
gtk.DIALOG_DESTROY_WITH_PARENT,
|
||||
(gtk.STOCK_CLOSE, gtk.RESPONSE_OK))
|
||||
|
||||
# We want to show a little more of the treeview in the default,
|
||||
# emptier, case
|
||||
self.set_size_request(-1, 300)
|
||||
self.set_border_width(6)
|
||||
self.vbox.set_property("spacing", 0)
|
||||
self.action_area.set_property("border-width", 6)
|
||||
|
||||
self.configurator = configurator
|
||||
self.newly_added = {}
|
||||
|
||||
# Label to inform users that meta is enabled but that you can't
|
||||
# disable it as it'd be a *bad* idea
|
||||
msg = "As the core of the build system the <i>meta</i> layer must always be included and therefore can't be viewed or edited here."
|
||||
lbl = gtk.Label()
|
||||
lbl.show()
|
||||
lbl.set_use_markup(True)
|
||||
lbl.set_markup(msg)
|
||||
lbl.set_line_wrap(True)
|
||||
lbl.set_justify(gtk.JUSTIFY_FILL)
|
||||
self.vbox.pack_start(lbl, expand=False, fill=False, padding=6)
|
||||
|
||||
# Create a treeview in which to list layers
|
||||
# ListStore of Name, Path, Enabled
|
||||
self.layer_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
|
||||
self.tv = gtk.TreeView(self.layer_store)
|
||||
self.tv.set_headers_visible(True)
|
||||
|
||||
col0 = gtk.TreeViewColumn('Name')
|
||||
self.tv.append_column(col0)
|
||||
col1 = gtk.TreeViewColumn('Path')
|
||||
self.tv.append_column(col1)
|
||||
col2 = gtk.TreeViewColumn('Enabled')
|
||||
self.tv.append_column(col2)
|
||||
|
||||
cell0 = gtk.CellRendererText()
|
||||
col0.pack_start(cell0, True)
|
||||
col0.set_attributes(cell0, text=0)
|
||||
cell1 = gtk.CellRendererText()
|
||||
col1.pack_start(cell1, True)
|
||||
col1.set_attributes(cell1, text=1)
|
||||
cell2 = gtk.CellRendererToggle()
|
||||
cell2.connect("toggled", self._toggle_layer_cb)
|
||||
col2.pack_start(cell2, True)
|
||||
col2.set_attributes(cell2, active=2)
|
||||
|
||||
self.tv.show()
|
||||
self.vbox.pack_start(self.tv, expand=True, fill=True, padding=0)
|
||||
|
||||
tb = gtk.Toolbar()
|
||||
tb.set_icon_size(gtk.ICON_SIZE_SMALL_TOOLBAR)
|
||||
tb.set_style(gtk.TOOLBAR_BOTH)
|
||||
tb.set_tooltips(True)
|
||||
tb.show()
|
||||
icon = gtk.Image()
|
||||
icon.set_from_stock(gtk.STOCK_ADD, gtk.ICON_SIZE_SMALL_TOOLBAR)
|
||||
icon.show()
|
||||
tb.insert_item("Add Layer", "Add new layer", None, icon,
|
||||
self._find_layer_cb, None, -1)
|
||||
self.vbox.pack_start(tb, expand=False, fill=False, padding=0)
|
||||
|
||||
def set_parent_window(self, parent):
|
||||
self.set_transient_for(parent)
|
||||
|
||||
def load_current_layers(self, data):
|
||||
for layer, path in self.configurator.enabled_layers.items():
|
||||
if layer != 'meta':
|
||||
self.layer_store.append([layer, path, True])
|
||||
|
||||
def save_current_layers(self):
|
||||
self.configurator.writeLayerConf()
|
||||
|
||||
def _toggle_layer_cb(self, cell, path):
|
||||
name = self.layer_store[path][0]
|
||||
toggle = not self.layer_store[path][2]
|
||||
if toggle:
|
||||
self.configurator.addLayer(name, path)
|
||||
else:
|
||||
self.configurator.disableLayer(name)
|
||||
self.layer_store[path][2] = toggle
|
||||
|
||||
def _find_layer_cb(self, button):
|
||||
self.find_layer(self)
|
||||
|
||||
def find_layer(self, parent):
|
||||
def conf_error(parent, lbl):
|
||||
dialog = CrumbsDialog(parent, lbl)
|
||||
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
dialog = gtk.FileChooserDialog("Add new layer", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_OPEN,
|
||||
(gtk.STOCK_CANCEL, gtk.RESPONSE_NO,
|
||||
gtk.STOCK_OPEN, gtk.RESPONSE_YES))
|
||||
label = gtk.Label("Select the layer.conf of the layer you wish to add")
|
||||
label.show()
|
||||
dialog.set_extra_widget(label)
|
||||
response = dialog.run()
|
||||
path = dialog.get_filename()
|
||||
dialog.destroy()
|
||||
|
||||
lbl = "<b>Error</b>\nUnable to load layer <i>%s</i> because " % path
|
||||
if response == gtk.RESPONSE_YES:
|
||||
# FIXME: verify we've actually got a layer conf?
|
||||
if path.endswith("layer.conf"):
|
||||
name, layerpath = self.configurator.addLayerConf(path)
|
||||
if name and layerpath:
|
||||
self.newly_added[name] = layerpath
|
||||
self.layer_store.append([name, layerpath, True])
|
||||
return
|
||||
elif name:
|
||||
return
|
||||
else:
|
||||
lbl += "there was a problem parsing the layer.conf."
|
||||
else:
|
||||
lbl += "it is not a layer.conf file."
|
||||
conf_error(parent, lbl)
|
||||
@@ -1,277 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import HobViewTable, HobNotebook, HobAltButton, HobButton
|
||||
from bb.ui.crumbs.hoblistmodel import PackageListModel
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# PackageSelectionPage
|
||||
#
|
||||
class PackageSelectionPage (HobPage):
|
||||
|
||||
pages = [
|
||||
{
|
||||
'name' : 'Included',
|
||||
'filter' : { PackageListModel.COL_INC : [True] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Package name',
|
||||
'col_id' : PackageListModel.COL_NAME,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Brought in by',
|
||||
'col_id' : PackageListModel.COL_BINB,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'binb',
|
||||
'col_min' : 100,
|
||||
'col_max' : 350,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Size',
|
||||
'col_id' : PackageListModel.COL_SIZE,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : PackageListModel.COL_INC,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'check toggle',
|
||||
'col_group': 'tree store group',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}]
|
||||
}, {
|
||||
'name' : 'All packages',
|
||||
'filter' : {},
|
||||
'columns' : [{
|
||||
'col_name' : 'Package name',
|
||||
'col_id' : PackageListModel.COL_NAME,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Size',
|
||||
'col_id' : PackageListModel.COL_SIZE,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 500,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : PackageListModel.COL_INC,
|
||||
'col_style': 'check toggle',
|
||||
'col_group': 'tree store group',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}]
|
||||
}
|
||||
]
|
||||
|
||||
def __init__(self, builder):
|
||||
super(PackageSelectionPage, self).__init__(builder, "Packages")
|
||||
|
||||
# set invisiable members
|
||||
self.recipe_model = self.builder.recipe_model
|
||||
self.package_model = self.builder.package_model
|
||||
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def included_clicked_cb(self, button):
|
||||
self.ins.set_current_page(0)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.label = gtk.Label("Packages included: 0\nSelected packages size: 0 MB")
|
||||
self.eventbox = self.add_onto_top_bar(self.label, 73)
|
||||
self.pack_start(self.eventbox, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
# set visible members
|
||||
self.ins = HobNotebook()
|
||||
self.tables = [] # we need to modify table when the dialog is shown
|
||||
# append the tab
|
||||
for page in self.pages:
|
||||
columns = page['columns']
|
||||
tab = HobViewTable(columns)
|
||||
filter = page['filter']
|
||||
tab.set_model(self.package_model.tree_model(filter))
|
||||
tab.connect("toggled", self.table_toggled_cb, page['name'])
|
||||
tab.connect_group_selection(self.table_selected_cb)
|
||||
if page['name'] == "Included":
|
||||
tab.connect("button-release-event", self.button_click_cb)
|
||||
tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include)
|
||||
self.ins.append_page(tab, page['name'])
|
||||
self.tables.append(tab)
|
||||
|
||||
self.ins.set_entry("Search packages:")
|
||||
# set the search entry for each table
|
||||
for tab in self.tables:
|
||||
tab.set_search_entry(0, self.ins.search)
|
||||
|
||||
# add all into the dialog
|
||||
self.box_group_area.pack_start(self.ins, expand=True, fill=True)
|
||||
|
||||
button_box = gtk.HBox(False, 6)
|
||||
self.box_group_area.pack_start(button_box, expand=False, fill=False)
|
||||
|
||||
self.build_image_button = HobButton('Build image')
|
||||
self.build_image_button.set_size_request(205, 49)
|
||||
self.build_image_button.set_tooltip_text("Build target image")
|
||||
self.build_image_button.set_flags(gtk.CAN_DEFAULT)
|
||||
self.build_image_button.grab_default()
|
||||
self.build_image_button.connect("clicked", self.build_image_clicked_cb)
|
||||
button_box.pack_end(self.build_image_button, expand=False, fill=False)
|
||||
|
||||
self.back_button = HobAltButton("<< Back to image configuration")
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def button_click_cb(self, widget, event):
|
||||
path, col = widget.table_tree.get_cursor()
|
||||
tree_model = widget.table_tree.get_model()
|
||||
if path: # else activation is likely a removal
|
||||
binb = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_BINB)
|
||||
if binb:
|
||||
self.builder.show_binb_dialog(binb)
|
||||
|
||||
def build_image_clicked_cb(self, button):
|
||||
self.builder.build_image()
|
||||
|
||||
def back_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def _expand_all(self):
|
||||
for tab in self.tables:
|
||||
tab.table_tree.expand_all()
|
||||
|
||||
def refresh_selection(self):
|
||||
self._expand_all()
|
||||
|
||||
self.builder.configuration.selected_packages = self.package_model.get_selected_packages()
|
||||
self.builder.configuration.user_selected_packages = self.package_model.get_user_selected_packages()
|
||||
selected_packages_num = len(self.builder.configuration.selected_packages)
|
||||
selected_packages_size = self.package_model.get_packages_size()
|
||||
selected_packages_size_str = HobPage._size_to_string(selected_packages_size)
|
||||
|
||||
image_overhead_factor = self.builder.configuration.image_overhead_factor
|
||||
image_rootfs_size = self.builder.configuration.image_rootfs_size * 1024 # image_rootfs_size is KB
|
||||
image_extra_size = self.builder.configuration.image_extra_size * 1024 # image_extra_size is KB
|
||||
base_size = image_overhead_factor * selected_packages_size
|
||||
image_total_size = max(base_size, image_rootfs_size) + image_extra_size
|
||||
if "zypper" in self.builder.configuration.selected_packages:
|
||||
image_total_size += (51200 * 1024)
|
||||
image_total_size_str = HobPage._size_to_string(image_total_size)
|
||||
|
||||
self.label.set_label("Packages included: %s\nSelected packages size: %s\nTotal image size: %s" %
|
||||
(selected_packages_num, selected_packages_size_str, image_total_size_str))
|
||||
self.ins.show_indicator_icon("Included", selected_packages_num)
|
||||
|
||||
def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
|
||||
if not self.package_model.path_included(path):
|
||||
self.package_model.include_item(item_path=path, binb="User Selected")
|
||||
else:
|
||||
if pagename == "Included":
|
||||
self.pre_fadeout_checkout_include(view_tree)
|
||||
self.package_model.exclude_item(item_path=path)
|
||||
self.render_fadeout(view_tree, cell)
|
||||
else:
|
||||
self.package_model.exclude_item(item_path=path)
|
||||
|
||||
self.refresh_selection()
|
||||
if not self.builder.customized:
|
||||
self.builder.customized = True
|
||||
self.builder.configuration.selected_image = self.recipe_model.__custom_image__
|
||||
self.builder.rcppkglist_populated()
|
||||
|
||||
self.builder.window_sensitive(True)
|
||||
|
||||
def table_toggled_cb(self, table, cell, view_path, toggled_columnid, view_tree, pagename):
|
||||
# Click to include a package
|
||||
self.builder.window_sensitive(False)
|
||||
view_model = view_tree.get_model()
|
||||
path = self.package_model.convert_vpath_to_path(view_model, view_path)
|
||||
glib.idle_add(self.toggle_item_idle_cb, path, view_tree, cell, pagename)
|
||||
|
||||
def pre_fadeout_checkout_include(self, tree):
|
||||
self.package_model.resync_fadeout_column(self.package_model.get_iter_first())
|
||||
# Check out a model which base on the column COL_FADE_INC,
|
||||
# it's save the prev state of column COL_INC before do exclude_item
|
||||
filter = { PackageListModel.COL_FADE_INC : [True]}
|
||||
new_model = self.package_model.tree_model(filter)
|
||||
tree.set_model(new_model)
|
||||
tree.expand_all()
|
||||
|
||||
def get_excluded_rows(self, to_render_cells, model, it):
|
||||
while it:
|
||||
path = model.get_path(it)
|
||||
prev_cell_is_active = model.get_value(it, PackageListModel.COL_FADE_INC)
|
||||
curr_cell_is_active = model.get_value(it, PackageListModel.COL_INC)
|
||||
if (prev_cell_is_active == True) and (curr_cell_is_active == False):
|
||||
to_render_cells.append(path)
|
||||
if model.iter_has_child(it):
|
||||
self.get_excluded_rows(to_render_cells, model, model.iter_children(it))
|
||||
it = model.iter_next(it)
|
||||
|
||||
return to_render_cells
|
||||
|
||||
def render_fadeout(self, tree, cell):
|
||||
if (not cell) or (not tree):
|
||||
return
|
||||
to_render_cells = []
|
||||
view_model = tree.get_model()
|
||||
self.get_excluded_rows(to_render_cells, view_model, view_model.get_iter_first())
|
||||
|
||||
cell.fadeout(tree, 1000, to_render_cells)
|
||||
|
||||
def after_fadeout_checkin_include(self, table, ctrl, cell, tree):
|
||||
tree.set_model(self.package_model.tree_model(self.pages[0]['filter']))
|
||||
tree.expand_all()
|
||||
|
||||
def foreach_cell_change_font(self, model, path, iter, paths=None):
|
||||
# Changed the font for a group cells
|
||||
if path and iter and path[0] == paths[0]:
|
||||
self.package_model.set(iter, self.package_model.COL_FONT, "bold")
|
||||
else:
|
||||
if iter and model.iter_parent(iter) == None:
|
||||
self.package_model.set(iter, self.package_model.COL_FONT, '11')
|
||||
else:
|
||||
self.package_model.set(iter, self.package_model.COL_FONT, '10')
|
||||
|
||||
def table_selected_cb(self, selection):
|
||||
model, paths = selection.get_selected_rows()
|
||||
if paths:
|
||||
child_path = self.package_model.convert_vpath_to_path(model, paths[0])
|
||||
self.package_model.foreach(self.foreach_cell_change_font, child_path)
|
||||
|
||||
@@ -1,186 +0,0 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import gtk
|
||||
try:
|
||||
import gconf
|
||||
except:
|
||||
pass
|
||||
|
||||
class PersistentTooltip(gtk.Window):
|
||||
"""
|
||||
A tooltip which persists once shown until the user dismisses it with the Esc
|
||||
key or by clicking the close button.
|
||||
|
||||
# FIXME: the PersistentTooltip should be disabled when the user clicks anywhere off
|
||||
# it. We can't do this with focus-out-event becuase modal ensures we have focus?
|
||||
|
||||
markup: some Pango text markup to display in the tooltip
|
||||
"""
|
||||
def __init__(self, markup, parent_win=None):
|
||||
gtk.Window.__init__(self, gtk.WINDOW_POPUP)
|
||||
|
||||
# Inherit the system theme for a tooltip
|
||||
style = gtk.rc_get_style_by_paths(gtk.settings_get_default(),
|
||||
'gtk-tooltip', 'gtk-tooltip', gobject.TYPE_NONE)
|
||||
self.set_style(style)
|
||||
|
||||
# The placement of the close button on the tip should reflect how the
|
||||
# window manager of the users system places close buttons. Try to read
|
||||
# the metacity gconf key to determine whether the close button is on the
|
||||
# left or the right.
|
||||
# In the case that we can't determine the users configuration we default
|
||||
# to close buttons being on the right.
|
||||
__button_right = True
|
||||
try:
|
||||
client = gconf.client_get_default()
|
||||
order = client.get_string("/apps/metacity/general/button_layout")
|
||||
if order and order.endswith(":"):
|
||||
__button_right = False
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
# We need to ensure we're only shown once
|
||||
self.shown = False
|
||||
|
||||
# We don't want any WM decorations
|
||||
self.set_decorated(False)
|
||||
# We don't want to show in the taskbar or window switcher
|
||||
self.set_skip_pager_hint(True)
|
||||
self.set_skip_taskbar_hint(True)
|
||||
# We must be modal to ensure we grab focus when presented from a gtk.Dialog
|
||||
self.set_modal(True)
|
||||
|
||||
self.set_border_width(0)
|
||||
self.set_position(gtk.WIN_POS_MOUSE)
|
||||
self.set_opacity(0.95)
|
||||
|
||||
# Ensure a reasonable minimum size
|
||||
self.set_geometry_hints(self, 100, 50)
|
||||
|
||||
# Set this window as a transient window for parent(main window)
|
||||
if parent_win:
|
||||
self.set_transient_for(parent_win)
|
||||
self.set_destroy_with_parent(True)
|
||||
# Draw our label and close buttons
|
||||
hbox = gtk.HBox(False, 0)
|
||||
hbox.show()
|
||||
self.add(hbox)
|
||||
|
||||
img = gtk.Image()
|
||||
img.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_BUTTON)
|
||||
|
||||
self.button = gtk.Button()
|
||||
self.button.set_image(img)
|
||||
self.button.connect("clicked", self._dismiss_cb)
|
||||
self.button.set_flags(gtk.CAN_DEFAULT)
|
||||
self.button.grab_focus()
|
||||
self.button.show()
|
||||
vbox = gtk.VBox(False, 0)
|
||||
vbox.show()
|
||||
vbox.pack_start(self.button, False, False, 0)
|
||||
if __button_right:
|
||||
hbox.pack_end(vbox, True, True, 0)
|
||||
else:
|
||||
hbox.pack_start(vbox, True, True, 0)
|
||||
|
||||
self.set_default(self.button)
|
||||
|
||||
bin = gtk.HBox(True, 6)
|
||||
bin.set_border_width(6)
|
||||
bin.show()
|
||||
self.label = gtk.Label()
|
||||
self.label.set_line_wrap(True)
|
||||
# We want to match the colours of the normal tooltips, as dictated by
|
||||
# the users gtk+-2.0 theme, wherever possible - on some systems this
|
||||
# requires explicitly setting a fg_color for the label which matches the
|
||||
# tooltip_fg_color
|
||||
settings = gtk.settings_get_default()
|
||||
colours = settings.get_property('gtk-color-scheme').split('\n')
|
||||
# remove any empty lines, there's likely to be a trailing one after
|
||||
# calling split on a dictionary-like string
|
||||
colours = filter(None, colours)
|
||||
for col in colours:
|
||||
item, val = col.split(': ')
|
||||
if item == 'tooltip_fg_color':
|
||||
style = self.label.get_style()
|
||||
style.fg[gtk.STATE_NORMAL] = gtk.gdk.color_parse(val)
|
||||
self.label.set_style(style)
|
||||
break # we only care for the tooltip_fg_color
|
||||
|
||||
self.label.set_markup(markup)
|
||||
self.label.show()
|
||||
bin.add(self.label)
|
||||
hbox.pack_end(bin, True, True, 6)
|
||||
|
||||
# add the original URL display for user reference
|
||||
if 'a href' in markup:
|
||||
hbox.set_tooltip_text(self.get_markup_url(markup))
|
||||
hbox.show()
|
||||
|
||||
self.connect("key-press-event", self._catch_esc_cb)
|
||||
|
||||
"""
|
||||
Callback when the PersistentTooltip's close button is clicked.
|
||||
Hides the PersistentTooltip.
|
||||
"""
|
||||
def _dismiss_cb(self, button):
|
||||
self.hide()
|
||||
return True
|
||||
|
||||
"""
|
||||
Callback when the Esc key is detected. Hides the PersistentTooltip.
|
||||
"""
|
||||
def _catch_esc_cb(self, widget, event):
|
||||
keyname = gtk.gdk.keyval_name(event.keyval)
|
||||
if keyname == "Escape":
|
||||
self.hide()
|
||||
return True
|
||||
|
||||
"""
|
||||
Called to present the PersistentTooltip.
|
||||
Overrides the superclasses show() method to include state tracking.
|
||||
"""
|
||||
def show(self):
|
||||
if not self.shown:
|
||||
self.shown = True
|
||||
gtk.Window.show(self)
|
||||
|
||||
"""
|
||||
Called to hide the PersistentTooltip.
|
||||
Overrides the superclasses hide() method to include state tracking.
|
||||
"""
|
||||
def hide(self):
|
||||
self.shown = False
|
||||
gtk.Window.hide(self)
|
||||
|
||||
"""
|
||||
Called to get the hyperlink URL from markup text.
|
||||
"""
|
||||
def get_markup_url(self, markup):
|
||||
url = "http:"
|
||||
if markup and type(markup) == str:
|
||||
s = markup
|
||||
if 'http:' in s:
|
||||
import re
|
||||
url = re.search('(http:[^,\\ "]+)', s).group(0)
|
||||
|
||||
return url
|
||||
@@ -1,54 +0,0 @@
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class HobProgressBar (gtk.ProgressBar):
|
||||
def __init__(self):
|
||||
gtk.ProgressBar.__init__(self)
|
||||
self.set_rcstyle(True)
|
||||
self.percentage = 0
|
||||
|
||||
def set_rcstyle(self, status):
|
||||
rcstyle = gtk.RcStyle()
|
||||
rcstyle.fg[2] = gtk.gdk.Color(HobColors.BLACK)
|
||||
if status == "stop":
|
||||
rcstyle.bg[3] = gtk.gdk.Color(HobColors.WARNING)
|
||||
elif status == "fail":
|
||||
rcstyle.bg[3] = gtk.gdk.Color(HobColors.ERROR)
|
||||
else:
|
||||
rcstyle.bg[3] = gtk.gdk.Color(HobColors.RUNNING)
|
||||
self.modify_style(rcstyle)
|
||||
|
||||
def set_title(self, text=None):
|
||||
if not text:
|
||||
text = ""
|
||||
text += " %.0f%%" % self.percentage
|
||||
self.set_text(text)
|
||||
|
||||
def reset(self):
|
||||
self.set_fraction(0)
|
||||
self.set_text("")
|
||||
self.set_rcstyle(True)
|
||||
self.percentage = 0
|
||||
|
||||
def update(self, fraction):
|
||||
self.percentage = int(fraction * 100)
|
||||
self.set_fraction(fraction)
|
||||
@@ -1,263 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import HobViewTable, HobNotebook, HobAltButton, HobButton
|
||||
from bb.ui.crumbs.hoblistmodel import RecipeListModel
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# RecipeSelectionPage
|
||||
#
|
||||
class RecipeSelectionPage (HobPage):
|
||||
pages = [
|
||||
{
|
||||
'name' : 'Included',
|
||||
'tooltip' : 'The recipes currently included for your image',
|
||||
'filter' : { RecipeListModel.COL_INC : [True],
|
||||
RecipeListModel.COL_TYPE : ['recipe', 'task'] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Recipe name',
|
||||
'col_id' : RecipeListModel.COL_NAME,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Brought in by',
|
||||
'col_id' : RecipeListModel.COL_BINB,
|
||||
'col_style': 'binb',
|
||||
'col_min' : 100,
|
||||
'col_max' : 500,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Group',
|
||||
'col_id' : RecipeListModel.COL_GROUP,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : RecipeListModel.COL_INC,
|
||||
'col_style': 'check toggle',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}]
|
||||
}, {
|
||||
'name' : 'All recipes',
|
||||
'tooltip' : 'All recipes available in the Yocto Project',
|
||||
'filter' : { RecipeListModel.COL_TYPE : ['recipe'] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Recipe name',
|
||||
'col_id' : RecipeListModel.COL_NAME,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'License',
|
||||
'col_id' : RecipeListModel.COL_LIC,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Group',
|
||||
'col_id' : RecipeListModel.COL_GROUP,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : RecipeListModel.COL_INC,
|
||||
'col_style': 'check toggle',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}]
|
||||
}, {
|
||||
'name' : 'Tasks',
|
||||
'tooltip' : 'All tasks available in the Yocto Project',
|
||||
'filter' : { RecipeListModel.COL_TYPE : ['task'] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Task name',
|
||||
'col_id' : RecipeListModel.COL_NAME,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Description',
|
||||
'col_id' : RecipeListModel.COL_DESC,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : RecipeListModel.COL_INC,
|
||||
'col_style': 'check toggle',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}]
|
||||
}
|
||||
]
|
||||
|
||||
def __init__(self, builder = None):
|
||||
super(RecipeSelectionPage, self).__init__(builder, "Recipes")
|
||||
|
||||
# set invisiable members
|
||||
self.recipe_model = self.builder.recipe_model
|
||||
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def included_clicked_cb(self, button):
|
||||
self.ins.set_current_page(0)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.eventbox = self.add_onto_top_bar(None, 73)
|
||||
self.pack_start(self.eventbox, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
# set visible members
|
||||
self.ins = HobNotebook()
|
||||
self.tables = [] # we need modify table when the dialog is shown
|
||||
# append the tabs in order
|
||||
for page in self.pages:
|
||||
columns = page['columns']
|
||||
tab = HobViewTable(columns)
|
||||
filter = page['filter']
|
||||
tab.set_model(self.recipe_model.tree_model(filter))
|
||||
tab.connect("toggled", self.table_toggled_cb, page['name'])
|
||||
if page['name'] == "Included":
|
||||
tab.connect("button-release-event", self.button_click_cb)
|
||||
tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include)
|
||||
self.ins.append_page(tab, page['name'], page['tooltip'])
|
||||
self.tables.append(tab)
|
||||
|
||||
self.ins.set_entry("Search recipes:")
|
||||
# set the search entry for each table
|
||||
for tab in self.tables:
|
||||
search_tip = "Enter a recipe's or task's name to find it"
|
||||
self.ins.search.set_tooltip_text(search_tip)
|
||||
self.ins.search.props.has_tooltip = True
|
||||
tab.set_search_entry(0, self.ins.search)
|
||||
|
||||
# add all into the window
|
||||
self.box_group_area.pack_start(self.ins, expand=True, fill=True)
|
||||
|
||||
button_box = gtk.HBox(False, 6)
|
||||
self.box_group_area.pack_end(button_box, expand=False, fill=False)
|
||||
|
||||
self.build_packages_button = HobButton('Build packages')
|
||||
self.build_packages_button.set_size_request(205, 49)
|
||||
self.build_packages_button.set_tooltip_text("Build selected recipes into packages")
|
||||
self.build_packages_button.set_flags(gtk.CAN_DEFAULT)
|
||||
self.build_packages_button.grab_default()
|
||||
self.build_packages_button.connect("clicked", self.build_packages_clicked_cb)
|
||||
button_box.pack_end(self.build_packages_button, expand=False, fill=False)
|
||||
|
||||
self.back_button = HobAltButton("<< Back to image configuration")
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def button_click_cb(self, widget, event):
|
||||
path, col = widget.table_tree.get_cursor()
|
||||
tree_model = widget.table_tree.get_model()
|
||||
if path: # else activation is likely a removal
|
||||
binb = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_BINB)
|
||||
if binb:
|
||||
self.builder.show_binb_dialog(binb)
|
||||
|
||||
def build_packages_clicked_cb(self, button):
|
||||
self.builder.build_packages()
|
||||
|
||||
def back_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def refresh_selection(self):
|
||||
self.builder.configuration.selected_image = self.recipe_model.get_selected_image()
|
||||
_, self.builder.configuration.selected_recipes = self.recipe_model.get_selected_recipes()
|
||||
self.ins.show_indicator_icon("Included", len(self.builder.configuration.selected_recipes))
|
||||
|
||||
def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
|
||||
if not self.recipe_model.path_included(path):
|
||||
self.recipe_model.include_item(item_path=path, binb="User Selected", image_contents=False)
|
||||
else:
|
||||
if pagename == "Included":
|
||||
self.pre_fadeout_checkout_include(view_tree)
|
||||
self.recipe_model.exclude_item(item_path=path)
|
||||
self.render_fadeout(view_tree, cell)
|
||||
else:
|
||||
self.recipe_model.exclude_item(item_path=path)
|
||||
|
||||
self.refresh_selection()
|
||||
if not self.builder.customized:
|
||||
self.builder.customized = True
|
||||
self.builder.configuration.selected_image = self.recipe_model.__custom_image__
|
||||
self.builder.rcppkglist_populated()
|
||||
|
||||
self.builder.window_sensitive(True)
|
||||
|
||||
def table_toggled_cb(self, table, cell, view_path, toggled_columnid, view_tree, pagename):
|
||||
# Click to include a recipe
|
||||
self.builder.window_sensitive(False)
|
||||
view_model = view_tree.get_model()
|
||||
path = self.recipe_model.convert_vpath_to_path(view_model, view_path)
|
||||
glib.idle_add(self.toggle_item_idle_cb, path, view_tree, cell, pagename)
|
||||
|
||||
def pre_fadeout_checkout_include(self, tree):
|
||||
#resync the included items to a backup fade include column
|
||||
it = self.recipe_model.get_iter_first()
|
||||
while it:
|
||||
active = self.recipe_model.get_value(it, self.recipe_model.COL_INC)
|
||||
self.recipe_model.set(it, self.recipe_model.COL_FADE_INC, active)
|
||||
it = self.recipe_model.iter_next(it)
|
||||
# Check out a model which base on the column COL_FADE_INC,
|
||||
# it's save the prev state of column COL_INC before do exclude_item
|
||||
filter = { RecipeListModel.COL_FADE_INC : [True],
|
||||
RecipeListModel.COL_TYPE : ['recipe', 'task'] }
|
||||
new_model = self.recipe_model.tree_model(filter, excluded_items_ahead=True)
|
||||
tree.set_model(new_model)
|
||||
|
||||
def render_fadeout(self, tree, cell):
|
||||
if (not cell) or (not tree):
|
||||
return
|
||||
to_render_cells = []
|
||||
model = tree.get_model()
|
||||
it = model.get_iter_first()
|
||||
while it:
|
||||
path = model.get_path(it)
|
||||
prev_cell_is_active = model.get_value(it, RecipeListModel.COL_FADE_INC)
|
||||
curr_cell_is_active = model.get_value(it, RecipeListModel.COL_INC)
|
||||
if (prev_cell_is_active == True) and (curr_cell_is_active == False):
|
||||
to_render_cells.append(path)
|
||||
it = model.iter_next(it)
|
||||
|
||||
cell.fadeout(tree, 1000, to_render_cells)
|
||||
|
||||
def after_fadeout_checkin_include(self, table, ctrl, cell, tree):
|
||||
tree.set_model(self.recipe_model.tree_model(self.pages[0]['filter']))
|
||||
@@ -25,9 +25,12 @@ import logging
|
||||
import time
|
||||
import urllib
|
||||
import urllib2
|
||||
import pango
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import HobWarpCellRendererText, HobCellRendererPixbuf
|
||||
|
||||
class Colors(object):
|
||||
OK = "#ffffff"
|
||||
RUNNING = "#aaffaa"
|
||||
WARNING ="#f88017"
|
||||
ERROR = "#ffaaaa"
|
||||
|
||||
class RunningBuildModel (gtk.TreeStore):
|
||||
(COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
|
||||
@@ -42,49 +45,20 @@ class RunningBuildModel (gtk.TreeStore):
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_INT)
|
||||
|
||||
def failure_model_filter(self, model, it):
|
||||
color = model.get(it, self.COL_COLOR)[0]
|
||||
if not color:
|
||||
return False
|
||||
if color == HobColors.ERROR:
|
||||
return True
|
||||
return False
|
||||
|
||||
def failure_model(self):
|
||||
model = self.filter_new()
|
||||
model.set_visible_func(self.failure_model_filter)
|
||||
return model
|
||||
|
||||
def foreach_cell_func(self, model, path, iter, usr_data=None):
|
||||
if model.get_value(iter, self.COL_ICON) == "gtk-execute":
|
||||
model.set(iter, self.COL_ICON, "")
|
||||
|
||||
def close_task_refresh(self):
|
||||
self.foreach(self.foreach_cell_func, None)
|
||||
|
||||
class RunningBuild (gobject.GObject):
|
||||
__gsignals__ = {
|
||||
'build-started' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
'build-started' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-failed' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-complete' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'task-started' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
'log-error' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'no-provider' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
'build-failed' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-complete' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
())
|
||||
}
|
||||
pids_to_task = {}
|
||||
tasks_to_iter = {}
|
||||
@@ -134,14 +108,13 @@ class RunningBuild (gobject.GObject):
|
||||
|
||||
if event.levelno >= logging.ERROR:
|
||||
icon = "dialog-error"
|
||||
color = HobColors.ERROR
|
||||
self.emit("log-error")
|
||||
color = Colors.ERROR
|
||||
elif event.levelno >= logging.WARNING:
|
||||
icon = "dialog-warning"
|
||||
color = HobColors.WARNING
|
||||
color = Colors.WARNING
|
||||
else:
|
||||
icon = None
|
||||
color = HobColors.OK
|
||||
color = Colors.OK
|
||||
|
||||
# if we know which package we belong to, we'll append onto its list.
|
||||
# otherwise, we'll jump to the top of the master list
|
||||
@@ -179,7 +152,7 @@ class RunningBuild (gobject.GObject):
|
||||
None,
|
||||
"Package: %s" % (package),
|
||||
None,
|
||||
HobColors.OK,
|
||||
Colors.OK,
|
||||
0))
|
||||
self.tasks_to_iter[(package, None)] = parent
|
||||
|
||||
@@ -187,7 +160,7 @@ class RunningBuild (gobject.GObject):
|
||||
# such.
|
||||
# @todo if parent is already in error, don't mark it green
|
||||
self.model.set(parent, self.model.COL_ICON, "gtk-execute",
|
||||
self.model.COL_COLOR, HobColors.RUNNING)
|
||||
self.model.COL_COLOR, Colors.RUNNING)
|
||||
|
||||
# Add an entry in the model for this task
|
||||
i = self.model.append (parent, (None,
|
||||
@@ -195,7 +168,7 @@ class RunningBuild (gobject.GObject):
|
||||
task,
|
||||
"Task: %s" % (task),
|
||||
"gtk-execute",
|
||||
HobColors.RUNNING,
|
||||
Colors.RUNNING,
|
||||
0))
|
||||
|
||||
# update the parent's active task count
|
||||
@@ -206,6 +179,10 @@ class RunningBuild (gobject.GObject):
|
||||
# that we need to attach to a task.
|
||||
self.tasks_to_iter[(package, task)] = i
|
||||
|
||||
# If we don't handle these the GUI does not proceed
|
||||
elif isinstance(event, bb.build.TaskInvalid):
|
||||
return
|
||||
|
||||
elif isinstance(event, bb.build.TaskBase):
|
||||
current = self.tasks_to_iter[(package, task)]
|
||||
parent = self.tasks_to_iter[(package, None)]
|
||||
@@ -217,20 +194,20 @@ class RunningBuild (gobject.GObject):
|
||||
if isinstance(event, bb.build.TaskFailed):
|
||||
# Mark the task and parent as failed
|
||||
icon = "dialog-error"
|
||||
color = HobColors.ERROR
|
||||
color = Colors.ERROR
|
||||
|
||||
logfile = event.logfile
|
||||
if logfile and os.path.exists(logfile):
|
||||
with open(logfile) as f:
|
||||
logdata = f.read()
|
||||
self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', HobColors.OK, 0))
|
||||
self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', Colors.OK, 0))
|
||||
|
||||
for i in (current, parent):
|
||||
self.model.set(i, self.model.COL_ICON, icon,
|
||||
self.model.COL_COLOR, color)
|
||||
else:
|
||||
icon = None
|
||||
color = HobColors.OK
|
||||
color = Colors.OK
|
||||
|
||||
# Mark the task as inactive
|
||||
self.model.set(current, self.model.COL_ICON, icon,
|
||||
@@ -242,7 +219,7 @@ class RunningBuild (gobject.GObject):
|
||||
if self.model.get(parent, self.model.COL_ICON) != 'dialog-error':
|
||||
self.model.set(parent, self.model.COL_ICON, icon)
|
||||
if num_active == 0:
|
||||
self.model.set(parent, self.model.COL_COLOR, HobColors.OK)
|
||||
self.model.set(parent, self.model.COL_COLOR, Colors.OK)
|
||||
|
||||
# Clear the iters and the pids since when the task goes away the
|
||||
# pid will no longer be used for messages
|
||||
@@ -257,12 +234,8 @@ class RunningBuild (gobject.GObject):
|
||||
None,
|
||||
"Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
|
||||
None,
|
||||
HobColors.OK,
|
||||
Colors.OK,
|
||||
0))
|
||||
if pbar:
|
||||
pbar.update(0, self.progress_total)
|
||||
pbar.set_title(bb.event.getName(event))
|
||||
|
||||
elif isinstance(event, bb.event.BuildCompleted):
|
||||
failures = int (event._failures)
|
||||
self.model.prepend(None, (None,
|
||||
@@ -270,7 +243,7 @@ class RunningBuild (gobject.GObject):
|
||||
None,
|
||||
"Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
|
||||
None,
|
||||
HobColors.OK,
|
||||
Colors.OK,
|
||||
0))
|
||||
|
||||
# Emit the appropriate signal depending on the number of failures
|
||||
@@ -281,18 +254,12 @@ class RunningBuild (gobject.GObject):
|
||||
# Emit a generic "build-complete" signal for things wishing to
|
||||
# handle when the build is finished
|
||||
self.emit("build-complete")
|
||||
# reset the all cell's icon indicator
|
||||
self.model.close_task_refresh()
|
||||
if pbar:
|
||||
pbar.set_text(event.msg)
|
||||
|
||||
elif isinstance(event, bb.command.CommandFailed):
|
||||
if event.error.startswith("Exited with"):
|
||||
# If the command fails with an exit code we're done, emit the
|
||||
# generic signal for the UI to notify the user
|
||||
self.emit("build-complete")
|
||||
# reset the all cell's icon indicator
|
||||
self.model.close_task_refresh()
|
||||
|
||||
elif isinstance(event, bb.event.CacheLoadStarted) and pbar:
|
||||
pbar.set_title("Loading cache")
|
||||
@@ -313,30 +280,6 @@ class RunningBuild (gobject.GObject):
|
||||
pbar.update(event.current, self.progress_total)
|
||||
elif isinstance(event, bb.event.ParseCompleted) and pbar:
|
||||
pbar.hide()
|
||||
#using runqueue events as many as possible to update the progress bar
|
||||
elif isinstance(event, (bb.runqueue.runQueueTaskStarted, bb.runqueue.sceneQueueTaskStarted)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
num_of_completed = event.stats.completed + event.stats.failed
|
||||
message["current"] = num_of_completed
|
||||
message["total"] = event.stats.total
|
||||
message["title"] = ""
|
||||
message["task"] = event.taskstring
|
||||
self.emit("task-started", message)
|
||||
elif isinstance(event, bb.event.NoProvider):
|
||||
msg = ""
|
||||
if event._runtime:
|
||||
r = "R"
|
||||
else:
|
||||
r = ""
|
||||
if event._dependees:
|
||||
msg = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)\n" % (r, event._item, ", ".join(event._dependees), r)
|
||||
else:
|
||||
msg = "Nothing %sPROVIDES '%s'\n" % (r, event._item)
|
||||
if event._reasons:
|
||||
for reason in event._reasons:
|
||||
msg += ("%s\n" % reason)
|
||||
self.emit("no-provider", msg)
|
||||
|
||||
return
|
||||
|
||||
@@ -356,26 +299,18 @@ class RunningBuildTreeView (gtk.TreeView):
|
||||
__gsignals__ = {
|
||||
"button_press_event" : "override"
|
||||
}
|
||||
def __init__ (self, readonly=False, hob=False):
|
||||
def __init__ (self, readonly=False):
|
||||
gtk.TreeView.__init__ (self)
|
||||
self.readonly = readonly
|
||||
|
||||
# The icon that indicates whether we're building or failed.
|
||||
# add 'hob' flag because there has not only hob to share this code
|
||||
if hob:
|
||||
renderer = HobCellRendererPixbuf ()
|
||||
else:
|
||||
renderer = gtk.CellRendererPixbuf()
|
||||
renderer = gtk.CellRendererPixbuf ()
|
||||
col = gtk.TreeViewColumn ("Status", renderer)
|
||||
col.add_attribute (renderer, "icon-name", 4)
|
||||
self.append_column (col)
|
||||
|
||||
# The message of the build.
|
||||
# add 'hob' flag because there has not only hob to share this code
|
||||
if hob:
|
||||
self.message_renderer = HobWarpCellRendererText (col_number=1)
|
||||
else:
|
||||
self.message_renderer = gtk.CellRendererText ()
|
||||
self.message_renderer = gtk.CellRendererText ()
|
||||
self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=3)
|
||||
self.message_column.add_attribute(self.message_renderer, 'background', 5)
|
||||
self.message_renderer.set_property('editable', (not self.readonly))
|
||||
@@ -432,22 +367,3 @@ class RunningBuildTreeView (gtk.TreeView):
|
||||
message = model.get(it, model.COL_MESSAGE)[0]
|
||||
|
||||
self._add_to_clipboard(message)
|
||||
|
||||
class BuildFailureTreeView(gtk.TreeView):
|
||||
|
||||
def __init__ (self):
|
||||
gtk.TreeView.__init__(self)
|
||||
self.set_rules_hint(False)
|
||||
self.set_headers_visible(False)
|
||||
self.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
# The icon that indicates whether we're building or failed.
|
||||
renderer = HobCellRendererPixbuf ()
|
||||
col = gtk.TreeViewColumn ("Status", renderer)
|
||||
col.add_attribute (renderer, "icon-name", RunningBuildModel.COL_ICON)
|
||||
self.append_column (col)
|
||||
|
||||
# The message of the build.
|
||||
self.message_renderer = HobWarpCellRendererText (col_number=1)
|
||||
self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=RunningBuildModel.COL_MESSAGE, background=RunningBuildModel.COL_COLOR)
|
||||
self.append_column (self.message_column)
|
||||
|
||||
620
bitbake/lib/bb/ui/crumbs/tasklistmodel.py
Normal file
@@ -0,0 +1,620 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import re
|
||||
|
||||
class BuildRep(gobject.GObject):
|
||||
|
||||
def __init__(self, userpkgs, allpkgs, base_image=None):
|
||||
gobject.GObject.__init__(self)
|
||||
self.base_image = base_image
|
||||
self.allpkgs = allpkgs
|
||||
self.userpkgs = userpkgs
|
||||
|
||||
def loadRecipe(self, pathname):
|
||||
contents = []
|
||||
packages = ""
|
||||
base_image = ""
|
||||
|
||||
with open(pathname, 'r') as f:
|
||||
contents = f.readlines()
|
||||
|
||||
pkg_pattern = "^\s*(IMAGE_INSTALL)\s*([+=.?]+)\s*(\".*?\")"
|
||||
img_pattern = "^\s*(require)\s+(\S+.bb)"
|
||||
|
||||
for line in contents:
|
||||
matchpkg = re.search(pkg_pattern, line)
|
||||
matchimg = re.search(img_pattern, line)
|
||||
if matchpkg:
|
||||
packages = packages + matchpkg.group(3).strip('"')
|
||||
if matchimg:
|
||||
base_image = os.path.basename(matchimg.group(2)).split(".")[0]
|
||||
|
||||
self.base_image = base_image
|
||||
self.userpkgs = packages
|
||||
|
||||
def writeRecipe(self, writepath, model):
|
||||
template = """
|
||||
# Recipe generated by the HOB
|
||||
|
||||
require %s
|
||||
|
||||
IMAGE_INSTALL += "%s"
|
||||
"""
|
||||
|
||||
empty_template = """
|
||||
# Recipe generated by the HOB
|
||||
|
||||
inherit core-image
|
||||
|
||||
IMAGE_INSTALL = "%s"
|
||||
"""
|
||||
if self.base_image and not self.base_image == "empty":
|
||||
meta_path = model.find_image_path(self.base_image)
|
||||
recipe = template % (meta_path, self.userpkgs)
|
||||
else:
|
||||
recipe = empty_template % self.allpkgs
|
||||
|
||||
if os.path.exists(writepath):
|
||||
os.rename(writepath, "%s~" % writepath)
|
||||
|
||||
with open(writepath, 'w') as r:
|
||||
r.write(recipe)
|
||||
|
||||
return writepath
|
||||
|
||||
class TaskListModel(gtk.ListStore):
|
||||
"""
|
||||
This class defines an gtk.ListStore subclass which will convert the output
|
||||
of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
(COL_NAME, COL_DESC, COL_LIC, COL_GROUP, COL_DEPS, COL_BINB, COL_TYPE, COL_INC, COL_IMG, COL_PATH, COL_PN) = range(11)
|
||||
|
||||
__gsignals__ = {
|
||||
"tasklist-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"contents-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_INT,)),
|
||||
"image-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
}
|
||||
|
||||
"""
|
||||
"""
|
||||
def __init__(self):
|
||||
self.contents = None
|
||||
self.tasks = None
|
||||
self.packages = None
|
||||
self.images = None
|
||||
self.selected_image = None
|
||||
|
||||
gtk.ListStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING)
|
||||
|
||||
"""
|
||||
Helper method to determine whether name is a target pn
|
||||
"""
|
||||
def non_target_name(self, name):
|
||||
if ('-native' in name) or ('-cross' in name) or name.startswith('virtual/'):
|
||||
return True
|
||||
return False
|
||||
|
||||
def contents_changed_cb(self, tree_model, path, it=None):
|
||||
pkg_cnt = self.contents.iter_n_children(None)
|
||||
self.emit("contents-changed", pkg_cnt)
|
||||
|
||||
def contents_model_filter(self, model, it):
|
||||
if not model.get_value(it, self.COL_INC) or model.get_value(it, self.COL_TYPE) == 'image':
|
||||
return False
|
||||
name = model.get_value(it, self.COL_NAME)
|
||||
if self.non_target_name(name):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModel
|
||||
containing only the items which are to be included in the
|
||||
image
|
||||
"""
|
||||
def contents_model(self):
|
||||
if not self.contents:
|
||||
self.contents = self.filter_new()
|
||||
self.contents.set_visible_func(self.contents_model_filter)
|
||||
self.contents.connect("row-inserted", self.contents_changed_cb)
|
||||
self.contents.connect("row-deleted", self.contents_changed_cb)
|
||||
return self.contents
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is a task
|
||||
"""
|
||||
def task_model_filter(self, model, it):
|
||||
if model.get_value(it, self.COL_TYPE) == 'task':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModel
|
||||
containing only the items which are tasks
|
||||
"""
|
||||
def tasks_model(self):
|
||||
if not self.tasks:
|
||||
self.tasks = self.filter_new()
|
||||
self.tasks.set_visible_func(self.task_model_filter)
|
||||
return self.tasks
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is an image
|
||||
"""
|
||||
def image_model_filter(self, model, it):
|
||||
if model.get_value(it, self.COL_TYPE) == 'image':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModel
|
||||
containing only the items which are images
|
||||
"""
|
||||
def images_model(self):
|
||||
if not self.images:
|
||||
self.images = self.filter_new()
|
||||
self.images.set_visible_func(self.image_model_filter)
|
||||
return self.images
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is a package
|
||||
"""
|
||||
def package_model_filter(self, model, it):
|
||||
if model.get_value(it, self.COL_TYPE) != 'package':
|
||||
return False
|
||||
else:
|
||||
name = model.get_value(it, self.COL_NAME)
|
||||
if self.non_target_name(name):
|
||||
return False
|
||||
return True
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModel
|
||||
containing only the items which are packages
|
||||
"""
|
||||
def packages_model(self):
|
||||
if not self.packages:
|
||||
self.packages = self.filter_new()
|
||||
self.packages.set_visible_func(self.package_model_filter)
|
||||
return self.packages
|
||||
|
||||
"""
|
||||
The populate() function takes as input the data from a
|
||||
bb.event.TargetsTreeGenerated event and populates the TaskList.
|
||||
Once the population is done it emits gsignal tasklist-populated
|
||||
to notify any listeners that the model is ready
|
||||
"""
|
||||
def populate(self, event_model):
|
||||
# First clear the model, in case repopulating
|
||||
self.clear()
|
||||
for item in event_model["pn"]:
|
||||
atype = 'package'
|
||||
name = item
|
||||
summary = event_model["pn"][item]["summary"]
|
||||
lic = event_model["pn"][item]["license"]
|
||||
group = event_model["pn"][item]["section"]
|
||||
filename = event_model["pn"][item]["filename"]
|
||||
if ('task-' in name):
|
||||
atype = 'task'
|
||||
elif ('-image-' in name):
|
||||
atype = 'image'
|
||||
|
||||
# Create a combined list of build and runtime dependencies and
|
||||
# then remove any duplicate entries and any entries for -dev
|
||||
# packages
|
||||
depends = event_model["depends"].get(item, [])
|
||||
rdepends = event_model["rdepends-pn"].get(item, [])
|
||||
packages = {}
|
||||
for pkg in event_model["packages"]:
|
||||
if event_model["packages"][pkg]["pn"] == name:
|
||||
deps = []
|
||||
deps.extend(depends)
|
||||
deps.extend(event_model["rdepends-pkg"].get(pkg, []))
|
||||
deps.extend(rdepends)
|
||||
deps = self.squish(deps)
|
||||
# rdepends-pn includes pn-dev
|
||||
if ("%s-dev" % item) in deps:
|
||||
deps.remove("%s-dev" % item)
|
||||
# rdepends-on includes pn
|
||||
if pkg in deps:
|
||||
deps.remove(pkg)
|
||||
packages[pkg] = deps
|
||||
|
||||
for p in packages:
|
||||
self.set(self.append(), self.COL_NAME, p, self.COL_DESC, summary,
|
||||
self.COL_LIC, lic, self.COL_GROUP, group,
|
||||
self.COL_DEPS, " ".join(packages[p]), self.COL_BINB, "",
|
||||
self.COL_TYPE, atype, self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_PATH, filename,
|
||||
self.COL_PN, item)
|
||||
|
||||
self.emit("tasklist-populated")
|
||||
|
||||
"""
|
||||
Load a BuildRep into the model
|
||||
"""
|
||||
def load_image_rep(self, rep):
|
||||
# Unset everything
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
path = self.get_path(it)
|
||||
self[path][self.COL_INC] = False
|
||||
self[path][self.COL_IMG] = False
|
||||
it = self.iter_next(it)
|
||||
|
||||
# Iterate the images and disable them all
|
||||
it = self.images.get_iter_first()
|
||||
while it:
|
||||
path = self.images.convert_path_to_child_path(self.images.get_path(it))
|
||||
name = self[path][self.COL_NAME]
|
||||
if name == rep.base_image:
|
||||
self.include_item(path, image_contents=True)
|
||||
else:
|
||||
self[path][self.COL_INC] = False
|
||||
it = self.images.iter_next(it)
|
||||
|
||||
# Mark all of the additional packages for inclusion
|
||||
packages = rep.userpkgs.split(" ")
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
path = self.get_path(it)
|
||||
name = self[path][self.COL_NAME]
|
||||
if name in packages:
|
||||
self.include_item(path, binb="User Selected")
|
||||
packages.remove(name)
|
||||
it = self.iter_next(it)
|
||||
|
||||
self.emit("image-changed", rep.base_image)
|
||||
|
||||
"""
|
||||
squish lst so that it doesn't contain any duplicate entries
|
||||
"""
|
||||
def squish(self, lst):
|
||||
seen = {}
|
||||
for l in lst:
|
||||
seen[l] = 1
|
||||
return seen.keys()
|
||||
|
||||
"""
|
||||
Mark the item at path as not included
|
||||
NOTE:
|
||||
path should be a gtk.TreeModelPath into self (not a filtered model)
|
||||
"""
|
||||
def remove_item_path(self, path):
|
||||
self[path][self.COL_BINB] = ""
|
||||
self[path][self.COL_INC] = False
|
||||
|
||||
"""
|
||||
Recursively called to mark the item at opath and any package which
|
||||
depends on it for removal.
|
||||
NOTE: This method dumbly removes user selected packages and since we don't
|
||||
do significant reverse dependency tracking it's easier and simpler to save
|
||||
the items marked as user selected and re-add them once the removal sweep is
|
||||
complete.
|
||||
"""
|
||||
def mark(self, opath):
|
||||
usersel = {}
|
||||
removed = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
# The name of the item we're removing, so that we can use it to find
|
||||
# other items which either depend on it, or were brought in by it
|
||||
marked_name = self[opath][self.COL_NAME]
|
||||
|
||||
# Remove the passed item
|
||||
self.remove_item_path(opath)
|
||||
|
||||
# Remove all dependent packages, update binb
|
||||
while it:
|
||||
path = self.get_path(it)
|
||||
it = self.iter_next(it)
|
||||
|
||||
inc = self[path][self.COL_INC]
|
||||
deps = self[path][self.COL_DEPS]
|
||||
binb = self[path][self.COL_BINB].split(', ')
|
||||
itype = self[path][self.COL_TYPE]
|
||||
itname = self[path][self.COL_NAME]
|
||||
|
||||
# We ignore anything that isn't a package
|
||||
if not itype == "package":
|
||||
continue
|
||||
|
||||
# If the user added this item and it's not the item we're removing
|
||||
# we should keep it and its dependencies, the easiest way to do so
|
||||
# is to save its name and re-mark it for inclusion once dependency
|
||||
# processing is complete
|
||||
if "User Selected" in binb:
|
||||
usersel[itname] = self[path][self.COL_IMG]
|
||||
|
||||
# If the iterated item is included and depends on the removed
|
||||
# item it should also be removed.
|
||||
# FIXME: need to ensure partial name matching doesn't happen
|
||||
if inc and marked_name in deps and itname not in removed:
|
||||
# found a dependency, remove it
|
||||
removed.append(itname)
|
||||
self.mark(path)
|
||||
|
||||
# If the iterated item was brought in by the removed (passed) item
|
||||
# try and find an alternative dependee and update the binb column
|
||||
if inc and marked_name in binb:
|
||||
binb.remove(marked_name)
|
||||
self[path][self.COL_BINB] = ', '.join(binb).lstrip(', ')
|
||||
|
||||
# Re-add any removed user selected items
|
||||
for u in usersel:
|
||||
npath = self.find_path_for_item(u)
|
||||
self.include_item(item_path=npath,
|
||||
binb="User Selected",
|
||||
image_contents=usersel[u])
|
||||
"""
|
||||
Remove items from contents if the have an empty COL_BINB (brought in by)
|
||||
caused by all packages they are a dependency of being removed.
|
||||
If the item isn't a package we leave it included.
|
||||
"""
|
||||
def sweep_up(self):
|
||||
it = self.contents.get_iter_first()
|
||||
while it:
|
||||
binb = self.contents.get_value(it, self.COL_BINB)
|
||||
itype = self.contents.get_value(it, self.COL_TYPE)
|
||||
remove = False
|
||||
|
||||
if itype == 'package' and not binb:
|
||||
oit = self.contents.convert_iter_to_child_iter(it)
|
||||
opath = self.get_path(oit)
|
||||
self.mark(opath)
|
||||
remove = True
|
||||
|
||||
# When we remove a package from the contents model we alter the
|
||||
# model, so continuing to iterate is bad. *Furthermore* it's
|
||||
# likely that the removal has affected an already iterated item
|
||||
# so we should start from the beginning anyway.
|
||||
# Only when we've managed to iterate the entire contents model
|
||||
# without removing any items do we allow the loop to exit.
|
||||
if remove:
|
||||
it = self.contents.get_iter_first()
|
||||
else:
|
||||
it = self.contents.iter_next(it)
|
||||
|
||||
"""
|
||||
Check whether the item at item_path is included or not
|
||||
"""
|
||||
def contents_includes_path(self, item_path):
|
||||
return self[item_path][self.COL_INC]
|
||||
|
||||
"""
|
||||
Add this item, and any of its dependencies, to the image contents
|
||||
"""
|
||||
def include_item(self, item_path, binb="", image_contents=False):
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_DEPS]
|
||||
|
||||
self[item_path][self.COL_INC] = True
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if binb and not binb in item_bin:
|
||||
item_bin.append(binb)
|
||||
self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
|
||||
|
||||
# We want to do some magic with things which are brought in by the
|
||||
# base image so tag them as so
|
||||
if image_contents:
|
||||
self[item_path][self.COL_IMG] = True
|
||||
if self[item_path][self.COL_TYPE] == 'image':
|
||||
self.selected_image = item_name
|
||||
|
||||
if item_deps:
|
||||
# Ensure all of the items deps are included and, where appropriate,
|
||||
# add this item to their COL_BINB
|
||||
for dep in item_deps.split(" "):
|
||||
# If the contents model doesn't already contain dep, add it
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_included = self.contents_includes_path(dep_path)
|
||||
|
||||
if dep_included and not dep in item_bin:
|
||||
# don't set the COL_BINB to this item if the target is an
|
||||
# item in our own COL_BINB
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if not item_name in dep_bin:
|
||||
dep_bin.append(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
elif not dep_included:
|
||||
self.include_item(dep_path, binb=item_name, image_contents=image_contents)
|
||||
|
||||
"""
|
||||
Find the model path for the item_name
|
||||
Returns the path in the model or None
|
||||
"""
|
||||
def find_path_for_item(self, item_name):
|
||||
# We don't include virtual/* or *-native items in the model so save a
|
||||
# heavy iteration loop by exiting early for these items
|
||||
if self.non_target_name(item_name):
|
||||
return None
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if (self.get_value(it, self.COL_NAME) == item_name):
|
||||
return self.get_path(it)
|
||||
else:
|
||||
it = self.iter_next(it)
|
||||
return None
|
||||
|
||||
"""
|
||||
Empty self.contents by setting the include of each entry to None
|
||||
"""
|
||||
def reset(self):
|
||||
# Deselect images - slightly more complex logic so that we don't
|
||||
# have to iterate all of the contents of the main model, instead
|
||||
# just iterate the images model.
|
||||
if self.selected_image:
|
||||
iit = self.images.get_iter_first()
|
||||
while iit:
|
||||
pit = self.images.convert_iter_to_child_iter(iit)
|
||||
self.set(pit, self.COL_INC, False)
|
||||
iit = self.images.iter_next(iit)
|
||||
self.selected_image = None
|
||||
|
||||
it = self.contents.get_iter_first()
|
||||
while it:
|
||||
oit = self.contents.convert_iter_to_child_iter(it)
|
||||
self.set(oit,
|
||||
self.COL_INC, False,
|
||||
self.COL_BINB, "",
|
||||
self.COL_IMG, False)
|
||||
# As we've just removed the first item...
|
||||
it = self.contents.get_iter_first()
|
||||
|
||||
"""
|
||||
Returns two lists. One of user selected packages and the other containing
|
||||
all selected packages
|
||||
"""
|
||||
def get_selected_packages(self):
|
||||
allpkgs = []
|
||||
userpkgs = []
|
||||
|
||||
it = self.contents.get_iter_first()
|
||||
while it:
|
||||
sel = "User Selected" in self.contents.get_value(it, self.COL_BINB)
|
||||
name = self.contents.get_value(it, self.COL_NAME)
|
||||
allpkgs.append(name)
|
||||
if sel:
|
||||
userpkgs.append(name)
|
||||
it = self.contents.iter_next(it)
|
||||
return userpkgs, allpkgs
|
||||
|
||||
"""
|
||||
Return a squished (uniquified) list of the PN's of all selected items
|
||||
"""
|
||||
def get_selected_pn(self):
|
||||
pns = []
|
||||
|
||||
it = self.contents.get_iter_first()
|
||||
while it:
|
||||
if self.contents.get_value(it, self.COL_BINB):
|
||||
pns.append(self.contents.get_value(it, self.COL_PN))
|
||||
it = self.contents.iter_next(it)
|
||||
|
||||
return self.squish(pns)
|
||||
|
||||
def image_contents_removed(self):
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
sel = self.get_value(it, self.COL_INC)
|
||||
img = self.get_value(it, self.COL_IMG)
|
||||
if img and not sel:
|
||||
return True
|
||||
it = self.iter_next(it)
|
||||
return False
|
||||
|
||||
def get_build_rep(self):
|
||||
userpkgs, allpkgs = self.get_selected_packages()
|
||||
# If base image contents have been removed start from an empty rootfs
|
||||
if not self.selected_image or self.image_contents_removed():
|
||||
image = "empty"
|
||||
else:
|
||||
image = self.selected_image
|
||||
|
||||
return BuildRep(" ".join(userpkgs), " ".join(allpkgs), image)
|
||||
|
||||
def find_reverse_depends(self, pn):
|
||||
revdeps = []
|
||||
it = self.contents.get_iter_first()
|
||||
|
||||
while it:
|
||||
name = self.contents.get_value(it, self.COL_NAME)
|
||||
itype = self.contents.get_value(it, self.COL_TYPE)
|
||||
deps = self.contents.get_value(it, self.COL_DEPS)
|
||||
|
||||
it = self.contents.iter_next(it)
|
||||
|
||||
if not itype == 'package':
|
||||
continue
|
||||
|
||||
if pn in deps:
|
||||
revdeps.append(name)
|
||||
|
||||
if pn in revdeps:
|
||||
revdeps.remove(pn)
|
||||
return revdeps
|
||||
|
||||
def set_selected_image(self, img):
|
||||
self.selected_image = img
|
||||
path = self.find_path_for_item(img)
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected",
|
||||
image_contents=True)
|
||||
|
||||
self.emit("image-changed", self.selected_image)
|
||||
|
||||
def set_selected_packages(self, pkglist):
|
||||
selected = pkglist
|
||||
it = self.get_iter_first()
|
||||
|
||||
while it:
|
||||
name = self.get_value(it, self.COL_NAME)
|
||||
if name in pkglist:
|
||||
pkglist.remove(name)
|
||||
path = self.get_path(it)
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected")
|
||||
if len(pkglist) == 0:
|
||||
return
|
||||
it = self.iter_next(it)
|
||||
|
||||
def find_image_path(self, image):
|
||||
it = self.images.get_iter_first()
|
||||
|
||||
while it:
|
||||
image_name = self.images.get_value(it, self.COL_NAME)
|
||||
if image_name == image:
|
||||
path = self.images.get_value(it, self.COL_PATH)
|
||||
meta_pattern = "(\S*)/(meta*/)(\S*)"
|
||||
meta_match = re.search(meta_pattern, path)
|
||||
if meta_match:
|
||||
_, lyr, bbrel = path.partition(meta_match.group(2))
|
||||
if bbrel:
|
||||
path = bbrel
|
||||
return path
|
||||
it = self.images.iter_next(it)
|
||||
@@ -1,211 +0,0 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import os
|
||||
import re
|
||||
|
||||
class File(gobject.GObject):
|
||||
|
||||
def __init__(self, pathfilename, suffix):
|
||||
if not pathfilename.endswith(suffix):
|
||||
pathfilename = "%s%s" % (pathfilename, suffix)
|
||||
gobject.GObject.__init__(self)
|
||||
self.pathfilename = pathfilename
|
||||
|
||||
def readFile(self):
|
||||
if not os.path.isfile(self.pathfilename):
|
||||
return None
|
||||
if not os.path.exists(self.pathfilename):
|
||||
return None
|
||||
|
||||
with open(self.pathfilename, 'r') as f:
|
||||
contents = f.readlines()
|
||||
f.close()
|
||||
|
||||
return contents
|
||||
|
||||
def writeFile(self, contents):
|
||||
if os.path.exists(self.pathfilename):
|
||||
orig = "%s.orig" % self.pathfilename
|
||||
if os.path.exists(orig):
|
||||
os.remove(orig)
|
||||
os.rename(self.pathfilename, orig)
|
||||
|
||||
with open(self.pathfilename, 'w') as f:
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
class ConfigFile(File):
|
||||
"""
|
||||
This object does save general config file. (say bblayers.conf, or local.conf). Again, it is the base class for other template files and image bb files.
|
||||
"""
|
||||
def __init__(self, pathfilename, suffix = None, header = None):
|
||||
if suffix:
|
||||
File.__init__(self, pathfilename, suffix)
|
||||
else:
|
||||
File.__init__(self, pathfilename, ".conf")
|
||||
if header:
|
||||
self.header = header
|
||||
else:
|
||||
self.header = "# Config generated by Hob\n\n"
|
||||
self.dictionary = {}
|
||||
|
||||
def setVar(self, var, val):
|
||||
if isinstance(val, list):
|
||||
liststr = ""
|
||||
if val:
|
||||
i = 0
|
||||
for value in val:
|
||||
if i < len(val) - 1:
|
||||
liststr += "%s " % value
|
||||
else:
|
||||
liststr += "%s" % value
|
||||
i += 1
|
||||
self.dictionary[var] = liststr
|
||||
else:
|
||||
self.dictionary[var] = val
|
||||
|
||||
def save(self):
|
||||
contents = self.header
|
||||
for var, val in self.dictionary.items():
|
||||
contents += "%s = \"%s\"\n" % (var, val)
|
||||
File.writeFile(self, contents)
|
||||
|
||||
class HobTemplateFile(ConfigFile):
|
||||
"""
|
||||
This object does save or load hob specific file.
|
||||
"""
|
||||
def __init__(self, pathfilename):
|
||||
ConfigFile.__init__(self, pathfilename, ".hob", "# Hob Template generated by Hob\n\n")
|
||||
|
||||
def getVar(self, var):
|
||||
if var in self.dictionary:
|
||||
return self.dictionary[var]
|
||||
else:
|
||||
return ""
|
||||
|
||||
def getVersion(self):
|
||||
contents = ConfigFile.readFile(self)
|
||||
|
||||
pattern = "^\s*(\S+)\s*=\s*(\".*?\")"
|
||||
|
||||
for line in contents:
|
||||
match = re.search(pattern, line)
|
||||
if match:
|
||||
if match.group(1) == "VERSION":
|
||||
return match.group(2).strip('"')
|
||||
return None
|
||||
|
||||
def load(self):
|
||||
contents = ConfigFile.readFile(self)
|
||||
self.dictionary.clear()
|
||||
|
||||
pattern = "^\s*(\S+)\s*=\s*(\".*?\")"
|
||||
|
||||
for line in contents:
|
||||
match = re.search(pattern, line)
|
||||
if match:
|
||||
var = match.group(1)
|
||||
val = match.group(2).strip('"')
|
||||
self.dictionary[var] = val
|
||||
return self.dictionary
|
||||
|
||||
class RecipeFile(ConfigFile):
|
||||
"""
|
||||
This object is for image bb file.
|
||||
"""
|
||||
def __init__(self, pathfilename):
|
||||
ConfigFile.__init__(self, pathfilename, ".bb", "# Recipe generated by Hob\n\ninherit core-image\n")
|
||||
|
||||
class TemplateMgr(gobject.GObject):
|
||||
|
||||
__gLocalVars__ = ["MACHINE", "PACKAGE_CLASSES", "DISTRO", "DL_DIR", "SSTATE_DIR", "SSTATE_MIRROR", "PARALLEL_MAKE", "BB_NUMBER_THREADS", "CONF_VERSION"]
|
||||
__gBBLayersVars__ = ["BBLAYERS", "LCONF_VERSION"]
|
||||
__gRecipeVars__ = ["DEPENDS", "IMAGE_INSTALL"]
|
||||
|
||||
def __init__(self):
|
||||
gobject.GObject.__init__(self)
|
||||
self.template_hob = None
|
||||
self.bblayers_conf = None
|
||||
self.local_conf = None
|
||||
self.image_bb = None
|
||||
|
||||
@classmethod
|
||||
def convert_to_template_pathfilename(cls, filename, path):
|
||||
return "%s/%s%s%s" % (path, "template-", filename, ".hob")
|
||||
|
||||
@classmethod
|
||||
def convert_to_bblayers_pathfilename(cls, filename, path):
|
||||
return "%s/%s%s%s" % (path, "bblayers-", filename, ".conf")
|
||||
|
||||
@classmethod
|
||||
def convert_to_local_pathfilename(cls, filename, path):
|
||||
return "%s/%s%s%s" % (path, "local-", filename, ".conf")
|
||||
|
||||
@classmethod
|
||||
def convert_to_image_pathfilename(cls, filename, path):
|
||||
return "%s/%s%s%s" % (path, "hob-image-", filename, ".bb")
|
||||
|
||||
def open(self, filename, path):
|
||||
self.template_hob = HobTemplateFile(TemplateMgr.convert_to_template_pathfilename(filename, path))
|
||||
self.bblayers_conf = ConfigFile(TemplateMgr.convert_to_bblayers_pathfilename(filename, path))
|
||||
self.local_conf = ConfigFile(TemplateMgr.convert_to_local_pathfilename(filename, path))
|
||||
self.image_bb = RecipeFile(TemplateMgr.convert_to_image_pathfilename(filename, path))
|
||||
|
||||
def setVar(self, var, val):
|
||||
if var in TemplateMgr.__gLocalVars__:
|
||||
self.local_conf.setVar(var, val)
|
||||
if var in TemplateMgr.__gBBLayersVars__:
|
||||
self.bblayers_conf.setVar(var, val)
|
||||
if var in TemplateMgr.__gRecipeVars__:
|
||||
self.image_bb.setVar(var, val)
|
||||
|
||||
self.template_hob.setVar(var, val)
|
||||
|
||||
def save(self):
|
||||
self.local_conf.save()
|
||||
self.bblayers_conf.save()
|
||||
self.image_bb.save()
|
||||
self.template_hob.save()
|
||||
|
||||
def getVersion(self, path):
|
||||
return HobTemplateFile(path).getVersion()
|
||||
|
||||
def load(self, path):
|
||||
self.template_hob = HobTemplateFile(path)
|
||||
self.dictionary = self.template_hob.load()
|
||||
|
||||
def getVar(self, var):
|
||||
return self.template_hob.getVar(var)
|
||||
|
||||
def destroy(self):
|
||||
if self.template_hob:
|
||||
del self.template_hob
|
||||
template_hob = None
|
||||
if self.bblayers_conf:
|
||||
del self.bblayers_conf
|
||||
self.bblayers_conf = None
|
||||
if self.local_conf:
|
||||
del self.local_conf
|
||||
self.local_conf = None
|
||||
if self.image_bb:
|
||||
del self.image_bb
|
||||
self.image_bb = None
|
||||
@@ -1,34 +0,0 @@
|
||||
#
|
||||
# BitBake UI Utils
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
# This utility method looks for xterm or vte and return the
|
||||
# frist to exist, currently we are keeping this simple, but
|
||||
# we will likely move the oe.terminal implementation into
|
||||
# bitbake which will allow more flexibility.
|
||||
|
||||
import os
|
||||
import bb
|
||||
|
||||
def which_terminal():
|
||||
term = bb.utils.which(os.environ["PATH"], "xterm")
|
||||
if term:
|
||||
return term + " -e "
|
||||
term = bb.utils.which(os.environ["PATH"], "vte")
|
||||
if term:
|
||||
return term + " -c "
|
||||
return None
|
||||
@@ -24,7 +24,7 @@ import threading
|
||||
import xmlrpclib
|
||||
import bb
|
||||
import bb.event
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.progress import ProgressBar
|
||||
|
||||
# Package Model
|
||||
(COL_PKG_NAME) = (0)
|
||||
@@ -220,12 +220,8 @@ def main(server, eventHandler):
|
||||
|
||||
gtk.gdk.threads_enter()
|
||||
dep = DepExplorer()
|
||||
bardialog = gtk.Dialog(parent=dep)
|
||||
bardialog.set_default_size(400, 50)
|
||||
pbar = HobProgressBar()
|
||||
bardialog.vbox.pack_start(pbar)
|
||||
bardialog.show_all()
|
||||
bardialog.connect("delete-event", gtk.main_quit)
|
||||
pbar = ProgressBar(dep)
|
||||
pbar.connect("delete-event", gtk.main_quit)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
progress_total = 0
|
||||
@@ -242,20 +238,19 @@ def main(server, eventHandler):
|
||||
if isinstance(event, bb.event.CacheLoadStarted):
|
||||
progress_total = event.total
|
||||
gtk.gdk.threads_enter()
|
||||
bardialog.set_title("Loading Cache")
|
||||
pbar.update(0)
|
||||
pbar.set_title("Loading Cache")
|
||||
pbar.update(0, progress_total)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
if isinstance(event, bb.event.CacheLoadProgress):
|
||||
x = event.current
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.update(x * 1.0 / progress_total)
|
||||
pbar.set_title('')
|
||||
pbar.update(x, progress_total)
|
||||
gtk.gdk.threads_leave()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.CacheLoadCompleted):
|
||||
bardialog.hide()
|
||||
pbar.hide()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.ParseStarted):
|
||||
@@ -263,21 +258,19 @@ def main(server, eventHandler):
|
||||
if progress_total == 0:
|
||||
continue
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.update(0)
|
||||
bardialog.set_title("Processing recipes")
|
||||
|
||||
pbar.set_title("Processing recipes")
|
||||
pbar.update(0, progress_total)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
if isinstance(event, bb.event.ParseProgress):
|
||||
x = event.current
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.update(x * 1.0 / progress_total)
|
||||
pbar.set_title('')
|
||||
pbar.update(x, progress_total)
|
||||
gtk.gdk.threads_leave()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.ParseCompleted):
|
||||
bardialog.hide()
|
||||
pbar.hide()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.DepTreeGenerated):
|
||||
|
||||
1124
bitbake/lib/bb/ui/hob.py
Executable file → Normal file
|
Before Width: | Height: | Size: 6.7 KiB |
|
Before Width: | Height: | Size: 6.9 KiB |
|
Before Width: | Height: | Size: 1.2 KiB |
|
Before Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 3.9 KiB |
|
Before Width: | Height: | Size: 5.7 KiB |
|
Before Width: | Height: | Size: 3.9 KiB |